repo_name
stringlengths
5
92
path
stringlengths
4
221
copies
stringclasses
19 values
size
stringlengths
4
6
content
stringlengths
766
896k
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
32
997
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
13.6
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
buckets1337/UOMUMM
src/Renderer.py
1
5959
# Renderer.py # Various ways to format text output to players class Renderer(): ''' A renderer component just contains methods for formatting text output in various ways ''' def __init__(self, server): self.owner = server def formatMessage(self, message, width): ''' splits a <message> string into lines that are <width> characters long without breaking words apart across lines. Broken apart single lines are slightly indented on every line other than the first in the final formatted message. Returns the formatted message string. ''' count = 0 formatted = '' if message == None: message = 'None' for character in range(0,len(message)): char = message[character] if char != '\n': if count < width: formatted += char count += 1 #print formatted else: if message[character] == ' ': formatted += "\n" + char count = 2 #print 'da TRUTH' else: collecting = True coll = '' i = 1 while collecting: if message[character-i] != '\n': coll += message[character-i] i += 1 else: collecting = False if ' ' not in coll.strip(): #print 'TRUE' formatted += "\n " + char count = 2 else: #print 'checking...' checking = True i = 1 while checking: msg = message.strip() chk = msg[character-i] #print chk if chk == ' ': #print formatted formatted = formatted[:-i] + "\n" + formatted[-i:] + char #print formatted count = i + 1 checking = False else: i += 1 else: formatted += char count = 0 return formatted def messageBox(self, client, title, message): ''' displays a simple <message> in a box for <client>. The box resizes to fit the message and title. Has a <title> at the top of the box along the border. ''' message = self.formatMessage(message, 76) #print message if message.endswith("\n"): message = message[:-1] msgLines = message.split('\n') #print msgLines finalMsg = '' longest = 0 for line in msgLines: if len(line) > longest: longest = len(line) for line in msgLines: if longest > len(str(title)): if longest > len(line): mod = longest - len(line) line = line + ((mod) * " ") # else: # line = line + ((len(str(title)) - 4) * " ") else: mod = (len(str(title)) + 2) - len(line) line = line + (mod * " ") line = " | " + line + " |\n" finalMsg += line #print int((0.5)*float(longest)) if longest >= len(str(title)): titleLine = "\n " + (int((0.5)*float(longest - len(str(title)))+1)* "_") + "^!"+str(title)+"^~" + (int((0.5)*float(longest - len(str(title)))+1)* "_") + "\n" titleLineLen = len(titleLine) - 6 if titleLineLen > (longest + 2): #print len(titleLine) #print longest + 2 diff = titleLineLen - (longest + 2) - 1 if not diff <= 0: titleLine = titleLine[:-diff] + "\n" if diff == 0: titleLine = titleLine[:-1] + "_\n" elif (longest + 2) >= titleLineLen: diff = (longest + 2) - titleLineLen if titleLine.endswith("\n"): titleLine = titleLine[:-1] titleLine += (diff * "_") + "\n" client.send_cc(titleLine) client.send_cc(" |" + ((longest + 2)*" ") + "|\n") client.send_cc(finalMsg) client.send_cc(" |" + ((longest + 2)*"_") + "|\n\n") else: client.send_cc("\n __^!" + str(title) + "^~__\n") client.send_cc(" |" + ((4 + len(str(title))) * " ") + "|\n") client.send_cc(finalMsg) client.send_cc(" |" + ((4 + len(str(title))) * "_") + "|\n\n") def roomDisplay(self, client, room): ''' renders the typical display for a room to client ''' namePad = 80 - len(room.name) - 2 client.send_cc("\n") message = "+" + ("-" * (int(0.5 *namePad)-1)) + "^! " + str(room.name) + " ^~" + ("-" * (int(0.5* namePad)-1)) + "+" + "\n" if len(message) < 81: message = "+" + ("-" * (int(0.5 *namePad)-1)) + "^! " + str(room.name) + " ^~" + ("-" * (int(0.5* namePad)-1)) + "-+" + "\n" client.send_cc(message) # client.send_cc("|" + (" " * 78) + "|" + "\n") descrip = self.formatMessage(room.description, 76) desc = descrip.split("\\n") #print desc for line in desc: linePad = 80 - len(line) - 2 if len(line) > 0: message = "|" +(" " * (int(0.5 *linePad))) + line +(" " * (int(0.5 *linePad))) + "|" + "\n" if len(message) < 81: message = ("|" +(" " * (int(0.5 *linePad))) + line +(" " * (int(0.5 *linePad))) + " |" + "\n") client.send_cc(message) else: client.send_cc("|" + (" " * 78) + "|" + "\n") client.send_cc("+" + ("-" * 78) + "+" + "\n") client.send_cc("|" + (" " * 78) + "|" + "\n") #print "players: " + str(room.players) for player in room.players: if player.connection != client: playerPad = int(80 - len(player.name) - 3) client.send_cc("| " + "^C" + str(player.name) + "^~" + (" " * playerPad) + "|" + "\n") else: client.send_cc("|" + (" " * 78) + "|" + "\n") client.send_cc("|" + (" " * 78) + "|" + "\n") client.send_cc("|" + (" " * 78) + "|" + "\n") exitList = [] if room.orderedExits == []: #print 'test' #print room.exits for exit in room.exits: #print room.exits[exit] exitList.append(str(room.exits[exit])) room.orderedExits = exitList else: for rm in room.orderedExits: exitList.append(str(rm[1])) #print exitList if exitList != []: lenExit = len(exitList[0]) else: lenExit = 0 firstPad = int(80 - lenExit - 12) if exitList != []: msg = "| " + "^!exits:^~ 1." + exitList[0] + (" " * firstPad) + "|" + "\n" client.send_cc(msg) i = 2 for exit in exitList[1:]: pad = int(80 - len(exitList[i-1]) - 12) client.send_cc("| " + str(i) + "." + exitList[i-1] + (" " * pad) + "|" + "\n") i += 1 else: client.send_cc("|" + (" " * 78) + "|" + "\n") client.send_cc("+" + ("-" * 78) + "+" + "\n")
apache-2.0
-8,190,471,305,137,985,000
28.8
161
0.52492
false
2.802916
false
false
false
tdlong/YeastRobot
UserPrograms/ASE/Rearray_day3_pilot_1.py
1
1439
import sys # where RobotControl.py, etc lives sys.path.append('/home/pi/Desktop/ADL/YeastRobot/PythonLibrary') from RobotControl import * ################################# ### Define Deck Layout ################################# deck="""\ DW96W SW96P SW96P SW96P SW96P SW96P SW96P BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK BLANK """ # 2 3 4 5 6 # note the 1st user defined column is "2" not zero or one, since tips are at 0 & 1 ################################## myvol = 140 # 1 = UL of BoxA, 2 = UR of BoxA, 3 = LL of BoxA, etc. OffsetDict={0: 'UL', 1: 'UR', 2: 'LL', 3: 'LR'} # read in deck, etc DefineDeck(deck) printDeck() InitializeRobot() CurrentTipPosition = 1 for offset in [0,1,2,3]: CurrentTipPosition = retrieveTips(CurrentTipPosition) extraSeatTips() # initial mix position(0,2, position = OffsetDict[offset]) mix(300,98,100,5) # From DW96W to SW96P with 140ul of glycerol # 6 replicate glycerol stocks for i in [3,4,5,6,7,8]: position(0,2, position = OffsetDict[offset]) aspirate(myvol,depth=99,speed=50, mix=3) position(0,i, position = OffsetDict[offset]) moveDispense(myvol, startdepth = 95, enddepth=60, speed = 50) disposeTips() position(0,0) ShutDownRobot() quit()
gpl-3.0
-2,579,611,862,031,048,700
27.215686
84
0.603892
false
2.699812
false
false
false
cchristelis/watchkeeper
django_project/healthsites/utils.py
1
1525
__author__ = 'Irwan Fathurrahman <[email protected]>' __date__ = '25/04/16' __license__ = "GPL" __copyright__ = 'kartoza.com' import os import json from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder from healthsites.map_clustering import cluster, parse_bbox from healthsites.models.healthsite import Healthsite def healthsites_clustering(bbox, zoom, iconsize): # parse request params if zoom <= settings.CLUSTER_CACHE_MAX_ZOOM: # if geoname and tag are not set we can return the cached layer # try to read healthsitesfrom disk filename = os.path.join( settings.CLUSTER_CACHE_DIR, '{}_{}_{}_healthsites.json'.format(zoom, *iconsize) ) try: cached_locs = open(filename, 'rb') cached_data = cached_locs.read() return cached_data except IOError as e: localities = Healthsite.objects.all() object_list = cluster(localities, zoom, *iconsize) # create the missing cache with open(filename, 'wb') as cache_file: json_dump = json.dump(object_list, cache_file) return json_dump else: # make polygon bbox_poly = parse_bbox(bbox) # cluster healthsites for a view healthsites = Healthsite.objects.filter(point_geometry__contained=bbox_poly) object_list = cluster(healthsites, zoom, *iconsize) return json.dumps(object_list, cls=DjangoJSONEncoder)
bsd-2-clause
-7,391,638,456,330,655,000
34.465116
84
0.64
false
3.84131
false
false
false
kmerenkov/clitter
setup.py
1
1981
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2008, Konstantin Merenkov <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the <organization> nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY Konstantin Merenkov <[email protected]> ''AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL Konstantin Merenkov <[email protected]> BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from distutils.core import setup setup(name='clitter', version='0.1', description='Command line twitter client', author='Konstantin Merenkov', author_email='[email protected]', url='http://github.com/kmerenkov/clitter/', packages=['clitter/twitter', 'clitter'], scripts=['bin/clitter'])
bsd-3-clause
7,848,016,887,700,294,000
52.540541
91
0.745583
false
4.084536
false
false
false
Debian/dak
daklib/termcolor.py
1
1725
# vim:set et sw=4: """ TermColor utils for dak @contact: Debian FTP Master <[email protected]> @copyright: 2019 Mo Zhou <[email protected]> @license: GNU General Public License version 2 or later """ # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################### __all__ = [] ############################################################################### _COLORS_ = ('red', 'green', 'yellow', 'blue', 'violet', 'cyan', 'white') _COLOR_CODES_ = {k: 31 + _COLORS_.index(k) for k in _COLORS_} def colorize(s, fg, bg=None, bold=False, ul=False): ''' s: str -- string to be colorized fg: str/int -- foreground color. See _COLORS_ for choices bg: str/int -- background color. See _COLORS_ for choices bold: bool -- bold font? ul: bool -- underline? ''' if fg not in _COLORS_: raise ValueError("Unsupported foreground Color!") if (bg is not None) or any((bold, ul)): raise NotImplementedError return "\x1b[{}m{}\x1b[0;m".format(_COLOR_CODES_[fg], s)
gpl-2.0
-1,461,835,152,391,277,600
36.5
79
0.624928
false
3.938356
false
false
false
alissonpintor/stoky
app/basemodel.py
1
1966
from flask_sqlalchemy import Model from sqlalchemy import exc as core_exc from sqlalchemy.orm import exc class Result(object): """ Classe que recebe o resultado """ def __init__(self, status, message): self.status = status self.message = message class BaseModel(Model): """ classe Model base que contem metodos comuns como delete, search by id, update """ def update(self): from app import db try: db.session.add(self) db.session.commit() return Result(status=True, message='Registro realizado com sucesso') except Exception as e: return Result(status=False, message=str(e)) def delete(self): from app import db try: db.session.delete(self) db.session.commit() return Result(status=True, message='Registro excluído com sucesso') except core_exc.IntegrityError: return Result(status=False, message='Não foi possível excluir. Erro de Integridade') except Exception as e: return Result(status=False, message=str(e)) @classmethod def by_id(cls, id): from app import db primary_key = db.inspect(cls).primary_key[0] data = db.session.query( cls ).filter( primary_key==id ).first() return data @classmethod def by(cls, **kwargs): from app import db data = db.session.query(cls) for k, v in kwargs.items(): if k.upper() in cls.__table__.columns.keys(): column = cls.__table__.columns[k.upper()] data = data.filter(column==v) data = data.first() return data @classmethod def all(cls): from app import db data = cls.query.all() return data
gpl-3.0
-4,292,930,550,900,183,600
24.842105
96
0.545593
false
4.248918
false
false
false
brigittebigi/proceed
proceed/src/wxgui/sp_icons.py
1
2562
import os.path from sp_glob import ICONS_PATH # Frames APP_ICON = os.path.join(ICONS_PATH, "app.ico") APP_CHECK_ICON = os.path.join(ICONS_PATH, "appcheck.ico") APP_EXPORT_PDF_ICON = os.path.join(ICONS_PATH, "appexport-pdf.ico") # For the toolbar of the main frame EXIT_ICON = os.path.join(ICONS_PATH, "exit.png") OPEN_ICON = os.path.join(ICONS_PATH, "open.png") SAVE_ICON = os.path.join(ICONS_PATH, "save.png") CHECK_ICON = os.path.join(ICONS_PATH, "check.png") EXPORT_ICON = os.path.join(ICONS_PATH, "export.png") ADD_ICON = os.path.join(ICONS_PATH, "add.png") EDIT_ICON = os.path.join(ICONS_PATH, "edit.png") DELETE_ICON = os.path.join(ICONS_PATH, "delete.png") ABOUT_ICON = os.path.join(ICONS_PATH, "about.png") FEEDBACK_ICON = os.path.join(ICONS_PATH, "feedback.png") CANCEL_ICON = os.path.join(ICONS_PATH, "cancel.png") APPLY_ICON = os.path.join(ICONS_PATH, "apply.png") HELP_ICON = os.path.join(ICONS_PATH, "help.png" ) FORWARD_ICON = os.path.join(ICONS_PATH, "forward.png" ) BACKWARD_ICON = os.path.join(ICONS_PATH, "backward.png" ) NEXT_ICON = os.path.join(ICONS_PATH, "next.png") PREVIOUS_ICON = os.path.join(ICONS_PATH, "previous.png") HOME_ICON = os.path.join(ICONS_PATH, "home.png" ) LOGOUT_ICON = os.path.join(ICONS_PATH, "logout.png" ) SETTINGS_ICON = os.path.join(ICONS_PATH, "settings.png" ) # For the other frames AUTHOR_ICON = os.path.join(ICONS_PATH, "author.png") DOCUMENT_ICON = os.path.join(ICONS_PATH, "document.png") SESSION_ICON = os.path.join(ICONS_PATH, "session.png") CONFERENCE_ICON = os.path.join(ICONS_PATH, "conference.png") IMPORT_EXPORT_ICON = os.path.join(ICONS_PATH, "import-export.png") GRID_ICON = os.path.join(ICONS_PATH, "grid.png") TEX_ICON = os.path.join(ICONS_PATH, "tex.png") WWW_ICON = os.path.join(ICONS_PATH, "www.png") PROCESS_ICON = os.path.join(ICONS_PATH, "process.png") # For the Feedback form MAIL_DEFAULT_ICON = os.path.join(ICONS_PATH, "maildefault.png") MAIL_GMAIL_ICON = os.path.join(ICONS_PATH, "mailgoogle.png") MAIL_OTHER_ICON = os.path.join(ICONS_PATH, "mailother.png") CHECKED_ICON = os.path.join(ICONS_PATH, "check.ico") UNCHECKED_ICON = os.path.join(ICONS_PATH, "uncheck.ico") RADIOCHECKED_ICON = os.path.join(ICONS_PATH, "radiocheck.ico") RADIOUNCHECKED_ICON = os.path.join(ICONS_PATH, "radiouncheck.ico")
gpl-3.0
8,382,389,665,049,526,000
46.444444
67
0.639344
false
2.660436
false
false
false
bdang2012/taiga-back-casting
taiga/external_apps/serializers.py
1
2126
# Copyright (C) 2014-2015 Andrey Antukh <[email protected]> # Copyright (C) 2014-2015 Jesús Espino <[email protected]> # Copyright (C) 2014-2015 David Barragán <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import json from taiga.base.api import serializers from . import models from . import services from django.utils.translation import ugettext as _ class ApplicationSerializer(serializers.ModelSerializer): class Meta: model = models.Application fields = ("id", "name", "web", "description", "icon_url") class ApplicationTokenSerializer(serializers.ModelSerializer): cyphered_token = serializers.CharField(source="cyphered_token", read_only=True) next_url = serializers.CharField(source="next_url", read_only=True) application = ApplicationSerializer(read_only=True) class Meta: model = models.ApplicationToken fields = ("user", "id", "application", "auth_code", "next_url") class AuthorizationCodeSerializer(serializers.ModelSerializer): next_url = serializers.CharField(source="next_url", read_only=True) class Meta: model = models.ApplicationToken fields = ("auth_code", "state", "next_url") class AccessTokenSerializer(serializers.ModelSerializer): cyphered_token = serializers.CharField(source="cyphered_token", read_only=True) next_url = serializers.CharField(source="next_url", read_only=True) class Meta: model = models.ApplicationToken fields = ("cyphered_token", )
agpl-3.0
-6,451,691,023,716,321,000
36.928571
83
0.733992
false
3.827027
false
false
false
9wfox/mvc
utility.py
1
6713
# -*- coding:utf-8 -*- """ 工具 历史: 2011-08-03 + 重构 get_pys_members。 2011-08-15 * 修改 con_mongo_object,支持 objectid。 2011-08-20 + 增加 template_path, static_path。 2011-08-25 * 将参数检查函数从 loigc 转移过来。 2011-08-27 * 重构 get_pys_members,改名 get_members。 """ from datetime import datetime from sys import argv from os import walk, listdir from os.path import abspath, join as path_join, dirname, basename, splitext from fnmatch import fnmatch from hashlib import md5 from base64 import b64encode, b64decode from inspect import ismodule, getmembers from bson.objectid import ObjectId try: from pyDes import des, triple_des, PAD_PKCS5, CBC _enc_key = lambda length: __conf__.ENCRYPT_KEY.zfill(length)[:length] _cipher = lambda: des(_enc_key(8), mode = CBC, IV = "\0" * 8, padmode = PAD_PKCS5) except: pass ### 应用程序路径函数 #################################################################################### ROOT_PATH = dirname(abspath(argv[0])) app_path = lambda n: path_join(ROOT_PATH, n) template_path = lambda n: path_join(ROOT_PATH, "{0}/{1}".format(__conf__.TEMPLATE_DIR_NAME, n)) static_path = lambda n: path_join(ROOT_PATH, "{0}/{1}".format(__conf__.STATIC_DIR_NAME, n)) ### 装饰器 ############################################################################################# def staticclass(cls): def new(cls, *args, **kwargs): raise RuntimeError("Static Class") setattr(cls, "__new__", staticmethod(new)) return cls class sealedclass(type): """ metaclass: Sealed Class """ _types = set() def __init__(cls, name, bases, attr): for t in bases: if t in cls._types: raise SyntaxError("sealed class") cls._types.add(cls) class partialclass(type): """ metaclass: Partial Class class A(object): y = 456 def test(self): print "test" class B(object): __metaclass__ = partialclass __mainclass__ = A x = 1234 def do(self): self.test() print self.x, self.y A().do() """ def __init__(cls, name, bases, attr): print "cls:", cls print "name:", name print "bases:", bases print "attr:", attr main_class = attr.pop("__mainclass__") map(lambda a: setattr(main_class, a[0], a[1]), [(k, v) for k, v in attr.items() if "__" not in k]) ### 杂类函数 ############################################################################################ def get_modules(pkg_name, module_filter = None): """ 返回包中所有符合条件的模块。 参数: pkg_name 包名称 module_filter 模块名过滤器 def (module_name) """ path = app_path(pkg_name) #py_filter = lambda f: all((fnmatch(f, "*.py"), not f.startswith("__"), module_filter and module_filter(f) or True)) py_filter = lambda f: all((fnmatch(f, "*.pyc") or fnmatch(f, "*.py"), not f.startswith("__"), module_filter and module_filter(f) or True)) names = [splitext(n)[0] for n in listdir(path) if py_filter(n)] return [__import__("{0}.{1}".format(pkg_name, n)).__dict__[n] for n in names] def get_members(pkg_name, module_filter = None, member_filter = None): """ 返回包中所有符合条件的模块成员。 参数: pkg_name 包名称 module_filter 模块名过滤器 def (module_name) member_filter 成员过滤器 def member_filter(module_member_object) """ modules = get_modules(pkg_name, module_filter) ret = {} for m in modules: members = dict(("{0}.{1}".format(v.__module__, k), v) for k, v in getmembers(m, member_filter)) ret.update(members) return ret def set_default_encoding(): """ 设置系统默认编码 """ import sys, locale reload(sys) lang, coding = locale.getdefaultlocale() #sys.setdefaultencoding(coding) def conv_mongo_object(d): """ 将 MongoDB 返回结果中的: (1) Unicode 还原为 str。 (2) ObjectId 还原为 str。 """ if isinstance(d, (unicode, ObjectId, datetime)): return str(d) elif isinstance(d, (list, tuple)): return [conv_mongo_object(x) for x in d] elif isinstance(d, dict): return dict([(conv_mongo_object(k), conv_mongo_object(v)) for k, v in d.items()]) else: return d mongo_conv = conv_mongo_object ### 哈希加密函数 ######################################################################################## def hash2(o): """ 哈希函数 """ return md5(str(o)).hexdigest() def encrypt(s, base64 = False): """ 对称加密函数 """ e = _cipher().encrypt(s) return base64 and b64encode(e) or e def decrypt(s, base64 = False): """ 对称解密函数 """ return _cipher().decrypt(base64 and b64decode(s) or s) ### 参数检查函数 ######################################################################################## def not_null(*args): """ 检查参数不为None """ if not all(map(lambda v: v is not None, args)): raise ValueError("Argument must be not None/Null!") def not_empty(*args): """ 检查参数不为空 """ if not all(args): raise ValueError("Argument must be not None/Null/Zero/Empty!") def args_range(min_value, max_value, *args): """ 检查参数范围 """ not_null(*args) if not all(map(lambda v: min_value <= v <= max_value, args)): raise ValueError("Argument must be between {0} and {1}!".format(min_value, max_value)) def args_length(min_len, max_len, *args): """ 检查参数长度 """ not_null(*args) if not all(map(lambda v: min_len <= len(v) <= max_len, args)): raise ValueError("Argument length must be between {0} and {1}!".format(min_len, max_len)) __all__ = ["ROOT_PATH", "app_path", "template_path", "static_path", "staticclass", "sealedclass", "partialclass", "get_modules", "get_members", "conv_mongo_object", "mongo_conv", "set_default_encoding", "hash2", "encrypt", "decrypt", "not_null", "not_empty", "args_range", "args_length"]
mit
6,438,945,186,189,151,000
24.227273
142
0.507799
false
3.233316
false
false
false
leanix/leanix-sdk-python
src/leanix/DocumentsApi.py
1
11645
#!/usr/bin/env python """ The MIT License (MIT) Copyright (c) 2017 LeanIX GmbH Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ import sys import os from models import * class DocumentsApi(object): def __init__(self, apiClient): self.apiClient = apiClient def getDocuments(self, **kwargs): """ Read all documents Args: relations, bool: If set to true, all relations of the Fact Sheet are fetched as well. Fetching all relations can be slower. Default: false. (optional) filter, str: Full-text filter (optional) referenceSystem, str: Reference system filter, e.g. Signavio (optional) referenceID, str: ReferenceID, e.g. Signavio ID (optional) factSheetID, str: FactSheetID, e.g. LeanIX ID (optional) Returns: Array[Document] """ allParams = ['relations', 'filter', 'referenceSystem', 'referenceID', 'factSheetID'] params = locals() for (key, val) in params['kwargs'].iteritems(): if key not in allParams: raise TypeError("Got an unexpected keyword argument '%s' to method getDocuments" % key) params[key] = val del params['kwargs'] resourcePath = '/documents' resourcePath = resourcePath.replace('{format}', 'json') method = 'GET' queryParams = {} headerParams = {} formParams = {} bodyParam = None if ('relations' in params): queryParams['relations'] = self.apiClient.toPathValue(params['relations']) if ('filter' in params): queryParams['filter'] = self.apiClient.toPathValue(params['filter']) if ('referenceSystem' in params): queryParams['referenceSystem'] = self.apiClient.toPathValue(params['referenceSystem']) if ('referenceID' in params): queryParams['referenceID'] = self.apiClient.toPathValue(params['referenceID']) if ('factSheetID' in params): queryParams['factSheetID'] = self.apiClient.toPathValue(params['factSheetID']) if formParams: headerParams['Content-type'] = 'application/x-www-form-urlencoded' # postData = (formParams if formParams else bodyParam) postData = params['body'] if 'body' in params else None response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams) if not response: return None responseObject = self.apiClient.deserialize(response, 'Array[Document]') return responseObject def createDocument(self, **kwargs): """ Create a new Document Args: body, Document: Message-Body (optional) Returns: Document """ allParams = ['body'] params = locals() for (key, val) in params['kwargs'].iteritems(): if key not in allParams: raise TypeError("Got an unexpected keyword argument '%s' to method createDocument" % key) params[key] = val del params['kwargs'] resourcePath = '/documents' resourcePath = resourcePath.replace('{format}', 'json') method = 'POST' queryParams = {} headerParams = {} formParams = {} bodyParam = None if formParams: headerParams['Content-type'] = 'application/x-www-form-urlencoded' if ('' in params): bodyParam = params[''] # postData = (formParams if formParams else bodyParam) postData = params['body'] if 'body' in params else None response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams) if not response: return None responseObject = self.apiClient.deserialize(response, 'Document') return responseObject def getDocument(self, ID, **kwargs): """ Read a Document by a given ID Args: ID, str: Unique ID (required) relations, bool: If set to true, all relations of the Fact Sheet are fetched as well. Fetching all relations can be slower. Default: false. (optional) Returns: Document """ allParams = ['ID', 'relations'] params = locals() for (key, val) in params['kwargs'].iteritems(): if key not in allParams: raise TypeError("Got an unexpected keyword argument '%s' to method getDocument" % key) params[key] = val del params['kwargs'] resourcePath = '/documents/{ID}' resourcePath = resourcePath.replace('{format}', 'json') method = 'GET' queryParams = {} headerParams = {} formParams = {} bodyParam = None if ('relations' in params): queryParams['relations'] = self.apiClient.toPathValue(params['relations']) if ('ID' in params): replacement = str(self.apiClient.toPathValue(params['ID'])) resourcePath = resourcePath.replace('{' + 'ID' + '}', replacement) if formParams: headerParams['Content-type'] = 'application/x-www-form-urlencoded' # postData = (formParams if formParams else bodyParam) postData = params['body'] if 'body' in params else None response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams) if not response: return None responseObject = self.apiClient.deserialize(response, 'Document') return responseObject def updateDocument(self, ID, **kwargs): """ Update a Document by a given ID Args: ID, str: Unique ID (required) body, Document: Message-Body (optional) Returns: Document """ allParams = ['ID', 'body'] params = locals() for (key, val) in params['kwargs'].iteritems(): if key not in allParams: raise TypeError("Got an unexpected keyword argument '%s' to method updateDocument" % key) params[key] = val del params['kwargs'] resourcePath = '/documents/{ID}' resourcePath = resourcePath.replace('{format}', 'json') method = 'PUT' queryParams = {} headerParams = {} formParams = {} bodyParam = None if ('ID' in params): replacement = str(self.apiClient.toPathValue(params['ID'])) resourcePath = resourcePath.replace('{' + 'ID' + '}', replacement) if formParams: headerParams['Content-type'] = 'application/x-www-form-urlencoded' if ('' in params): bodyParam = params[''] # postData = (formParams if formParams else bodyParam) postData = params['body'] if 'body' in params else None response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams) if not response: return None responseObject = self.apiClient.deserialize(response, 'Document') return responseObject def deleteDocument(self, ID, **kwargs): """ Delete a Document by a given ID Args: ID, str: Unique ID (required) Returns: """ allParams = ['ID'] params = locals() for (key, val) in params['kwargs'].iteritems(): if key not in allParams: raise TypeError("Got an unexpected keyword argument '%s' to method deleteDocument" % key) params[key] = val del params['kwargs'] resourcePath = '/documents/{ID}' resourcePath = resourcePath.replace('{format}', 'json') method = 'DELETE' queryParams = {} headerParams = {} formParams = {} bodyParam = None if ('ID' in params): replacement = str(self.apiClient.toPathValue(params['ID'])) resourcePath = resourcePath.replace('{' + 'ID' + '}', replacement) if formParams: headerParams['Content-type'] = 'application/x-www-form-urlencoded' # postData = (formParams if formParams else bodyParam) postData = params['body'] if 'body' in params else None response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams) def updateDataObject(self, ID, **kwargs): """ Update the data object for the given document ID Args: ID, str: Unique ID (required) body, DataObject: Message-Body (optional) Returns: DataObject """ allParams = ['ID', 'body'] params = locals() for (key, val) in params['kwargs'].iteritems(): if key not in allParams: raise TypeError("Got an unexpected keyword argument '%s' to method updateDataObject" % key) params[key] = val del params['kwargs'] resourcePath = '/documents/{ID}/dataobjects' resourcePath = resourcePath.replace('{format}', 'json') method = 'PUT' queryParams = {} headerParams = {} formParams = {} bodyParam = None if ('ID' in params): replacement = str(self.apiClient.toPathValue(params['ID'])) resourcePath = resourcePath.replace('{' + 'ID' + '}', replacement) if formParams: headerParams['Content-type'] = 'application/x-www-form-urlencoded' if ('' in params): bodyParam = params[''] # postData = (formParams if formParams else bodyParam) postData = params['body'] if 'body' in params else None response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams) if not response: return None responseObject = self.apiClient.deserialize(response, 'DataObject') return responseObject
mit
-3,196,921,589,828,660,000
30.136364
162
0.575784
false
4.722222
false
false
false
mvaled/sentry
src/sentry/south_migrations/0480_incidentactivity.py
1
136910
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): # Flag to indicate if this migration is too risky # to run online and needs to be coordinated for offline is_dangerous = False def forwards(self, orm): # Adding model 'IncidentActivity' db.create_table('sentry_incidentactivity', ( ('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)), ('incident', self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')( to=orm['sentry.Incident'])), ('user', self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')( to=orm['sentry.User'], null=True)), ('type', self.gf('django.db.models.fields.IntegerField')()), ('value', self.gf('django.db.models.fields.TextField')(null=True)), ('previous_value', self.gf('django.db.models.fields.TextField')(null=True)), ('comment', self.gf('django.db.models.fields.TextField')(null=True)), ('event_stats_snapshot', self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')( to=orm['sentry.TimeSeriesSnapshot'], null=True)), ('date_added', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)), )) db.send_create_signal('sentry', ['IncidentActivity']) # Adding model 'TimeSeriesSnapshot' db.create_table('sentry_timeseriessnapshot', ( ('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)), ('start', self.gf('django.db.models.fields.DateTimeField')()), ('end', self.gf('django.db.models.fields.DateTimeField')()), ('values', self.gf('sentry.db.models.fields.array.ArrayField')( of=(u'django.db.models.fields.IntegerField', [], {}))), ('period', self.gf('django.db.models.fields.IntegerField')()), ('date_added', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)), )) db.send_create_signal('sentry', ['TimeSeriesSnapshot']) def backwards(self, orm): # Deleting model 'IncidentActivity' db.delete_table('sentry_incidentactivity') # Deleting model 'TimeSeriesSnapshot' db.delete_table('sentry_timeseriessnapshot') models = { 'sentry.activity': { 'Meta': {'unique_together': '()', 'object_name': 'Activity', 'index_together': '()'}, 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}) }, 'sentry.apiapplication': { 'Meta': {'unique_together': '()', 'object_name': 'ApiApplication', 'index_together': '()'}, 'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'client_id': ('django.db.models.fields.CharField', [], {'default': "'12bc89ca7374404ea6921393b99c2e83ca9087accd2345a19bc5c5fc3892410a'", 'unique': 'True', 'max_length': '64'}), 'client_secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'3b5eb3fdb9a44c908cc9392a5fd7b133e999526dea0d455ea24fc3cd719a22c0'"}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'homepage_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'default': "'Quiet Spaniel'", 'max_length': '64', 'blank': 'True'}), 'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}), 'privacy_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}), 'redirect_uris': ('django.db.models.fields.TextField', [], {}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'terms_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}) }, 'sentry.apiauthorization': { 'Meta': {'unique_together': "(('user', 'application'),)", 'object_name': 'ApiAuthorization', 'index_together': '()'}, 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'django.db.models.fields.TextField', [], {})}), 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.apigrant': { 'Meta': {'unique_together': '()', 'object_name': 'ApiGrant', 'index_together': '()'}, 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']"}), 'code': ('django.db.models.fields.CharField', [], {'default': "'20e6168c01b8433daaf1d95b568cec7e'", 'max_length': '64', 'db_index': 'True'}), 'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2019, 5, 16, 0, 0)', 'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'redirect_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'django.db.models.fields.TextField', [], {})}), 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.apikey': { 'Meta': {'unique_together': '()', 'object_name': 'ApiKey', 'index_together': '()'}, 'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}), 'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'key_set'", 'to': "orm['sentry.Organization']"}), 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'django.db.models.fields.TextField', [], {})}), 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}) }, 'sentry.apitoken': { 'Meta': {'unique_together': '()', 'object_name': 'ApiToken', 'index_together': '()'}, 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2019, 6, 15, 0, 0)', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'refresh_token': ('django.db.models.fields.CharField', [], {'default': "'1baed9fb48f145d2ac57b013160dc650e4c940d6c5f14789a331cf28b3af7c45'", 'max_length': '64', 'unique': 'True', 'null': 'True'}), 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'django.db.models.fields.TextField', [], {})}), 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}), 'token': ('django.db.models.fields.CharField', [], {'default': "'cde3d55c0f444c42acd08de782b5f7fcf3a0c44d35a94cb4b40472b82a437a0d'", 'unique': 'True', 'max_length': '64'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.assistantactivity': { 'Meta': {'unique_together': "(('user', 'guide_id'),)", 'object_name': 'AssistantActivity', 'db_table': "'sentry_assistant_activity'", 'index_together': '()'}, 'dismissed_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'guide_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'useful': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}), 'viewed_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}) }, 'sentry.auditlogentry': { 'Meta': {'unique_together': '()', 'object_name': 'AuditLogEntry', 'index_together': '()'}, 'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "u'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}), 'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}), 'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}), 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "u'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"}) }, 'sentry.authenticator': { 'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'Authenticator', 'db_table': "'auth_authenticator'", 'index_together': '()'}, 'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}), 'last_used_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.authidentity': { 'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity', 'index_together': '()'}, 'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}), 'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.authprovider': { 'Meta': {'unique_together': '()', 'object_name': 'AuthProvider', 'index_together': '()'}, 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}), 'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}), 'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}), 'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}) }, 'sentry.broadcast': { 'Meta': {'unique_together': '()', 'object_name': 'Broadcast', 'index_together': '()'}, 'cta': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2019, 5, 23, 0, 0)', 'null': 'True', 'blank': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'upstream_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}) }, 'sentry.broadcastseen': { 'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen', 'index_together': '()'}, 'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}), 'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.commit': { 'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'Commit', 'index_together': "(('repository_id', 'date_added'),)"}, 'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'message': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.commitauthor': { 'Meta': {'unique_together': "(('organization_id', 'email'), ('organization_id', 'external_id'))", 'object_name': 'CommitAuthor', 'index_together': '()'}, 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '164', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}) }, 'sentry.commitfilechange': { 'Meta': {'unique_together': "(('commit', 'filename'),)", 'object_name': 'CommitFileChange', 'index_together': '()'}, 'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}), 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '1'}) }, 'sentry.counter': { 'Meta': {'unique_together': '()', 'object_name': 'Counter', 'db_table': "'sentry_projectcounter'", 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}), 'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.dashboard': { 'Meta': {'unique_together': "(('organization', 'title'),)", 'object_name': 'Dashboard', 'index_together': '()'}, 'created_by': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'sentry.deletedorganization': { 'Meta': {'unique_together': '()', 'object_name': 'DeletedOrganization', 'index_together': '()'}, 'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), 'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}) }, 'sentry.deletedproject': { 'Meta': {'unique_together': '()', 'object_name': 'DeletedProject', 'index_together': '()'}, 'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), 'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'organization_slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}), 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}) }, 'sentry.deletedteam': { 'Meta': {'unique_together': '()', 'object_name': 'DeletedTeam', 'index_together': '()'}, 'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), 'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'organization_slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}), 'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}) }, 'sentry.deploy': { 'Meta': {'unique_together': '()', 'object_name': 'Deploy', 'index_together': '()'}, 'date_finished': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}), 'notified': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}) }, 'sentry.discoversavedquery': { 'Meta': {'unique_together': '()', 'object_name': 'DiscoverSavedQuery', 'index_together': '()'}, 'created_by': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'through': "orm['sentry.DiscoverSavedQueryProject']", 'symmetrical': 'False'}), 'query': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}) }, 'sentry.discoversavedqueryproject': { 'Meta': {'unique_together': "(('project', 'discover_saved_query'),)", 'object_name': 'DiscoverSavedQueryProject', 'index_together': '()'}, 'discover_saved_query': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.DiscoverSavedQuery']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}) }, 'sentry.distribution': { 'Meta': {'unique_together': "(('release', 'name'),)", 'object_name': 'Distribution', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}) }, 'sentry.email': { 'Meta': {'unique_together': '()', 'object_name': 'Email', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('sentry.db.models.fields.citext.CIEmailField', [], {'unique': 'True', 'max_length': '75'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}) }, 'sentry.environment': { 'Meta': {'unique_together': "(('organization_id', 'name'),)", 'object_name': 'Environment', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'through': "orm['sentry.EnvironmentProject']", 'symmetrical': 'False'}) }, 'sentry.environmentproject': { 'Meta': {'unique_together': "(('project', 'environment'),)", 'object_name': 'EnvironmentProject', 'index_together': '()'}, 'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_hidden': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}) }, 'sentry.event': { 'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)"}, 'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}), 'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'}) }, 'sentry.eventattachment': { 'Meta': {'unique_together': "(('project_id', 'event_id', 'file'),)", 'object_name': 'EventAttachment', 'index_together': "(('project_id', 'date_added'),)"}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}), 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.eventmapping': { 'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.eventprocessingissue': { 'Meta': {'unique_together': "(('raw_event', 'processing_issue'),)", 'object_name': 'EventProcessingIssue', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'processing_issue': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProcessingIssue']"}), 'raw_event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.RawEvent']"}) }, 'sentry.eventtag': { 'Meta': {'unique_together': "(('event_id', 'key_id', 'value_id'),)", 'object_name': 'EventTag', 'index_together': "(('group_id', 'key_id', 'value_id'),)"}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.eventuser': { 'Meta': {'unique_together': "(('project_id', 'ident'), ('project_id', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project_id', 'email'), ('project_id', 'username'), ('project_id', 'ip_address'))"}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}), 'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}), 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}) }, 'sentry.externalissue': { 'Meta': {'unique_together': "(('organization_id', 'integration_id', 'key'),)", 'object_name': 'ExternalIssue', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'integration_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'metadata': ('sentry.db.models.fields.jsonfield.JSONField', [], {'null': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'title': ('django.db.models.fields.TextField', [], {'null': 'True'}) }, 'sentry.featureadoption': { 'Meta': {'unique_together': "(('organization', 'feature_id'),)", 'object_name': 'FeatureAdoption', 'index_together': '()'}, 'applicable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'feature_id': ('django.db.models.fields.PositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}) }, 'sentry.file': { 'Meta': {'unique_together': '()', 'object_name': 'File', 'index_together': '()'}, 'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']"}), 'blobs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False'}), 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'db_index': 'True'}), 'headers': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'path': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '64'}) }, 'sentry.fileblob': { 'Meta': {'unique_together': '()', 'object_name': 'FileBlob', 'index_together': '()'}, 'checksum': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'path': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}) }, 'sentry.fileblobindex': { 'Meta': {'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex', 'index_together': '()'}, 'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}), 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.fileblobowner': { 'Meta': {'unique_together': "(('blob', 'organization'),)", 'object_name': 'FileBlobOwner', 'index_together': '()'}, 'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}) }, 'sentry.group': { 'Meta': {'unique_together': "(('project', 'short_id'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"}, 'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}), 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}), 'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.PROTECT'}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}), 'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}), 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}), 'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}), 'short_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}), 'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}), 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'}) }, 'sentry.groupassignee': { 'Meta': {'unique_together': '()', 'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'", 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'assignee_set'", 'to': "orm['sentry.Project']"}), 'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'sentry_assignee_set'", 'null': 'True', 'to': "orm['sentry.Team']"}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'sentry_assignee_set'", 'null': 'True', 'to': "orm['sentry.User']"}) }, 'sentry.groupbookmark': { 'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'bookmark_set'", 'to': "orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'bookmark_set'", 'to': "orm['sentry.Project']"}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'sentry_bookmark_set'", 'to': "orm['sentry.User']"}) }, 'sentry.groupcommitresolution': { 'Meta': {'unique_together': "(('group_id', 'commit_id'),)", 'object_name': 'GroupCommitResolution', 'index_together': '()'}, 'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}) }, 'sentry.groupemailthread': { 'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread', 'index_together': '()'}, 'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'groupemail_set'", 'to': "orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'groupemail_set'", 'to': "orm['sentry.Project']"}) }, 'sentry.groupenvironment': { 'Meta': {'unique_together': "(('group', 'environment'),)", 'object_name': 'GroupEnvironment', 'index_together': "(('environment', 'first_release'),)"}, 'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}), 'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.DO_NOTHING'}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}) }, 'sentry.grouphash': { 'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash', 'index_together': '()'}, 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}), 'group_tombstone_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}), 'state': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}) }, 'sentry.grouplink': { 'Meta': {'unique_together': "(('group_id', 'linked_type', 'linked_id'),)", 'object_name': 'GroupLink', 'index_together': '()'}, 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'linked_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'linked_type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}), 'relationship': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '2'}) }, 'sentry.groupmeta': { 'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta', 'index_together': '()'}, 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'value': ('django.db.models.fields.TextField', [], {}) }, 'sentry.groupredirect': { 'Meta': {'unique_together': '()', 'object_name': 'GroupRedirect', 'index_together': '()'}, 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'previous_group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'unique': 'True'}) }, 'sentry.grouprelease': { 'Meta': {'unique_together': "(('group_id', 'release_id', 'environment'),)", 'object_name': 'GroupRelease', 'index_together': '()'}, 'environment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64'}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}) }, 'sentry.groupresolution': { 'Meta': {'unique_together': '()', 'object_name': 'GroupResolution', 'index_together': '()'}, 'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}) }, 'sentry.grouprulestatus': { 'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}), 'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}) }, 'sentry.groupseen': { 'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen', 'index_together': '()'}, 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'}) }, 'sentry.groupshare': { 'Meta': {'unique_together': '()', 'object_name': 'GroupShare', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}), 'uuid': ('django.db.models.fields.CharField', [], {'default': "'993f599bf9114fe1b88e46386a3514da'", 'unique': 'True', 'max_length': '32'}) }, 'sentry.groupsnooze': { 'Meta': {'unique_together': '()', 'object_name': 'GroupSnooze', 'index_together': '()'}, 'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'state': ('sentry.db.models.fields.jsonfield.JSONField', [], {'null': 'True'}), 'until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'user_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'user_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}) }, 'sentry.groupsubscription': { 'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'GroupSubscription', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'subscription_set'", 'to': "orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'subscription_set'", 'to': "orm['sentry.Project']"}), 'reason': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.grouptagkey': { 'Meta': {'unique_together': "(('project_id', 'group_id', 'key'),)", 'object_name': 'GroupTagKey', 'index_together': '()'}, 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}) }, 'sentry.grouptagvalue': { 'Meta': {'unique_together': "(('group_id', 'key', 'value'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'", 'index_together': "(('project_id', 'key', 'value', 'last_seen'),)"}, 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '200'}) }, 'sentry.grouptombstone': { 'Meta': {'unique_together': '()', 'object_name': 'GroupTombstone', 'index_together': '()'}, 'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'blank': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'previous_group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'unique': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}) }, 'sentry.identity': { 'Meta': {'unique_together': "(('idp', 'external_id'), ('idp', 'user'))", 'object_name': 'Identity', 'index_together': '()'}, 'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'idp': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.IdentityProvider']"}), 'scopes': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'django.db.models.fields.TextField', [], {})}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.identityprovider': { 'Meta': {'unique_together': "(('type', 'external_id'),)", 'object_name': 'IdentityProvider', 'index_together': '()'}, 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '64'}) }, 'sentry.incident': { 'Meta': {'unique_together': "(('organization', 'identifier'),)", 'object_name': 'Incident', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_closed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'date_detected': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_started': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'detection_uuid': ('sentry.db.models.fields.uuid.UUIDField', [], {'max_length': '32', 'null': 'True', 'db_index': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'incidents'", 'symmetrical': 'False', 'through': "orm['sentry.IncidentGroup']", 'to': "orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'identifier': ('django.db.models.fields.IntegerField', [], {}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'incidents'", 'symmetrical': 'False', 'through': "orm['sentry.IncidentProject']", 'to': "orm['sentry.Project']"}), 'query': ('django.db.models.fields.TextField', [], {}), 'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {}), 'title': ('django.db.models.fields.TextField', [], {}) }, 'sentry.incidentactivity': { 'Meta': {'unique_together': '()', 'object_name': 'IncidentActivity', 'index_together': '()'}, 'comment': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'event_stats_snapshot': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.TimeSeriesSnapshot']", 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'incident': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Incident']"}), 'previous_value': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'type': ('django.db.models.fields.IntegerField', [], {}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}), 'value': ('django.db.models.fields.TextField', [], {'null': 'True'}) }, 'sentry.incidentgroup': { 'Meta': {'unique_together': "(('group', 'incident'),)", 'object_name': 'IncidentGroup', 'index_together': '()'}, 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'db_index': 'False'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'incident': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Incident']"}) }, 'sentry.incidentproject': { 'Meta': {'unique_together': "(('project', 'incident'),)", 'object_name': 'IncidentProject', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'incident': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Incident']"}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'db_index': 'False'}) }, 'sentry.incidentseen': { 'Meta': {'unique_together': "(('user', 'incident'),)", 'object_name': 'IncidentSeen', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'incident': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Incident']"}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'}) }, 'sentry.integration': { 'Meta': {'unique_together': "(('provider', 'external_id'),)", 'object_name': 'Integration', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'metadata': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationIntegration']", 'to': "orm['sentry.Organization']"}), 'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectIntegration']", 'to': "orm['sentry.Project']"}), 'provider': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}) }, 'sentry.integrationexternalproject': { 'Meta': {'unique_together': "(('organization_integration_id', 'external_id'),)", 'object_name': 'IntegrationExternalProject', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'organization_integration_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'resolved_status': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'unresolved_status': ('django.db.models.fields.CharField', [], {'max_length': '64'}) }, 'sentry.latestrelease': { 'Meta': {'unique_together': "(('repository_id', 'environment_id'),)", 'object_name': 'LatestRelease', 'index_together': '()'}, 'commit_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'deploy_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'environment_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'release_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'repository_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.lostpasswordhash': { 'Meta': {'unique_together': '()', 'object_name': 'LostPasswordHash', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'}) }, 'sentry.monitor': { 'Meta': {'unique_together': '()', 'object_name': 'Monitor', 'index_together': "(('type', 'next_checkin'),)"}, 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'guid': ('sentry.db.models.fields.uuid.UUIDField', [], {'auto_add': "'uuid:uuid4'", 'unique': 'True', 'max_length': '32'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_checkin': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'next_checkin': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}) }, 'sentry.monitorcheckin': { 'Meta': {'unique_together': '()', 'object_name': 'MonitorCheckIn', 'index_together': '()'}, 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'duration': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'guid': ('sentry.db.models.fields.uuid.UUIDField', [], {'auto_add': "'uuid:uuid4'", 'unique': 'True', 'max_length': '32'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'location': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.MonitorLocation']", 'null': 'True'}), 'monitor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Monitor']"}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}) }, 'sentry.monitorlocation': { 'Meta': {'unique_together': '()', 'object_name': 'MonitorLocation', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'guid': ('sentry.db.models.fields.uuid.UUIDField', [], {'auto_add': "'uuid:uuid4'", 'unique': 'True', 'max_length': '32'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}) }, 'sentry.option': { 'Meta': {'unique_together': '()', 'object_name': 'Option', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}), 'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}) }, 'sentry.organization': { 'Meta': {'unique_together': '()', 'object_name': 'Organization', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}), 'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}) }, 'sentry.organizationaccessrequest': { 'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}), 'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}) }, 'sentry.organizationavatar': { 'Meta': {'unique_together': '()', 'object_name': 'OrganizationAvatar', 'index_together': '()'}, 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}), 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'avatar'", 'unique': 'True', 'to': "orm['sentry.Organization']"}) }, 'sentry.organizationintegration': { 'Meta': {'unique_together': "(('organization', 'integration'),)", 'object_name': 'OrganizationIntegration', 'index_together': '()'}, 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'default_auth_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}) }, 'sentry.organizationmember': { 'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), 'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'member_set'", 'to': "orm['sentry.Organization']"}), 'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}), 'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}), 'token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'token_expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "u'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"}) }, 'sentry.organizationmemberteam': { 'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'", 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}), 'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}) }, 'sentry.organizationonboardingtask': { 'Meta': {'unique_together': "(('organization', 'task'),)", 'object_name': 'OrganizationOnboardingTask', 'index_together': '()'}, 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}) }, 'sentry.organizationoption': { 'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'", 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}) }, 'sentry.platformexternalissue': { 'Meta': {'unique_together': "(('group_id', 'service_type'),)", 'object_name': 'PlatformExternalIssue', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'display_name': ('django.db.models.fields.TextField', [], {}), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'service_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'web_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}) }, 'sentry.processingissue': { 'Meta': {'unique_together': "(('project', 'checksum', 'type'),)", 'object_name': 'ProcessingIssue', 'index_together': '()'}, 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}), 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '30'}) }, 'sentry.project': { 'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Project', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}), 'forced_color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'teams': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'teams'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectTeam']", 'to': "orm['sentry.Team']"}) }, 'sentry.projectavatar': { 'Meta': {'unique_together': '()', 'object_name': 'ProjectAvatar', 'index_together': '()'}, 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}), 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'avatar'", 'unique': 'True', 'to': "orm['sentry.Project']"}) }, 'sentry.projectbookmark': { 'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'ProjectBookmark', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True', 'blank': 'True'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.projectcficachefile': { 'Meta': {'unique_together': "(('project', 'debug_file'),)", 'object_name': 'ProjectCfiCacheFile', 'index_together': '()'}, 'cache_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}), 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'debug_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDebugFile']", 'on_delete': 'models.DO_NOTHING', 'db_column': "'dsym_file_id'"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}), 'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.projectdebugfile': { 'Meta': {'unique_together': '()', 'object_name': 'ProjectDebugFile', 'db_table': "'sentry_projectdsymfile'", 'index_together': "(('project', 'debug_id'), ('project', 'code_id'))"}, 'code_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'cpu_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'null': 'True'}), 'debug_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_column': "'uuid'"}), 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'object_name': ('django.db.models.fields.TextField', [], {}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}) }, 'sentry.projectintegration': { 'Meta': {'unique_together': "(('project', 'integration'),)", 'object_name': 'ProjectIntegration', 'index_together': '()'}, 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}) }, 'sentry.projectkey': { 'Meta': {'unique_together': '()', 'object_name': 'ProjectKey', 'index_together': '()'}, 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'key_set'", 'to': "orm['sentry.Project']"}), 'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}), 'rate_limit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'rate_limit_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), 'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}) }, 'sentry.projectoption': { 'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'", 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}) }, 'sentry.projectownership': { 'Meta': {'unique_together': '()', 'object_name': 'ProjectOwnership', 'index_together': '()'}, 'auto_assignment': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'fallthrough': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}), 'raw': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'schema': ('sentry.db.models.fields.jsonfield.JSONField', [], {'null': 'True'}) }, 'sentry.projectplatform': { 'Meta': {'unique_together': "(('project_id', 'platform'),)", 'object_name': 'ProjectPlatform', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.projectredirect': { 'Meta': {'unique_together': "(('organization', 'redirect_slug'),)", 'object_name': 'ProjectRedirect', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'redirect_slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}) }, 'sentry.projectsymcachefile': { 'Meta': {'unique_together': "(('project', 'debug_file'),)", 'object_name': 'ProjectSymCacheFile', 'index_together': '()'}, 'cache_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}), 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'debug_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDebugFile']", 'on_delete': 'models.DO_NOTHING', 'db_column': "'dsym_file_id'"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}), 'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.projectteam': { 'Meta': {'unique_together': "(('project', 'team'),)", 'object_name': 'ProjectTeam', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}) }, 'sentry.promptsactivity': { 'Meta': {'unique_together': "(('user', 'feature', 'organization_id', 'project_id'),)", 'object_name': 'PromptsActivity', 'index_together': '()'}, 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'feature': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.pullrequest': { 'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'PullRequest', 'db_table': "'sentry_pull_request'", 'index_together': "(('repository_id', 'date_added'), ('organization_id', 'merge_commit_sha'))"}, 'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'merge_commit_sha': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'message': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'title': ('django.db.models.fields.TextField', [], {'null': 'True'}) }, 'sentry.pullrequestcommit': { 'Meta': {'unique_together': "(('pull_request', 'commit'),)", 'object_name': 'PullRequestCommit', 'db_table': "'sentry_pullrequest_commit'", 'index_together': '()'}, 'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'pull_request': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.PullRequest']"}) }, 'sentry.rawevent': { 'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'RawEvent', 'index_together': '()'}, 'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}) }, 'sentry.recentsearch': { 'Meta': {'unique_together': "(('user', 'organization', 'type', 'query_hash'),)", 'object_name': 'RecentSearch', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'query': ('django.db.models.fields.TextField', [], {}), 'query_hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'type': ('django.db.models.fields.PositiveSmallIntegerField', [], {}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'}) }, 'sentry.relay': { 'Meta': {'unique_together': '()', 'object_name': 'Relay', 'index_together': '()'}, 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'public_key': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'relay_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}) }, 'sentry.release': { 'Meta': {'unique_together': "(('organization', 'version'),)", 'object_name': 'Release', 'index_together': '()'}, 'authors': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'django.db.models.fields.TextField', [], {})}), 'commit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}), 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'last_deploy_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'releases'", 'symmetrical': 'False', 'through': "orm['sentry.ReleaseProject']", 'to': "orm['sentry.Project']"}), 'ref': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}), 'total_deploys': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'version': ('django.db.models.fields.CharField', [], {'max_length': '250'}) }, 'sentry.releasecommit': { 'Meta': {'unique_together': "(('release', 'commit'), ('release', 'order'))", 'object_name': 'ReleaseCommit', 'index_together': '()'}, 'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}) }, 'sentry.releaseenvironment': { 'Meta': {'unique_together': "(('organization', 'release', 'environment'),)", 'object_name': 'ReleaseEnvironment', 'db_table': "'sentry_environmentrelease'", 'index_together': '()'}, 'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}) }, 'sentry.releasefile': { 'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile', 'index_together': "(('release', 'name'),)"}, 'dist': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Distribution']", 'null': 'True'}), 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'name': ('django.db.models.fields.TextField', [], {}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}) }, 'sentry.releaseheadcommit': { 'Meta': {'unique_together': "(('repository_id', 'release'),)", 'object_name': 'ReleaseHeadCommit', 'index_together': '()'}, 'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}), 'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.releaseproject': { 'Meta': {'unique_together': "(('project', 'release'),)", 'object_name': 'ReleaseProject', 'db_table': "'sentry_release_project'", 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}) }, 'sentry.releaseprojectenvironment': { 'Meta': {'unique_together': "(('project', 'release', 'environment'),)", 'object_name': 'ReleaseProjectEnvironment', 'index_together': '()'}, 'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_deploy_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'new_issues_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}) }, 'sentry.repository': { 'Meta': {'unique_together': "(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))", 'object_name': 'Repository', 'index_together': '()'}, 'config': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'integration_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'provider': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}) }, 'sentry.reprocessingreport': { 'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'ReprocessingReport', 'index_together': '()'}, 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}) }, 'sentry.rule': { 'Meta': {'unique_together': '()', 'object_name': 'Rule', 'index_together': '()'}, 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}) }, 'sentry.savedsearch': { 'Meta': {'unique_together': "(('project', 'name'), ('organization', 'owner', 'type'))", 'object_name': 'SavedSearch', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_global': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'null': 'True'}), 'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}), 'query': ('django.db.models.fields.TextField', [], {}), 'type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'}) }, 'sentry.savedsearchuserdefault': { 'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'", 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}), 'savedsearch': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.SavedSearch']"}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.scheduleddeletion': { 'Meta': {'unique_together': "(('app_label', 'model_name', 'object_id'),)", 'object_name': 'ScheduledDeletion', 'index_together': '()'}, 'aborted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_scheduled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2019, 6, 15, 0, 0)'}), 'guid': ('django.db.models.fields.CharField', [], {'default': "'a426ce10c7824ca2a31b88c01cf51105'", 'unique': 'True', 'max_length': '32'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'in_progress': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'model_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'object_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.scheduledjob': { 'Meta': {'unique_together': '()', 'object_name': 'ScheduledJob', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_scheduled': ('django.db.models.fields.DateTimeField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'payload': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}) }, 'sentry.sentryapp': { 'Meta': {'unique_together': '()', 'object_name': 'SentryApp', 'index_together': '()'}, 'application': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'sentry_app'", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['sentry.ApiApplication']"}), 'author': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'date_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'events': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'django.db.models.fields.TextField', [], {})}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_alertable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'name': ('django.db.models.fields.TextField', [], {}), 'overview': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'owned_sentry_apps'", 'to': "orm['sentry.Organization']"}), 'proxy_user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'sentry_app'", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['sentry.User']"}), 'redirect_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}), 'schema': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'django.db.models.fields.TextField', [], {})}), 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}), 'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'uuid': ('django.db.models.fields.CharField', [], {'default': "'95221a10-3d96-4af7-8670-be0f643dd7a1'", 'max_length': '64'}), 'webhook_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}) }, 'sentry.sentryappavatar': { 'Meta': {'unique_together': '()', 'object_name': 'SentryAppAvatar', 'index_together': '()'}, 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}), 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'sentry_app': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'avatar'", 'unique': 'True', 'to': "orm['sentry.SentryApp']"}) }, 'sentry.sentryappcomponent': { 'Meta': {'unique_together': '()', 'object_name': 'SentryAppComponent', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'schema': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}), 'sentry_app': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'components'", 'to': "orm['sentry.SentryApp']"}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'uuid': ('sentry.db.models.fields.uuid.UUIDField', [], {'auto_add': "'uuid:uuid4'", 'unique': 'True', 'max_length': '32'}) }, 'sentry.sentryappinstallation': { 'Meta': {'unique_together': '()', 'object_name': 'SentryAppInstallation', 'index_together': '()'}, 'api_grant': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'sentry_app_installation'", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['sentry.ApiGrant']"}), 'api_token': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'sentry_app_installation'", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['sentry.ApiToken']"}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'date_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'sentry_app_installations'", 'to': "orm['sentry.Organization']"}), 'sentry_app': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'installations'", 'to': "orm['sentry.SentryApp']"}), 'uuid': ('django.db.models.fields.CharField', [], {'default': "'c025a308-74d1-4b11-95f4-f74b0dba0a37'", 'max_length': '64'}) }, 'sentry.servicehook': { 'Meta': {'unique_together': '()', 'object_name': 'ServiceHook', 'index_together': '()'}, 'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'events': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'django.db.models.fields.TextField', [], {})}), 'guid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'351f827fb83b47d2b3d4c8a8a379cc632e91af06c5d44946abd9396a33877cc8'"}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '512'}), 'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}) }, 'sentry.servicehookproject': { 'Meta': {'unique_together': "(('service_hook', 'project_id'),)", 'object_name': 'ServiceHookProject', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'service_hook': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ServiceHook']"}) }, 'sentry.tagkey': { 'Meta': {'unique_together': "(('project_id', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'", 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}) }, 'sentry.tagvalue': { 'Meta': {'unique_together': "(('project_id', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'", 'index_together': "(('project_id', 'key', 'last_seen'),)"}, 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}), 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '200'}) }, 'sentry.team': { 'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}) }, 'sentry.teamavatar': { 'Meta': {'unique_together': '()', 'object_name': 'TeamAvatar', 'index_together': '()'}, 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}), 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'avatar'", 'unique': 'True', 'to': "orm['sentry.Team']"}) }, 'sentry.timeseriessnapshot': { 'Meta': {'unique_together': '()', 'object_name': 'TimeSeriesSnapshot', 'index_together': '()'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'end': ('django.db.models.fields.DateTimeField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'period': ('django.db.models.fields.IntegerField', [], {}), 'start': ('django.db.models.fields.DateTimeField', [], {}), 'values': ('sentry.db.models.fields.array.ArrayField', [], {'of': (u'sentry.db.models.fields.array.ArrayField', [], {'null': 'True'})}) }, 'sentry.user': { 'Meta': {'unique_together': '()', 'object_name': 'User', 'db_table': "'auth_user'", 'index_together': '()'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_password_expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_sentry_app': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_active': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'last_password_change': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'first_name'", 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'session_nonce': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}) }, 'sentry.useravatar': { 'Meta': {'unique_together': '()', 'object_name': 'UserAvatar', 'index_together': '()'}, 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}), 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'avatar'", 'unique': 'True', 'to': "orm['sentry.User']"}) }, 'sentry.useremail': { 'Meta': {'unique_together': "(('user', 'email'),)", 'object_name': 'UserEmail', 'index_together': '()'}, 'date_hash_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "u'emails'", 'to': "orm['sentry.User']"}), 'validation_hash': ('django.db.models.fields.CharField', [], {'default': "u'tfq6tE9Duz48Ehl7NuSBrIVlGLs4yM09'", 'max_length': '32'}) }, 'sentry.userip': { 'Meta': {'unique_together': "(('user', 'ip_address'),)", 'object_name': 'UserIP', 'index_together': '()'}, 'country_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True'}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'region_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.useroption': { 'Meta': {'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))", 'object_name': 'UserOption', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'null': 'True'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}), 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}) }, 'sentry.userpermission': { 'Meta': {'unique_together': "(('user', 'permission'),)", 'object_name': 'UserPermission', 'index_together': '()'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'permission': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}) }, 'sentry.userreport': { 'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))"}, 'comments': ('django.db.models.fields.TextField', [], {}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']", 'null': 'True'}), 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'event_user_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}), 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}) }, 'sentry.widget': { 'Meta': {'unique_together': "(('dashboard', 'order'), ('dashboard', 'title'))", 'object_name': 'Widget', 'index_together': '()'}, 'dashboard': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Dashboard']"}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'display_options': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'display_type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'sentry.widgetdatasource': { 'Meta': {'unique_together': "(('widget', 'name'), ('widget', 'order'))", 'object_name': 'WidgetDataSource', 'index_together': '()'}, 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {'default': '{}'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'widget': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Widget']"}) } } complete_apps = ['sentry']
bsd-3-clause
6,805,013,253,530,824,000
94.341226
234
0.580177
false
3.845138
false
false
false
AlphaSmartDog/DeepLearningNotes
Note-6 A3CNet/Note-6.4 HS300指数增强/agent/agent.py
1
6362
import numpy as np import tensorflow as tf from agent.forward import ActorCriticNet from params import * def batch_choice(a, p): action_list = [np.random.choice(a, p=i) for i in p] return np.array(action_list) # local network for advantage actor-critic which are also know as A2C class Agent(object): def __init__(self, name, access, inputs_shape, action_size): self.Access = access self.action_size = action_size batch_size = inputs_shape[0] self.batch_size = batch_size with tf.variable_scope(name): # placeholder # [Time, Batch, Rows, Columns, Channels] self.inputs = tf.placeholder( tf.float32, [None] + inputs_shape, 'inputs') # fix inputs = tf.expand_dims(self.inputs, axis=-1) # [T_MAX, Batch] self.actions = tf.placeholder( tf.int32, [None, batch_size], "actions") # [T_MAX] self.targets = tf.placeholder( tf.float32, [None], "discounted_rewards") self.gathers = tf.placeholder( tf.int32, [None], 'gather_list') # build network and adjust output probability self.net = ActorCriticNet('AC-' + name) policy, value = self.net(inputs, action_size) policy = tf.clip_by_value( policy, CLIP_MIN, CLIP_MAX, 'constraint') # interface gather and step # [Time, Batch, action_size] -> [T_MAX, Batch, action_size] self.policy = tf.gather(policy, self.gathers) self.value = tf.gather(value, self.gathers) # [T_MAX] self.value = tf.squeeze(self.value, axis=1) self.policy_step = policy[-1] # [Batch, action_size] self.value_step = value[-1] # 1 # build other function self._build_losses() self._build_async_swap() self._build_interface() print('graph %s' % (str(name))) def _build_losses(self): # value loss self.advantage = self.targets - self.value # [T_MAX] value_loss = 0.5 * tf.square(self.advantage) # policy loss # [T_MAX, Batch, action_size] -> [T_MAX, Batch] policy_action = tf.reduce_sum( self.policy * tf.one_hot(self.actions, self.action_size), axis=2) # [T_MAX, Batch] policy_loss = -tf.log(policy_action) * tf.stop_gradient( tf.expand_dims(self.advantage, axis=1)) # entropy loss [T_MAX, Batch] entropy_loss = tf.reduce_sum(self.policy * tf.log(self.policy), axis=2) # total loss self.critic_loss = tf.reduce_mean(value_loss) self.actor_loss = tf.reduce_mean(policy_loss + entropy_loss * ENTROPY_BETA) self.total_loss = self.critic_loss + self.actor_loss # interface self.a_total_loss = self.total_loss self.a_entropy_loss = tf.reduce_mean(entropy_loss) self.a_policy_loss = tf.reduce_mean(policy_loss) self.a_value_loss = tf.reduce_mean(value_loss) self.a_critic_loss = self.critic_loss self.a_actor_loss = self.actor_loss self.a_advantage = tf.reduce_mean(self.advantage) self.a_value_mean = tf.reduce_mean(self.value) self.a_policy_mean = tf.reduce_mean(self.policy) def _build_async_swap(self): # Get gradients from local network using local losses local_vars = self.get_trainable() self.gradients = tf.gradients(self.total_loss, local_vars) # Clip gradients grads, self.global_norm = tf.clip_by_global_norm( self.gradients, MAX_GRAD_NORM) # Update global network # Apply local gradients to global network global_vars = self.Access.get_trainable() self.update_global = self.Access.optimizer.apply_gradients( zip(grads, global_vars)) # Update local network assign_list = [] for gv, lv in zip(global_vars, local_vars): assign_list.append(tf.assign(lv, gv)) self.update_local = assign_list def _build_interface(self): self.a_interface = [self.a_total_loss, self.a_entropy_loss, self.a_policy_loss, self.a_value_loss, self.a_actor_loss, self.a_critic_loss, self.a_advantage, self.a_value_mean, self.a_policy_mean, self.a_advantage] def get_trainable(self): return list(self.net.get_variables()) def init_or_update_local(self, sess): sess.run(self.update_local) def get_step_policy(self, sess, inputs): return sess.run(self.policy_step, {self.inputs: inputs}) def get_step_value(self, sess, inputs): return sess.run(self.value_step, {self.inputs: inputs}) def get_losses(self, sess, inputs, actions, targets, gather_list): """ get all loss functions of network :param sess: :param inputs: :param actions: :param targets: :return: """ feed_dict = {self.inputs: inputs, self.actions: actions, self.targets: targets, self.gathers: gather_list} return sess.run(self.a_interface, feed_dict) def train_step(self, sess, inputs, actions, targets, gathers): feed_dict = {self.inputs: inputs, self.actions: actions, self.targets: targets, self.gathers: gathers} sess.run(self.update_global, feed_dict) # get stochastic action for train def get_stochastic_action(self, sess, inputs, epsilon=0.9): if np.random.uniform() < epsilon: policy = sess.run(self.policy_step, {self.inputs: inputs}) return batch_choice(self.action_size, policy) else: return np.random.randint(self.action_size, size=self.batch_size) # get deterministic action for test def get_deterministic_policy_action(self, sess, inputs): policy_step = sess.run(self.policy_step, {self.inputs: inputs}) return np.argmax(policy_step, axis=1)
mit
-5,111,055,656,513,035,000
37.095808
83
0.568375
false
3.709621
false
false
false
saghul/shline
segments/hg.py
1
1578
def add_hg_segment(): import os import subprocess env = {"LANG": "C", "HOME": os.getenv("HOME")} def get_hg_status(): has_modified_files = False has_untracked_files = False has_missing_files = False try: output = subprocess.check_output(['hg', 'status'], env=env) except subprocess.CalledProcessError: pass else: for line in output.split('\n'): if line == '': continue elif line[0] == '?': has_untracked_files = True elif line[0] == '!': has_missing_files = True else: has_modified_files = True return has_modified_files, has_untracked_files, has_missing_files try: output = subprocess.check_output(['hg', 'branch'], env=env) except (subprocess.CalledProcessError, OSError): return branch = output.rstrip() if not branch: return bg = Color.REPO_CLEAN_BG fg = Color.REPO_CLEAN_FG has_modified_files, has_untracked_files, has_missing_files = get_hg_status() if has_modified_files or has_untracked_files or has_missing_files: bg = Color.REPO_DIRTY_BG fg = Color.REPO_DIRTY_FG extra = '' if has_untracked_files: extra += '+' if has_missing_files: extra += '!' branch += (' ' + extra if extra != '' else '') return shline.append(' %s %s ' % (shline.branch, branch), fg, bg) add_hg_segment()
mit
-5,181,100,140,301,123,000
29.346154
80
0.529151
false
3.954887
false
false
false
tpflueger/CSCI4900
scripts/main.py
1
1140
# SPDX-License-Identifier: MIT '''Usage: {0} scan (FILE) {0} dependencies (JARNAME) {0} (--help | --version) Arguments: scan Scan pom file for dependencies dependencies Show dependency tree for jarFile ''' import shutil import sys import os from dependency_reader import DependencyReader from docopt import docopt __version__ = '1.0.0' def main(): argv = docopt( doc=__doc__.format(os.path.basename(sys.argv[0])), argv=sys.argv[1:], version=__version__ ) dependencyReader = DependencyReader() if argv['scan']: dependencyReader.getPom(os.path.abspath(argv['FILE'])) dependencyReader.getDependencies() dependencyReader.relateDependencies() dependencyReader.scanDependencies() dependencyReader.createRelationships() dependencyReader.retrieve_dependencies(None) shutil.rmtree(dependencyReader.tempDirectoryPath) elif argv['dependencies']: dependencyReader.retrieve_dependencies(argv['JARNAME']) shutil.rmtree(dependencyReader.tempDirectoryPath) if __name__ == "__main__": sys.exit(main())
mit
-7,091,659,358,233,557,000
27.5
63
0.670175
false
4.05694
false
false
false
NoBodyCam/TftpPxeBootBareMetal
nova/api/openstack/compute/contrib/floating_ip_dns.py
1
10842
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 Andrew Bogott for the Wikimedia Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License import urllib import webob from nova.api.openstack import extensions from nova.api.openstack import wsgi from nova.api.openstack import xmlutil from nova import exception from nova import network from nova.openstack.common import log as logging LOG = logging.getLogger(__name__) authorize = extensions.extension_authorizer('compute', 'floating_ip_dns') def make_dns_entry(elem): elem.set('id') elem.set('ip') elem.set('type') elem.set('domain') elem.set('name') def make_domain_entry(elem): elem.set('domain') elem.set('scope') elem.set('project') elem.set('availability_zone') class FloatingIPDNSTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('dns_entry', selector='dns_entry') make_dns_entry(root) return xmlutil.MasterTemplate(root, 1) class FloatingIPDNSsTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('dns_entries') elem = xmlutil.SubTemplateElement(root, 'dns_entry', selector='dns_entries') make_dns_entry(elem) return xmlutil.MasterTemplate(root, 1) class DomainTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('domain_entry', selector='domain_entry') make_domain_entry(root) return xmlutil.MasterTemplate(root, 1) class DomainsTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('domain_entries') elem = xmlutil.SubTemplateElement(root, 'domain_entry', selector='domain_entries') make_domain_entry(elem) return xmlutil.MasterTemplate(root, 1) def _translate_dns_entry_view(dns_entry): result = {} result['ip'] = dns_entry.get('ip') result['id'] = dns_entry.get('id') result['type'] = dns_entry.get('type') result['domain'] = dns_entry.get('domain') result['name'] = dns_entry.get('name') return {'dns_entry': result} def _translate_dns_entries_view(dns_entries): return {'dns_entries': [_translate_dns_entry_view(entry)['dns_entry'] for entry in dns_entries]} def _translate_domain_entry_view(domain_entry): result = {} result['domain'] = domain_entry.get('domain') result['scope'] = domain_entry.get('scope') result['project'] = domain_entry.get('project') result['availability_zone'] = domain_entry.get('availability_zone') return {'domain_entry': result} def _translate_domain_entries_view(domain_entries): return {'domain_entries': [_translate_domain_entry_view(entry)['domain_entry'] for entry in domain_entries]} def _unquote_domain(domain): """Unquoting function for receiving a domain name in a URL. Domain names tend to have .'s in them. Urllib doesn't quote dots, but Routes tends to choke on them, so we need an extra level of by-hand quoting here. """ return urllib.unquote(domain).replace('%2E', '.') def _create_dns_entry(ip, name, domain): return {'ip': ip, 'name': name, 'domain': domain} def _create_domain_entry(domain, scope=None, project=None, av_zone=None): return {'domain': domain, 'scope': scope, 'project': project, 'availability_zone': av_zone} class FloatingIPDNSDomainController(object): """DNS domain controller for OpenStack API""" def __init__(self): self.network_api = network.API() super(FloatingIPDNSDomainController, self).__init__() @wsgi.serializers(xml=DomainsTemplate) def index(self, req): """Return a list of available DNS domains.""" context = req.environ['nova.context'] authorize(context) domains = self.network_api.get_dns_domains(context) domainlist = [_create_domain_entry(domain['domain'], domain.get('scope'), domain.get('project'), domain.get('availability_zone')) for domain in domains] return _translate_domain_entries_view(domainlist) @wsgi.serializers(xml=DomainTemplate) def update(self, req, id, body): """Add or modify domain entry""" context = req.environ['nova.context'] authorize(context) fqdomain = _unquote_domain(id) try: entry = body['domain_entry'] scope = entry['scope'] except (TypeError, KeyError): raise webob.exc.HTTPUnprocessableEntity() project = entry.get('project', None) av_zone = entry.get('availability_zone', None) if (scope not in ('private', 'public') or project and av_zone or scope == 'private' and project or scope == 'public' and av_zone): raise webob.exc.HTTPUnprocessableEntity() if scope == 'private': create_dns_domain = self.network_api.create_private_dns_domain area_name, area = 'availability_zone', av_zone else: create_dns_domain = self.network_api.create_public_dns_domain area_name, area = 'project', project create_dns_domain(context, fqdomain, area) return _translate_domain_entry_view({'domain': fqdomain, 'scope': scope, area_name: area}) def delete(self, req, id): """Delete the domain identified by id. """ context = req.environ['nova.context'] authorize(context) domain = _unquote_domain(id) # Delete the whole domain try: self.network_api.delete_dns_domain(context, domain) except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=unicode(e)) return webob.Response(status_int=202) class FloatingIPDNSEntryController(object): """DNS Entry controller for OpenStack API""" def __init__(self): self.network_api = network.API() super(FloatingIPDNSEntryController, self).__init__() @wsgi.serializers(xml=FloatingIPDNSTemplate) def show(self, req, domain_id, id): """Return the DNS entry that corresponds to domain_id and id.""" context = req.environ['nova.context'] authorize(context) domain = _unquote_domain(domain_id) name = id entries = self.network_api.get_dns_entries_by_name(context, name, domain) entry = _create_dns_entry(entries[0], name, domain) return _translate_dns_entry_view(entry) @wsgi.serializers(xml=FloatingIPDNSsTemplate) def index(self, req, domain_id): """Return a list of dns entries for the specified domain and ip.""" context = req.environ['nova.context'] authorize(context) params = req.GET floating_ip = params.get('ip') domain = _unquote_domain(domain_id) if not floating_ip: raise webob.exc.HTTPUnprocessableEntity() entries = self.network_api.get_dns_entries_by_address(context, floating_ip, domain) entrylist = [_create_dns_entry(floating_ip, entry, domain) for entry in entries] return _translate_dns_entries_view(entrylist) @wsgi.serializers(xml=FloatingIPDNSTemplate) def update(self, req, domain_id, id, body): """Add or modify dns entry""" context = req.environ['nova.context'] authorize(context) domain = _unquote_domain(domain_id) name = id try: entry = body['dns_entry'] address = entry['ip'] dns_type = entry['dns_type'] except (TypeError, KeyError): raise webob.exc.HTTPUnprocessableEntity() entries = self.network_api.get_dns_entries_by_name(context, name, domain) if not entries: # create! self.network_api.add_dns_entry(context, address, name, dns_type, domain) else: # modify! self.network_api.modify_dns_entry(context, name, address, domain) return _translate_dns_entry_view({'ip': address, 'name': name, 'type': dns_type, 'domain': domain}) def delete(self, req, domain_id, id): """Delete the entry identified by req and id. """ context = req.environ['nova.context'] authorize(context) domain = _unquote_domain(domain_id) name = id try: self.network_api.delete_dns_entry(context, name, domain) except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=unicode(e)) return webob.Response(status_int=202) class Floating_ip_dns(extensions.ExtensionDescriptor): """Floating IP DNS support""" name = "Floating_ip_dns" alias = "os-floating-ip-dns" namespace = "http://docs.openstack.org/ext/floating_ip_dns/api/v1.1" updated = "2011-12-23T00:00:00+00:00" def __init__(self, ext_mgr): self.network_api = network.API() super(Floating_ip_dns, self).__init__(ext_mgr) def get_resources(self): resources = [] res = extensions.ResourceExtension('os-floating-ip-dns', FloatingIPDNSDomainController()) resources.append(res) res = extensions.ResourceExtension('entries', FloatingIPDNSEntryController(), parent={'member_name': 'domain', 'collection_name': 'os-floating-ip-dns'}) resources.append(res) return resources
apache-2.0
3,639,136,114,536,705,000
34.547541
78
0.590481
false
4.19582
false
false
false
tensorflow/datasets
tensorflow_datasets/structured/movielens.py
1
17907
# coding=utf-8 # Copyright 2021 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """MovieLens dataset.""" import os import textwrap from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple from absl import logging import tensorflow.compat.v2 as tf import tensorflow_datasets.public_api as tfds from tensorflow_datasets.structured import movielens_parsing _CITATION = """ @article{10.1145/2827872, author = {Harper, F. Maxwell and Konstan, Joseph A.}, title = {The MovieLens Datasets: History and Context}, year = {2015}, issue_date = {January 2016}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, volume = {5}, number = {4}, issn = {2160-6455}, url = {https://doi.org/10.1145/2827872}, doi = {10.1145/2827872}, journal = {ACM Trans. Interact. Intell. Syst.}, month = dec, articleno = {19}, numpages = {19}, keywords = {Datasets, recommendations, ratings, MovieLens} } """ _DESCRIPTION = """ This dataset contains a set of movie ratings from the MovieLens website, a movie recommendation service. This dataset was collected and maintained by [GroupLens] (https://grouplens.org/), a research group at the University of Minnesota. There are 5 versions included: "25m", "latest-small", "100k", "1m", "20m". In all datasets, the movies data and ratings data are joined on "movieId". The 25m dataset, latest-small dataset, and 20m dataset contain only movie data and rating data. The 1m dataset and 100k dataset contain demographic data in addition to movie and rating data. - "25m": This is the latest stable version of the MovieLens dataset. It is recommended for research purposes. - "latest-small": This is a small subset of the latest version of the MovieLens dataset. It is changed and updated over time by GroupLens. - "100k": This is the oldest version of the MovieLens datasets. It is a small dataset with demographic data. - "1m": This is the largest MovieLens dataset that contains demographic data. - "20m": This is one of the most used MovieLens datasets in academic papers along with the 1m dataset. For each version, users can view either only the movies data by adding the "-movies" suffix (e.g. "25m-movies") or the ratings data joined with the movies data (and users data in the 1m and 100k datasets) by adding the "-ratings" suffix (e.g. "25m-ratings"). The features below are included in all versions with the "-ratings" suffix. - "movie_id": a unique identifier of the rated movie - "movie_title": the title of the rated movie with the release year in parentheses - "movie_genres": a sequence of genres to which the rated movie belongs - "user_id": a unique identifier of the user who made the rating - "user_rating": the score of the rating on a five-star scale - "timestamp": the timestamp of the ratings, represented in seconds since midnight Coordinated Universal Time (UTC) of January 1, 1970 The "100k-ratings" and "1m-ratings" versions in addition include the following demographic features. - "user_gender": gender of the user who made the rating; a true value corresponds to male - "bucketized_user_age": bucketized age values of the user who made the rating, the values and the corresponding ranges are: - 1: "Under 18" - 18: "18-24" - 25: "25-34" - 35: "35-44" - 45: "45-49" - 50: "50-55" - 56: "56+" - "user_occupation_label": the occupation of the user who made the rating represented by an integer-encoded label; labels are preprocessed to be consistent across different versions - "user_occupation_text": the occupation of the user who made the rating in the original string; different versions can have different set of raw text labels - "user_zip_code": the zip code of the user who made the rating In addition, the "100k-ratings" dataset would also have a feature "raw_user_age" which is the exact ages of the users who made the rating Datasets with the "-movies" suffix contain only "movie_id", "movie_title", and "movie_genres" features. """ _FORMAT_VERSIONS = ['25m', 'latest-small', '20m', '100k', '1m'] _TABLE_OPTIONS = ['movies', 'ratings'] class MovieLensConfig(tfds.core.BuilderConfig): """BuilderConfig for MovieLens dataset.""" def __init__(self, format_version: Optional[str] = None, table_option: Optional[str] = None, download_url: Optional[str] = None, parsing_fn: Optional[Callable[[str], Iterator[Tuple[int, Dict[ str, Any]]],]] = None, **kwargs) -> None: """Constructs a MovieLensConfig. Args: format_version: a string to identify the format of the dataset, one of '_FORMAT_VERSIONS'. table_option: a string to identify the table to expose, one of '_TABLE_OPTIONS'. download_url: a string url for downloading the dataset. parsing_fn: a callable for parsing the data. **kwargs: keyword arguments forwarded to super. Raises: ValueError: if format_version is not one of '_FORMAT_VERSIONS' or if table_option is not one of '_TABLE_OPTIONS'. """ if format_version not in _FORMAT_VERSIONS: raise ValueError('format_version must be one of %s.' % _FORMAT_VERSIONS) if table_option not in _TABLE_OPTIONS: raise ValueError('table_option must be one of %s.' % _TABLE_OPTIONS) super(MovieLensConfig, self).__init__(**kwargs) self._format_version = format_version self._table_option = table_option self._download_url = download_url self._parsing_fn = parsing_fn @property def format_version(self) -> str: return self._format_version @property def table_option(self) -> str: return self._table_option @property def download_url(self) -> str: return self._download_url @property def parsing_fn( self) -> Optional[Callable[[str], Iterator[Tuple[int, Dict[str, Any]]],]]: return self._parsing_fn class Movielens(tfds.core.GeneratorBasedBuilder): """MovieLens rating dataset.""" BUILDER_CONFIGS = [ MovieLensConfig( name='25m-ratings', description=textwrap.dedent("""\ This dataset contains 25,000,095 ratings across 62,423 movies, created by 162,541 users between January 09, 1995 and November 21, 2019. This dataset is the latest stable version of the MovieLens dataset, generated on November 21, 2019. Each user has rated at least 20 movies. The ratings are in half-star increments. This dataset does not include demographic data."""), version='0.1.0', format_version='25m', table_option='ratings', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-25m.zip'), parsing_fn=movielens_parsing.parse_current_ratings_data, ), MovieLensConfig( name='25m-movies', description=textwrap.dedent("""\ This dataset contains data of 62,423 movies rated in the 25m dataset."""), version='0.1.0', format_version='25m', table_option='movies', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-25m.zip'), parsing_fn=movielens_parsing.parse_current_movies_data, ), # The latest-small dataset is changed over time. Its checksum might need # updating in the future. MovieLensConfig( name='latest-small-ratings', description=textwrap.dedent("""\ This dataset contains 100,836 ratings across 9,742 movies, created by 610 users between March 29, 1996 and September 24, 2018. This dataset is generated on September 26, 2018 and is the a subset of the full latest version of the MovieLens dataset. This dataset is changed and updated over time. Each user has rated at least 20 movies. The ratings are in half-star increments. This dataset does not include demographic data."""), version='0.1.0', format_version='latest-small', table_option='ratings', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-latest-small.zip'), parsing_fn=movielens_parsing.parse_current_ratings_data, ), MovieLensConfig( name='latest-small-movies', description=textwrap.dedent("""\ This dataset contains data of 9,742 movies rated in the latest-small dataset."""), version='0.1.0', format_version='latest-small', table_option='movies', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-latest-small.zip'), parsing_fn=movielens_parsing.parse_current_movies_data, ), MovieLensConfig( name='100k-ratings', description=textwrap.dedent("""\ This dataset contains 100,000 ratings from 943 users on 1,682 movies. This dataset is the oldest version of the MovieLens dataset. Each user has rated at least 20 movies. Ratings are in whole-star increments. This dataset contains demographic data of users in addition to data on movies and ratings."""), version='0.1.0', format_version='100k', table_option='ratings', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-100k.zip'), parsing_fn=movielens_parsing.parse_100k_ratings_data, ), MovieLensConfig( name='100k-movies', description=textwrap.dedent("""\ This dataset contains data of 1,682 movies rated in the 100k dataset."""), version='0.1.0', format_version='100k', table_option='movies', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-100k.zip'), parsing_fn=movielens_parsing.parse_100k_movies_data, ), MovieLensConfig( name='1m-ratings', description=textwrap.dedent("""\ This dataset contains 1,000,209 anonymous ratings of approximately 3,900 movies made by 6,040 MovieLens users who joined MovieLens in 2000. This dataset is the largest dataset that includes demographic data. Each user has rated at least 20 movies. Ratings are in whole-star increments. In demographic data, age values are divided into ranges and the lowest age value for each range is used in the data instead of the actual values."""), version='0.1.0', format_version='1m', table_option='ratings', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-1m.zip'), parsing_fn=movielens_parsing.parse_1m_ratings_data, ), MovieLensConfig( name='1m-movies', description=textwrap.dedent("""\ This dataset contains data of approximately 3,900 movies rated in the 1m dataset."""), version='0.1.0', format_version='1m', table_option='movies', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-1m.zip'), parsing_fn=movielens_parsing.parse_1m_movies_data, ), MovieLensConfig( name='20m-ratings', description=textwrap.dedent("""\ This dataset contains 20,000,263 ratings across 27,278 movies, created by 138,493 users between January 09, 1995 and March 31, 2015. This dataset was generated on October 17, 2016. Each user has rated at least 20 movies. Ratings are in half-star increments. This dataset does not contain demographic data."""), version='0.1.0', format_version='20m', table_option='ratings', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-20m.zip'), parsing_fn=movielens_parsing.parse_current_ratings_data, ), MovieLensConfig( name='20m-movies', description=textwrap.dedent("""\ This dataset contains data of 27,278 movies rated in the 20m dataset"""), version='0.1.0', format_version='20m', table_option='movies', download_url=('http://files.grouplens.org/datasets/movielens/' 'ml-20m.zip'), parsing_fn=movielens_parsing.parse_current_movies_data, ), ] VERSION = tfds.core.Version('0.1.0') def _info(self) -> tfds.core.DatasetInfo: """Returns DatasetInfo according to self.builder_config.""" movie_features_dict = { 'movie_id': tf.string, 'movie_title': tf.string, 'movie_genres': tfds.features.Sequence( tfds.features.ClassLabel(names=[ 'Action', 'Adventure', 'Animation', 'Children', 'Comedy', 'Crime', 'Documentary', 'Drama', 'Fantasy', 'Film-Noir', 'Horror', 'IMAX', 'Musical', 'Mystery', 'Romance', 'Sci-Fi', 'Thriller', 'Unknown', 'War', 'Western', '(no genres listed)', ]),), } rating_features_dict = { 'user_id': tf.string, 'user_rating': tf.float32, # Using int64 since tfds currently does not support float64. 'timestamp': tf.int64, } demographic_features_dict = { 'user_gender': tf.bool, 'bucketized_user_age': tf.float32, 'user_occupation_label': tfds.features.ClassLabel(names=[ 'academic/educator', 'artist', 'clerical/admin', 'customer service', 'doctor/health care', 'entertainment', 'executive/managerial', 'farmer', 'homemaker', 'lawyer', 'librarian', 'other/not specified', 'programmer', 'retired', 'sales/marketing', 'scientist', 'self-employed', 'student', 'technician/engineer', 'tradesman/craftsman', 'unemployed', 'writer', ]), 'user_occupation_text': tf.string, 'user_zip_code': tf.string, } features_dict = {} if self.builder_config.table_option == 'movies': features_dict.update(movie_features_dict) # For the other cases, self.builder_config.table_option == 'ratings'. # Older versions of MovieLens (1m, 100k) have demographic features. elif self.builder_config.format_version == '1m': features_dict.update(movie_features_dict) features_dict.update(rating_features_dict) features_dict.update(demographic_features_dict) elif self.builder_config.format_version == '100k': # Only the 100k dataset contains exact user ages. The 1m dataset # contains only bucketized age values. features_dict.update(movie_features_dict) features_dict.update(rating_features_dict) features_dict.update(demographic_features_dict) features_dict.update(raw_user_age=tf.float32) else: features_dict.update(movie_features_dict) features_dict.update(rating_features_dict) return tfds.core.DatasetInfo( builder=self, description=_DESCRIPTION, features=tfds.features.FeaturesDict(features_dict), supervised_keys=None, homepage='https://grouplens.org/datasets/movielens/', citation=_CITATION, ) def _split_generators( self, dl_manager: tfds.download.DownloadManager ) -> List[tfds.core.SplitGenerator]: """Returns SplitGenerators.""" extracted_path = dl_manager.download_and_extract( self.builder_config.download_url,) dir_path = os.path.join( extracted_path, 'ml-%s' % self.builder_config.format_version, ) return [ tfds.core.SplitGenerator( name=tfds.Split.TRAIN, gen_kwargs={'dir_path': dir_path}, ), ] def _generate_examples( self, dir_path: Optional[str] = None) -> Iterator[Tuple[int, Dict[str, Any]]]: """Yields examples by calling the corresponding parsing function.""" for ex in self.builder_config.parsing_fn(dir_path): yield ex class MovieLens(Movielens): """MovieLens rating dataset (deprecated handle version).""" def __init__(self, **kwargs): logging.warning( 'The handle "movie_lens" for the MovieLens dataset is deprecated. ' 'Prefer using "movielens" instead.') super(MovieLens, self).__init__(**kwargs)
apache-2.0
1,772,383,642,943,722,500
37.592672
80
0.612833
false
3.964357
true
false
false
brianwc/juriscraper
opinions/united_states/state/cal.py
1
1498
from juriscraper.OpinionSite import OpinionSite import re import time from datetime import date class Site(OpinionSite): def __init__(self): super(Site, self).__init__() self.url = 'http://www.courtinfo.ca.gov/cgi-bin/opinions-blank.cgi?Courts=S' self.court_id = self.__module__ def _get_case_names(self): case_names = [] for name in self.html.xpath('//table/tr/td[3]/text()'): date_regex = re.compile(r' \d\d?\/\d\d?\/\d\d| filed') if 'P. v. ' in date_regex.split(name)[0]: case_names.append(date_regex.split(name)[0].replace("P. ", "People ")) else: case_names.append(date_regex.split(name)[0]) return case_names def _get_download_urls(self): return [t for t in self.html.xpath("//table/tr/td[2]/a/@href[contains(.,'PDF')]")] def _get_case_dates(self): dates = [] for s in self.html.xpath('//table/tr/td[1]/text()'): s = s.strip() date_formats = ['%b %d %Y', '%b %d, %Y'] for format in date_formats: try: dates.append(date.fromtimestamp(time.mktime(time.strptime(s, format)))) except ValueError: pass return dates def _get_docket_numbers(self): return [t for t in self.html.xpath('//table/tr/td[2]/text()[1]')] def _get_precedential_statuses(self): return ['Published'] * len(self.case_names)
bsd-2-clause
7,258,131,139,515,015,000
34.666667
91
0.548732
false
3.314159
false
false
false
openstack/horizon
openstack_dashboard/dashboards/project/volumes/tests.py
1
96001
# Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from unittest import mock from django.conf import settings from django.forms import widgets from django.template.defaultfilters import slugify from django.test.utils import override_settings from django.urls import reverse from django.utils.http import urlunquote from openstack_dashboard import api from openstack_dashboard.api import cinder from openstack_dashboard.dashboards.project.volumes \ import tables as volume_tables from openstack_dashboard.dashboards.project.volumes import tabs from openstack_dashboard.test import helpers as test from openstack_dashboard.usage import quotas DETAIL_URL = ('horizon:project:volumes:detail') INDEX_URL = reverse('horizon:project:volumes:index') SEARCH_OPTS = dict(status=api.cinder.VOLUME_STATE_AVAILABLE) ATTACHMENT_ID = '6061364b-6612-48a9-8fee-1a38fe072547' class VolumeIndexViewTests(test.ResetImageAPIVersionMixin, test.TestCase): @test.create_mocks({ api.nova: ['server_get', 'server_list'], api.cinder: ['volume_backup_supported', 'volume_snapshot_list', 'volume_list_paged', 'tenant_absolute_limits', 'group_list'], }) def _test_index(self, with_attachments=False, with_groups=False): vol_snaps = self.cinder_volume_snapshots.list() volumes = self.cinder_volumes.list() if with_attachments: server = self.servers.first() else: for volume in volumes: volume.attachments = [] self.mock_volume_backup_supported.return_value = False if with_groups: self.mock_group_list.return_value = self.cinder_groups.list() volumes = self.cinder_group_volumes.list() self.mock_volume_list_paged.return_value = [volumes, False, False] if with_attachments: self.mock_server_get.return_value = server self.mock_server_list.return_value = [self.servers.list(), False] self.mock_volume_snapshot_list.return_value = vol_snaps self.mock_tenant_absolute_limits.return_value = \ self.cinder_limits['absolute'] res = self.client.get(INDEX_URL) if with_attachments: self.mock_server_list.assert_called_once_with(test.IsHttpRequest(), search_opts=None) self.mock_volume_snapshot_list.assert_called_once() if with_groups: self.mock_group_list.assert_called_once_with(test.IsHttpRequest(), search_opts=None) self.mock_volume_backup_supported.assert_called_with( test.IsHttpRequest()) self.mock_volume_list_paged.assert_called_once_with( test.IsHttpRequest(), marker=None, search_opts=None, sort_dir='desc', paginate=True) self.mock_tenant_absolute_limits.assert_called_with( test.IsHttpRequest()) self.assertEqual(res.status_code, 200) self.assertTemplateUsed(res, 'horizon/common/_data_table_view.html') def test_index_with_volume_attachments(self): self._test_index(True) def test_index_no_volume_attachments(self): self._test_index(False) def test_index_with_volume_groups(self): self._test_index(with_groups=True) @test.create_mocks({ api.nova: ['server_get', 'server_list'], cinder: ['tenant_absolute_limits', 'volume_list_paged', 'volume_backup_supported', 'volume_snapshot_list'], }) def _test_index_paginated(self, marker, sort_dir, volumes, url, has_more, has_prev): backup_supported = True vol_snaps = self.cinder_volume_snapshots.list() server = self.servers.first() self.mock_volume_backup_supported.return_value = backup_supported self.mock_volume_list_paged.return_value = [volumes, has_more, has_prev] self.mock_volume_snapshot_list.return_value = vol_snaps self.mock_server_list.return_value = [self.servers.list(), False] self.mock_server_get.return_value = server self.mock_tenant_absolute_limits.return_value = \ self.cinder_limits['absolute'] res = self.client.get(urlunquote(url)) self.assertEqual(2, self.mock_volume_backup_supported.call_count) self.mock_volume_list_paged.assert_called_once_with( test.IsHttpRequest(), marker=marker, sort_dir=sort_dir, search_opts=None, paginate=True) self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=None) self.mock_tenant_absolute_limits.assert_called_with( test.IsHttpRequest()) self.mock_server_list.assert_called_once_with(test.IsHttpRequest(), search_opts=None) self.assertEqual(res.status_code, 200) self.assertTemplateUsed(res, 'horizon/common/_data_table_view.html') return res def ensure_attachments_exist(self, volumes): volumes = copy.copy(volumes) for volume in volumes: if not volume.attachments: volume.attachments.append({ "id": "1", "server_id": '1', "device": "/dev/hda", "attachment_id": ATTACHMENT_ID}) return volumes @override_settings(API_RESULT_PAGE_SIZE=2) def test_index_paginated(self): volumes = self.ensure_attachments_exist(self.cinder_volumes.list()) size = settings.API_RESULT_PAGE_SIZE # get first page expected_volumes = volumes[:size] url = INDEX_URL res = self._test_index_paginated(None, "desc", expected_volumes, url, True, False) result = res.context['volumes_table'].data self.assertCountEqual(result, expected_volumes) # get second page expected_volumes = volumes[size:2 * size] marker = expected_volumes[0].id next = volume_tables.VolumesTable._meta.pagination_param url = "?".join([INDEX_URL, "=".join([next, marker])]) res = self._test_index_paginated(marker, "desc", expected_volumes, url, True, True) result = res.context['volumes_table'].data self.assertCountEqual(result, expected_volumes) # get last page expected_volumes = volumes[-size:] marker = expected_volumes[0].id next = volume_tables.VolumesTable._meta.pagination_param url = "?".join([INDEX_URL, "=".join([next, marker])]) res = self._test_index_paginated(marker, "desc", expected_volumes, url, False, True) result = res.context['volumes_table'].data self.assertCountEqual(result, expected_volumes) @override_settings(API_RESULT_PAGE_SIZE=2) def test_index_paginated_prev_page(self): volumes = self.ensure_attachments_exist(self.cinder_volumes.list()) size = settings.API_RESULT_PAGE_SIZE # prev from some page expected_volumes = volumes[size:2 * size] marker = expected_volumes[0].id prev = volume_tables.VolumesTable._meta.prev_pagination_param url = "?".join([INDEX_URL, "=".join([prev, marker])]) res = self._test_index_paginated(marker, "asc", expected_volumes, url, True, True) result = res.context['volumes_table'].data self.assertCountEqual(result, expected_volumes) # back to first page expected_volumes = volumes[:size] marker = expected_volumes[0].id prev = volume_tables.VolumesTable._meta.prev_pagination_param url = "?".join([INDEX_URL, "=".join([prev, marker])]) res = self._test_index_paginated(marker, "asc", expected_volumes, url, True, False) result = res.context['volumes_table'].data self.assertCountEqual(result, expected_volumes) class VolumeViewTests(test.ResetImageAPIVersionMixin, test.TestCase): def tearDown(self): for volume in self.cinder_volumes.list(): # VolumeTableMixIn._set_volume_attributes mutates data # and cinder_volumes.list() doesn't deep copy for att in volume.attachments: if 'instance' in att: del att['instance'] super().tearDown() @test.create_mocks({ cinder: ['volume_create', 'volume_snapshot_list', 'volume_type_list', 'volume_type_default', 'volume_list', 'availability_zone_list', 'extension_supported', 'group_list'], quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed'], }) def test_create_volume(self): volume = self.cinder_volumes.first() volume_type = self.cinder_volume_types.first() az = self.cinder_availability_zones.first().zoneName formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'type': volume_type.name, 'size': 50, 'snapshot_source': '', 'availability_zone': az} self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_image_list_detailed.return_value = [[], False, False] self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_extension_supported.return_value = True self.mock_volume_list.return_value = self.cinder_volumes.list() self.mock_volume_create.return_value = volume self.mock_group_list.return_value = [] url = reverse('horizon:project:volumes:create') res = self.client.post(url, formData) self.assertNoFormErrors(res) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_type_default.assert_called_once() self.mock_volume_type_list.assert_called_once() self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_availability_zone_list.assert_called_once() self.mock_extension_supported.assert_called_once_with( test.IsHttpRequest(), 'AvailabilityZones') self.mock_volume_list.assert_called_once_with(test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_volume_create.assert_called_once_with( test.IsHttpRequest(), formData['size'], formData['name'], formData['description'], formData['type'], metadata={}, snapshot_id=None, group_id=None, image_id=None, availability_zone=formData['availability_zone'], source_volid=None) self.mock_image_list_detailed.assert_called_with( test.IsHttpRequest(), filters={'visibility': 'shared', 'status': 'active'}) self.mock_tenant_quota_usages.assert_called_once_with( test.IsHttpRequest(), targets=('volumes', 'gigabytes')) self.mock_group_list.assert_called_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed'], cinder: ['extension_supported', 'availability_zone_list', 'volume_list', 'volume_type_default', 'volume_type_list', 'volume_snapshot_list', 'volume_create', 'group_list'], }) def test_create_volume_without_name(self): volume = self.cinder_volumes.first() volume_type = self.cinder_volume_types.first() az = self.cinder_availability_zones.first().zoneName formData = {'name': '', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'type': volume_type.name, 'size': 50, 'snapshot_source': '', 'availability_zone': az} self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_image_list_detailed.return_value = [self.images.list(), False, False] self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_extension_supported.return_value = True self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_volume_list.return_value = self.cinder_volumes.list() self.mock_volume_create.return_value = volume self.mock_group_list.return_value = [] url = reverse('horizon:project:volumes:create') res = self.client.post(url, formData) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_type_list.assert_called_once() self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_image_list_detailed.assert_called_with( test.IsHttpRequest(), filters={'visibility': 'shared', 'status': 'active'}) self.mock_availability_zone_list.assert_called_once() self.mock_extension_supported.assert_called_once_with( test.IsHttpRequest(), 'AvailabilityZones') self.mock_volume_type_default.assert_called_once() self.mock_volume_list.assert_called_once() self.mock_volume_create.assert_called_once_with( test.IsHttpRequest(), formData['size'], formData['name'], formData['description'], formData['type'], metadata={}, snapshot_id=None, group_id=None, image_id=None, availability_zone=formData['availability_zone'], source_volid=None) self.mock_group_list.assert_called_once_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed'], cinder: ['extension_supported', 'availability_zone_list', 'volume_list', 'volume_type_default', 'volume_type_list', 'volume_snapshot_list', 'volume_create', 'group_list'], }) def test_create_volume_dropdown(self): volume = self.cinder_volumes.first() formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'size': 50, 'type': '', 'volume_source_type': 'no_source_type', 'snapshot_source': self.cinder_volume_snapshots.first().id, 'image_source': self.images.first().id} self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_image_list_detailed.return_value = \ [self.images.list(), False, False] self.mock_volume_list.return_value = self.cinder_volumes.list() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_extension_supported.return_value = True self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_group_list.return_value = [] self.mock_volume_create.return_value = volume url = reverse('horizon:project:volumes:create') res = self.client.post(url, formData) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_type_default.assert_called_once() self.mock_volume_type_list.assert_called_once() self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_image_list_detailed.assert_called_with( test.IsHttpRequest(), filters={'visibility': 'shared', 'status': 'active'}) self.mock_volume_list.assert_called_once_with(test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_tenant_quota_usages.assert_called_once() self.mock_extension_supported.assert_called_once_with( test.IsHttpRequest(), 'AvailabilityZones') self.mock_availability_zone_list.assert_called_once() self.mock_volume_create.assert_called_once_with( test.IsHttpRequest(), formData['size'], formData['name'], formData['description'], '', metadata={}, snapshot_id=None, group_id=None, image_id=None, availability_zone=None, source_volid=None) self.mock_group_list.assert_called_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], cinder: ['volume_type_list', 'volume_type_default', 'volume_get', 'volume_snapshot_get', 'volume_create', 'group_list'], }) def test_create_volume_from_snapshot(self): volume = self.cinder_volumes.first() snapshot = self.cinder_volume_snapshots.first() formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'size': 50, 'type': '', 'snapshot_source': snapshot.id} self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_volume_snapshot_get.return_value = snapshot self.mock_volume_get.return_value = self.cinder_volumes.first() self.mock_volume_create.return_value = volume self.mock_group_list.return_value = [] # get snapshot from url url = reverse('horizon:project:volumes:create') res = self.client.post("?".join([url, "snapshot_id=" + str(snapshot.id)]), formData) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_type_default.assert_called_once() self.mock_volume_type_list.assert_called_once() self.mock_tenant_quota_usages.assert_called_once() self.mock_volume_snapshot_get.assert_called_once_with( test.IsHttpRequest(), str(snapshot.id)) self.mock_volume_get.assert_called_once_with(test.IsHttpRequest(), snapshot.volume_id) self.mock_volume_create.assert_called_once_with( test.IsHttpRequest(), formData['size'], formData['name'], formData['description'], '', metadata={}, snapshot_id=snapshot.id, group_id=None, image_id=None, availability_zone=None, source_volid=None) self.mock_group_list.assert_called_once_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed'], cinder: ['extension_supported', 'volume_snapshot_list', 'volume_snapshot_get', 'availability_zone_list', 'volume_type_list', 'volume_list', 'volume_type_default', 'volume_get', 'volume_create', 'group_list'], }) def test_create_volume_from_volume(self): volume = self.cinder_volumes.first() formData = {'name': 'A copy of a volume', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'size': 50, 'type': '', 'volume_source_type': 'volume_source', 'volume_source': volume.id} self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_volume_list.return_value = self.cinder_volumes.list() self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_volume_get.return_value = self.cinder_volumes.first() self.mock_extension_supported.return_value = True self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_image_list_detailed.return_value = \ [self.images.list(), False, False] self.mock_volume_create.return_value = volume self.mock_group_list.return_value = [] url = reverse('horizon:project:volumes:create') redirect_url = INDEX_URL res = self.client.post(url, formData) self.assertNoFormErrors(res) self.assertMessageCount(info=1) self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_type_default.assert_called_once() self.mock_volume_list.assert_called_once_with(test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_volume_type_list.assert_called_once() self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_tenant_quota_usages.assert_called_once() self.mock_volume_get.assert_called_once_with(test.IsHttpRequest(), volume.id) self.mock_extension_supported.assert_called_once_with( test.IsHttpRequest(), 'AvailabilityZones') self.mock_availability_zone_list.assert_called_once() self.mock_image_list_detailed.assert_called_with( test.IsHttpRequest(), filters={'visibility': 'shared', 'status': 'active'}) self.mock_volume_create.assert_called_once_with( test.IsHttpRequest(), formData['size'], formData['name'], formData['description'], None, metadata={}, snapshot_id=None, group_id=None, image_id=None, availability_zone=None, source_volid=volume.id) self.mock_group_list.assert_called_once_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed'], cinder: ['extension_supported', 'availability_zone_list', 'volume_type_list', 'volume_list', 'volume_type_default', 'volume_get', 'volume_snapshot_get', 'volume_snapshot_list', 'volume_create', 'group_list'], }) def test_create_volume_from_snapshot_dropdown(self): volume = self.cinder_volumes.first() snapshot = self.cinder_volume_snapshots.first() formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'size': 50, 'type': '', 'volume_source_type': 'snapshot_source', 'snapshot_source': snapshot.id} self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_image_list_detailed.return_value = [self.images.list(), False, False] self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_volume_list.return_value = self.cinder_volumes.list() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_volume_snapshot_get.return_value = snapshot self.mock_extension_supported.return_value = True self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_volume_create.return_value = volume self.mock_group_list.return_value = [] # get snapshot from dropdown list url = reverse('horizon:project:volumes:create') res = self.client.post(url, formData) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_type_list.assert_called_once() self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_image_list_detailed.assert_called_with( test.IsHttpRequest(), filters={'visibility': 'shared', 'status': 'active'}) self.mock_volume_type_default.assert_called_once() self.mock_volume_list.assert_called_once_with(test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_tenant_quota_usages.assert_called_once() self.mock_volume_snapshot_get.assert_called_once_with( test.IsHttpRequest(), str(snapshot.id)) self.mock_extension_supported.assert_called_once_with( test.IsHttpRequest(), 'AvailabilityZones') self.mock_availability_zone_list.assert_called_once() self.mock_volume_create.assert_called_once_with( test.IsHttpRequest(), formData['size'], formData['name'], formData['description'], '', metadata={}, snapshot_id=snapshot.id, group_id=None, image_id=None, availability_zone=None, source_volid=None) self.mock_group_list.assert_called_once_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed'], cinder: ['volume_snapshot_get', 'volume_type_list', 'volume_type_default', 'volume_get', 'group_list'], }) def test_create_volume_from_snapshot_invalid_size(self): snapshot = self.cinder_volume_snapshots.first() formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'size': 20, 'snapshot_source': snapshot.id} self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_volume_snapshot_get.return_value = snapshot self.mock_volume_get.return_value = self.cinder_volumes.first() self.mock_group_list.return_value = [] url = reverse('horizon:project:volumes:create') res = self.client.post("?".join([url, "snapshot_id=" + str(snapshot.id)]), formData, follow=True) self.assertEqual(res.redirect_chain, []) self.assertFormError(res, 'form', None, "The volume size cannot be less than the " "snapshot size (40GiB)") self.assertEqual(3, self.mock_volume_type_list.call_count) self.assertEqual(2, self.mock_volume_type_default.call_count) self.mock_volume_snapshot_get.assert_called_with(test.IsHttpRequest(), str(snapshot.id)) self.mock_volume_get.assert_called_with(test.IsHttpRequest(), snapshot.volume_id) self.mock_group_list.assert_called_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_get'], cinder: ['extension_supported', 'availability_zone_list', 'volume_type_default', 'volume_type_list', 'volume_create', 'group_list'], }) def test_create_volume_from_image(self): volume = self.cinder_volumes.first() image = self.images.first() formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'size': 40, 'type': '', 'image_source': image.id} self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_volume_type_list.ret = self.cinder_volume_types.list() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_image_get.return_value = image self.mock_extension_supported.return_value = True self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_group_list.return_value = [] self.mock_volume_create.return_value = volume # get image from url url = reverse('horizon:project:volumes:create') res = self.client.post("?".join([url, "image_id=" + str(image.id)]), formData) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_type_default.assert_called_once() self.mock_volume_type_list.assert_called_once() self.mock_tenant_quota_usages.assert_called_once() self.mock_image_get.assert_called_once_with(test.IsHttpRequest(), str(image.id)) self.mock_extension_supported.assert_called_once_with( test.IsHttpRequest(), 'AvailabilityZones') self.mock_availability_zone_list.assert_called_once() self.mock_volume_create.assert_called_once_with( test.IsHttpRequest(), formData['size'], formData['name'], formData['description'], '', metadata={}, snapshot_id=None, group_id=None, image_id=image.id, availability_zone=None, source_volid=None) self.mock_group_list.assert_called_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed', 'image_get'], cinder: ['extension_supported', 'availability_zone_list', 'volume_snapshot_list', 'volume_list', 'volume_type_list', 'volume_type_default', 'volume_create', 'group_list'], }) def test_create_volume_from_image_dropdown(self): volume = self.cinder_volumes.first() image = self.images.first() formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'size': 30, 'type': '', 'volume_source_type': 'image_source', 'snapshot_source': self.cinder_volume_snapshots.first().id, 'image_source': image.id} self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_image_list_detailed.return_value = [self.images.list(), False, False] self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_volume_list.return_value = self.cinder_volumes.list() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_image_get.return_value = image self.mock_extension_supported.return_value = True self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_group_list.return_value = [] self.mock_volume_create.return_value = volume # get image from dropdown list url = reverse('horizon:project:volumes:create') res = self.client.post(url, formData) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_type_list.assert_called_once() self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_image_list_detailed.assert_called_with( test.IsHttpRequest(), filters={'visibility': 'shared', 'status': 'active'}) self.mock_volume_type_default.assert_called_once() self.mock_volume_list.assert_called_once_with(test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_tenant_quota_usages.assert_called_once() self.mock_image_get.assert_called_with(test.IsHttpRequest(), str(image.id)) self.mock_extension_supported.assert_called_once_with( test.IsHttpRequest(), 'AvailabilityZones') self.mock_availability_zone_list.assert_called_once() self.mock_volume_create.assert_called_once_with( test.IsHttpRequest(), formData['size'], formData['name'], formData['description'], '', metadata={}, snapshot_id=None, group_id=None, image_id=image.id, availability_zone=None, source_volid=None) self.mock_group_list.assert_called_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_get'], cinder: ['extension_supported', 'availability_zone_list', 'volume_type_list', 'volume_type_default', 'group_list'], }) def test_create_volume_from_image_under_image_size(self): image = self.images.first() formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'size': 1, 'image_source': image.id} self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_image_get.return_value = image self.mock_extension_supported.return_value = True self.mock_group_list.return_value = [] url = reverse('horizon:project:volumes:create') res = self.client.post("?".join([url, "image_id=" + str(image.id)]), formData, follow=True) self.assertEqual(res.redirect_chain, []) msg = ("The volume size cannot be less than the " "image size (20.0\xa0GB)") self.assertFormError(res, 'form', None, msg) self.assertEqual(3, self.mock_volume_type_list.call_count) self.assertEqual(2, self.mock_volume_type_default.call_count) self.assertEqual(2, self.mock_tenant_quota_usages.call_count) self.mock_image_get.assert_called_with(test.IsHttpRequest(), str(image.id)) self.mock_extension_supported.assert_called_with(test.IsHttpRequest(), 'AvailabilityZones') self.mock_group_list.assert_called_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_get'], cinder: ['extension_supported', 'availability_zone_list', 'volume_type_list', 'volume_type_default', 'group_list'], }) def _test_create_volume_from_image_under_image_min_disk_size(self, image): formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'size': 5, 'image_source': image.id} self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_image_get.return_value = image self.mock_extension_supported.return_value = True self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_group_list.return_value = [] url = reverse('horizon:project:volumes:create') res = self.client.post("?".join([url, "image_id=" + str(image.id)]), formData, follow=True) self.assertEqual(res.redirect_chain, []) self.assertFormError(res, 'form', None, "The volume size cannot be less than the " "image minimum disk size (30GiB)") self.assertEqual(3, self.mock_volume_type_list.call_count) self.assertEqual(2, self.mock_volume_type_default.call_count) self.assertEqual(2, self.mock_availability_zone_list.call_count) self.mock_image_get.assert_called_with(test.IsHttpRequest(), str(image.id)) self.mock_extension_supported.assert_called_with(test.IsHttpRequest(), 'AvailabilityZones') self.mock_group_list.assert_called_with(test.IsHttpRequest()) def test_create_volume_from_image_under_image_min_disk_size(self): image = self.images.get(name="protected_images") image.min_disk = 30 self._test_create_volume_from_image_under_image_min_disk_size(image) def test_create_volume_from_image_under_image_prop_min_disk_size_v2(self): image = self.imagesV2.get(name="protected_images") self._test_create_volume_from_image_under_image_min_disk_size(image) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed'], cinder: ['extension_supported', 'availability_zone_list', 'volume_list', 'volume_type_list', 'volume_type_default', 'volume_snapshot_list', 'group_list'], }) def test_create_volume_gb_used_over_alloted_quota(self): formData = {'name': 'This Volume Is Huge!', 'description': 'This is a volume that is just too big!', 'method': 'CreateForm', 'size': 5000} usage_limit = self.cinder_quota_usages.first() usage_limit.add_quota(api.base.Quota('volumes', 6)) usage_limit.tally('volumes', len(self.cinder_volumes.list())) usage_limit.add_quota(api.base.Quota('gigabytes', 100)) usage_limit.tally('gigabytes', 80) self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_tenant_quota_usages.return_value = usage_limit self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_image_list_detailed.return_value = [self.images.list(), False, False] self.mock_volume_list.return_value = self.cinder_volumes.list() self.mock_extension_supported.return_value = True self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_group_list.return_value = [] url = reverse('horizon:project:volumes:create') res = self.client.post(url, formData) expected_error = ['A volume of 5000GiB cannot be created as you only' ' have 20GiB of your quota available.'] self.assertEqual(res.context['form'].errors['__all__'], expected_error) self.assertEqual(3, self.mock_volume_type_list.call_count) self.assertEqual(2, self.mock_volume_type_default.call_count) self.assertEqual(2, self.mock_volume_list.call_count) self.assertEqual(2, self.mock_availability_zone_list.call_count) self.assertEqual(2, self.mock_tenant_quota_usages.call_count) self.mock_volume_snapshot_list.assert_called_with( test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_image_list_detailed.assert_called_with( test.IsHttpRequest(), filters={'visibility': 'shared', 'status': 'active'}) self.mock_extension_supported.assert_called_with(test.IsHttpRequest(), 'AvailabilityZones') self.mock_group_list.assert_called_with(test.IsHttpRequest()) @test.create_mocks({ quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed'], cinder: ['extension_supported', 'availability_zone_list', 'volume_list', 'volume_type_list', 'volume_type_default', 'volume_snapshot_list', 'group_list'], }) def test_create_volume_number_over_alloted_quota(self): formData = {'name': 'Too Many...', 'description': 'We have no volumes left!', 'method': 'CreateForm', 'size': 10} usage_limit = self.cinder_quota_usages.first() usage_limit.add_quota(api.base.Quota('volumes', len(self.cinder_volumes.list()))) usage_limit.tally('volumes', len(self.cinder_volumes.list())) usage_limit.add_quota(api.base.Quota('gigabytes', 100)) usage_limit.tally('gigabytes', 20) self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_tenant_quota_usages.return_value = usage_limit self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_image_list_detailed.return_value = [self.images.list(), False, False] self.mock_volume_list.return_value = self.cinder_volumes.list() self.mock_extension_supported.return_value = True self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_group_list.return_value = [] url = reverse('horizon:project:volumes:create') res = self.client.post(url, formData) expected_error = ['You are already using all of your available' ' volumes.'] self.assertEqual(res.context['form'].errors['__all__'], expected_error) self.assertEqual(3, self.mock_volume_type_list.call_count) self.assertEqual(2, self.mock_volume_type_default.call_count) self.assertEqual(2, self.mock_availability_zone_list.call_count) self.mock_volume_snapshot_list.assert_called_with( test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_image_list_detailed.assert_called_with( test.IsHttpRequest(), filters={'visibility': 'shared', 'status': 'active'}) self.mock_volume_list.assert_called_with(test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_extension_supported.assert_called_with(test.IsHttpRequest(), 'AvailabilityZones') self.mock_group_list.assert_called_with(test.IsHttpRequest()) @test.create_mocks({ cinder: ['volume_create', 'volume_snapshot_list', 'volume_type_list', 'volume_type_default', 'volume_list', 'availability_zone_list', 'extension_supported', 'group_list'], quotas: ['tenant_quota_usages'], api.glance: ['image_list_detailed'], }) def test_create_volume_with_group(self): volume = self.cinder_volumes.first() volume_type = self.cinder_volume_types.first() az = self.cinder_availability_zones.first().zoneName volume_group = self.cinder_groups.list()[0] formData = {'name': 'A Volume I Am Making', 'description': 'This is a volume I am making for a test.', 'method': 'CreateForm', 'type': volume_type.name, 'size': 50, 'snapshot_source': '', 'availability_zone': az, 'group': volume_group.id} self.mock_volume_type_default.return_value = \ self.cinder_volume_types.first() self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_tenant_quota_usages.return_value = \ self.cinder_quota_usages.first() self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_image_list_detailed.return_value = [[], False, False] self.mock_availability_zone_list.return_value = \ self.cinder_availability_zones.list() self.mock_extension_supported.return_value = True self.mock_volume_list.return_value = self.cinder_volumes.list() self.mock_volume_create.return_value = volume self.mock_group_list.return_value = self.cinder_groups.list() url = reverse('horizon:project:volumes:create') res = self.client.post(url, formData) self.assertNoFormErrors(res) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_type_default.assert_called_once() self.mock_volume_type_list.assert_called_once() self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_availability_zone_list.assert_called_once() self.mock_extension_supported.assert_called_once_with( test.IsHttpRequest(), 'AvailabilityZones') self.mock_volume_list.assert_called_once_with(test.IsHttpRequest(), search_opts=SEARCH_OPTS) self.mock_volume_create.assert_called_once_with( test.IsHttpRequest(), formData['size'], formData['name'], formData['description'], formData['type'], metadata={}, snapshot_id=None, group_id=volume_group.id, image_id=None, availability_zone=formData['availability_zone'], source_volid=None) self.mock_image_list_detailed.assert_called_with( test.IsHttpRequest(), filters={'visibility': 'shared', 'status': 'active'}) self.mock_tenant_quota_usages.assert_called_once_with( test.IsHttpRequest(), targets=('volumes', 'gigabytes')) self.mock_group_list.assert_called_with(test.IsHttpRequest()) @test.create_mocks({ api.nova: ['server_list'], cinder: ['volume_delete', 'volume_snapshot_list', 'volume_list_paged', 'tenant_absolute_limits'], }) def test_delete_volume(self): volumes = self.cinder_volumes.list() volume = self.cinder_volumes.first() formData = {'action': 'volumes__delete__%s' % volume.id} self.mock_volume_list_paged.return_value = [volumes, False, False] self.mock_volume_snapshot_list.return_value = [] self.mock_server_list.return_value = [self.servers.list(), False] self.mock_volume_list_paged.return_value = [volumes, False, False] self.mock_tenant_absolute_limits.return_value = \ self.cinder_limits['absolute'] url = INDEX_URL res = self.client.post(url, formData, follow=True) self.assertIn("Scheduled deletion of Volume: Volume name", [m.message for m in res.context['messages']]) self.mock_volume_list_paged.assert_called_with( test.IsHttpRequest(), marker=None, paginate=True, sort_dir='desc', search_opts=None) self.assertEqual(2, self.mock_volume_snapshot_list.call_count) self.mock_volume_delete.assert_called_once_with(test.IsHttpRequest(), volume.id) self.mock_server_list.assert_called_with(test.IsHttpRequest(), search_opts=None) self.assertEqual(8, self.mock_tenant_absolute_limits.call_count) @mock.patch.object(quotas, 'tenant_quota_usages') @mock.patch.object(cinder, 'tenant_absolute_limits') @mock.patch.object(cinder, 'volume_get') def test_delete_volume_with_snap_no_action_item(self, mock_get, mock_limits, mock_quotas): volume = self.cinder_volumes.get(name='Volume name') setattr(volume, 'has_snapshot', True) limits = self.cinder_limits['absolute'] mock_get.return_value = volume mock_limits.return_value = limits mock_quotas.return_value = self.cinder_quota_usages.first() url = (INDEX_URL + "?action=row_update&table=volumes&obj_id=" + volume.id) res = self.client.get(url, {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest') self.assertEqual(res.status_code, 200) mock_quotas.assert_called_once_with(test.IsHttpRequest(), targets=('volumes', 'gigabytes')) self.assert_mock_multiple_calls_with_same_arguments( mock_limits, 2, mock.call(test.IsHttpRequest())) self.assertNotContains(res, 'Delete Volume') self.assertNotContains(res, 'delete') @mock.patch.object(api.nova, 'server_list') @mock.patch.object(cinder, 'volume_get') @override_settings(OPENSTACK_HYPERVISOR_FEATURES={'can_set_mount_point': True}) def test_edit_attachments(self, mock_get, mock_server_list): volume = self.cinder_volumes.first() servers = [s for s in self.servers.list() if s.tenant_id == self.request.user.tenant_id] volume.attachments = [{'id': volume.id, 'volume_id': volume.id, 'volume_name': volume.name, "attachment_id": ATTACHMENT_ID, 'instance': servers[0], 'device': '/dev/vdb', 'server_id': servers[0].id}] mock_get.return_value = volume mock_server_list.return_value = [servers, False] url = reverse('horizon:project:volumes:attach', args=[volume.id]) res = self.client.get(url) msg = 'Volume %s on instance %s' % (volume.name, servers[0].name) self.assertContains(res, msg) # Asserting length of 2 accounts for the one instance option, # and the one 'Choose Instance' option. form = res.context['form'] self.assertEqual(len(form.fields['instance']._choices), 1) self.assertEqual(res.status_code, 200) self.assertIsInstance(form.fields['device'].widget, widgets.TextInput) self.assertFalse(form.fields['device'].required) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_server_list.assert_called_once() @mock.patch.object(api.nova, 'server_list') @mock.patch.object(cinder, 'volume_get') @override_settings(OPENSTACK_HYPERVISOR_FEATURES={'can_set_mount_point': True}) def test_edit_attachments_auto_device_name(self, mock_get, mock_server_list): volume = self.cinder_volumes.first() servers = [s for s in self.servers.list() if s.tenant_id == self.request.user.tenant_id] volume.attachments = [{'id': volume.id, 'volume_id': volume.id, 'volume_name': volume.name, "attachment_id": ATTACHMENT_ID, 'instance': servers[0], 'device': '', 'server_id': servers[0].id}] mock_get.return_value = volume mock_server_list.return_value = [servers, False] url = reverse('horizon:project:volumes:attach', args=[volume.id]) res = self.client.get(url) form = res.context['form'] self.assertIsInstance(form.fields['device'].widget, widgets.TextInput) self.assertFalse(form.fields['device'].required) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_server_list.assert_called_once() @mock.patch.object(api.nova, 'server_list') @mock.patch.object(cinder, 'volume_get') def test_edit_attachments_cannot_set_mount_point(self, mock_get, mock_server_list): volume = self.cinder_volumes.first() url = reverse('horizon:project:volumes:attach', args=[volume.id]) res = self.client.get(url) # Assert the device field is hidden. form = res.context['form'] self.assertIsInstance(form.fields['device'].widget, widgets.HiddenInput) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_server_list.assert_called_once() @mock.patch.object(api.nova, 'server_list') @mock.patch.object(cinder, 'volume_get') def test_edit_attachments_attached_volume(self, mock_get, mock_server_list): servers = [s for s in self.servers.list() if s.tenant_id == self.request.user.tenant_id] server = servers[0] volume = self.cinder_volumes.list()[0] mock_get.return_value = volume mock_server_list.return_value = [servers, False] url = reverse('horizon:project:volumes:attach', args=[volume.id]) res = self.client.get(url) self.assertEqual(res.context['form'].fields['instance']._choices[0][1], "Select an instance") self.assertEqual(len(res.context['form'].fields['instance'].choices), 2) self.assertEqual(res.context['form'].fields['instance']._choices[1][0], server.id) self.assertEqual(res.status_code, 200) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_server_list.assert_called_once() @mock.patch.object(quotas, 'tenant_quota_usages') @mock.patch.object(cinder, 'tenant_absolute_limits') @mock.patch.object(cinder, 'volume_get') def test_create_snapshot_button_attributes(self, mock_get, mock_limits, mock_quotas): limits = {'maxTotalSnapshots': 2} limits['totalSnapshotsUsed'] = 1 volume = self.cinder_volumes.first() mock_get.return_value = volume mock_limits.return_value = limits mock_quotas.return_value = self.cinder_quota_usages.first() res_url = (INDEX_URL + "?action=row_update&table=volumes&obj_id=" + volume.id) res = self.client.get(res_url, {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest') action_name = ('%(table)s__row_%(id)s__action_%(action)s' % {'table': 'volumes', 'id': volume.id, 'action': 'snapshots'}) content = res.content.decode('utf-8') self.assertIn(action_name, content) self.assertIn('Create Snapshot', content) self.assertIn(reverse('horizon:project:volumes:create_snapshot', args=[volume.id]), content) self.assertNotIn('disabled', content) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_quotas.assert_called_once_with(test.IsHttpRequest(), targets=('volumes', 'gigabytes')) self.assert_mock_multiple_calls_with_same_arguments( mock_limits, 2, mock.call(test.IsHttpRequest())) @mock.patch.object(quotas, 'tenant_quota_usages') @mock.patch.object(cinder, 'tenant_absolute_limits') @mock.patch.object(cinder, 'volume_get') def test_create_snapshot_button_disabled_when_quota_exceeded( self, mock_get, mock_limits, mock_quotas): limits = {'maxTotalSnapshots': 1} limits['totalSnapshotsUsed'] = limits['maxTotalSnapshots'] volume = self.cinder_volumes.first() mock_get.return_value = volume mock_limits.return_value = limits mock_quotas.return_value = self.cinder_quota_usages.first() res_url = (INDEX_URL + "?action=row_update&table=volumes&obj_id=" + volume.id) res = self.client.get(res_url, {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest') action_name = ('%(table)s__row_%(id)s__action_%(action)s' % {'table': 'volumes', 'id': volume.id, 'action': 'snapshots'}) content = res.content.decode('utf-8') self.assertIn(action_name, content) self.assertIn('Create Snapshot (Quota exceeded)', content) self.assertIn(reverse('horizon:project:volumes:create_snapshot', args=[volume.id]), content) self.assertIn('disabled', content, 'The create snapshot button should be disabled') mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_quotas.assert_called_once_with(test.IsHttpRequest(), targets=('volumes', 'gigabytes')) self.assert_mock_multiple_calls_with_same_arguments( mock_limits, 2, mock.call(test.IsHttpRequest())) @test.create_mocks({ api.nova: ['server_list'], cinder: ['volume_backup_supported', 'volume_snapshot_list', 'volume_list_paged', 'tenant_absolute_limits'], }) def test_create_button_attributes(self): limits = self.cinder_limits['absolute'] limits['maxTotalVolumes'] = 10 limits['totalVolumesUsed'] = 1 volumes = self.cinder_volumes.list() self.mock_volume_backup_supported.return_value = True self.mock_volume_list_paged.return_value = [volumes, False, False] self.mock_volume_snapshot_list.return_value = [] self.mock_server_list.return_value = [self.servers.list(), False] self.mock_tenant_absolute_limits.return_value = limits res = self.client.get(INDEX_URL) self.assertTemplateUsed(res, 'horizon/common/_data_table_view.html') volumes = res.context['volumes_table'].data self.assertCountEqual(volumes, self.cinder_volumes.list()) create_action = self.getAndAssertTableAction(res, 'volumes', 'create') self.assertEqual(set(['ajax-modal', 'ajax-update', 'btn-create']), set(create_action.classes)) self.assertEqual('Create Volume', create_action.verbose_name) self.assertEqual('horizon:project:volumes:create', create_action.url) self.assertEqual((('volume', 'volume:create'),), create_action.policy_rules) self.assertEqual(5, self.mock_volume_backup_supported.call_count) self.mock_volume_list_paged.assert_called_once_with( test.IsHttpRequest(), sort_dir='desc', marker=None, paginate=True, search_opts=None) self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=None) self.mock_server_list.assert_called_once_with(test.IsHttpRequest(), search_opts=None) self.assertEqual(9, self.mock_tenant_absolute_limits.call_count) @test.create_mocks({ api.nova: ['server_list'], cinder: ['volume_backup_supported', 'volume_snapshot_list', 'volume_list_paged', 'tenant_absolute_limits'], }) def test_create_button_disabled_when_quota_exceeded(self): limits = self.cinder_limits['absolute'] limits['totalVolumesUsed'] = limits['maxTotalVolumes'] volumes = self.cinder_volumes.list() self.mock_volume_backup_supported.return_value = True self.mock_volume_list_paged.return_value = [volumes, False, False] self.mock_volume_snapshot_list.return_value = [] self.mock_server_list.return_value = [self.servers.list(), False] self.mock_tenant_absolute_limits.return_value = limits res = self.client.get(INDEX_URL) self.assertTemplateUsed(res, 'horizon/common/_data_table_view.html') volumes = res.context['volumes_table'].data self.assertCountEqual(volumes, self.cinder_volumes.list()) create_action = self.getAndAssertTableAction(res, 'volumes', 'create') self.assertIn('disabled', create_action.classes, 'The create button should be disabled') self.assertEqual(5, self.mock_volume_backup_supported.call_count) self.mock_volume_list_paged.assert_called_once_with( test.IsHttpRequest(), marker=None, paginate=True, sort_dir='desc', search_opts=None) self.mock_server_list.assert_called_once_with(test.IsHttpRequest(), search_opts=None) self.assertEqual(9, self.mock_tenant_absolute_limits.call_count) @test.create_mocks({ api.nova: ['server_get'], cinder: ['volume_snapshot_list', 'volume_get', 'tenant_absolute_limits'], }) def test_detail_view(self): volume = self.cinder_volumes.first() server = self.servers.first() snapshots = self.cinder_volume_snapshots.list() volume.attachments = [{"server_id": server.id, "attachment_id": ATTACHMENT_ID}] self.mock_volume_get.return_value = volume self.mock_volume_snapshot_list.return_value = snapshots self.mock_server_get.return_value = server self.mock_tenant_absolute_limits.return_value = \ self.cinder_limits['absolute'] url = reverse('horizon:project:volumes:detail', args=[volume.id]) res = self.client.get(url) self.assertTemplateUsed(res, 'horizon/common/_detail.html') self.assertEqual(res.context['volume'].id, volume.id) self.assertNoMessages() self.mock_volume_get.assert_called_once_with(test.IsHttpRequest(), volume.id) self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts={'volume_id': volume.id}) self.mock_server_get.assert_called_once_with(test.IsHttpRequest(), server.id) self.mock_tenant_absolute_limits.assert_called_once() @mock.patch.object(cinder, 'volume_get_encryption_metadata') @mock.patch.object(cinder, 'volume_get') def test_encryption_detail_view_encrypted(self, mock_get, mock_encryption): enc_meta = self.cinder_volume_encryption.first() volume = self.cinder_volumes.get(name='my_volume2') mock_encryption.return_value = enc_meta mock_get.return_value = volume url = reverse('horizon:project:volumes:encryption_detail', args=[volume.id]) res = self.client.get(url) self.assertContains(res, "Volume Encryption Details: %s" % volume.name, 2, 200) self.assertContains(res, "<dd>%s</dd>" % volume.volume_type, 1, 200) self.assertContains(res, "<dd>%s</dd>" % enc_meta.provider, 1, 200) self.assertContains(res, "<dd>%s</dd>" % enc_meta.control_location, 1, 200) self.assertContains(res, "<dd>%s</dd>" % enc_meta.cipher, 1, 200) self.assertContains(res, "<dd>%s</dd>" % enc_meta.key_size, 1, 200) self.assertNoMessages() mock_encryption.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) @mock.patch.object(cinder, 'volume_get_encryption_metadata') @mock.patch.object(cinder, 'volume_get') def test_encryption_detail_view_unencrypted(self, mock_get, mock_encryption): enc_meta = self.cinder_volume_encryption.list()[1] volume = self.cinder_volumes.get(name='my_volume2') mock_encryption.return_value = enc_meta mock_get.return_value = volume url = reverse('horizon:project:volumes:encryption_detail', args=[volume.id]) res = self.client.get(url) self.assertContains(res, "Volume Encryption Details: %s" % volume.name, 2, 200) self.assertContains(res, "<h3>Volume is Unencrypted</h3>", 1, 200) self.assertNoMessages() mock_encryption.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) @mock.patch.object(quotas, 'tenant_quota_usages') @mock.patch.object(cinder, 'tenant_absolute_limits') @mock.patch.object(cinder, 'volume_get') def test_get_data(self, mock_get, mock_limits, mock_quotas): volume = self.cinder_volumes.get(name='v2_volume') volume._apiresource.name = "" mock_get.return_value = volume mock_limits.return_value = self.cinder_limits['absolute'] mock_quotas.return_value = self.cinder_quota_usages.first() url = (INDEX_URL + "?action=row_update&table=volumes&obj_id=" + volume.id) res = self.client.get(url, {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest') self.assertEqual(res.status_code, 200) self.assertEqual(volume.name, volume.id) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_quotas.assert_called_once_with(test.IsHttpRequest(), targets=('volumes', 'gigabytes')) self.assert_mock_multiple_calls_with_same_arguments( mock_limits, 2, mock.call(test.IsHttpRequest())) @test.create_mocks({ api.nova: ['server_get'], cinder: ['tenant_absolute_limits', 'volume_get', 'volume_snapshot_list'], }) def test_detail_view_snapshot_tab(self): volume = self.cinder_volumes.first() server = self.servers.first() snapshots = self.cinder_volume_snapshots.list() this_volume_snapshots = [snapshot for snapshot in snapshots if snapshot.volume_id == volume.id] volume.attachments = [{"server_id": server.id, "attachment_id": ATTACHMENT_ID}] self.mock_volume_get.return_value = volume self.mock_server_get.return_value = server self.mock_tenant_absolute_limits.return_value = \ self.cinder_limits['absolute'] self.mock_volume_snapshot_list.return_value = this_volume_snapshots url = '?'.join([reverse(DETAIL_URL, args=[volume.id]), '='.join(['tab', 'volume_details__snapshots_tab'])]) res = self.client.get(url) self.assertTemplateUsed(res, 'horizon/common/_detail.html') self.assertEqual(res.context['volume'].id, volume.id) self.assertEqual(len(res.context['table'].data), len(this_volume_snapshots)) self.assertNoMessages() self.mock_volume_get.assert_called_once_with(test.IsHttpRequest(), volume.id) self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts={'volume_id': volume.id}) self.mock_tenant_absolute_limits.assert_called_once() @test.create_mocks({cinder: ['volume_get', 'message_list', 'volume_snapshot_list', 'tenant_absolute_limits']}) def test_detail_view_with_messages_tab(self): volume = self.cinder_volumes.first() messages = [msg for msg in self.cinder_messages.list() if msg.resource_type == 'VOLUME'] snapshots = self.cinder_volume_snapshots.list() self.mock_volume_get.return_value = volume self.mock_message_list.return_value = messages self.mock_volume_snapshot_list.return_value = snapshots self.mock_tenant_absolute_limits.return_value = \ self.cinder_limits['absolute'] url = reverse(DETAIL_URL, args=[volume.id]) detail_view = tabs.VolumeDetailTabs(self.request) messages_tab_link = "?%s=%s" % ( detail_view.param_name, detail_view.get_tab("messages_tab").get_id()) url += messages_tab_link res = self.client.get(url) self.assertTemplateUsed(res, 'horizon/common/_detail.html') self.assertContains(res, messages[0].user_message) self.assertContains(res, messages[1].user_message) self.assertNoMessages() self.mock_volume_get.assert_called_once_with(test.IsHttpRequest(), volume.id) self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts={'volume_id': volume.id}) self.mock_tenant_absolute_limits.assert_called_once_with( test.IsHttpRequest()) search_opts = {'resource_type': 'volume', 'resource_uuid': volume.id} self.mock_message_list.assert_called_once_with( test.IsHttpRequest(), search_opts=search_opts) @mock.patch.object(cinder, 'volume_get') def test_detail_view_with_exception(self, mock_get): volume = self.cinder_volumes.first() server = self.servers.first() volume.attachments = [{"server_id": server.id, "attachment_id": ATTACHMENT_ID}] mock_get.side_effect = self.exceptions.cinder url = reverse('horizon:project:volumes:detail', args=[volume.id]) res = self.client.get(url) self.assertRedirectsNoFollow(res, INDEX_URL) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) @test.create_mocks({cinder: ['volume_update', 'volume_set_bootable', 'volume_get']}) def test_update_volume(self): volume = self.cinder_volumes.get(name="my_volume") self.mock_volume_get.return_value = volume formData = {'method': 'UpdateForm', 'name': volume.name, 'description': volume.description, 'bootable': False} url = reverse('horizon:project:volumes:update', args=[volume.id]) res = self.client.post(url, formData) self.assertRedirectsNoFollow(res, INDEX_URL) self.mock_volume_get.assert_called_once_with( test.IsHttpRequest(), volume.id) self.mock_volume_update.assert_called_once_with( test.IsHttpRequest(), volume.id, volume.name, volume.description) self.mock_volume_set_bootable.assert_called_once_with( test.IsHttpRequest(), volume.id, False) @test.create_mocks({cinder: ['volume_update', 'volume_set_bootable', 'volume_get']}) def test_update_volume_without_name(self): volume = self.cinder_volumes.get(name="my_volume") self.mock_volume_get.return_value = volume formData = {'method': 'UpdateForm', 'name': '', 'description': volume.description, 'bootable': False} url = reverse('horizon:project:volumes:update', args=[volume.id]) res = self.client.post(url, formData) self.assertRedirectsNoFollow(res, INDEX_URL) self.mock_volume_get.assert_called_once_with(test.IsHttpRequest(), volume.id) self.mock_volume_update.assert_called_once_with( test.IsHttpRequest(), volume.id, '', volume.description) self.mock_volume_set_bootable.assert_called_once_with( test.IsHttpRequest(), volume.id, False) @test.create_mocks({cinder: ['volume_update', 'volume_set_bootable', 'volume_get']}) def test_update_volume_bootable_flag(self): volume = self.cinder_bootable_volumes.get(name="my_volume") self.mock_volume_get.return_value = volume formData = {'method': 'UpdateForm', 'name': volume.name, 'description': 'update bootable flag', 'bootable': True} url = reverse('horizon:project:volumes:update', args=[volume.id]) res = self.client.post(url, formData) self.assertRedirectsNoFollow(res, INDEX_URL) self.mock_volume_get.assert_called_once_with(test.IsHttpRequest(), volume.id) self.mock_volume_update.assert_called_once_with( test.IsHttpRequest(), volume.id, volume.name, 'update bootable flag') self.mock_volume_set_bootable.assert_called_once_with( test.IsHttpRequest(), volume.id, True) @mock.patch.object(api.glance, 'get_image_schemas') @mock.patch.object(cinder, 'volume_upload_to_image') @mock.patch.object(cinder, 'volume_get') def test_upload_to_image(self, mock_get, mock_upload, mock_schemas_list): volume = self.cinder_volumes.get(name='v2_volume') loaded_resp = {'container_format': 'bare', 'disk_format': 'raw', 'id': '741fe2ac-aa2f-4cec-82a9-4994896b43fb', 'image_id': '2faa080b-dd56-4bf0-8f0a-0d4627d8f306', 'image_name': 'test', 'size': '2', 'status': 'uploading'} form_data = {'id': volume.id, 'name': volume.name, 'image_name': 'testimage', 'force': True, 'container_format': 'bare', 'disk_format': 'raw'} mock_schemas_list.return_value = self.image_schemas.first() mock_get.return_value = volume mock_upload.return_value = loaded_resp url = reverse('horizon:project:volumes:upload_to_image', args=[volume.id]) res = self.client.post(url, form_data) self.assertNoFormErrors(res) self.assertMessageCount(info=1) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_upload.assert_called_once_with(test.IsHttpRequest(), form_data['id'], form_data['force'], form_data['image_name'], form_data['container_format'], form_data['disk_format']) @mock.patch.object(quotas, 'tenant_quota_usages') @mock.patch.object(cinder, 'volume_extend') @mock.patch.object(cinder, 'volume_get') def test_extend_volume(self, mock_get, mock_extend, mock_quotas): volume = self.cinder_volumes.first() formData = {'name': 'A Volume I Am Making', 'orig_size': volume.size, 'new_size': 120} mock_get.return_value = volume mock_quotas.return_value = self.cinder_quota_usages.first() mock_extend.return_value = volume url = reverse('horizon:project:volumes:extend', args=[volume.id]) res = self.client.post(url, formData) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_quotas.assert_called_once() mock_extend.assert_called_once_with(test.IsHttpRequest(), volume.id, formData['new_size']) @mock.patch.object(quotas, 'tenant_quota_usages') @mock.patch.object(cinder, 'volume_get') def test_extend_volume_with_wrong_size(self, mock_get, mock_quotas): volume = self.cinder_volumes.first() formData = {'name': 'A Volume I Am Making', 'orig_size': volume.size, 'new_size': 10} mock_get.return_value = volume mock_quotas.return_value = self.cinder_quota_usages.first() url = reverse('horizon:project:volumes:extend', args=[volume.id]) res = self.client.post(url, formData) self.assertFormErrors(res, 1, "New size must be greater than " "current size.") mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_quotas.assert_called_once() @mock.patch.object(quotas, 'tenant_quota_usages') @mock.patch.object(cinder, 'tenant_absolute_limits') @mock.patch.object(cinder, 'volume_get') def test_retype_volume_supported_action_item(self, mock_get, mock_limits, mock_quotas): volume = self.cinder_volumes.get(name='v2_volume') limits = self.cinder_limits['absolute'] mock_get.return_value = volume mock_limits.return_value = limits mock_quotas.return_value = self.cinder_quota_usages.first() url = (INDEX_URL + "?action=row_update&table=volumes&obj_id=" + volume.id) res = self.client.get(url, {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest') self.assertEqual(res.status_code, 200) self.assertContains(res, 'Change Volume Type') self.assertContains(res, 'retype') mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) mock_quotas.assert_called_once_with(test.IsHttpRequest(), targets=('volumes', 'gigabytes')) self.assert_mock_multiple_calls_with_same_arguments( mock_limits, 2, mock.call(test.IsHttpRequest())) @test.create_mocks({ cinder: ['volume_type_list', 'volume_retype', 'volume_get'] }) def test_retype_volume(self): volume = self.cinder_volumes.get(name='my_volume2') volume_type = self.cinder_volume_types.get(name='vol_type_1') form_data = {'id': volume.id, 'name': volume.name, 'volume_type': volume_type.name, 'migration_policy': 'on-demand'} self.mock_volume_get.return_value = volume self.mock_volume_type_list.return_value = \ self.cinder_volume_types.list() self.mock_volume_retype.return_value = True url = reverse('horizon:project:volumes:retype', args=[volume.id]) res = self.client.post(url, form_data) self.assertNoFormErrors(res) redirect_url = INDEX_URL self.assertRedirectsNoFollow(res, redirect_url) self.mock_volume_get.assert_called_once_with(test.IsHttpRequest(), volume.id) self.mock_volume_type_list.assert_called_once() self.mock_volume_retype.assert_called_once_with( test.IsHttpRequest(), volume.id, form_data['volume_type'], form_data['migration_policy']) def test_encryption_false(self): self._test_encryption(False) def test_encryption_true(self): self._test_encryption(True) @test.create_mocks({ api.nova: ['server_list'], cinder: ['volume_backup_supported', 'volume_list_paged', 'volume_snapshot_list', 'tenant_absolute_limits'], }) def _test_encryption(self, encryption): volumes = self.cinder_volumes.list() for volume in volumes: volume.encrypted = encryption limits = self.cinder_limits['absolute'] self.mock_volume_backup_supported.return_value = False self.mock_volume_list_paged.return_value = [self.cinder_volumes.list(), False, False] self.mock_volume_snapshot_list.return_value = \ self.cinder_volume_snapshots.list() self.mock_server_list.return_value = [self.servers.list(), False] self.mock_tenant_absolute_limits.return_value = limits res = self.client.get(INDEX_URL) rows = res.context['volumes_table'].get_rows() column_value = 'Yes' if encryption else 'No' for row in rows: self.assertEqual(row.cells['encryption'].data, column_value) self.assertEqual(10, self.mock_volume_backup_supported.call_count) self.mock_volume_list_paged.assert_called_once_with( test.IsHttpRequest(), marker=None, sort_dir='desc', search_opts=None, paginate=True) self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=None) self.assertEqual(13, self.mock_tenant_absolute_limits.call_count) @mock.patch.object(quotas, 'tenant_quota_usages') @mock.patch.object(cinder, 'volume_get') def test_extend_volume_with_size_out_of_quota(self, mock_get, mock_quotas): volume = self.cinder_volumes.first() usage_limit = self.cinder_quota_usages.first() usage_limit.add_quota(api.base.Quota('gigabytes', 100)) usage_limit.tally('gigabytes', 20) usage_limit.tally('volumes', len(self.cinder_volumes.list())) formData = {'name': 'A Volume I Am Making', 'orig_size': volume.size, 'new_size': 1000} mock_quotas.return_value = usage_limit mock_get.return_value = volume url = reverse('horizon:project:volumes:extend', args=[volume.id]) res = self.client.post(url, formData) self.assertFormError(res, "form", "new_size", "Volume cannot be extended to 1000GiB as " "the maximum size it can be extended to is " "120GiB.") mock_get.assert_called_once_with(test.IsHttpRequest(), volume.id) self.assertEqual(2, mock_quotas.call_count) @test.create_mocks({ api.nova: ['server_list'], cinder: ['volume_backup_supported', 'volume_list_paged', 'volume_snapshot_list', 'tenant_absolute_limits'], }) def test_create_transfer_availability(self): limits = self.cinder_limits['absolute'] self.mock_volume_backup_supported.return_value = False self.mock_volume_list_paged.return_value = [self.cinder_volumes.list(), False, False] self.mock_volume_snapshot_list.return_value = [] self.mock_server_list.return_value = [self.servers.list(), False] self.mock_tenant_absolute_limits.return_value = limits res = self.client.get(INDEX_URL) table = res.context['volumes_table'] # Verify that the create transfer action is present if and only if # the volume is available for vol in table.data: actions = [a.name for a in table.get_row_actions(vol)] self.assertEqual('create_transfer' in actions, vol.status == 'available') self.assertEqual(10, self.mock_volume_backup_supported.call_count) self.mock_volume_list_paged.assert_called_once_with( test.IsHttpRequest(), marker=None, sort_dir='desc', search_opts=None, paginate=True) self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=None) self.mock_server_list.assert_called_once_with(test.IsHttpRequest(), search_opts=None) self.assertEqual(13, self.mock_tenant_absolute_limits.call_count) @mock.patch.object(cinder, 'transfer_get') @mock.patch.object(cinder, 'transfer_create') def test_create_transfer(self, mock_transfer_create, mock_transfer_get): volumes = self.cinder_volumes.list() volToTransfer = [v for v in volumes if v.status == 'available'][0] formData = {'volume_id': volToTransfer.id, 'name': 'any transfer name'} transfer = self.cinder_volume_transfers.first() mock_transfer_create.return_value = transfer mock_transfer_get.return_value = transfer # Create a transfer for the first available volume url = reverse('horizon:project:volumes:create_transfer', args=[volToTransfer.id]) res = self.client.post(url, formData) self.assertNoFormErrors(res) mock_transfer_create.assert_called_once_with(test.IsHttpRequest(), formData['volume_id'], formData['name']) mock_transfer_get.assert_called_once_with(test.IsHttpRequest(), transfer.id) @test.create_mocks({ api.nova: ['server_list'], cinder: ['volume_backup_supported', 'volume_list_paged', 'volume_snapshot_list', 'transfer_delete', 'tenant_absolute_limits'], }) def test_delete_transfer(self): transfer = self.cinder_volume_transfers.first() volumes = [] # Attach the volume transfer to the relevant volume for v in self.cinder_volumes.list(): if v.id == transfer.volume_id: v.status = 'awaiting-transfer' v.transfer = transfer volumes.append(v) formData = {'action': 'volumes__delete_transfer__%s' % transfer.volume_id} self.mock_volume_backup_supported.return_value = False self.mock_volume_list_paged.return_value = [volumes, False, False] self.mock_volume_snapshot_list.return_value = [] self.mock_server_list.return_value = [self.servers.list(), False] self.mock_tenant_absolute_limits.return_value = \ self.cinder_limits['absolute'] url = INDEX_URL res = self.client.post(url, formData, follow=True) self.assertNoFormErrors(res) self.assertIn('Successfully deleted volume transfer "test transfer"', [m.message for m in res.context['messages']]) self.assertEqual(5, self.mock_volume_backup_supported.call_count) self.mock_volume_list_paged.assert_called_once_with( test.IsHttpRequest(), marker=None, search_opts=None, sort_dir='desc', paginate=True) self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=None) self.mock_transfer_delete.assert_called_once_with(test.IsHttpRequest(), transfer.id) self.mock_server_list.assert_called_once_with(test.IsHttpRequest(), search_opts=None) self.assertEqual(8, self.mock_tenant_absolute_limits.call_count) @test.create_mocks({ api.nova: ['server_list'], cinder: ['volume_list_paged', 'volume_snapshot_list', 'tenant_absolute_limits', 'transfer_accept'] }) def test_accept_transfer(self): transfer = self.cinder_volume_transfers.first() self.mock_tenant_absolute_limits.return_value = \ self.cinder_limits['absolute'] formData = {'transfer_id': transfer.id, 'auth_key': transfer.auth_key} url = reverse('horizon:project:volumes:accept_transfer') res = self.client.post(url, formData, follow=True) self.assertNoFormErrors(res) self.mock_transfer_accept.assert_called_once_with(test.IsHttpRequest(), transfer.id, transfer.auth_key) self.assertEqual(3, self.mock_tenant_absolute_limits.call_count) self.mock_server_list.assert_called_once() self.mock_volume_list_paged.assert_called_once() self.mock_volume_snapshot_list.assert_called_once() self.mock_transfer_accept.assert_called_once() @mock.patch.object(cinder, 'transfer_get') def test_download_transfer_credentials(self, mock_transfer): transfer = self.cinder_volume_transfers.first() filename = "{}.txt".format(slugify(transfer.id)) url = reverse('horizon:project:volumes:' 'download_transfer_creds', kwargs={'transfer_id': transfer.id, 'auth_key': transfer.auth_key}) res = self.client.get(url) self.assertTrue(res.has_header('content-disposition')) self.assertTrue(res.has_header('content-type')) self.assertEqual(res.get('content-disposition'), 'attachment; filename={}'.format(filename)) self.assertEqual(res.get('content-type'), 'application/text') self.assertIn(transfer.id, res.content.decode('utf-8')) self.assertIn(transfer.auth_key, res.content.decode('utf-8')) mock_transfer.assert_called_once_with(test.IsHttpRequest(), transfer.id) @test.create_mocks({ api.nova: ['server_list'], cinder: ['volume_backup_supported', 'volume_list_paged', 'volume_snapshot_list', 'tenant_absolute_limits', 'volume_get'], }) def test_create_backup_availability(self): limits = self.cinder_limits['absolute'] self.mock_volume_backup_supported.return_value = True self.mock_volume_list_paged.return_value = [self.cinder_volumes.list(), False, False] self.mock_volume_snapshot_list.return_value = [] self.mock_server_list.return_value = [self.servers.list(), False] self.mock_tenant_absolute_limits.return_value = limits res = self.client.get(INDEX_URL) table = res.context['volumes_table'] # Verify that the create backup action is present if and only if # the volume is available or in-use for vol in table.data: actions = [a.name for a in table.get_row_actions(vol)] self.assertEqual('backups' in actions, vol.status in ('available', 'in-use')) self.assertEqual(10, self.mock_volume_backup_supported.call_count) self.mock_volume_list_paged.assert_called_once_with( test.IsHttpRequest(), marker=None, sort_dir='desc', search_opts=None, paginate=True) self.mock_volume_snapshot_list.assert_called_once_with( test.IsHttpRequest(), search_opts=None) self.mock_server_list.assert_called_once_with(test.IsHttpRequest(), search_opts=None) self.assertEqual(13, self.mock_tenant_absolute_limits.call_count)
apache-2.0
5,897,069,910,118,493,000
44.455019
79
0.577588
false
4.073708
true
false
false
efimlosev/corpcolo
noc-ps/add_server.py
1
2912
from json_p_n import sendRecieve import pexpect,argparse from sys import path import subprocess path.append('/home/efim/Dropbox') from ipcalc_flask import calculateSubnet as calc def Main(): parser = argparse.ArgumentParser() parser.add_argument('subnet', help='Give me a subnet', type=str) #optinal argument parser.add_argument('vlan', help='We need a vlan here', type=str) #the same parser.add_argument('desc', help='We need a description here', type=str) #the same parser.add_argument('hostname', nargs='?', help='We need a hostname here', type=str) #the same parser.add_argument('-i', help='We need an Ip here', type=str) #the same args = parser.parse_args() temp = addUrl(args.subnet,args.vlan,args.desc) temp1 = getAllinformationWeWantiToUpdate(temp,{'hostname': args.hostname},args.i) updateHost(temp1,args.vlan,args.desc) def addUrl(subnet,vlan,desc): tmp = calc(subnet) sub = str(tmp[0]) gw = str(tmp[1]) ip = str(tmp[2]).split(' - ')[0] nm = str(tmp[3]) servername, descrend = desc.split(' ') tmp = None tmp = sendRecieve('addSubnet',{'subnet': sub, 'gateway': gw, 'netmask': nm, 'vlan' : vlan, 'description': desc}) print tmp['result']['success'] ipSub = { 'ip':ip, 'subnet': sub, 'descrend' : descrend, 'servername' : servername } return ipSub def getAllinformationWeWantiToUpdate(ipsub,hostname,ip=None): ipsub.update(hostname) if ip != None: ipsub['ip'] = ip # print ipsub return ipsub def updateHost(whatWeWantToUpdate,vlan,desc ): hosts = sendRecieve("searchHosts", {'start': 0, 'limit': 100, 'query': whatWeWantToUpdate['servername'] })['result']['data'] exactHost = [ host for host in hosts if host['descr'].split('(')[0] == whatWeWantToUpdate['servername']] #print exactHost[0]['descr'] for k,v in exactHost[0].iteritems(): if k in whatWeWantToUpdate: exactHost[0][k] = whatWeWantToUpdate[k] exactHost[0]['descr'] = str(exactHost[0]['descr'].split(')')[0] + ')' + whatWeWantToUpdate['descrend']) print exactHost[0]['pool'] connection = sendRecieve("getConnectionsByHost", exactHost[0]['mac'])['result']['data'] switchName = connection[0]['devname'] switchPort = connection[0]['portdescr'].split(' ')[1].split('[')[1].split(']')[0] devices = sendRecieve("getDevices", 0, 1000)['result']['data'] switchIp = [device['ip'] for device in devices if device['name'] == switchName ][0] if exactHost[0]['pool'] != 16: print 'Something went wrong, exitting!' exit() print sendRecieve("getConnectionsByHost", exactHost[0]['mac']) print exactHost[0]['ip'] print sendRecieve("updateHost", exactHost[0]) subprocess.check_call(['/home/efim/Dropbox/sshs_rem.sh', switchIp, switchPort, vlan, desc]) if __name__ == '__main__': Main() #updateHost('710A6R22', {'descr': 'test'})
gpl-2.0
-1,418,926,991,163,073,500
40.6
127
0.650755
false
3.228381
false
false
false
Dylan-halls/Network-Exploitation-Toolkit
PacketBlocker/ARP_UDP.py
1
2931
import logging logging.getLogger("scapy.runtime").setLevel(logging.ERROR) from scapy.all import * import threading from termcolor import colored os.system("clear") print(""" ____ ___________ __________ | | \______ ______ ) | | /| | \| ___) | | / | ` \ | |______/ /_______ /____| \/ """) os.system('echo 0 > /proc/sys/net/ipv4/ip_forward') VIP = input("\nVictim: ") GW = input("Gateway: ") IFACE = input("Interface: ") str(GW) str(VIP) str(IFACE) def pkthandler(pkt): try: ip = pkt[IP] except IndexError: pass try: src = ip.src dst = ip.dst except UnboundLocalError: pass if pkt.haslayer(UDP): udp = pkt[UDP] print("--------------------------------------------------------\n\n") print(" .:{}:. ".format(colored('UDP','red'))) print(" ") print(" \033[1;36mSource IP:\033[00m {} \033[1;36mDestination IP:\033[00m {}".format(src, dst)) print(" \033[1;36mSource Port:\033[00m {} \033[1;36mDestination Port:\033[00m {}".format(udp.sport, udp.dport)) print(" \033[1;36mLength:\033[00m {} ".format(udp.len)) print(" \033[1;36mChecksum:\033[00m {} ".format(udp.chksum)) rawLoad = pkt.getlayer(Raw) if rawLoad == None: pass else: print(" \033[1;36mRaw:\n\n\033[00m {} ".format(rawLoad)) print(" ") print(" ") hexdump(pkt) def v_poison(): v = ARP(pdst=VIP, psrc=GW,) while True: try: send(v,verbose=0,inter=1,loop=1) except KeyboardInterupt: # Functions constructing and sending the ARP packets sys.exit(1) def gw_poison(): gw = ARP(pdst=GW, psrc=VIP) while True: try: send(gw,verbose=0,inter=1,loop=1) except KeyboardInterupt: sys.exit(1) def format_muti_lines(prefix, string, size=80): size -= len(prefix) if isinstance(string, bytes): string = ''.join(r'\x{:02x}'.format(byte) for byte in string) if size % 2: size -= 1 return '\n'.join([prefix + line for line in textwrap.wrap(string, size)]) vthread = [] gwthread = [] while True: # Threads vpoison = threading.Thread(target=v_poison) vpoison.setDaemon(True) vthread.append(vpoison) vpoison.start() gwpoison = threading.Thread(target=gw_poison) gwpoison.setDaemon(True) gwthread.append(gwpoison) gwpoison.start() try: pkt = sniff(iface=str(IFACE),filter='udp port 53',prn=pkthandler) except KeyboardInterrupt: os.system("{ cd ..; python3 net.py; }") exit(0) if __name__ == "__main__": UDP()
mit
-8,585,580,230,848,259,000
26.92381
118
0.493347
false
3.293258
false
false
false
rgayon/plaso
plaso/parsers/czip.py
1
2615
# -*- coding: utf-8 -*- """This file contains a parser for compound ZIP files.""" from __future__ import unicode_literals import struct import zipfile from plaso.lib import errors from plaso.parsers import interface from plaso.parsers import logger from plaso.parsers import manager class CompoundZIPParser(interface.FileObjectParser): """Shared functionality for parsing compound zip files. Compound zip files are zip files used as containers to create another file format, as opposed to archives of unrelated files. """ NAME = 'czip' DATA_FORMAT = 'Compound ZIP file' _plugin_classes = {} def ParseFileObject(self, parser_mediator, file_object): """Parses a compound ZIP file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed. """ display_name = parser_mediator.GetDisplayName() if not zipfile.is_zipfile(file_object): raise errors.UnableToParseFile( '[{0:s}] unable to parse file: {1:s} with error: {2:s}'.format( self.NAME, display_name, 'Not a Zip file.')) try: zip_file = zipfile.ZipFile(file_object, 'r', allowZip64=True) self._ProcessZipFileWithPlugins(parser_mediator, zip_file) zip_file.close() # Some non-ZIP files return true for is_zipfile but will fail with a # negative seek (IOError) or another error. except (zipfile.BadZipfile, struct.error) as exception: raise errors.UnableToParseFile( '[{0:s}] unable to parse file: {1:s} with error: {2!s}'.format( self.NAME, display_name, exception)) def _ProcessZipFileWithPlugins(self, parser_mediator, zip_file): """Processes a zip file using all compound zip files. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. zip_file (zipfile.ZipFile): the zip file. It should not be closed in this method, but will be closed in ParseFileObject(). """ archive_members = zip_file.namelist() for plugin in self._plugins: try: plugin.UpdateChainAndProcess( parser_mediator, zip_file=zip_file, archive_members=archive_members) except errors.WrongCompoundZIPPlugin as exception: logger.debug('[{0:s}] wrong plugin: {1!s}'.format( self.NAME, exception)) manager.ParsersManager.RegisterParser(CompoundZIPParser)
apache-2.0
3,504,794,705,852,098,000
33.407895
80
0.690631
false
3.926426
false
false
false
DolphinDream/sverchok
nodes/generators_extended/spiral_mk2.py
1
22191
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### import bpy from bpy.props import IntProperty, FloatProperty, BoolProperty, EnumProperty from math import sin, cos, pi, sqrt, exp, atan, log import re from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import updateNode, match_long_repeat, get_edge_list from sverchok.utils.sv_easing_functions import * from sverchok.utils.sv_transform_helper import AngleUnits, SvAngleHelper PHI = (sqrt(5) + 1) / 2 # the golden ratio PHIPI = 2 * log(PHI) / pi # exponent for the Fibonacci (golden) spiral spiral_type_items = [ ("ARCHIMEDEAN", "Archimedean", "Generate an archimedean spiral.", 0), ("LOGARITHMIC", "Logarithmic", "Generate a logarithmic spiral.", 1), ("SPHERICAL", "Spherical", "Generate a spherical spiral.", 2), ("OVOIDAL", "Ovoidal", "Generate an ovoidal spiral.", 3), ("CORNU", "Cornu", "Generate a cornu spiral.", 4), ("EXO", "Exo", "Generate an exo spiral.", 5), ("SPIRANGLE", "Spirangle", "Generate a spirangle spiral.", 6) ] # name : [ preset index, type, eR, iR, exponent, turns, resolution, scale, height ] spiral_presets = { " ": (0, "", 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0), # archimedean spirals "ARCHIMEDEAN": (10, "ARCHIMEDEAN", 1.0, 0.0, 1.0, 7, 100, 1.0, 0.0), "PARABOLIC": (11, "ARCHIMEDEAN", 1.0, 0.0, 2.0, 5, 100, 1.0, 0.0), "HYPERBOLIC": (12, "ARCHIMEDEAN", 1.0, 0.0, -1.0, 11, 100, 1.0, 0.0), "LITUUS": (13, "ARCHIMEDEAN", 1.0, 0.0, -2.0, 11, 100, 1.0, 0.0), # logarithmic spirals "FIBONACCI": (20, "LOGARITHMIC", 1.0, 0.5, PHIPI, 3, 100, 1.0, 0.0), # 3D spirals (mix type) "CONICAL": (30, "ARCHIMEDEAN", 1.0, 0.0, 1.0, 7, 100, 1.0, 3.0), "HELIX": (31, "LOGARITHMIC", 1.0, 0.0, 0.0, 7, 100, 1.0, 4.0), "SPHERICAL": (32, "SPHERICAL", 1.0, 0.0, 0.0, 11, 55, 1.0, 0.0), "OVOIDAL": (33, "OVOIDAL", 5.0, 1.0, 0.0, 7, 55, 1.0, 6.0), # spiral odities "CORNU": (40, "CORNU", 1.0, 1.0, 1.0, 5, 55, 1.0, 0.0), "EXO": (41, "EXO", 1.0, 0.1, PHI, 11, 101, 1.0, 0.0), # choppy spirals "SPIRANGLE SC": (50, "SPIRANGLE", 1.0, 0.0, 0.0, 8, 4, 1.0, 0.0), "SPIRANGLE HX": (51, "SPIRANGLE", 1.0, 0.0, 0.5, 7, 6, 1.0, 0.) } normalize_items = [ ("ER", "eR", "Normalize spiral to the external radius.", 0), ("IR", "iR", "Normalize spiral to the internal radius.", 1) ] def make_archimedean_spiral(settings): ''' eR : exterior radius (end radius) iR : interior radius (start radius) exponent : rate of growth (between iR and eR) turns : number of turns in the spiral N : curve resolution per turn scale : overall scale of the curve height : the height of the spiral along z phase : phase the spiral around its center flip : flip the spiral direction (default is CLOCKWISE) ''' eR, iR, exponent, turns, N, scale, height, phase, flip = settings sign = -1 if flip else 1 # flip direction ? max_phi = 2 * pi * turns * sign epsilon = 1e-5 if exponent < 0 else 0 # to avoid raising zero to negative power exponent = 1e-2 if exponent == 0 else exponent # to avoid division by zero dR = eR - iR # radius range : cached for performance ex = 1 / exponent # inverse exponent : cached for performance N = N * turns # total number of points in the spiral verts = [] norms = [] add_vert = verts.append add_norm = norms.append for n in range(N + 1): t = n / N # t : [0, 1] phi = max_phi * t + phase r = (iR + dR * (t + epsilon) ** ex) * scale # essentially: r = a * t ^ (1/b) x = r * cos(phi) y = r * sin(phi) z = height * t add_vert([x, y, z]) edges = get_edge_list(N) return verts, edges, norms def make_logarithmic_spiral(settings): ''' eR : exterior radius iR : interior radius exponent : rate of growth turns : number of turns in the spiral N : curve resolution per turn scale : overall scale of the curve height : the height of the spiral along z phase : phase the spiral around its center flip : flip the spiral direction (default is CLOCKWISE) ''' eR, iR, exponent, turns, N, scale, height, phase, flip = settings sign = -1 if flip else 1 # flip direction ? max_phi = 2 * pi * turns N = N * turns # total number of points in the spiral verts = [] norms = [] add_vert = verts.append add_norm = norms.append for n in range(N + 1): t = n / N # t : [0, 1] phi = max_phi * t r = eR * exp(exponent * phi) * scale # essentially: r = a * e ^ (b*t) pho = phi * sign + phase # final angle : cached for performance x = r * sin(pho) y = r * cos(pho) z = height * t add_vert([x, y, z]) edges = get_edge_list(N) return verts, edges, norms def make_spherical_spiral(settings): ''' This is the approximate sperical spiral that has a finite length, where the phi & theta angles sweep their ranges at constant rates. eR : exterior radius iR : interior radius (UNUSED) exponent : rate of growth (sigmoid in & out) turns : number of turns in the spiral N : the curve resolution of one turn scale : overall scale of the curve height : the height of the spiral along z (UNUSED) phase : phase the spiral around its center flip : flip the spiral direction (default is CLOCKWISE) ''' eR, iR, exponent, turns, N, scale, height, phase, flip = settings sign = -1 if flip else 1 # flip direction ? max_phi = 2 * pi * turns * sign N = N * turns # total number of points in the spiral es = prepareExponentialSettings(2, exponent + 1e-5) # used for easing verts = [] norms = [] add_vert = verts.append add_norm = norms.append for n in range(N + 1): t = n / N # t : [0, 1] phi = max_phi * t + phase a = ExponentialEaseInOut(t, es) # ease theta variation theta = -pi / 2 + pi * a RxCosTheta = (iR + eR * cos(theta)) * scale # cached for performance x = cos(phi) * RxCosTheta y = sin(phi) * RxCosTheta z = eR * sin(theta) add_vert([x, y, z]) edges = get_edge_list(N) return verts, edges, norms def make_ovoidal_spiral(settings): ''' eR : exterior radius (vertical cross section circles) iR : interior radius (horizontal cross section circle) exponent : rate of growth (sigmoid in & out) turns : number of turns in the spiral N : the curve resolution of one turn scale : overall scale of the curve height : the height of the spiral along z phase : phase the spiral around its center flip : flip the spiral direction (default is CLOCKWISE) ''' eR, iR, exponent, turns, N, scale, height, phase, flip = settings sign = -1 if flip else 1 # flip direction ? max_phi = 2 * pi * turns * sign # derive eR based on iR and height (the main parameters) # eR = [iR - (H/2)^2/iR]/2 ::: H = 2 * sqrt(2*iR*eR - iR*iR) eR = 0.5 * (iR + 0.25 * height * height / iR) eR2 = eR * eR # cached for performance dR = eR - iR # cached for performance N = N * turns # total number of points in the spiral es = prepareExponentialSettings(2, exponent + 1e-5) # used for easing verts = [] norms = [] add_vert = verts.append add_norm = norms.append for n in range(N + 1): t = n / N # t : [0, 1] phi = max_phi * t + phase a = ExponentialEaseInOut(t, es) # ease theta variation theta = -pi / 2 + pi * a h = 0.5 * height * sin(theta) # [-H/2, +H/2] r = sqrt(eR2 - h * h) - dR # [0 -> iR -> 0] x = r * cos(phi) * scale y = r * sin(phi) * scale z = h * scale add_vert([x, y, z]) edges = get_edge_list(N) return verts, edges, norms def make_cornu_spiral(settings): ''' L : length N : resolution S : scale M : x(t) = s * Integral(0,t) { cos(pi*u*u/2) du } y(t) = s * Integral(0,t) { sin(pi*u*u/2) du } TODO : refine the math (smoother curve, adaptive res, faster computation) ''' eR, iR, exponent, turns, N, scale, height, phase, flip = settings sign = -1 if flip else 1 # flip direction ? N = N * turns # total number of points in the spiral L = iR * turns # length S = eR * scale # overall scale es = prepareExponentialSettings(2, exponent + 1e-5) # used for easing verts1 = [] # pozitive spiral verts verts2 = [] # nagative spiral verts norms = [] add_vert1 = verts1.append add_vert2 = verts2.append add_norm = norms.append l1 = 0 x = 0 y = 0 for n in range(N + 1): t = n / N # t = [0,1] a = QuadraticEaseOut(t) # a = ExponentialEaseOut(t, es) l = L * a # l = [0, +L] r = x * x + y * y # print("r=", r) # M = 100 + int(300 * pow(r, exponent)) # integral steps M = 100 + int(100 * a) # integral steps l2 = l # integral from l1 to l2 u = l1 du = (l2 - l1) / M for m in range(M + 1): u = u + du # u = [l1, l2] phi = u * u * pi / 2 x = x + cos(phi) * du y = y + sin(phi) * du l1 = l2 # scale and flip xx = x * S yy = y * S * sign # rotate by phase amount px = xx * cos(phase) - yy * sin(phase) py = xx * sin(phase) + yy * cos(phase) pz = height * t add_vert1([px, py, pz]) # positive spiral verts add_vert2([-px, -py, -pz]) # netative spiral verts verts = verts2[::-1] + verts1 edges = get_edge_list(N) return verts, edges, norms def make_exo_spiral(settings): ''' This is an exponential in & out between two circles eR : exterior radius iR : interior radius exponent : rate of growth (SIGMOID : exponential in & out) turns : number of turns in the spiral N : the curve resolution of one turn scale : overall scale of the curve height : the height of the spiral along z phase : phase the spiral around its center flip : flip the spiral direction (default is CLOCKWISE) ''' eR, iR, exponent, turns, N, scale, height, phase, flip = settings sign = 1 if flip else -1 # flip direction ? max_phi = 2 * pi * turns * sign N = N * turns # total number of points in the spiral es = prepareExponentialSettings(11, exponent + 1e-5) # used for easing verts = [] norms = [] add_vert = verts.append add_norm = norms.append for n in range(N + 1): t = n / N # t : [0, 1] a = ExponentialEaseInOut(t, es) # ease radius variation (SIGMOID) r = (iR + (eR - iR) * a) * scale phi = max_phi * t + phase x = r * cos(phi) y = r * sin(phi) z = height * t add_vert([x, y, z]) edges = get_edge_list(N) return verts, edges, norms def make_spirangle_spiral(settings): ''' eR : exterior radius (end radius) iR : interior radius (start radius) exponent : rate of growth turns : number of turns in the spiral N : curve resolution per turn scale : overall scale of the curve height : the height of the spiral along z phase : phase the spiral around its center flip : flip the spiral direction (default is CLOCKWISE) ''' eR, iR, exponent, turns, N, scale, height, phase, flip = settings sign = -1 if flip else 1 # flip direction ? deltaA = 2 * pi / N * sign # angle increment deltaE = exponent / N # exponent increment deltaR = (eR + iR) # radius increment deltaZ = height / (N * turns) # z increment e = 0 r = iR phi = phase x, y, z = [0, 0, -deltaZ] N = N * turns # total number of points in the spiral verts = [] norms = [] add_vert = verts.append add_norm = norms.append for n in range(N + 1): x = x + r * cos(phi) * scale y = y + r * sin(phi) * scale z = z + deltaZ e = e + deltaE r = r + deltaR * exp(e) phi = phi + deltaA add_vert([x, y, z]) edges = get_edge_list(N) return verts, edges, norms def normalize_spiral(verts, normalize_eR, eR, iR, scale): ''' Normalize the spiral (XY) to either exterior or interior radius ''' if normalize_eR: # normalize to exterior radius (ending radius) psx = verts[-1][0] # x coordinate of the last point in the spiral psy = verts[-1][1] # y coordinate of the last point in the spiral r = sqrt(psx * psx + psy * psy) ss = eR / r * scale if eR != 0 else 1 else: # normalize to interior radius (starting radius) psx = verts[0][0] # x coordinate of the first point in the spiral psy = verts[0][1] # y coordinate of the first point in the spiral r = sqrt(psx * psx + psy * psy) ss = iR / r * scale if iR != 0 else 1 for n in range(len(verts)): verts[n][0] *= ss verts[n][1] *= ss return verts class SvSpiralNodeMK2(bpy.types.Node, SverchCustomTreeNode, SvAngleHelper): """ Triggers: Spiral Tooltip: Generate spiral curves """ bl_idname = 'SvSpiralNodeMK2' bl_label = 'Spiral' sv_icon = "SV_SPIRAL" def update_angles(self, context, au): ''' Update all the angles to preserve their values in the new units ''' self.phase = self.phase * au def update_spiral(self, context): if self.updating: return self.presets = " " updateNode(self, context) def preset_items(self, context): return [(k, k.title(), "", "", s[0]) for k, s in sorted(spiral_presets.items(), key=lambda k: k[1][0])] def update_presets(self, context): self.updating = True if self.presets == " ": self.updating = False return _, sT, eR, iR, e, t, N, s, h = spiral_presets[self.presets] self.sType = sT self.eRadius = eR self.iRadius = iR self.exponent = e self.turns = t self.resolution = N self.scale = s self.height = h self.phase = 0.0 self.arms = 1 self.flip = False self.separate = False self.updating = False updateNode(self, context) presets: EnumProperty( name="Presets", items=preset_items, update=update_presets) sType: EnumProperty( name="Type", items=spiral_type_items, default="ARCHIMEDEAN", update=update_spiral) normalize: EnumProperty( name="Normalize Radius", items=normalize_items, default="ER", update=update_spiral) iRadius: FloatProperty( name="Interior Radius", description="Interior radius", default=1.0, min=0.0, update=update_spiral) eRadius: FloatProperty( name="Exterior Radius", description="Exterior radius", default=2.0, min=0.0, update=update_spiral) turns: IntProperty( name="Turns", description="Number of turns", default=11, min=1, update=update_spiral) arms: IntProperty( name="Arms", description="Number of spiral arms", default=1, min=1, update=update_spiral) flip: BoolProperty( name="Flip Direction", description="Flip spiral direction", default=False, update=update_spiral) scale: FloatProperty( name="Scale", description="Scale spiral vertices", default=1.0, update=update_spiral) height: FloatProperty( name="Height", description="Height of the spiral along z", default=0.0, update=update_spiral) phase: FloatProperty( name="Phase", description="Phase amount around spiral center", default=0.0, update=SvAngleHelper.update_angle) exponent: FloatProperty( name="Exponent", description="Exponent attenuator", default=2.0, update=update_spiral) resolution: IntProperty( name="Turn Resolution", description="Number of vertices in one turn in the spiral", default=100, min=3, update=update_spiral) separate: BoolProperty( name="Separate arms", description="Separate the spiral arms", default=False, update=update_spiral) updating: BoolProperty(default=False) # used for disabling update callback def migrate_from(self, old_node): ''' Migration from old nodes ''' if old_node.bl_idname == "SvSpiralNode": self.sType = old_node.stype self.last_angle_units = AngleUnits.RADIANS self.angle_units = AngleUnits.RADIANS def sv_init(self, context): self.width = 170 self.inputs.new('SvStringsSocket', "R").prop_name = 'eRadius' self.inputs.new('SvStringsSocket', "r").prop_name = 'iRadius' self.inputs.new('SvStringsSocket', "e").prop_name = 'exponent' self.inputs.new('SvStringsSocket', "t").prop_name = 'turns' self.inputs.new('SvStringsSocket', "n").prop_name = 'resolution' self.inputs.new('SvStringsSocket', "s").prop_name = 'scale' self.inputs.new('SvStringsSocket', "h").prop_name = 'height' self.inputs.new('SvStringsSocket', "p").prop_name = 'phase' self.inputs.new('SvStringsSocket', "a").prop_name = 'arms' self.outputs.new('SvVerticesSocket', "Vertices") self.outputs.new('SvStringsSocket', "Edges") self.presets = "ARCHIMEDEAN" def draw_buttons(self, context, layout): layout.prop(self, 'presets') layout.prop(self, 'sType', text="") col = layout.column(align=True) if self.sType in ("LOGARITHMIC", "ARCHIMEDEAN", "SPIRANGLE"): row = col.row(align=True) row.prop(self, 'normalize', expand=True) row = col.row(align=True) row.prop(self, 'flip', text="Flip", toggle=True) row.prop(self, 'separate', text="Separate", toggle=True) def draw_buttons_ext(self, context, layout): self.draw_angle_units_buttons(context, layout) def process(self): outputs = self.outputs # return if no outputs are connected if not any(s.is_linked for s in outputs): return # input values lists (single or multi value) inputs = self.inputs input_R = inputs["R"].sv_get()[0] # list of exterior radii input_r = inputs["r"].sv_get()[0] # list of interior radii input_e = inputs["e"].sv_get()[0] # list of exponents input_t = inputs["t"].sv_get()[0] # list of turns input_n = inputs["n"].sv_get()[0] # list of curve resolutions input_s = inputs["s"].sv_get()[0] # list of scales input_h = inputs["h"].sv_get()[0] # list of heights (z) input_p = inputs["p"].sv_get()[0] # list of phases input_a = inputs["a"].sv_get()[0] # list of arms # sanitize the input input_R = list(map(lambda x: max(0.0, x), input_R)) input_r = list(map(lambda x: max(0.0, x), input_r)) input_t = list(map(lambda x: max(1, int(x)), input_t)) input_n = list(map(lambda x: max(3, int(x)), input_n)) input_a = list(map(lambda x: max(1, int(x)), input_a)) # extra parameters f = self.flip # flip direction parameters = match_long_repeat([input_R, input_r, input_e, input_t, input_n, input_s, input_h, input_p, input_a]) # conversion factor from the current angle units to radians au = self.radians_conversion_factor() make_spiral = eval("make_" + self.sType.lower() + "_spiral") verts_list = [] edges_list = [] for R, r, e, t, n, s, h, p, a in zip(*parameters): p = p * au arm_verts = [] arm_edges = [] for i in range(a): # generate each arm pa = p + 2 * pi / a * i settings = [R, r, e, t, n, s, h, pa, f] # spiral settings verts, edges, norms = make_spiral(settings) if self.sType in ("LOGARITHMIC", "ARCHIMEDEAN", "SPIRANGLE"): normalize_spiral(verts, self.normalize == "ER", R, r, s) if self.separate: arm_verts.append(verts) arm_edges.append(edges) else: # join the arms o = len(arm_verts) edges = [[i1 + o, i2 + o] for (i1, i2) in edges] arm_verts.extend(verts) arm_edges.extend(edges) verts_list.append(arm_verts) edges_list.append(arm_edges) self.outputs['Vertices'].sv_set(verts_list) self.outputs['Edges'].sv_set(edges_list) def register(): bpy.utils.register_class(SvSpiralNodeMK2) def unregister(): bpy.utils.unregister_class(SvSpiralNodeMK2)
gpl-3.0
-9,002,781,532,965,027,000
32.879389
111
0.567437
false
3.223094
false
false
false
EBI-Metagenomics/emgapi
emgapi/migrations/0028_auto_20200706_1823.py
1
1069
# -*- coding: utf-8 -*- # Generated by Django 1.11.25 on 2020-06-16 12:02 from __future__ import unicode_literals from django.db import migrations def add_checksum_algorithms(apps, schema_editor): """ Add hash algorithms: - SHA1 - SHA256 - MD5 """ ChecksumAlgorithms = apps.get_model("emgapi", "ChecksumAlgorithm") for alg in ["SHA1", "SHA256", "MD5"]: ChecksumAlgorithms.objects.get_or_create(name=alg) def remove_checksum_algorithms(apps, schema_editor): """ Remove hash algorithms: - SHA1 - SHA256 - MD5 """ ChecksumAlgorithms = apps.get_model("emgapi", "ChecksumAlgorithm") for alg in ["SHA1", "SHA256", "MD5"]: try: ChecksumAlgorithms.objects.get(name=alg).delete() except ChecksumAlgorithms.DoesNotExist: pass class Migration(migrations.Migration): dependencies = [ ('emgapi', '0027_auto_20200706_1823'), ] operations = [ migrations.RunPython(add_checksum_algorithms, reverse_code=remove_checksum_algorithms) ]
apache-2.0
3,405,595,739,773,955,600
24.452381
94
0.637979
false
3.587248
false
false
false
itoijala/pyfeyner
examples/pyfeyn-test3.py
1
2416
#!/usr/bin/env python2 # # pyfeyner - a simple Python interface for making Feynman diagrams. # Copyright (C) 2005-2010 Andy Buckley, Georg von Hippel # Copyright (C) 2013 Ismo Toijala # # pyfeyner is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # pyfeyner is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with pyfeyner; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # ## A B-meson colour-suppressed penguin decay diagram # _ # in1 ------(_)------() out1a # \ ____() out1b # \(____ # () out2a # in2 ---------------() out2b # from pyfeyner.user import * import pyx fd = FeynDiagram() in1 = Point(1, 7) loop_in = Vertex(4, 7) loop_out = Vertex(7, 7) out1a = Point(11, 7) out1b = Point(11, 5) in2 = Point(1, 0) out2a = Point(11, 2) out2b = Point(11, 0) out1c = Vertex(out1b.x() - 2, out1b.y()) out1d = Vertex(out2a.x() - 2, out2a.y()) vtx = Vertex(out1c.midpoint(out1d).x() - 1.5, out1c.midpoint(out1d).y()) fd.add(Fermion(out2b, in2).addArrow().addLabel(r"\APdown")) fd.add(Fermion(in1, loop_in).addArrow().addLabel(r"\Pbottom")) fd.add(Fermion(loop_out, out1a).addArrow().addLabel(r"\Pstrange")) fd.add(Photon(loop_in, loop_out).bend(-1.5).addLabel(r"\PWplus")) f_loop, = fd.add(Fermion(loop_in, loop_out).bend(+1.5).addArrow() \ .addLabel(r"\Pup,\,\Pcharm,\,\Ptop")) fd.add(Photon(f_loop.fracpoint(0.6), vtx).addLabel(r"\Pphoton/\PZ", displace=0.5).bend(0.5)) fd.add(Fermion(out1b, out1c).addArrow(0.8).addLabel(r"\APup")) fd.add(Fermion(out1c, out1d).arcThru(vtx)) fd.add(Fermion(out1d, out2a).addArrow(0.2).addLabel(r"\Pup")) fd.add(Ellipse(x=1, y=3.5, xradius=1, yradius=3.5).setFillStyle(pyx.pattern.crosshatched(0.1, 45))) fd.add(Ellipse(x=11, y=6, xradius=0.6, yradius=1).setFillStyle(pyx.pattern.hatched135)) fd.add(Ellipse(x=11, y=1, xradius=0.6, yradius=1).setFillStyle(pyx.pattern.hatched135)) fd.draw("pyfeyn-test3.pdf")
gpl-2.0
8,639,295,022,185,520,000
36.75
99
0.678394
false
2.511435
false
false
false
johnloucaides/chipsec
chipsec/module_common.py
1
3593
#!/usr/bin/python #CHIPSEC: Platform Security Assessment Framework #Copyright (c) 2010-2015, Intel Corporation # #This program is free software; you can redistribute it and/or #modify it under the terms of the GNU General Public License #as published by the Free Software Foundation; Version 2. # #This program is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #GNU General Public License for more details. # #You should have received a copy of the GNU General Public License #along with this program; if not, write to the Free Software #Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # #Contact information: #[email protected] # # ------------------------------------------------------------------------------- # # CHIPSEC: Platform Hardware Security Assessment Framework # (c) 2010-2012 Intel Corporation # # ------------------------------------------------------------------------------- """ Common include file for modules """ import platform import string import sys import os from time import localtime, strftime import chipsec.logger import chipsec.chipset import chipsec.defines class ModuleResult: FAILED = 0 PASSED = 1 WARNING = 2 SKIPPED = 3 DEPRECATED = 4 INFORMATION = 5 ERROR = -1 ModuleResultName = { ModuleResult.FAILED: "Failed", ModuleResult.PASSED: "Passed", ModuleResult.WARNING: "Warning", ModuleResult.SKIPPED: "Skipped", ModuleResult.DEPRECATED: "Deprecated", ModuleResult.INFORMATION: "Information", ModuleResult.ERROR: "Error" } def getModuleResultName(res): return ModuleResultName[res] if res in ModuleResultName else ModuleResultName[ModuleResult.ERROR] class BaseModule(object): def __init__(self): self.cs = chipsec.chipset.cs() self.logger = chipsec.logger.logger() self.res = ModuleResult.PASSED def is_supported(self): """ This method should be overwritten by the module returning True or False depending whether or not this module is supported in the currently running platform. To access the currently running platform use >>> self.cs.get_chipset_id() """ return True def update_res(self, value): if self.res == ModuleResult.WARNING: if value == ModuleResult.FAILED \ or value == ModuleResult.ERROR: self.res = value elif self.res == ModuleResult.FAILED: if value == ModuleResult.ERROR: self.res = value elif self.res == ModuleResult.INFORMATION: self.res = value else: # PASSED or SKIPPED or DEPRECATED self.res = value def run(self, module_argv): raise NotImplementedError('sub class should overwrite the run() method') MTAG_BIOS = "BIOS" MTAG_SMM = "SMM" MTAG_SECUREBOOT = "SECUREBOOT" MTAG_HWCONFIG = "HWCONFIG" MTAG_CPU = "CPU" ##! [Available Tags] MTAG_METAS = { MTAG_BIOS: "System Firmware (BIOS/UEFI) Modules", MTAG_SMM: "System Management Mode (SMM) Modules", MTAG_SECUREBOOT: "Secure Boot Modules", MTAG_HWCONFIG: "Hardware Configuration Modules", MTAG_CPU: "CPU Modules", } ##! [Available Tags] MODULE_TAGS = dict( [(_tag, []) for _tag in MTAG_METAS]) # # Common module command line options # OPT_MODIFY = 'modify'
gpl-2.0
2,244,424,559,796,795,000
27.515873
101
0.629001
false
4.069083
false
false
false
mbollmann/perceptron
mmb_perceptron/feature_extractor/generator/generative_extractor.py
1
3342
# -*- coding: utf-8 -*- import numpy as np from .. import FeatureExtractor class GenerativeExtractor(FeatureExtractor): """Abstract base class for a generative feature extractor. Compared to simple feature extractors, generators perform the additional task of generating class label candidates. This means that they don't return a single feature vector, but a dictionary mapping candidate classes (for the classifier) to their respective feature vectors. In terms of the perceptron algorithm, they combine the GEN() and Phi() functions in a single object for ease of implementation. """ def _rebind_methods(self, status): super(GenerativeExtractor, self)._rebind_methods(status) if status: self.generate = self._generate_sequenced self.generate_vector = self._generate_vector_sequenced else: self.generate = self._generate_independent self.generate_vector = self._generate_vector_independent def _generate_independent(self, x, truth=None): """Return candidates and their feature representations. Should return a tuple (F, C), where F is a list of feature representations, and C is a list of class labels so that C[i] is the class label belonging to the feature representation F[i]. During training, the **first element in these lists** is considered by the perceptron to be the **correct class label** for this data point. If the parameter 'truth' is supplied, it indicates the gold-standard best candidate according to the training data; however, it is up to the generator function whether to include this value as the first element of the feature representations (thereby making the **gold standard** the correct class label for the perceptron learner) or generate the candidates independently and select an **oracle-best** class label from those. """ raise NotImplementedError("function not implemented") def _generate_sequenced(self, seq, pos, history=None, truth=None): raise NotImplementedError("function not implemented") def _generate_vector_independent(self, x, truth=None, grow=True): """Return candidates and their feature representations. Identical to _generate_independent(), except that F is now a matrix of numerical feature vectors. """ (features, labels) = self._generate_independent(x, truth=truth) if grow: for f in features: self._label_mapper.extend(f) vectors = np.array([self._label_mapper.map_to_vector(f) for f in features]) else: vectors = np.array([self._label_mapper.get_vector(f) for f in features]) return (vectors, labels) def _generate_vector_sequenced(self, seq, pos, history=None, truth=None, grow=True): (features, labels) = \ self._generate_sequenced(seq, pos, history=history, truth=truth) if grow: for f in features: self._label_mapper.extend(f) vectors = np.array([self._label_mapper.map_to_vector(f) for f in features]) else: vectors = np.array([self._label_mapper.get_vector(f) for f in features]) return (vectors, labels)
mit
-3,316,705,741,891,611,600
44.162162
88
0.666966
false
4.479893
false
false
false
openstack/cliff
cliff/help.py
1
4846
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import inspect import traceback from . import command class HelpExit(SystemExit): """Special exception type to trigger quick exit from the application We subclass from SystemExit to preserve API compatibility for anything that used to catch SystemExit, but use a different class so that cliff's Application can tell the difference between something trying to hard-exit and help saying it's done. """ class HelpAction(argparse.Action): """Provide a custom action so the -h and --help options to the main app will print a list of the commands. The commands are determined by checking the CommandManager instance, passed in as the "default" value for the action. """ def __call__(self, parser, namespace, values, option_string=None): app = self.default parser.print_help(app.stdout) app.stdout.write('\nCommands:\n') dists_by_module = command._get_distributions_by_modules() def dist_for_obj(obj): name = inspect.getmodule(obj).__name__.partition('.')[0] return dists_by_module.get(name) app_dist = dist_for_obj(app) command_manager = app.command_manager for name, ep in sorted(command_manager): try: factory = ep.load() except Exception: app.stdout.write('Could not load %r\n' % ep) if namespace.debug: traceback.print_exc(file=app.stdout) continue try: kwargs = {} if 'cmd_name' in inspect.getfullargspec(factory.__init__).args: kwargs['cmd_name'] = name cmd = factory(app, None, **kwargs) if cmd.deprecated: continue except Exception as err: app.stdout.write('Could not instantiate %r: %s\n' % (ep, err)) if namespace.debug: traceback.print_exc(file=app.stdout) continue one_liner = cmd.get_description().split('\n')[0] dist_name = dist_for_obj(factory) if dist_name and dist_name != app_dist: dist_info = ' (' + dist_name + ')' else: dist_info = '' app.stdout.write(' %-13s %s%s\n' % (name, one_liner, dist_info)) raise HelpExit() class HelpCommand(command.Command): """print detailed help for another command """ def get_parser(self, prog_name): parser = super(HelpCommand, self).get_parser(prog_name) parser.add_argument('cmd', nargs='*', help='name of the command', ) return parser def take_action(self, parsed_args): if parsed_args.cmd: try: the_cmd = self.app.command_manager.find_command( parsed_args.cmd, ) cmd_factory, cmd_name, search_args = the_cmd except ValueError: # Did not find an exact match cmd = parsed_args.cmd[0] fuzzy_matches = [k[0] for k in self.app.command_manager if k[0].startswith(cmd) ] if not fuzzy_matches: raise self.app.stdout.write('Command "%s" matches:\n' % cmd) for fm in sorted(fuzzy_matches): self.app.stdout.write(' %s\n' % fm) return self.app_args.cmd = search_args kwargs = {} if 'cmd_name' in inspect.getfullargspec(cmd_factory.__init__).args: kwargs['cmd_name'] = cmd_name cmd = cmd_factory(self.app, self.app_args, **kwargs) full_name = (cmd_name if self.app.interactive_mode else ' '.join([self.app.NAME, cmd_name]) ) cmd_parser = cmd.get_parser(full_name) cmd_parser.print_help(self.app.stdout) else: action = HelpAction(None, None, default=self.app) action(self.app.parser, self.app.options, None, None) return 0
apache-2.0
-4,748,475,090,079,885,000
37.768
79
0.554065
false
4.326786
false
false
false
ApproxEng/approxeng.input
src/python/approxeng/input/selectbinder.py
1
6065
from functools import reduce from select import select from threading import Thread import approxeng.input.sys as sys from approxeng.input.controllers import * EV_KEY = 1 EV_REL = 2 EV_ABS = 3 class ControllerResource: """ General resource which binds one or more controllers on entry and unbinds the event listening thread on exit. """ def __init__(self, *requirements, print_events=False, **kwargs): """ Create a new resource to bind and access one or more controllers. If no additional arguments are supplied this will find the first controller of any kind enabled by the library. Otherwise the requirements must be provided as a list of ControllerRequirement :param ControllerRequirement requirements: ControllerRequirement instances used, in order, to find and bind controllers. If empty this will be equivalent to supplying a single unfiltered requirement and will match the first specified controller. :param bool print_events: Defaults to False, if set to True then all events picked up by the binder will be printed to stdout. Use this when you're trying to figure out what events correspond to what axes and buttons! :param kwargs: Any addition keyword arguments are passed to the constructors for the controller classes. This is useful particularly to specify e.g. dead and hot zone ranges on discovery. :raises ControllerNotFoundError: If the requirement can't be satisfied, or no requirements are specified but there aren't any controllers. """ self.discoveries = find_matching_controllers(*requirements, **kwargs) self.unbind = None self.print_events = print_events def __enter__(self): """ Called on entering the resource block, returns the controller passed into the constructor. """ self.unbind = bind_controllers(*self.discoveries, print_events=self.print_events) if len(self.discoveries) == 1: return self.discoveries[0].controller else: return tuple(discovery.controller for discovery in self.discoveries) def __exit__(self, exc_type, exc_value, traceback): """ Called on resource exit, unbinds the controller, removing the listening thread. """ self.unbind() def bind_controllers(*discoveries, print_events=False): """ Bind a controller or controllers to a set of evdev InputDevice instances, starting a thread to keep those controllers in sync with the state of the hardware. :param ControllerDiscovery discoveries: ControllerDiscovery instances specifying the controllers and their associated input devices :param bool print_events: Defaults to False, if set to True then all events picked up by this binder will be printed to stdout :return: A function which can be used to stop the event reading thread and unbind from the device """ discoveries = list(discoveries) class SelectThread(Thread): def __init__(self): Thread.__init__(self, name='evdev select thread') self.daemon = True self.running = True self.device_to_controller_discovery = {} for discovery in discoveries: for d in discovery.devices: self.device_to_controller_discovery[d.fn] = discovery self.all_devices = reduce(lambda x, y: x + y, [discovery.devices for discovery in discoveries]) def run(self): for discovery in discoveries: discovery.controller.device_unique_name = discovery.name while self.running: try: r, w, x = select(self.all_devices, [], [], 0.5) for fd in r: active_device = fd controller_discovery = self.device_to_controller_discovery[active_device.fn] controller = controller_discovery.controller controller_devices = controller_discovery.devices prefix = None if controller.node_mappings is not None and len(controller_devices) > 1: try: prefix = controller.node_mappings[active_device.name] except KeyError: pass for event in active_device.read(): if print_events: print(event) if event.type == EV_ABS or event.type == EV_REL: controller.axes.axis_updated(event, prefix=prefix) elif event.type == EV_KEY: # Button event if event.value == 1: # Button down controller.buttons.button_pressed(event.code, prefix=prefix) elif event.value == 0: # Button up controller.buttons.button_released(event.code, prefix=prefix) except Exception as e: self.stop(e) def stop(self, exception=None): for discovery in discoveries: discovery.controller.device_unique_name = None discovery.controller.exception = exception self.running = False polling_thread = SelectThread() # Force an update of the LED and battery system cache sys.scan_cache(force_update=True) for device in polling_thread.all_devices: device.grab() def unbind(): polling_thread.stop() for dev in polling_thread.all_devices: try: dev.ungrab() except IOError: pass polling_thread.start() return unbind
apache-2.0
-2,654,246,842,321,006,600
40.541096
118
0.591096
false
5.131134
false
false
false
akshmakov/Dolfin-Fijee-Fork
test/unit/book/python/chapter_1_files/stationary/poisson/d2_p2D.py
1
1457
""" FEniCS tutorial demo program: Poisson equation with Dirichlet conditions. As d1_p2D.py, but chosing linear solver and preconditioner is demonstrated. -Laplace(u) = f on the unit square. u = u0 on the boundary. u0 = u = 1 + x^2 + 2y^2, f = -6. """ from dolfin import * # Create mesh and define function space mesh = UnitSquareMesh(60, 40) V = FunctionSpace(mesh, 'Lagrange', 1) # Define boundary conditions u0 = Expression('1 + x[0]*x[0] + 2*x[1]*x[1]') def u0_boundary(x, on_boundary): return on_boundary bc = DirichletBC(V, u0, u0_boundary) # Define variational problem u = TrialFunction(V) v = TestFunction(V) f = Constant(-6.0) a = inner(nabla_grad(u), nabla_grad(v))*dx L = f*v*dx # Compute solution u = Function(V) info(parameters, True) prm = parameters['krylov_solver'] # short form prm['absolute_tolerance'] = 1E-5 prm['relative_tolerance'] = 1E-3 prm['maximum_iterations'] = 1000 #prm['preconditioner']['ilu']['fill_level'] = 0 print parameters['linear_algebra_backend'] #set_log_level(PROGRESS) set_log_level(DEBUG) solve(a == L, u, bc, solver_parameters={'linear_solver': 'cg', 'preconditioner': 'ilu'}) # Alternative syntax solve(a == L, u, bc, solver_parameters=dict(linear_solver='cg', preconditioner='ilu')) # Plot solution and mesh #plot(u) #plot(mesh) # Dump solution to file in VTK format file = File('poisson.pvd') file << u # Hold plot interactive()
gpl-3.0
-481,046,481,958,258,050
22.885246
75
0.663693
false
2.840156
false
false
false
bruno-briner/plugin.video.brplay
resources/lib/hlsproxy/decrypter.py
1
16632
#!/usr/bin/env python # -*- coding: utf-8 -*- """Simple AES cipher implementation in pure Python following PEP-272 API Based on: https://bitbucket.org/intgr/pyaes/ to compatible with PEP-8. The goal of this module is to be as fast as reasonable in Python while still being Pythonic and readable/understandable. It is licensed under the permissive MIT license. Hopefully the code is readable and commented enough that it can serve as an introduction to the AES cipher for Python coders. In fact, it should go along well with the Stick Figure Guide to AES: http://www.moserware.com/2009/09/stick-figure-guide-to-advanced.html Contrary to intuition, this implementation numbers the 4x4 matrices from top to bottom for efficiency reasons:: 0 4 8 12 1 5 9 13 2 6 10 14 3 7 11 15 Effectively it's the transposition of what you'd expect. This actually makes the code simpler -- except the ShiftRows step, but hopefully the explanation there clears it up. """ #### # Copyright (c) 2010 Marti Raudsepp <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #### from array import array # Globals mandated by PEP 272: # http://www.python.org/dev/peps/pep-0272/ MODE_ECB = 1 MODE_CBC = 2 #MODE_CTR = 6 block_size = 16 # variable length key: 16, 24 or 32 bytes key_size = None class AESDecrypter(): MODE_CBC=2 def new(self, key, mode, IV=None): if mode == MODE_ECB: return ECBMode(AES(key)) elif mode == MODE_CBC: if IV is None: raise ValueError("CBC mode needs an IV value!") return CBCMode(AES(key), IV) else: raise NotImplementedError #### AES cipher implementation class AES(object): block_size = 16 def __init__(self, key): self.setkey(key) def setkey(self, key): """Sets the key and performs key expansion.""" self.key = key self.key_size = len(key) if self.key_size == 16: self.rounds = 10 elif self.key_size == 24: self.rounds = 12 elif self.key_size == 32: self.rounds = 14 else: raise ValueError("Key length must be 16, 24 or 32 bytes") self.expand_key() def expand_key(self): """Performs AES key expansion on self.key and stores in self.exkey""" # The key schedule specifies how parts of the key are fed into the # cipher's round functions. "Key expansion" means performing this # schedule in advance. Almost all implementations do this. # # Here's a description of AES key schedule: # http://en.wikipedia.org/wiki/Rijndael_key_schedule # The expanded key starts with the actual key itself exkey = array('B', self.key) # extra key expansion steps if self.key_size == 16: extra_cnt = 0 elif self.key_size == 24: extra_cnt = 2 else: extra_cnt = 3 # 4-byte temporary variable for key expansion word = exkey[-4:] # Each expansion cycle uses 'i' once for Rcon table lookup for i in xrange(1, 11): #### key schedule core: # left-rotate by 1 byte word = word[1:4] + word[0:1] # apply S-box to all bytes for j in xrange(4): word[j] = aes_sbox[word[j]] # apply the Rcon table to the leftmost byte word[0] ^= aes_Rcon[i] #### end key schedule core for z in xrange(4): for j in xrange(4): # mix in bytes from the last subkey word[j] ^= exkey[-self.key_size + j] exkey.extend(word) # Last key expansion cycle always finishes here if len(exkey) >= (self.rounds + 1) * self.block_size: break # Special substitution step for 256-bit key if self.key_size == 32: for j in xrange(4): # mix in bytes from the last subkey XORed with S-box of # current word bytes word[j] = aes_sbox[word[j]] ^ exkey[-self.key_size + j] exkey.extend(word) # Twice for 192-bit key, thrice for 256-bit key for z in xrange(extra_cnt): for j in xrange(4): # mix in bytes from the last subkey word[j] ^= exkey[-self.key_size + j] exkey.extend(word) self.exkey = exkey def add_round_key(self, block, round): """AddRoundKey step. This is where the key is mixed into plaintext""" offset = round * 16 exkey = self.exkey for i in xrange(16): block[i] ^= exkey[offset + i] #print 'AddRoundKey:', block def sub_bytes(self, block, sbox): """ SubBytes step, apply S-box to all bytes Depending on whether encrypting or decrypting, a different sbox array is passed in. """ for i in xrange(16): block[i] = sbox[block[i]] #print 'SubBytes :', block def shift_rows(self, b): """ ShiftRows step in AES. Shifts 2nd row to left by 1, 3rd row by 2, 4th row by 3 Since we're performing this on a transposed matrix, cells are numbered from top to bottom first:: 0 4 8 12 -> 0 4 8 12 -- 1st row doesn't change 1 5 9 13 -> 5 9 13 1 -- row shifted to left by 1 (wraps around) 2 6 10 14 -> 10 14 2 6 -- shifted by 2 3 7 11 15 -> 15 3 7 11 -- shifted by 3 """ b[1], b[5], b[9], b[13] = b[5], b[9], b[13], b[1] b[2], b[6], b[10], b[14] = b[10], b[14], b[2], b[6] b[3], b[7], b[11], b[15] = b[15], b[3], b[7], b[11] #print 'ShiftRows :', b def shift_rows_inv(self, b): """ Similar to shift_rows above, but performed in inverse for decryption. """ b[5], b[9], b[13], b[1] = b[1], b[5], b[9], b[13] b[10], b[14], b[2], b[6] = b[2], b[6], b[10], b[14] b[15], b[3], b[7], b[11] = b[3], b[7], b[11], b[15] #print 'ShiftRows :', b def mix_columns(self, block): """MixColumns step. Mixes the values in each column""" # Cache global multiplication tables (see below) mul_by_2 = gf_mul_by_2 mul_by_3 = gf_mul_by_3 # Since we're dealing with a transposed matrix, columns are already # sequential for col in xrange(0, 16, 4): v0, v1, v2, v3 = block[col:col + 4] block[col] = mul_by_2[v0] ^ v3 ^ v2 ^ mul_by_3[v1] block[col + 1] = mul_by_2[v1] ^ v0 ^ v3 ^ mul_by_3[v2] block[col + 2] = mul_by_2[v2] ^ v1 ^ v0 ^ mul_by_3[v3] block[col + 3] = mul_by_2[v3] ^ v2 ^ v1 ^ mul_by_3[v0] #print 'MixColumns :', block def mix_columns_inv(self, block): """ Similar to mix_columns above, but performed in inverse for decryption. """ # Cache global multiplication tables (see below) mul_9 = gf_mul_by_9 mul_11 = gf_mul_by_11 mul_13 = gf_mul_by_13 mul_14 = gf_mul_by_14 # Since we're dealing with a transposed matrix, columns are already # sequential for col in xrange(0, 16, 4): v0, v1, v2, v3 = block[col:col + 4] block[col] = mul_14[v0] ^ mul_9[v3] ^ mul_13[v2] ^ mul_11[v1] block[col + 1] = mul_14[v1] ^ mul_9[v0] ^ mul_13[v3] ^ mul_11[v2] block[col + 2] = mul_14[v2] ^ mul_9[v1] ^ mul_13[v0] ^ mul_11[v3] block[col + 3] = mul_14[v3] ^ mul_9[v2] ^ mul_13[v1] ^ mul_11[v0] #print 'MixColumns :', block def encrypt_block(self, block): """Encrypts a single block. This is the main AES function""" # For efficiency reasons, the state between steps is transmitted via a # mutable array, not returned self.add_round_key(block, 0) for round in xrange(1, self.rounds): self.sub_bytes(block, aes_sbox) self.shift_rows(block) self.mix_columns(block) self.add_round_key(block, round) self.sub_bytes(block, aes_sbox) self.shift_rows(block) # no mix_columns step in the last round self.add_round_key(block, self.rounds) def decrypt_block(self, block): """Decrypts a single block. This is the main AES decryption function""" # For efficiency reasons, the state between steps is transmitted via a # mutable array, not returned self.add_round_key(block, self.rounds) # count rounds down from (self.rounds) ... 1 for round in xrange(self.rounds - 1, 0, -1): self.shift_rows_inv(block) self.sub_bytes(block, aes_inv_sbox) self.add_round_key(block, round) self.mix_columns_inv(block) self.shift_rows_inv(block) self.sub_bytes(block, aes_inv_sbox) self.add_round_key(block, 0) # no mix_columns step in the last round #### ECB mode implementation class ECBMode(object): """Electronic CodeBook (ECB) mode encryption. Basically this mode applies the cipher function to each block individually; no feedback is done. NB! This is insecure for almost all purposes """ def __init__(self, cipher): self.cipher = cipher self.block_size = cipher.block_size def ecb(self, data, block_func): """Perform ECB mode with the given function""" if len(data) % self.block_size != 0: raise ValueError("Input length must be multiple of 16") block_size = self.block_size data = array('B', data) for offset in xrange(0, len(data), block_size): block = data[offset:offset + block_size] block_func(block) data[offset:offset + block_size] = block return data.tostring() def encrypt(self, data): """Encrypt data in ECB mode""" return self.ecb(data, self.cipher.encrypt_block) def decrypt(self, data): """Decrypt data in ECB mode""" return self.ecb(data, self.cipher.decrypt_block) #### CBC mode class CBCMode(object): """ Cipher Block Chaining(CBC) mode encryption. This mode avoids content leaks. In CBC encryption, each plaintext block is XORed with the ciphertext block preceding it; decryption is simply the inverse. """ # A better explanation of CBC can be found here: # http://en.wikipedia.org/wiki/Block_cipher_modes_of_operation#- # Cipher-block_chaining_.28CBC.29 def __init__(self, cipher, IV): self.cipher = cipher self.block_size = cipher.block_size self.IV = array('B', IV) def encrypt(self, data): """Encrypt data in CBC mode""" block_size = self.block_size if len(data) % block_size != 0: raise ValueError("Plaintext length must be multiple of 16") data = array('B', data) IV = self.IV for offset in xrange(0, len(data), block_size): block = data[offset:offset + block_size] # Perform CBC chaining for i in xrange(block_size): block[i] ^= IV[i] self.cipher.encrypt_block(block) data[offset:offset + block_size] = block IV = block self.IV = IV return data.tostring() def decrypt(self, data): """Decrypt data in CBC mode""" block_size = self.block_size if len(data) % block_size != 0: raise ValueError("Ciphertext length must be multiple of 16") data = array('B', data) IV = self.IV for offset in xrange(0, len(data), block_size): ctext = data[offset:offset + block_size] block = ctext[:] self.cipher.decrypt_block(block) # Perform CBC chaining #for i in xrange(block_size): # data[offset + i] ^= IV[i] for i in xrange(block_size): block[i] ^= IV[i] data[offset:offset + block_size] = block IV = ctext #data[offset : offset+block_size] = block self.IV = IV return data.tostring() def galois_multiply(a, b): """Galois Field multiplicaiton for AES""" p = 0 while b: if b & 1: p ^= a a <<= 1 if a & 0x100: a ^= 0x1b b >>= 1 return p & 0xff # Precompute the multiplication tables for encryption gf_mul_by_2 = array('B', [galois_multiply(x, 2) for x in range(256)]) gf_mul_by_3 = array('B', [galois_multiply(x, 3) for x in range(256)]) # ... for decryption gf_mul_by_9 = array('B', [galois_multiply(x, 9) for x in range(256)]) gf_mul_by_11 = array('B', [galois_multiply(x, 11) for x in range(256)]) gf_mul_by_13 = array('B', [galois_multiply(x, 13) for x in range(256)]) gf_mul_by_14 = array('B', [galois_multiply(x, 14) for x in range(256)]) #### # The S-box is a 256-element array, that maps a single byte value to another # byte value. Since it's designed to be reversible, each value occurs only once # in the S-box # # More information: http://en.wikipedia.org/wiki/Rijndael_S-box aes_sbox = array( 'B', '637c777bf26b6fc53001672bfed7ab76' 'ca82c97dfa5947f0add4a2af9ca472c0' 'b7fd9326363ff7cc34a5e5f171d83115' '04c723c31896059a071280e2eb27b275' '09832c1a1b6e5aa0523bd6b329e32f84' '53d100ed20fcb15b6acbbe394a4c58cf' 'd0efaafb434d338545f9027f503c9fa8' '51a3408f929d38f5bcb6da2110fff3d2' 'cd0c13ec5f974417c4a77e3d645d1973' '60814fdc222a908846eeb814de5e0bdb' 'e0323a0a4906245cc2d3ac629195e479' 'e7c8376d8dd54ea96c56f4ea657aae08' 'ba78252e1ca6b4c6e8dd741f4bbd8b8a' '703eb5664803f60e613557b986c11d9e' 'e1f8981169d98e949b1e87e9ce5528df' '8ca1890dbfe6426841992d0fb054bb16'.decode('hex') ) # This is the inverse of the above. In other words: # aes_inv_sbox[aes_sbox[val]] == val aes_inv_sbox = array( 'B', '52096ad53036a538bf40a39e81f3d7fb' '7ce339829b2fff87348e4344c4dee9cb' '547b9432a6c2233dee4c950b42fac34e' '082ea16628d924b2765ba2496d8bd125' '72f8f66486689816d4a45ccc5d65b692' '6c704850fdedb9da5e154657a78d9d84' '90d8ab008cbcd30af7e45805b8b34506' 'd02c1e8fca3f0f02c1afbd0301138a6b' '3a9111414f67dcea97f2cfcef0b4e673' '96ac7422e7ad3585e2f937e81c75df6e' '47f11a711d29c5896fb7620eaa18be1b' 'fc563e4bc6d279209adbc0fe78cd5af4' '1fdda8338807c731b11210592780ec5f' '60517fa919b54a0d2de57a9f93c99cef' 'a0e03b4dae2af5b0c8ebbb3c83539961' '172b047eba77d626e169146355210c7d'.decode('hex') ) # The Rcon table is used in AES's key schedule (key expansion) # It's a pre-computed table of exponentation of 2 in AES's finite field # # More information: http://en.wikipedia.org/wiki/Rijndael_key_schedule aes_Rcon = array( 'B', '8d01020408102040801b366cd8ab4d9a' '2f5ebc63c697356ad4b37dfaefc59139' '72e4d3bd61c29f254a943366cc831d3a' '74e8cb8d01020408102040801b366cd8' 'ab4d9a2f5ebc63c697356ad4b37dfaef' 'c5913972e4d3bd61c29f254a943366cc' '831d3a74e8cb8d01020408102040801b' '366cd8ab4d9a2f5ebc63c697356ad4b3' '7dfaefc5913972e4d3bd61c29f254a94' '3366cc831d3a74e8cb8d010204081020' '40801b366cd8ab4d9a2f5ebc63c69735' '6ad4b37dfaefc5913972e4d3bd61c29f' '254a943366cc831d3a74e8cb8d010204' '08102040801b366cd8ab4d9a2f5ebc63' 'c697356ad4b37dfaefc5913972e4d3bd' '61c29f254a943366cc831d3a74e8cb'.decode('hex') )
gpl-3.0
-5,658,260,328,799,054,000
31.6778
79
0.61045
false
3.159574
false
false
false
SaTa999/pyPanair
pyPanair/postprocess/agps_converter.py
1
10619
#!/usr/bin/env python import numpy as np import os def read_column(file, firstline): """read a column from first line (e.g. n01c001) to *eof""" column = list() line = firstline # register methods for faster evaluation f_readline = file.readline column_append = column.append # read each line until *eof while line: line = f_readline().split() if line[0] == "*eof": break column_append(line) return column def read_network(file, header): """read a network""" network_n = int(header[0][1:3]) # get network number from the first header (e.g. 01 from n01c001) # print("loading network no.", network_n) network = list() line = header # register methods for faster evaluation network_append = network.append # read each line until next header while line: col = read_column(file, line) network_append(col) line = file.readline().split() # break at the end of agps file if not line: break # break when reaching the header for the next network (e.g. n02c001) if not int(line[0][1:3]) == network_n: break network = np.array(network, dtype=float) return network, line def read_agps(inputfile="agps"): # read the agps file and return a list of arrays containing data for each network with open(inputfile, "r") as f: # skip the header of the agps file for _ in range(6): f.readline() line = f.readline().split() f.readline() # skip the header of first network ("icol, x, y, z, cp1, cp2, cp3, cp4") dat = [] while line: net, line = read_network(f, line) dat.append(net) return dat def write_vtk(n_wake=0, outputname="agps", inputfile="agps"): """Write agps in the legacy paraview format (vtk) All networks will be merged into one block Therefore, user are advised to omit 'wakes' by specifying the 'n_wakes'""" data = read_agps(inputfile) # read agps file & specify the number of networks to omit print("n_wake = ", n_wake) # write the header of the vtk file vtk = "# vtk DataFile Version 2.0\n" vtk += "scalar\n" vtk += "ASCII\n" vtk += "DATASET UNSTRUCTURED_GRID\n" n_points = 0 # number of points in vtk file n_cells = 0 # number of quadrilateral cells formed by the points n_cp = data[0].shape[2] - 4 points = str() # coordinate of each point (x, y, z) point_data = [str()] * n_cp # cp at each point (cp1, cp2, cp3, cp4) cells = str() # ids of each quadrilateral cell (e.g. (0, n_col, n_col + 1, 1) for first cell) for i in range(len(data) - n_wake): net = data[i] n_row = int(net.shape[0]) n_col = int(net.shape[1]) print("network {} shape: ".format(i + 1), net.shape) base_square = np.array((0, n_col, n_col + 1, 1)) for j in range(n_row): for k in range(n_col): point = net[j, k] # add coordinate of a point points += "{0} {1} {2}\n".format(point[1], point[2], point[3]) # add cp data of a point for l in range(n_cp): point_data[l] += "{}\n".format(point[4 + l]) # add ids of a cell if not j == n_row - 1 and not k == n_col - 1: square = base_square + (j * n_col + k) + n_points square = (str(p) for p in square) cells += "4 " + " ".join(square) + "\n" # add the number of points / cells n_points += n_row * n_col n_cells += (n_row - 1) * (n_col - 1) # write the header of each block (POINTS, CELLS, CELLTYPES, POINT_DATA) points = "POINTS {} float\n".format(n_points) + points cells = "CELLS {0} {1}\n".format(n_cells, n_cells * 5) + cells cell_types = "CELL_TYPES {}\n".format(n_cells) + "9\n" * n_cells vtk += points + cells + cell_types + "POINT_DATA {}\n".format(n_points) for l in range(n_cp): vtk += "SCALARS cp{} float\nLOOKUP_TABLE default\n".format(l + 1) + point_data[l] with open("{}.vtk".format(outputname), "w") as f: f.write(vtk) def write_vtm(n_wake=0, outputname="agps", inputfile="agps"): """convert agps networks to paraview unstructured grid each network will become a different vtu file to open all vtu files at the same time, open the vtm file with paraview""" data = read_agps(inputfile) # read agps file & specify the number of networks to omit print("n_wake = ", n_wake) # write header of vtm file vtm = "<?xml version=\"1.0\"?>\n" vtm += "<VTKFile type=\"vtkMultiBlockDataSet\" version=\"1.0\" byte_order=\"LittleEndian\">\n" vtm += " <vtkMultiBlockDataSet>\n" for i in range(len(data) - n_wake): # add dataset to vtm file vtu_dir = "{}_vtu".format(outputname) try: os.mkdir(vtu_dir) except OSError: if not os.path.exists(vtu_dir): raise vtu_path = "{0}/{1}{2}.vtu".format(vtu_dir, outputname, i + 1) vtm += " <DataSet index=\"network{0}\" file=\"{1}\"/>\n".format(i + 1, vtu_path) # write header of vtu file vtu = "<?xml version=\"1.0\"?>\n" vtu += "<VTKFile type=\"UnstructuredGrid\" version=\"1.0\" byte_order=\"LittleEndian\">\n" vtu += " <UnstructuredGrid>\n" # write the header of the piece net = data[i] n_cp = net.shape[2] - 4 n_row = int(net.shape[0]) n_col = int(net.shape[1]) print("network {} shape: ".format(i), net.shape) n_points = n_row * n_col n_cells = (n_row - 1) * (n_col - 1) vtu += " <Piece NumberOfPoints=\"{}\" NumberOfCells=\"{}\">\n".format(n_points, n_cells) # format the agps data points = str() # coordinate of each point (x, y, z) cells = str() # ids of each quadrilateral cell (e.g. (0, n_col, n_col + 1, 1) for first cell) base_square = np.array((0, n_col, n_col + 1, 1), dtype=int) for j in range(n_row): for k in range(n_col): point = net[j, k] # add coordinate of a point points += "{0} {1} {2}\n".format(point[1], point[2], point[3]) # add ids of a cell if not j == n_row - 1 and not k == n_col - 1: square = base_square + (j * n_col + k) square = (str(p) for p in square) cells += " ".join(square) + "\n" # add formatted agps data to vtu vtu += " <PointData Scalars=\"scalars\">\n" # add point_data to vtu for l in range(n_cp): vtu += " <DataArray type=\"Float32\" Name=\"cp{}\" format=\"ascii\">\n".format(l + 1) vtu += " ".join(str(cp) for cp in net[:, :, 4 + l].ravel()) + "\n" vtu += " </DataArray>\n" vtu += " </PointData>\n" # add points to vtu vtu += " <Points>\n" vtu += " <DataArray type=\"Float32\" Name=\"network{}\" NumberOfComponents=\"3\" " \ "format=\"ascii\">\n".format(i + 1) vtu += points vtu += " </DataArray>\n" vtu += " </Points>\n" # add cells to vtu vtu += " <Cells>\n" vtu += " <DataArray type=\"Int32\" Name=\"connectivity\" format=\"ascii\">\n" vtu += cells vtu += " </DataArray>\n" vtu += " <DataArray type=\"Int32\" Name=\"offsets\" format=\"ascii\">\n" vtu += " ".join(str(4 * (icell + 1)) for icell in range(n_cells)) + "\n" vtu += " </DataArray>\n" vtu += " <DataArray type=\"Int32\" Name=\"types\" format=\"ascii\">\n" vtu += " ".join(str(9) for _ in range(n_cells)) + "\n" vtu += " </DataArray>\n" vtu += " </Cells>\n" vtu += " </Piece>\n" vtu += " </UnstructuredGrid>\n</VTKFile>\n" with open(vtu_path, "w") as f: f.write(vtu) vtm += " </vtkMultiBlockDataSet>\n</VTKFile>" with open("{}.vtm".format(outputname), "w") as f: f.write(vtm) def write_tec(n_wake=0, outputname="agps", inputfile="agps"): """convert agps networks to tecplot finite element quadrilaterals""" data = read_agps(inputfile) # read agps file & specify the number of networks to omit print("n_wake = ", n_wake) # write header n_headers = data[0].shape[2] # number of headers (e.g. 8 for "irow, x, y, z, cp1, cp2, cp3, cp4") n_cp = n_headers - 4 # number of different cps in agps file tec = "TITLE = \"AGPS 3D Finite Element Data\"\n" tec += "VARIABLES = \"x\", \"y\", \"z\"" for i in range(n_cp): tec += ", \"cp{}\"".format(i + 1) tec += "\n" # write each network as a block for i in range(len(data) - n_wake): # write the header of the block net = data[i] n_row = int(net.shape[0]) n_col = int(net.shape[1]) print("network {} shape: ".format(i + 1), net.shape) n_points = n_row * n_col n_elements = (n_row - 1) * (n_col - 1) tec += "ZONE T=\"MIXED\", N={}, E={}, DATAPACKING=BLOCK," \ " ZONETYPE=FEQUADRILATERAL\n".format(n_points, n_elements) # write coordinates (x, y, z) and cps (cp1, cp2, cp3, cp4) in each row for l in range(1, n_headers): element = net[:, :, l] tec += " ".join(map(str, element.ravel())) + "\n" # write the ids of each quadrilateral (e.g. (0, n_col, n_col + 1, 1) for first quadrilateral) base_square = np.array((0, n_col, n_col + 1, 1)) + 1 # quads = str() # for j in range(n_row-1): # for k in range(n_col-1): # square = base_square + (j * n_col + k) # square = (str(p) for p in square) # quads += " ".join(square) + "\n" # same as the above code, but faster evaluation quads = "\n".join("\n".join((" ".join((str(p) for p in (base_square + j * n_col + k)))) for k in range(n_col - 1)) for j in range(n_row - 1)) tec += quads with open("{}.dat".format(outputname), "w") as f: f.write(tec)
mit
7,905,887,637,318,669,000
41.880165
104
0.512289
false
3.272419
false
false
false
jarod-w/ocsetup
ocsetup/plugins/storage_tab.py
1
2968
#!/usr/bin/python # storage_tab.py - Copyright (C) 2012 CloudTimes, Inc. # Written by Jarod.W <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; version 2 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. A copy of the GNU General Public License is # also available at http://www.gnu.org/copyleft/gpl.html. import traceback from ovirtnode.ovirtfunctions import log from ovirtnode.iscsi import get_current_iscsi_initiator_name, \ set_iscsi_initiator from ocsetup.wrapper_ovirtfunctions import PluginBase from ocsetup.ocsetup_ui_widgets import ButtonList from ocsetup.ocsetup_ui import WidgetBase, _ from ocsetup.datautil import refresh_window class Plugin(PluginBase): """ Plugin for license information of IVNH. """ def __init__(self): PluginBase.__init__(self, "Storage") self.iscsi_initiator_label = None self.iscsi_initiator_name_value = None self.iscsi_button = None def storage_apply(self, obj): from ocsetup.ocsetup import ocs log("enter storage apply") set_iscsi_initiator( ocs.page_Storage.iscsi_initiator_name_value_Entry.get_text()) def storage_reset(self, obj): log("enter storage reset") refresh_window(obj) def form(self): log("enter storage form function....") try: self.iscsi_initiator_label = WidgetBase( "iscsi_initiator_label", "Label", "iSCSI Initiator Name:", title=True) self.iscsi_initiator_name_value = WidgetBase( "iscsi_initiator_name_value", "Entry", "", "", get_conf=get_current_iscsi_initiator_name) self.iscsi_button = WidgetBase( 'iscsi_button', ButtonList, '', params={'labels': [_('Apply'), _('Reset')], 'callback': [self.storage_apply, self.storage_reset]}) except: log("Here some error happened.format ext: %s " % traceback.format_exc()) return [ "Storage", "Storage", [ (self.iscsi_initiator_label, self.iscsi_initiator_name_value), (WidgetBase('__', 'Label', vhelp=140),), (self.iscsi_button,), ]] def action(self): pass def get_plugin(): p = Plugin() return p.form()
gpl-2.0
-7,529,631,513,802,328,000
33.114943
78
0.627695
false
4.082531
false
false
false
cqychen/quants
quants/loaddata/skyeye_ods_invest_refer_sh_margins_detail.py
1
2670
#coding=utf8 import tushare as ts; import pymysql; import time as dt from datashape.coretypes import string from pandas.io.sql import SQLDatabase import sqlalchemy import datetime from sqlalchemy import create_engine from pandas.io import sql import threading import pandas as pd; import sys sys.path.append('../') #添加配置文件 from common_function import * def create_table(table_name): cmd=''' create table if not exists %s ( opDate VARCHAR (63) comment '信用交易日期' ,stockCode varchar (63) comment '股票代码' ,securityAbbr varchar (63) comment '标的证券简称' ,rzye BIGINT comment '本日融资余额(元)' ,rzmre BIGINT comment '本日融资买入额(元)' ,rzche BIGINT comment '本日融资偿还额(元)' ,rqyl BIGINT comment '本日融券余量' ,rqmcl BIGINT comment '本日融券卖出量' ,rqchl BIGINT comment '本日融券偿还量' ,PRIMARY KEY(stockCode,`opDate`) ,index(stockCode) )DEFAULT CHARSET=utf8 '''%table_name print (cmd) run_mysql_cmd(cmd,conn) def load_data_stock(stock_code): ''' :param stock_code:传递股票代码,将其装载进入mysql :return: ''' start_date = get_date_add_days(get_max_date_sh_margins_detail(stock_code), 1) #获取股票最大日期 rs = ts.sh_margin_details(start=start_date, end=end_date, symbol=stock_code)#获取数据 pd.DataFrame.to_sql(rs, table_name, con=conn, flavor='mysql', if_exists='append', index=False) def load_data(): stock_code = get_stock_info().index total_num = len(stock_code); tempnum = 1; for tmp_stock_code in stock_code: tempnum = tempnum + 1 print(tempnum,tmp_stock_code) load_data_stock(tmp_stock_code) if __name__ == '__main__': #--------------------设置基本信息--------------------------------- print("--------------加载股票日k线-----------------------------") startTime=dt.time() iphost,user,passwd=get_mysql_conn() db='ods_data' charset='utf8' table_name='ods_invest_refer_sh_margins_detail' conn = pymysql.connect(user=user, passwd=passwd,host=iphost, db=db,charset=charset) end_date= dt.strftime('%Y-%m-%d',dt.localtime(dt.time())) #--------------------脚本运行开始-------------------------------- create_table(table_name=table_name) load_data() endTime=dt.time() print("---------------脚本运行完毕,共计耗费时间%sS------------------"%(endTime-startTime))
epl-1.0
-4,613,820,654,172,870,000
34.217391
98
0.577778
false
2.825581
false
false
false
pfjel7/housing-insights
python/housinginsights/sources/cama.py
1
8713
# Script is deprecated, as of September 18, 2017. # zoneUnitCount now calculated with LoadData's _get_residential_units() # from pprint import pprint import os import sys import requests from collections import OrderedDict import csv import datetime PYTHON_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) sys.path.append(PYTHON_PATH) from housinginsights.sources.base import BaseApiConn from housinginsights.tools.logger import HILogger logger = HILogger(name=__file__, logfile="sources.log") class MarApiConn_2(BaseApiConn): """ API Interface to the Master Address Record (MAR) database. Use public methods to retrieve data. """ BASEURL = 'http://citizenatlas.dc.gov/newwebservices/locationverifier.asmx' def __init__(self, baseurl=None,proxies=None,database_choice=None, debug=False): super().__init__(MarApiConn_2.BASEURL) def get_data(self, square, lot, suffix): """ Get information on a location based on a simple query string. :param square: SSL first part :type location: String. :param lot: SSL second part :type location: String. :param output_type: Output type specified by user. :type output_type: String. :param output_file: Output file specified by user. :type output_file: String :returns: Json output from the api. :rtype: String """ params = { 'f': 'json', 'Square': square, 'Lot': lot, 'Suffix': suffix } result = self.get('/findAddFromSSL2', params=params) if result.status_code != 200: err = "An error occurred during request: status {0}" logger.exception(err.format(result.status_code)) raise mar_data = result.json() if mar_data['returnDataset'] == {}: mar_returns = {'Warning': 'No MAR data availble - property under construction - see AYB year'} else: entry = mar_data['returnDataset']['Table1'][0] mar_returns = {'anc': entry['ANC'], 'census_tract': entry['CENSUS_TRACT'], 'neighborhood_cluster': entry['CLUSTER_'], 'ward': entry['WARD'], 'zip': entry['ZIPCODE'] } return mar_returns class CamaApiConn(BaseApiConn): """ API Interface to the Computer Assisted Mass Appraisal - Residential (CAMA) API, to obtain SSL numbers to use as input for the MarApiConn_2 and get the corresponding housing and bedroom units. """ BASEURL = 'https://opendata.arcgis.com/datasets' def __init__(self): super().__init__(CamaApiConn.BASEURL) def get_data(self): """ Grabs data from CAMA. Individual CAMA property retrieves zone_type data from MAR api. Count number of housing units and bedroom units per zone. Return the count data (in dictionary form) to be processed into csv by get_csv() method. """ logger.info("Starting CAMA") mar_api = MarApiConn_2() result = self.get(urlpath='/c5fb3fbe4c694a59a6eef7bf5f8bc49a_25.geojson', params=None) if result.status_code != 200: err = "An error occurred during request: status {0}" raise Exception(err.format(result.status_code)) cama_data = result.json() logger.info(" Got cama_data. Length:{}".format(len(cama_data['features']))) """ Example of: anc_count = [OrderedDict([('zone_type', 'anc'), ('zone', 'ANC 2B'), ('housing_unit_count', 10), ('bedroom_unit_count', 10)], etc)] """ zone_types = ['anc', 'census_tract', 'neighborhood_cluster', 'ward', 'zip'] anc_count = [] census_count = [] cluster_count = [] ward_count = [] zipcode_count = [] """ Take each CAMA property data and retrieve the MAR data. """ """ Certain square values have four digits + a letter. (ex. 8888E) Square would be the first four digits and suffix would be the letter. SSL sometimes comes as 8 digit string without spacing in the middle. """ """ CAMA data includes bldgs under construction. CAMA's data includes AYB of 2018 as of June 2017. We eliminate all data points that are under construction and don't provide any housing units and bedrm at this time. """ for index, row in enumerate(cama_data['features']): if (index % 1000 == 0): print(" currently at index {}".format(index)) try: current_year = int(datetime.date.today().strftime('%Y')) #Skipping none values for units under construction if row['properties']['AYB'] is not None and int(row['properties']['AYB']) > current_year: continue objectid = row['properties']['OBJECTID'] if len(row['properties']['SSL']) == 8: square = row['properties']['SSL'][:4] lot = row['properties']['SSL'][4:] else: square, lot = row['properties']['SSL'].split() suffix = ' ' if len(square) > 4: square = square[:4] suffix = square[-1] mar_return = mar_api.get_data(square, lot, suffix) ''' Count the housing units and bedrooms ''' num_units = 0 if row['properties']['NUM_UNITS']: num_units = row['properties']['NUM_UNITS'] if num_units == 0: num_units = 1 bedrm = row['properties']['BEDRM'] if bedrm == 0: bedrm = 1 if bedrm == None: bedrm = 0 for zone in zone_types: if zone == 'anc': zone_count = anc_count elif zone == 'census_tract': zone_count = census_count elif zone == 'neighborhood_cluster': zone_count = cluster_count elif zone == 'ward': zone_count = ward_count elif zone == 'zip': zone_count = zipcode_count if 'Warning' not in mar_return.keys(): flag = False for dictionary in zone_count: #dictionary is {'zone_type': 'ANC', 'zone': 'ANC 8A', etc.} if dictionary['zone'] == mar_return[zone]: #mar_return[ANC] is 'ANC 8A' dictionary['housing_unit_count'] += num_units dictionary['bedroom_unit_count'] += bedrm flag = True break if not flag: zone_count.append( OrderedDict([('zone_type', zone), ('zone', mar_return[zone]), ('housing_unit_count', num_units), ('bedroom_unit_count', bedrm)]) ) except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() print(exc_type, "line", exc_tb.tb_lineno) print("Error! SSL: ", row['properties']['SSL'], row['properties']['AYB']) continue return {'anc': anc_count, 'census_tract': census_count, 'neighborhood_cluster': cluster_count, 'ward': ward_count, 'zip': zipcode_count} def get_csv(self): """ Takes the returned dictionary from get_data() and convert the information into csv file and then save the csv file in housing-insights/data/processed/zoneUnitCount as zoneUnitCount_2017-05-30.csv. """ if not os.path.exists('../../../data/processed/zoneUnitCount'): os.makedirs('../../../data/processed/zoneUnitCount') data_processed_zoneUnitCount = os.path.join(PYTHON_PATH, os.pardir, 'data', 'processed', 'zoneUnitCount') zone_data = self.get_data() toCSV = [] date = datetime.date.today().strftime('%Y-%m-%d') filename = os.path.join(data_processed_zoneUnitCount, 'zoneUnitCount_'+date+'.csv') for key, value in zone_data.items(): toCSV.extend(value) keys = toCSV[0].keys() with open(filename, 'w') as output_file: dict_writer = csv.DictWriter(output_file, keys) dict_writer.writeheader() dict_writer.writerows(toCSV) if __name__ == '__main__': # Pushes everything from the logger to the command line output as well. my_api = CamaApiConn() csvfile = my_api.get_csv()
mit
-8,037,650,862,094,938,000
38.425339
177
0.553655
false
4.050674
false
false
false
pmalczuk/python_scripts
disk.py
1
7360
#!/usr/bin/python2 import os, sys import getopt, datetime import platform #automatyczne przeliczenie wartosci od Bajtow w gore def GetHumanReadable(size,precision=2): suffixes=['B','KB','MB','GB','TB'] suffixIndex = 0 while size > 1024 and suffixIndex < 3: suffixIndex += 1 #increment the index of the suffix size = size/1024.0 #apply the division return "%.*f%s"%(precision,size,suffixes[suffixIndex]) #sprawdzenie punktu montowania def getmount(path): path = os.path.realpath(os.path.abspath(path)) if path == '/boot/efi': return path while path != os.path.sep: if os.path.ismount(path): return path path = os.path.abspath(os.path.join(path, os.pardir)) return path #parametry uzycia dyskow zwracane w formie dictionary def disk_usage(path): st = os.statvfs(path) free = st.f_bavail * st.f_frsize total = st.f_blocks * st.f_frsize used = (st.f_blocks - st.f_bfree) * st.f_frsize used_percent=float(used)/total*100 itotal = st.f_files ifree = st.f_ffree iused = st.f_files - st.f_ffree try: iused_percent=float(iused)/itotal*100 except: iused_percent=1 return {path: {'total': total,'used': used,'free': free,'used_percent': used_percent, 'itotal': itotal,'ifree': ifree,'iused_percent': iused_percent, 'iused': iused}} #usuwanie duplikatow w liscie def make_unique(original_list): unique_list = [] [unique_list.append(obj) for obj in original_list if obj not in unique_list] return unique_list def usage(): print """python check_disk.py -p [--partition=] -w [--warning=] -c [--critical] -C [--icritical] -W [--iwarning] -m [--megabytes] --gwarning --gcritical Przyklad: ./check_disk.py -p / -w 10 -c 20 -p /boot -w 11 -c 21 -p /tmp -w 11 -c 22 -p /opt -p /var -p /var/log -p /var/log/audit -W 10 -C 5 Trzeba pamietac zeby progi dawac od razu przy danym fs od lewej do prawej tzn. ./check_disk.py -p / -w 10 -c 20 -p /boot -w 11 -c 21 -p /tmp -W 10 -C 5 --gwarning 10 --gcritical 20 >>>> to jest poprawne ./check_disk.py -p / -w 10 -c 20 -p /boot -p /tmp -W 10 -C 5 --gwarning 10 --gcritical 20 >>> to jest nie poprawne """ sys.exit() def main(): partitions_args=[] warnings_args=[] criticals_args=[] mega=0 try: opts, args = getopt.getopt(sys.argv[1:], 'hw:c:p:W:C:m', ['help','warning=','critical=','partition=','iwarning=','icritical=','megabytes','gwarning=','gcritical=']) except getopt.GetoptError: usage() sys.exit(2) for opt, arg in opts: if opt == '-h': usage() elif opt in ("-w", "--warning"): warnings_args.append(int(arg)) elif opt in ("-c", "--critical"): criticals_args.append(int(arg)) elif opt in ("-p", "--partition"): partitions_args.append(arg) elif opt in ("-W", "--iwarning"): iwarning=int(arg) elif opt in ("-C", "--icritical"): icritical=int(arg) elif opt in ("-m", '--megabytes'): mega=1 elif opt in ('--gwarning'): gwarn=int(arg) elif opt in ('--gcritical'): gcrit=int(arg) else: usage() part_result=[] new_warning=[] new_critical=[] part_not_distinct=[] for partition in partitions_args: part_not_distinct.append(getmount(partition)) # sprawdzenie punktow montowania podanych partycji part_distinct=make_unique(part_not_distinct) #usuwanie duplikatow punktow montowania for mountpoint in part_distinct: part_result.append(disk_usage(mountpoint)) #sprawdzenie zajetosci per istniejacy punkt montowania #print warnings_args[partitions_args.index(mountpoint)] try: new_warning.append(warnings_args[part_distinct.index(mountpoint)]) # ustalenie progow warning new_critical.append(criticals_args[part_distinct.index(mountpoint)]) # ustalenie progow critical except IndexError: new_warning.append(gwarn) # ustalenie progow global warning new_critical.append(gcrit) # ustalenie progow global critical perfdata="" outputOK="" outputWARN="" outputCRIT="" outputINODE="" i=0 crit=0 warn=0 try: gwarn,gcrit except NameError as e: pass #wyswietlanie i wyliczanie if mega == 0: #procent used for element in part_result: for tag,value in element.items(): p_used=value['used_percent'] if p_used > float(new_critical[i]): outputCRIT+=tag+"="+GetHumanReadable(value['free'])+" " crit=1 elif p_used > float(new_warning[i]): outputWARN+=tag+"="+GetHumanReadable(value['free'])+" " warn=1 else: outputOK+=tag+"="+GetHumanReadable(value['free'])+" " #sprawdzenie inode used if value['iused_percent'] > float(icritical): outputINODE+=tag+" InodeCRIT "+format(value['iused_percent'],'.2f')+" " crit=1 elif value['iused_percent'] > float(iwarning): outputINODE+=tag+" InodeWARN "+format(value['iused_percent'],'.2f')+" " warn=1 warning=float(new_warning[i])/100*value['total']/1024 critical=float(new_critical[i])/100*value['total']/1024 perfdata+=tag+"="+str(value['used']/1024)+"KB;"+format(warning,'.0f')+";"+format(critical,'.0f')+";0;"+str(value['total']/1024)+"; " #output+=tag+"="+GetHumanReadable(value['used'])+" " i+=1 elif mega == 1: #megabajty used for element in part_result: for tag,value in element.items(): used=value['used']/1024/1024 if used < new_critical[i]: outputCRIT+=tag+"="+GetHumanReadable(value['free'])+" " crit=1 elif used < new_warning[i]: outputWARN+=tag+"="+GetHumanReadable(value['free'])+" " warn=1 else: outputOK+=tag+"="+GetHumanReadable(value['free'])+" " #sprawdzenie inode used if value['iused_percent'] > float(icritical): outputINODE+=tag+" InodeCRIT "+format(value['iused_percent'],'.2f')+" " crit=1 elif value['iused_percent'] > float(iwarning): outputINODE+=tag+" InodeWARN "+format(value['iused_percent'],'.2f')+" " warn=1 perfdata+=tag+"="+str(value['used']/1024)+"KB;"+str(new_warning[i]*1024)+";"+str(new_critical[i]*1024)+";0;"+str(value['total']/1024)+"; " #output+=tag+"="+GetHumanReadable(value['used'])+" " i+=1 if crit==1: print "DISK CRITICAL Free Space "+outputCRIT+" "+outputINODE+"| "+perfdata sys.exit(2) elif warn==1: print "DISK WARNING Free Space "+outputWARN+" "+outputINODE+"| "+perfdata sys.exit(1) else: print "DISK OK Free Space "+outputOK+"| "+perfdata sys.exit(0) if __name__ == '__main__': main()
gpl-3.0
-220,261,891,489,609,950
37.736842
170
0.56019
false
3.44086
false
false
false
tokatikato/OIPA
OIPA/api/v3/resources/activity_view_resources.py
1
12434
# Tastypie specific from tastypie import fields from tastypie.constants import ALL, ALL_WITH_RELATIONS from tastypie.resources import ModelResource # Data specific from api.cache import NoTransformCache from iati.models import ContactInfo, Activity, Organisation, AidType, FlowType, Sector, CollaborationType, \ TiedStatus, Transaction, ActivityStatus, Currency, OrganisationRole, ActivityScope, \ ActivityParticipatingOrganisation, Location, Result from api.v3.resources.helper_resources import TitleResource, DescriptionResource, FinanceTypeResource, \ ActivityBudgetResource, DocumentResource, WebsiteResource, PolicyMarkerResource, OtherIdentifierResource from api.v3.resources.advanced_resources import OnlyCountryResource, OnlyRegionResource # cache specific from django.http import HttpResponse from cache.validator import Validator from api.v3.resources.csv_serializer import CsvSerializer from api.api_tools import comma_separated_parameter_to_list from api.paginator import NoCountPaginator class ActivityViewAidTypeResource(ModelResource): class Meta: queryset = AidType.objects.all() include_resource_uri = False excludes = ['description'] class ActivityViewFlowTypeResource(ModelResource): class Meta: queryset = FlowType.objects.all() include_resource_uri = False excludes = ['description'] class ActivityViewSectorResource(ModelResource): class Meta: queryset = Sector.objects.all() include_resource_uri = False excludes = ['description'] class ActivityViewCollaborationTypeResource(ModelResource): class Meta: queryset = CollaborationType.objects.all() include_resource_uri = False excludes = ['description', 'language'] class ActivityViewTiedStatusResource(ModelResource): class Meta: queryset = TiedStatus.objects.all() include_resource_uri = False excludes = ['description'] class ActivityViewOrganisationRoleResource(ModelResource): class Meta: queryset = OrganisationRole.objects.all() include_resource_uri = False class ActivityViewOrganisationResource(ModelResource): organisation_role = fields.ForeignKey(ActivityViewOrganisationRoleResource, 'organisation_role', full=True, null=True) class Meta: queryset = Organisation.objects.all() include_resource_uri = False excludes = ['abbreviation', 'reported_by_organisation'] filtering = { 'iati_identifier': 'exact', 'code': ALL_WITH_RELATIONS } class ActivityViewTransactionResource(ModelResource): provider_organisation = fields.ForeignKey(ActivityViewOrganisationResource, 'provider_organisation', full=True, null=True) receiver_organisation = fields.ForeignKey(ActivityViewOrganisationResource, 'receiver_organisation', full=True, null=True) class Meta: queryset = Transaction.objects.all() include_resource_uri = False excludes = ['id', 'ref', 'description', 'provider_activity'] allowed_methods = ['get'] def dehydrate(self, bundle): bundle.data['disbursement_channel'] = bundle.obj.disbursement_channel_id bundle.data['currency'] = bundle.obj.currency_id bundle.data['tied_status'] = bundle.obj.tied_status_id bundle.data['transaction_type'] = bundle.obj.transaction_type_id return bundle class ActivityViewParticipatingOrganisationResource(ModelResource): organisation = fields.ToOneField(ActivityViewOrganisationResource, 'organisation', full=True, null=True) class Meta: queryset = ActivityParticipatingOrganisation.objects.all() include_resource_uri = False excludes = ['id'] filtering = { 'organisation': ALL_WITH_RELATIONS } def dehydrate(self, bundle): bundle.data['role_id'] = bundle.obj.role_id bundle.data['code'] = bundle.obj.organisation_id return bundle class ActivityViewActivityStatusResource(ModelResource): class Meta: queryset = ActivityStatus.objects.all() include_resource_uri = False excludes = ['language'] class ActivityViewActivityScopeResource(ModelResource): class Meta: queryset = ActivityScope.objects.all() include_resource_uri = False class ActivityViewCurrencyResource(ModelResource): class Meta: queryset = Currency.objects.all() include_resource_uri = False excludes = ['language'] class ActivityViewContactInfoResource(ModelResource): class Meta: queryset = ContactInfo.objects.all() include_resource_uri = False excludes = ['id'] class ActivityLocationResource(ModelResource): class Meta: queryset = Location.objects.all() include_resource_uri = False excludes = ['id', 'activity_description', 'adm_code', 'adm_country_adm1', 'adm_country_adm2', 'adm_country_name', 'adm_level', 'gazetteer_entry', 'location_id_code', 'point_srs_name', 'ref', 'type_description', 'point_pos'] class ActivityResultResource(ModelResource): class Meta: queryset = Result.objects.all() include_resource_uri = False excludes = ['id'] class ActivityResource(ModelResource): countries = fields.ToManyField(OnlyCountryResource, 'recipient_country', full=True, null=True, use_in='all') regions = fields.ToManyField(OnlyRegionResource, 'recipient_region', full=True, null=True, use_in='all') sectors = fields.ToManyField(ActivityViewSectorResource, 'sector', full=True, null=True, use_in='all') titles = fields.ToManyField(TitleResource, 'title_set', full=True, null=True, use_in='all') descriptions = fields.ToManyField(DescriptionResource, 'description_set', full=True, null=True, use_in='all') participating_organisations = fields.ToManyField(ActivityViewOrganisationResource, 'participating_organisation', full=True, null=True, use_in='all') reporting_organisation = fields.ForeignKey(ActivityViewOrganisationResource, 'reporting_organisation', full=True, null=True, use_in='detail' ) activity_status = fields.ForeignKey(ActivityViewActivityStatusResource, 'activity_status', full=True, null=True, use_in='detail') websites = fields.ToManyField(WebsiteResource, 'activity_website_set', full=True, null=True, use_in='detail') policy_markers = fields.ToManyField(PolicyMarkerResource, 'policy_marker', full=True, null=True, use_in='detail') collaboration_type = fields.ForeignKey(ActivityViewCollaborationTypeResource, attribute='collaboration_type', full=True, null=True, use_in='detail') default_flow_type = fields.ForeignKey(ActivityViewFlowTypeResource, attribute='default_flow_type', full=True, null=True, use_in='detail') default_finance_type = fields.ForeignKey(FinanceTypeResource, attribute='default_finance_type', full=True, null=True, use_in='detail') default_aid_type = fields.ForeignKey(ActivityViewAidTypeResource, attribute='default_aid_type', full=True, null=True, use_in='detail') default_tied_status = fields.ForeignKey(ActivityViewTiedStatusResource, attribute='default_tied_status', full=True, null=True, use_in='detail') activity_scope = fields.ForeignKey(ActivityViewActivityScopeResource, attribute='scope', full=True, null=True, use_in='detail') default_currency = fields.ForeignKey(ActivityViewCurrencyResource, attribute='default_currency', full=True, null=True, use_in='detail') budget = fields.ToManyField(ActivityBudgetResource, 'budget_set', full=True, null=True, use_in='detail') transactions = fields.ToManyField(ActivityViewTransactionResource, 'transaction_set', full=True, null=True, use_in='detail') documents = fields.ToManyField(DocumentResource, 'documentlink_set', full=True, null=True, use_in='detail') other_identifier = fields.ToManyField(OtherIdentifierResource, 'otheridentifier_set', full=True, null=True, use_in='detail') locations = fields.ToManyField(ActivityLocationResource, 'location_set', full=True, null=True, use_in='all') results = fields.ToManyField(ActivityResultResource, 'result_set', full=True, null=True, use_in='detail') # to add: # conditions # contact # country-budget? # crsadd # disbursement channel? # ffs # ffs forecast? # planned disbursement # related activity # verification status # vocabulary? class Meta: queryset = Activity.objects.all() resource_name = 'activities' max_limit = 1000 serializer = CsvSerializer() excludes = ['date_created'] ordering = ['start_actual', 'start_planned', 'end_actual', 'end_planned', 'sectors', 'total_budget'] filtering = { 'iati_identifier': 'exact', 'start_planned': ALL, 'start_actual': ALL, 'end_planned': ALL, 'end_actual': ALL, 'total_budget': ALL, 'sectors': ('exact', 'in'), 'regions': ('exact', 'in'), 'countries': ('exact', 'in'), 'reporting_organisation': ('exact', 'in'), 'documents': ALL_WITH_RELATIONS } cache = NoTransformCache() paginator_class = NoCountPaginator def apply_filters(self, request, applicable_filters): activity_list = super(ActivityResource, self).apply_filters(request, applicable_filters).prefetch_related('title_set').prefetch_related('description_set') query = request.GET.get('query', None) filter_year_param = request.GET.get('start_year_planned__in', None) if query: search_fields = comma_separated_parameter_to_list(request.GET.get('search_fields', None)) activity_list = activity_list.search(query, search_fields) if filter_year_param: years = comma_separated_parameter_to_list(filter_year_param) activity_list = activity_list.filter_years(years) return activity_list.distinct_if_necessary(applicable_filters) def full_dehydrate(self, bundle, for_list=False): #If the select_fields param is found, run this overwritten method. #Otherwise run the default Tastypie method select_fields_param = bundle.request.GET.get('select_fields', None) if select_fields_param: select_fields = comma_separated_parameter_to_list(select_fields_param) for field_name, field_object in self.fields.items(): #If the field_name is in the list of requested fields dehydrate it if (field_name) in select_fields: # A touch leaky but it makes URI resolution work. if getattr(field_object, 'dehydrated_type', None) == 'related': field_object.api_name = self._meta.api_name field_object.resource_name = self._meta.resource_name bundle.data[field_name] = field_object.dehydrate(bundle, for_list=for_list) # Check for an optional method to do further dehydration. method = getattr(self, "dehydrate_%s" % field_name, None) if method: bundle.data[field_name] = method(bundle) bundle = self.dehydrate(bundle) return bundle else: return super(ActivityResource, self).full_dehydrate(bundle, for_list) def get_list(self, request, **kwargs): # check if call is cached using validator.is_cached # check if call contains flush, if it does the call comes from the cache updater and shouldn't return cached results validator = Validator() cururl = request.META['PATH_INFO'] + "?" + request.META['QUERY_STRING'] if not 'flush' in cururl and validator.is_cached(cururl): return HttpResponse(validator.get_cached_call(cururl), mimetype='application/json') else: return super(ActivityResource, self).get_list(request, **kwargs) def alter_list_data_to_serialize(self, request, data): select_fields_param = request.GET.get('select_fields', None) if select_fields_param: select_fields = comma_separated_parameter_to_list(select_fields_param) data['meta']['selectable_fields'] = {f[0] for f in self.fields.items()} - {f for f in select_fields} return data
agpl-3.0
9,049,514,736,130,169,000
43.24911
162
0.687309
false
4.066056
false
false
false
PyGotham/pygotham
pygotham/manage/events.py
1
1920
"""Event-related management commands.""" import sys import arrow from flask import current_app from flask_script import Command, prompt, prompt_bool from werkzeug.datastructures import MultiDict from pygotham.core import db from pygotham.forms import EventForm from pygotham.models import Event class CreateEvent(Command): """Management command to create an :class:`~pygotham.models.Event`. In addition to asking for certain values, the event can also be activated. """ def run(self): """Run the command.""" # Get the information. name = prompt('Name') slug = prompt('Slug (optional)') begins = prompt('Event start date (YYYY-MM-DD)') ends = prompt('Event end date (YYYY-MM-DD)') proposals_begin = prompt('CFP start date (YYYY-MM-DD HH:MM:SS)') active = prompt_bool('Activate the event') data = MultiDict({ 'name': name, 'slug': slug, 'begins': begins, 'ends': ends, 'proposals_begin': proposals_begin, 'active': active, }) # Validate the form. form = EventForm(data, csrf_enabled=False) if form.validate(): # Save the new event. event = Event() form.populate_obj(event) if event.active: now = arrow.utcnow().to(current_app.config['TIME_ZONE']).naive event.activity_begins = now db.session.add(event) db.session.commit() print('\nEvent created successfully.') print('Event(id={} slug={} name={})'.format( event.id, event.slug, event.name)) return event # If something went wrong, report it and exit out. print('\nError creating event:') for errors in form.errors.values(): print('\n'.join(errors)) sys.exit(1)
bsd-3-clause
-2,552,559,807,039,151,000
28.538462
78
0.578125
false
4.201313
false
false
false
SahilTikale/haas
examples/dbinit.py
2
1653
#!/usr/bin/python """ Register nodes with HIL. This is intended to be used as a template for either creating a mock HIL setup for development or to be modified to register real-life nodes that follow a particular pattern. In the example environment for which this module is written, there are 10 nodes which have IPMI interfaces that are sequentially numbered starting with 10.0.0.0, have a username of "ADMIN_USER" and password of "ADMIN_PASSWORD". The ports are also numbered sequentially and are named following a dell switch scheme, which have ports that look like "gi1/0/5" It could be used in an environment similar to the one which ``hil.cfg`` corresponds, though could also be used for development with the ``hil.cfg.dev*`` """ from subprocess import check_call N_NODES = 6 ipmi_user = "ADMIN_USER" ipmi_pass = "ADMIN_PASSWORD" switch = "mock01" obmd_base_uri = 'http://obmd.example.com/nodes/' obmd_admin_token = 'secret' def hil(*args): """Convenience function that calls the hil command line tool with the given arguments. """ args = map(str, args) print args check_call(['hil'] + args) hil('switch', 'register', switch, 'mock', 'ip', 'user', 'pass') for node in range(N_NODES): ipmi_ip = "10.0.0." + str(node + 1) nic_port = "gi1/0/%d" % (node) nic_name = 'nic1' hil('node', 'register', node, obmd_base_uri + str(node), obmd_admin_token, "mock", ipmi_ip, ipmi_user, ipmi_pass) hil('node', 'nic', 'register', node, nic_name, 'FillThisInLater') hil('port', 'register', switch, nic_port) hil('port', 'nic', 'add', switch, nic_port, node, nic_name)
apache-2.0
1,030,338,861,592,609,900
29.611111
78
0.678766
false
3.19112
false
false
false
tobast/sysres-pikern
snake/snake_common.py
1
1643
SERVER_PORT = 31412 PSIZE = 20 WIDTH = 30 HEIGHT = 30 PERIOD = 100 def p2add(u, v): return (u[0] + v[0], u[1] + v[1]) DIRS = [(0, 1), (1, 0), (-1, 0), (0, -1)] NB_APPLES = 3 class Packet: def __init__(self, data = b''): self.start_index = 0 self.data = data def add_position(self, p): self.data += bytes((p[0], p[1])) def add_uint16(self, n): self.data += bytes(((n >> 8) & 0xff, n & 0xff)) def add_uint8(self, n): self.data += bytes((n,)) def add_color(self, c): self.add_uint16(c[0]) self.add_uint16(c[1]) self.add_uint16(c[2]) def add_position_list(self, l): self.add_uint16(len(l)) for p in l: self.add_position(p) def read_position(self): r = self.data[self.start_index] s = self.data[self.start_index + 1] self.start_index += 2 return (r, s) def read_uint16(self): r = self.data[self.start_index] s = self.data[self.start_index + 1] self.start_index += 2 return (r << 8) | s def read_uint8(self): r = self.data[self.start_index] self.start_index += 1 return r def read_position_list(self): l = [] n = self.read_uint16() for i in range(n): l.append(self.read_position()) return l def read_color(self): r = self.read_uint16() g = self.read_uint16() b = self.read_uint16() return (r, g, b) TOSERVER_INIT = 0 TOCLIENT_INIT = 1 SET_SNAKE = 2 SET_APPLES = 3 SET_DIRECTION = 4 SET_SNAKE_COLOR = 5 TOCLIENT_ACCESS_DENIED = 6
gpl-3.0
71,077,579,929,375,590
20.906667
55
0.522215
false
2.872378
false
false
false
jolyonb/edx-platform
lms/djangoapps/course_api/blocks/tests/test_forms.py
1
8460
""" Tests for Course Blocks forms """ from urllib import urlencode import ddt from django.http import Http404, QueryDict from opaque_keys.edx.locator import CourseLocator from rest_framework.exceptions import PermissionDenied from openedx.core.djangoapps.util.test_forms import FormTestMixin from student.models import CourseEnrollment from student.tests.factories import CourseEnrollmentFactory, UserFactory from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory from ..forms import BlockListGetForm @ddt.ddt class TestBlockListGetForm(FormTestMixin, SharedModuleStoreTestCase): """ Tests for BlockListGetForm """ FORM_CLASS = BlockListGetForm @classmethod def setUpClass(cls): super(TestBlockListGetForm, cls).setUpClass() cls.course = CourseFactory.create() def setUp(self): super(TestBlockListGetForm, self).setUp() self.student = UserFactory.create() self.student2 = UserFactory.create() self.staff = UserFactory.create(is_staff=True) CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id) CourseEnrollmentFactory.create(user=self.student2, course_id=self.course.id) usage_key = self.course.location self.initial = {'requesting_user': self.student} self.form_data = QueryDict( urlencode({ 'username': self.student.username, 'usage_key': unicode(usage_key), }), mutable=True, ) self.cleaned_data = { 'all_blocks': None, 'block_counts': set(), 'depth': 0, 'nav_depth': None, 'return_type': 'dict', 'requested_fields': {'display_name', 'type'}, 'student_view_data': set(), 'usage_key': usage_key, 'username': self.student.username, 'user': self.student, 'block_types_filter': set(), } def assert_raises_permission_denied(self): """ Fail unless permission is denied to the form """ with self.assertRaises(PermissionDenied): self.get_form(expected_valid=False) def assert_raises_not_found(self): """ Fail unless a 404 occurs """ with self.assertRaises(Http404): self.get_form(expected_valid=False) def assert_equals_cleaned_data(self): """ Check that the form returns the expected data """ form = self.get_form(expected_valid=True) self.assertDictEqual(form.cleaned_data, self.cleaned_data) def test_basic(self): self.assert_equals_cleaned_data() #-- usage key def test_no_usage_key_param(self): self.form_data.pop('usage_key') self.assert_error('usage_key', "This field is required.") def test_invalid_usage_key(self): self.form_data['usage_key'] = 'invalid_usage_key' self.assert_error('usage_key', "'invalid_usage_key' is not a valid usage key.") def test_non_existent_usage_key(self): self.form_data['usage_key'] = self.store.make_course_usage_key(CourseLocator('non', 'existent', 'course')) self.assert_raises_permission_denied() #-- user @ddt.data("True", "true", True) def test_no_user_all_blocks_true(self, all_blocks_value): self.initial = {'requesting_user': self.staff} self.form_data.pop('username') self.form_data['all_blocks'] = all_blocks_value self.get_form(expected_valid=True) @ddt.data("False", "false", False) def test_no_user_all_blocks_false(self, all_blocks_value): self.initial = {'requesting_user': self.staff} self.form_data.pop('username') self.form_data['all_blocks'] = all_blocks_value self.assert_error('username', "This field is required unless all_blocks is requested.") def test_no_user_all_blocks_none(self): self.initial = {'requesting_user': self.staff} self.form_data.pop('username') self.assert_error('username', "This field is required unless all_blocks is requested.") def test_no_user_non_staff(self): self.form_data.pop('username') self.form_data['all_blocks'] = True self.assert_raises_permission_denied() def test_nonexistent_user_by_student(self): self.form_data['username'] = 'non_existent_user' self.assert_raises_permission_denied() def test_nonexistent_user_by_staff(self): self.initial = {'requesting_user': self.staff} self.form_data['username'] = 'non_existent_user' self.assert_raises_not_found() def test_other_user_by_student(self): self.form_data['username'] = self.student2.username self.assert_raises_permission_denied() def test_other_user_by_staff(self): self.initial = {'requesting_user': self.staff} self.get_form(expected_valid=True) def test_unenrolled_student(self): CourseEnrollment.unenroll(self.student, self.course.id) self.assert_raises_permission_denied() def test_unenrolled_staff(self): CourseEnrollment.unenroll(self.staff, self.course.id) self.initial = {'requesting_user': self.staff} self.form_data['username'] = self.staff.username self.get_form(expected_valid=True) def test_unenrolled_student_by_staff(self): CourseEnrollment.unenroll(self.student, self.course.id) self.initial = {'requesting_user': self.staff} self.get_form(expected_valid=True) #-- depth def test_depth_integer(self): self.form_data['depth'] = 3 self.cleaned_data['depth'] = 3 self.assert_equals_cleaned_data() def test_depth_all(self): self.form_data['depth'] = 'all' self.cleaned_data['depth'] = None self.assert_equals_cleaned_data() def test_depth_invalid(self): self.form_data['depth'] = 'not_an_integer' self.assert_error('depth', "'not_an_integer' is not a valid depth value.") #-- nav depth def test_nav_depth(self): self.form_data['nav_depth'] = 3 self.cleaned_data['nav_depth'] = 3 self.cleaned_data['requested_fields'] |= {'nav_depth'} self.assert_equals_cleaned_data() def test_nav_depth_invalid(self): self.form_data['nav_depth'] = 'not_an_integer' self.assert_error('nav_depth', "Enter a whole number.") def test_nav_depth_negative(self): self.form_data['nav_depth'] = -1 self.assert_error('nav_depth', "Ensure this value is greater than or equal to 0.") #-- return_type def test_return_type(self): self.form_data['return_type'] = 'list' self.cleaned_data['return_type'] = 'list' self.assert_equals_cleaned_data() def test_return_type_invalid(self): self.form_data['return_type'] = 'invalid_return_type' self.assert_error( 'return_type', "Select a valid choice. invalid_return_type is not one of the available choices." ) #-- requested fields def test_requested_fields(self): self.form_data.setlist('requested_fields', ['graded', 'nav_depth', 'some_other_field']) self.cleaned_data['requested_fields'] |= {'graded', 'nav_depth', 'some_other_field'} self.assert_equals_cleaned_data() @ddt.data('block_counts', 'student_view_data') def test_higher_order_field(self, field_name): field_value = {'block_type1', 'block_type2'} self.form_data.setlist(field_name, field_value) self.cleaned_data[field_name] = field_value self.cleaned_data['requested_fields'].add(field_name) self.assert_equals_cleaned_data() def test_combined_fields(self): # add requested fields self.form_data.setlist('requested_fields', ['field1', 'field2']) # add higher order fields block_types_list = {'block_type1', 'block_type2'} for field_name in ['block_counts', 'student_view_data']: self.form_data.setlist(field_name, block_types_list) self.cleaned_data[field_name] = block_types_list # verify the requested_fields in cleaned_data includes all fields self.cleaned_data['requested_fields'] |= {'field1', 'field2', 'student_view_data', 'block_counts'} self.assert_equals_cleaned_data()
agpl-3.0
6,443,629,654,869,371,000
34.39749
114
0.635697
false
3.748339
true
false
false
famish99/pyvisa-sim
pyvisa_sim/tcpip.py
1
2280
# -*- coding: utf-8 -*- """ pyvisa-sim.tcpip ~~~~~~~~~~~~~~~~ TCPIP simulated session class. :copyright: 2014 by PyVISA-sim Authors, see AUTHORS for more details. :license: MIT, see LICENSE for more details. """ import time from pyvisa import constants from . import sessions class BaseTCPIPSession(sessions.Session): """Base class for TCPIP sessions.""" def read(self, count): end_char, _ = self.get_attribute(constants.VI_ATTR_TERMCHAR) enabled, _ = self.get_attribute(constants.VI_ATTR_TERMCHAR_EN) timeout, _ = self.get_attribute(constants.VI_ATTR_TMO_VALUE) timeout /= 1000 start = time.time() out = b"" while time.time() - start <= timeout: last = self.device.read() if not last: time.sleep(0.01) continue out += last if enabled: if len(out) > 0 and out[-1] == end_char: return out, constants.StatusCode.success_termination_character_read if len(out) == count: return out, constants.StatusCode.success_max_count_read else: return out, constants.StatusCode.error_timeout def write(self, data): send_end = self.get_attribute(constants.VI_ATTR_SEND_END_EN) for i in range(len(data)): self.device.write(data[i : i + 1]) if send_end: # EOM 4882 pass return len(data), constants.StatusCode.success @sessions.Session.register(constants.InterfaceType.tcpip, "INSTR") class TCPIPInstrumentSession(BaseTCPIPSession): def after_parsing(self): self.attrs[constants.VI_ATTR_INTF_NUM] = int(self.parsed.board) self.attrs[constants.VI_ATTR_TCPIP_ADDR] = self.parsed.host_address self.attrs[constants.VI_ATTR_TCPIP_DEVICE_NAME] = self.parsed.lan_device_name @sessions.Session.register(constants.InterfaceType.tcpip, "SOCKET") class TCPIPSocketSession(BaseTCPIPSession): def after_parsing(self): self.attrs[constants.VI_ATTR_INTF_NUM] = int(self.parsed.board) self.attrs[constants.VI_ATTR_TCPIP_ADDR] = self.parsed.host_address self.attrs[constants.VI_ATTR_TCPIP_PORT] = int(self.parsed.port)
mit
-8,203,132,260,625,480,000
29.4
87
0.623684
false
3.68932
false
false
false
waynewolf/abucket
from-tf-web/quickstart/1-get-started-tf-contrib-learn-customize.py
1
1494
import numpy as np import tensorflow as tf # Declare list of features, we only have one real-valued feature def model(features, labels, mode): # Build a linear model and predict values W = tf.get_variable("W", [1], dtype=tf.float64) b = tf.get_variable("b", [1], dtype=tf.float64) y = W*features['x'] + b # Loss sub-graph loss = tf.reduce_sum(tf.square(y - labels)) # Training sub-graph global_step = tf.train.get_global_step() optimizer = tf.train.GradientDescentOptimizer(0.01) train = tf.group(optimizer.minimize(loss), tf.assign_add(global_step, 1)) # ModelFnOps connects subgraphs we built to the # appropriate functionality. return tf.contrib.learn.ModelFnOps( mode=mode, predictions=y, loss=loss, train_op=train) estimator = tf.contrib.learn.Estimator(model_fn=model) # define our data sets x_train = np.array([1., 2., 3., 4.]) y_train = np.array([0., -1., -2., -3.]) x_eval = np.array([2., 5., 8., 1.]) y_eval = np.array([-1.01, -4.1, -7, 0.]) input_fn = tf.contrib.learn.io.numpy_input_fn({"x": x_train}, y_train, 4, num_epochs=1000) eval_input_fn = tf.contrib.learn.io.numpy_input_fn( {"x":x_eval}, y_eval, batch_size=4, num_epochs=1000) # train estimator.fit(input_fn=input_fn, steps=1000) # Here we evaluate how well our model did. train_loss = estimator.evaluate(input_fn=input_fn) eval_loss = estimator.evaluate(input_fn=eval_input_fn) print("train loss: %r"% train_loss) print("eval loss: %r"% eval_loss)
mit
-8,651,862,507,716,371,000
37.307692
90
0.670683
false
2.912281
false
false
false
MadsJensen/agency_connectivity
tf_functions.py
1
5293
""" Functions for TF analysis. @author: mje @email: mads [] cnru.dk """ import mne from mne.time_frequency import (psd_multitaper, tfr_multitaper, tfr_morlet, cwt_morlet) from mne.viz import iter_topography import matplotlib.pyplot as plt import numpy as np def calc_psd_epochs(epochs, plot=False): """Calculate PSD for epoch. Parameters ---------- epochs : list of epochs plot : bool To show plot of the psds. It will be average for each condition that is shown. Returns ------- psds_vol : numpy array The psds for the voluntary condition. psds_invol : numpy array The psds for the involuntary condition. """ tmin, tmax = -0.5, 0.5 fmin, fmax = 2, 90 # n_fft = 2048 # the FFT size (n_fft). Ideally a power of 2 psds_vol, freqs = psd_multitaper(epochs["voluntary"], tmin=tmin, tmax=tmax, fmin=fmin, fmax=fmax) psds_inv, freqs = psd_multitaper(epochs["involuntary"], tmin=tmin, tmax=tmax, fmin=fmin, fmax=fmax) psds_vol = 20 * np.log10(psds_vol) # scale to dB psds_inv = 20 * np.log10(psds_inv) # scale to dB if plot: def my_callback(ax, ch_idx): """Executed once you click on one of the channels in the plot.""" ax.plot(freqs, psds_vol_plot[ch_idx], color='red', label="voluntary") ax.plot(freqs, psds_inv_plot[ch_idx], color='blue', label="involuntary") ax.set_xlabel = 'Frequency (Hz)' ax.set_ylabel = 'Power (dB)' ax.legend() psds_vol_plot = psds_vol.copy().mean(axis=0) psds_inv_plot = psds_inv.copy().mean(axis=0) for ax, idx in iter_topography(epochs.info, fig_facecolor='k', axis_facecolor='k', axis_spinecolor='k', on_pick=my_callback): ax.plot(psds_vol_plot[idx], color='red', label="voluntary") ax.plot(psds_inv_plot[idx], color='blue', label="involuntary") plt.legend() plt.gcf().suptitle('Power spectral densities') plt.show() return psds_vol, psds_inv, freqs def multitaper_analysis(epochs): """ Parameters ---------- epochs : list of epochs Returns ------- result : numpy array The result of the multitaper analysis. """ frequencies = np.arange(6., 90., 2.) n_cycles = frequencies / 2. time_bandwidth = 4 # Same time-smoothing as (1), 7 tapers. power, plv = tfr_multitaper(epochs, freqs=frequencies, n_cycles=n_cycles, time_bandwidth=time_bandwidth, return_itc=True) return power, plv def morlet_analysis(epochs, n_cycles=4): """ Parameters ---------- epochs : list of epochs Returns ------- result : numpy array The result of the multitaper analysis. """ frequencies = np.arange(6., 30., 2.) # n_cycles = frequencies / 2. power, plv = tfr_morlet(epochs, freqs=frequencies, n_cycles=n_cycles, return_itc=True, verbose=True) return power, plv def single_trial_tf(epochs, frequencies, n_cycles=4.): """ Parameters ---------- epochs : Epochs object The epochs to calculate TF analysis on. frequencies : numpy array n_cycles : int The number of cycles for the Morlet wavelets. Returns ------- results : numpy array """ results = [] for j in range(len(epochs)): tfr = cwt_morlet(epochs.get_data()[j], sfreq=epochs.info["sfreq"], freqs=frequencies, use_fft=True, n_cycles=n_cycles, # decim=2, zero_mean=False) results.append(tfr) return results def calc_spatial_resolution(freqs, n_cycles): """Calculate the spatial resolution for a Morlet wavelet. The formula is: (freqs * cycles)*2. Parameters ---------- freqs : numpy array The frequencies to be calculated. n_cycles : int or numpy array The number of cycles used. Can be integer for the same cycle for all frequencies, or a numpy array for individual cycles per frequency. Returns ------- result : numpy array The results """ return (freqs / float(n_cycles)) * 2 def calc_wavelet_duration(freqs, n_cycles): """Calculate the wavelet duration for a Morlet wavelet in ms. The formula is: (cycle / frequencies / pi)*1000 Parameters ---------- freqs : numpy array The frequencies to be calculated. n_cycles : int or numpy array The number of cycles used. Can be integer for the same cycle for all frequencies, or a numpy array for individual cycles per frequency. Returns ------- result : numpy array The results """ return (float(n_cycles) / freqs / np.pi) * 1000
bsd-3-clause
6,559,354,453,207,024,000
27.005291
79
0.543737
false
3.871982
false
false
false
termNinja/Conversion-of-Regex-into-Automatons
pyscripts/classes/resources.py
1
18835
import re, os, sys from Queue import Queue # ----------------------------------------------------------------------------- class term_colors: """ Usage: print term_colors.WARNING + "This is a msg" + term_colors.ENDC """ HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' # ----------------------------------------------------------------------------- class xlogger: @staticmethod def dbg(msg): """ Prints a debugging msg onto stderr """ print >> sys.stderr, term_colors.FAIL + str(msg) + term_colors.ENDC @staticmethod def warn(msg): """ Prints a warning msg onto stderr """ print >> sys.stderr, term_colors.WARNING + str(msg) + term_colors.ENDC @staticmethod def info(msg): """ Prints an info msg onto stderr """ print >> sys.stderr, term_colors.OKBLUE + str(msg) + term_colors.ENDC @staticmethod def fine(msg): """ Prints an ok msg onto stderr """ print >> sys.stderr, term_colors.OKGREEN + str(msg) + term_colors.ENDC # ----------------------------------------------------------------------------- # handy macro class algo_step: thompson = "_01_thompson" elimeps = "_02_elimeps" determ = "_03_determ" minim = "_04_minim" # ----------------------------------------------------------------------------- # VERY IMPORTANT: # ----------------------------------------------------------------------------- # I changed type of end_node into STRING type, if error occurs BEFORE determinisation, # make sure to check it wasn't caused by this # ----------------------------------------------------------------------------- class Edge: def __init__(self, end_node, weight): """ Initializes edge object. end_node -> string weight -> string """ self.end_node = str(end_node) self.weight = str(weight) def __str__(self): return "(" + str(self.end_node) + ", " + str(self.weight) + ")" def __eq__(self, other): if self.end_node == other.end_node and self.weight == other.weight: return True else: return False def __hash__(self): return hash(self.end_node) ^ hash(self.weight) # ----------------------------------------------------------------------------- class Node: def __init__(self, node_val, is_ending): self.node_val = int(node_val) self.is_ending = bool(is_ending) def __str__(self): if self.is_ending: return "(" + str(self.node_val) + ")" else: return str(self.node_val) # When reading thomhpson's graph from .gv file, we KNOW that # node 1 is ENDING state, because that's how Thompson's algorithm was implemented # for this particular project. # ----------------------------------------------------------------------------- class Graph: # ------------------------------------------------------------------------- def __init__(self, graph_map, graph_name): self.graph_map = {} self.graph_name = graph_name self.ending_nodes = [int(1)] for ending_node in self.ending_nodes: self.graph_map[ending_node] = [] # ------------------------------------------------------------------------- def __str__(self): output = str(self.graph_name) + "\n-----------------------------\n" output += str(self.graph_map) return output # ------------------------------------------------------------------------- def form_graph_from_gv(self): """ Reads the .gv file that represent the graph and maps it onto Graph object. """ print "reading graph: " + self.graph_name # algo_step.thompson because python continues where C stopped with work # => Thompson algorithm has been performed f = open("../graphs/" + self.graph_name + algo_step.thompson + ".gv", "r") data = f.read() f.close() print "Graph data:" print data print # ----------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------- # Forming graph regex = r"\"([a-zA-Z0-9]+)\"\s*->\s*\"([a-zA-Z0-9]+)\"\s*" regex += r"(\[label\s*[=]\s*\"([a-zA-Z0-9]+)\"\])?" regex = re.compile(regex) for iter in regex.finditer(data): node_val = iter.group(1) into_node = iter.group(2) if iter.group(4) == None: graph_weight = "eps" else: graph_weight = iter.group(4) # Creating node # NOTICE TODO: Node objects aren't actually needed. It can be removed...later though if int(node_val) in self.ending_nodes: node = Node(node_val, True) print "making " + str(node_val) + "into ending node!" else: node = Node(int(node_val), False) # Creating edge edge = Edge(into_node, graph_weight) print node, edge if node.node_val in self.graph_map.keys(): self.graph_map[node.node_val].append(edge) else: self.graph_map[node.node_val] = [] self.graph_map[node.node_val].append(edge) ## TODO remove this, i've put it for testing purposes self.elim_eps() self.determinize() # ------------------------------------------------------------------------- def export_as_gv(self, algstep): """ Maps Graph object as gv file. """ output_text = [] output_text.append("digraph finite_state_machine {\n") output_text.append("graph [fontname = \"lmroman12\"];\n") output_text.append("node [fontname = \"lmroman12\"];\n") output_text.append("edge [fontname = \"lmroman12\"];\n") output_text.append("\trankdir=LR;\n") output_text.append("\tsize=\"8,5\"\n") output_text.append("\tnode [shape = doublecircle]; ") for node in self.ending_nodes: output_text.append("\"") output_text.append(str(node)) output_text.append("\"") output_text.append(",") output_text[-1] = ";\n" output_text.append("\tnode [shape = circle];\n") # lets fill in the elements nodes = self.graph_map.keys() for node in nodes: edges = self.graph_map[node] for edge in edges: output_text.append("\t\"" + str(node) + "\" -> \"" + str(edge.end_node) + "\"") # check if it was epsilon if edge.weight != "eps": output_text.append(" [label=\"" + str(edge.weight) + "\"]") output_text.append("\n") output_text.append("}") # writing into file f = open(self.graph_name + str(algstep) + ".gv", "w") # f = open("tester.gv", "w") f.write("".join(output_text)) f.close() # ------------------------------------------------------------------------- # Export graph structure as pdf # command is: # dot -Tpdf ../../graphs/source_file.gv -o ../../graphs/output.pdf def export_as_pdf(self, algstep): """ Draw a vector image of graph that it reads from gv file (make sure you have it created). Uses dot from graphviz to acomplish this amazing task. """ graph_id = self.graph_name.split("_")[0] output_name = self.graph_name + str(algstep) os.system("dot -Tpdf " + output_name + ".gv -o " + output_name + ".pdf") return 1 # ------------------------------------------------------------------------- def elim_eps(self): """ Performs algorithm that eliminates epsilon edges in graph. Wrapper for solve_eps_prob. """ new_map = {0: []} new_ending_nodes = [] visited_nodes = {0: False} visited = {} for node in self.graph_map.keys(): visited[node] = {} for tmp_node in self.graph_map.keys(): visited[node][tmp_node] = False self.solve_eps_prob(0, 0, new_map, visited, new_ending_nodes) self.graph_map = new_map self.ending_nodes = new_ending_nodes self.export_as_gv(algo_step.elimeps) self.export_as_pdf(algo_step.elimeps) xlogger.fine("Exported: " + self.graph_name + algo_step.elimeps + ".gv") xlogger.fine("Exported: " + self.graph_name + algo_step.elimeps + ".pdf") # ------------------------------------------------------------------------- def solve_eps_prob(self, root_node, current_node, new_map, visited, ending_nodes): """ Recursive method that peforms a DFS search and eliminates epsilon edges. """ visited[root_node][current_node] = True if current_node in self.ending_nodes: ending_nodes.append(root_node) return for adj in self.graph_map[current_node]: if adj.weight == "eps" and not visited[root_node][int(adj.end_node)]: self.solve_eps_prob(root_node, int(adj.end_node), new_map, visited, ending_nodes) elif adj.weight == "eps": return else: if not root_node in new_map.keys(): new_map[root_node] = [] new_map[root_node].append(adj) if not visited[root_node][int(adj.end_node)]: self.solve_eps_prob(int(adj.end_node), int(adj.end_node), new_map, visited, ending_nodes) # ------------------------------------------------------------------------- # ------------------------------------------------------------------------- def determinize(self): """ Performs the determinisation algorithm. """ # we switch to string keys because of new states queue = Queue() # queue.get() queue.put(item) queue.put("0") # 0 is always the starting node new_map = {} new_map["0"] = set() while queue.qsize() > 0: print print "----------------------------------------------------------" xlogger.info("Queue state: " + str([item for item in queue.queue])) print "----------------------------------------------------------" current_node = queue.get() xlogger.info("Took " + str(current_node) + " from queue.") # find all adjacent vertices # gives something like: "1,2,3" # gives a hash map like: # str(a) -> set(int(1), ...) str(b) -> set(int(5), int(6), int(7)) xlogger.info("Calling find_adjacent_nodes with " + str(current_node)) adjacent_nodes = self.find_adjacent_nodes(current_node) xlogger.info("Adjacent nodes: " + str(adjacent_nodes)) # update a map row if required for new deterministic nodes self.update_new_map_row(current_node, adjacent_nodes, new_map, queue) xlogger.fine("Determinized graph:") for key in new_map.keys(): print str(key) + "->" for elem in new_map[key]: print "---->" + str(elem) self.convert_into_object_map(new_map) self.export_as_gv(algo_step.determ) self.export_as_pdf(algo_step.determ) # ---------------------------------------------------------------------- # Used by method: determinize # ---------------------------------------------------------------------- def update_new_map_row(self, current_node, adjacent_nodes, new_map, queue): """ Used as a helper function in determinsation algorithm. It initialises and transforms some things in main graph object. """ # For each weight in array for weight in adjacent_nodes.keys(): # -------------------------------------------------------------- # We iterate over set of ints and form a string # -------------------------------------------------------------- new_node = [] new_edges = [] for elem in adjacent_nodes[weight]: # forming a string new_node.append(str(elem)) new_node.append(",") new_node = "".join(new_node)[0:-1] # cut , at the end xlogger.info("formed string: " + new_node) # -------------------------------------------------------------- elem = self.list_to_string(adjacent_nodes[weight]) xlogger.info("result from [a] -> str: " + str(elem)) xlogger.info("type(" + str(elem) + " is " + str(type(elem))) # new_map[current_node] = elem if not current_node in new_map: new_map[current_node] = set() new_map[current_node].add((weight, elem)) ## now we check if new_node is in new_map.keys(), ## if so, we ignore it, if not, we add it into queue and update ## it's adjacent nodes print type(new_node) if not new_node in new_map.keys(): ## adding into queue xlogger.info("adding into queue: " + str(new_node)) queue.put(new_node) ## updating # new_map[new_node] = [] # ---------------------------------------------------------------------- def list_to_string(self, nodelist): """ Converts a list of elements onto string with character ',' as separator [1, 2, 3] => "1,2,3" """ print res = [] for elem in nodelist: res.append(str(elem)) res.append(",") res = "".join(res)[0:-1] # cut , at the end xlogger.dbg("Done conversion: " + str(res)) print return res # ---------------------------------------------------------------------- def string_to_list(self, nodestr): """ Converts a , separated string into a list of strings. It also sorts the list. "1,2,3" => [1, 2, 3] "ab,cd" => ["ab", "cd"] """ if nodestr[-1] == ",": nodestr = nodestr.split(",")[0:-1] else: nodestr = nodestr.split(",") tmp = [] xlogger.dbg("string_to_list: ") xlogger.dbg("nodestr: " + str(nodestr)) for elem in nodestr: tmp.append(int(elem)) tmp.sort() nodestr = [] for elem in tmp: nodestr.append(str(elem)) xlogger.dbg("nodestr: " + str(nodestr)) return nodestr # ---------------------------------------------------------------------- # Used by method: determinize # ---------------------------------------------------------------------- def find_adjacent_nodes(self, current_node): """ Used as a helper function in determinsation algorithm. It finds adjacent nodes for a given node. """ xlogger.info("Entered find_adjacent_nodes with current_node = " + str(current_node)) # current node can be something like: "0,3,5" adjacent_nodes = {} # example: a -> "1,2,3" b -> "3,4,5" # [1, 2, 3] -> "1,2,3" xlogger.dbg("calling conversion for: " + str(current_node)) current_node = self.string_to_list(current_node) xlogger.info("updated current_node, current_node = " + str(current_node)) # ['0', '3', '5] -> '0', '3', '5' xlogger.dbg("current node: " + str(current_node)) for node in current_node: xlogger.dbg("node: " + str(node)) if int(node) in self.graph_map.keys(): for edge in self.graph_map[int(node)]: if edge.weight not in adjacent_nodes: adjacent_nodes[edge.weight] = set() adjacent_nodes[edge.weight].add(int(edge.end_node)) return adjacent_nodes # ---------------------------------------------------------------------- def convert_into_object_map(self, new_map): """ Converts a temp hash map created during determinisation algorithm onto a main graph map used for storing a graph. It also sets ending nodes. """ ending_nodes = [] self.graph_map.clear() graph_nodes = new_map.keys() for node in graph_nodes: self.graph_map[node] = [] for edge in new_map[node]: # ('1,2,3', 'a') self.graph_map[node].append(Edge(edge[1], edge[0])) if not edge[1] in graph_nodes: self.graph_map[edge[1]] = [] # finding ending nodes # node => "11,3" for example for node in self.graph_map.keys(): nodez = self.string_to_list(node) for elem in nodez: xlogger.dbg("elem: " + str(elem)) if int(elem) in self.ending_nodes: ending_nodes.append(str(node)) break xlogger.info("old ending nodes: " + str(self.ending_nodes)) xlogger.info("new ending nodes: " + str(ending_nodes)) # adding nodes that don't have an output edge # currently, they are implicitly given in our graph structure # they appear only in edges in map (example: 3 has no output edge) # For example, "1,2" -> ("ab", "3") # Lets find nodes like this and add them into main map for node in graph_nodes: for edge in new_map[node]: if not edge[1] in graph_nodes: self.graph_map[edge[1]] = [] # Finally, we form the ending nodes in Graph object self.ending_nodes = ending_nodes print self.show_graph() # ---------------------------------------------------------------------- # ---------------------------------------------------------------------- def show_graph(self): """ Prints graph to stdout. """ for node in self.graph_map.keys(): print node for edge in self.graph_map[node]: print " -> " + str(edge) # ---------------------------------------------------------------------- # TODO: Nexto to implement # ---------------------------------------------------------------------- def minimize(): """ Performs minimization algorithm. """ return 1 # -----------------------------------------------------------------------------
gpl-3.0
-8,287,676,522,294,573,000
38.239583
109
0.45219
false
4.267105
false
false
false
saurabh6790/frappe
frappe/core/doctype/user/user.py
1
39931
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals, print_function from bs4 import BeautifulSoup import frappe import frappe.share import frappe.defaults import frappe.permissions from frappe.model.document import Document from frappe.utils import (cint, flt, has_gravatar, escape_html, format_datetime, now_datetime, get_formatted_email, today) from frappe import throw, msgprint, _ from frappe.utils.password import update_password as _update_password, check_password, get_password_reset_limit from frappe.desk.notifications import clear_notifications from frappe.desk.doctype.notification_settings.notification_settings import create_notification_settings, toggle_notifications from frappe.utils.user import get_system_managers from frappe.website.utils import is_signup_enabled from frappe.rate_limiter import rate_limit from frappe.utils.background_jobs import enqueue from frappe.core.doctype.user_type.user_type import user_linked_with_permission_on_doctype STANDARD_USERS = ("Guest", "Administrator") class MaxUsersReachedError(frappe.ValidationError): pass class User(Document): __new_password = None def __setup__(self): # because it is handled separately self.flags.ignore_save_passwords = ['new_password'] def autoname(self): """set name as Email Address""" if self.get("is_admin") or self.get("is_guest"): self.name = self.first_name else: self.email = self.email.strip().lower() self.name = self.email def onload(self): from frappe.config import get_modules_from_all_apps self.set_onload('all_modules', [m.get("module_name") for m in get_modules_from_all_apps()]) def before_insert(self): self.flags.in_insert = True throttle_user_creation() def after_insert(self): create_notification_settings(self.name) frappe.cache().delete_key('users_for_mentions') def validate(self): self.check_demo() # clear new password self.__new_password = self.new_password self.new_password = "" if not frappe.flags.in_test: self.password_strength_test() if self.name not in STANDARD_USERS: self.validate_email_type(self.email) self.validate_email_type(self.name) self.add_system_manager_role() self.set_system_user() self.set_full_name() self.check_enable_disable() self.ensure_unique_roles() self.remove_all_roles_for_guest() self.validate_username() self.remove_disabled_roles() self.validate_user_email_inbox() ask_pass_update() self.validate_roles() self.validate_allowed_modules() self.validate_user_image() if self.language == "Loading...": self.language = None if (self.name not in ["Administrator", "Guest"]) and (not self.get_social_login_userid("frappe")): self.set_social_login_userid("frappe", frappe.generate_hash(length=39)) def validate_roles(self): if self.role_profile_name: role_profile = frappe.get_doc('Role Profile', self.role_profile_name) self.set('roles', []) self.append_roles(*[role.role for role in role_profile.roles]) def validate_allowed_modules(self): if self.module_profile: module_profile = frappe.get_doc('Module Profile', self.module_profile) self.set('block_modules', []) for d in module_profile.get('block_modules'): self.append('block_modules', { 'module': d.module }) def validate_user_image(self): if self.user_image and len(self.user_image) > 2000: frappe.throw(_("Not a valid User Image.")) def on_update(self): # clear new password self.share_with_self() clear_notifications(user=self.name) frappe.clear_cache(user=self.name) now=frappe.flags.in_test or frappe.flags.in_install self.send_password_notification(self.__new_password) frappe.enqueue( 'frappe.core.doctype.user.user.create_contact', user=self, ignore_mandatory=True, now=now ) if self.name not in ('Administrator', 'Guest') and not self.user_image: frappe.enqueue('frappe.core.doctype.user.user.update_gravatar', name=self.name, now=now) # Set user selected timezone if self.time_zone: frappe.defaults.set_default("time_zone", self.time_zone, self.name) if self.has_value_changed('allow_in_mentions') or self.has_value_changed('user_type'): frappe.cache().delete_key('users_for_mentions') def has_website_permission(self, ptype, user, verbose=False): """Returns true if current user is the session user""" return self.name == frappe.session.user def check_demo(self): if frappe.session.user == '[email protected]': frappe.throw(_('Cannot change user details in demo. Please signup for a new account at https://erpnext.com'), title=_('Not Allowed')) def set_full_name(self): self.full_name = " ".join(filter(None, [self.first_name, self.last_name])) def check_enable_disable(self): # do not allow disabling administrator/guest if not cint(self.enabled) and self.name in STANDARD_USERS: frappe.throw(_("User {0} cannot be disabled").format(self.name)) if not cint(self.enabled): self.a_system_manager_should_exist() # clear sessions if disabled if not cint(self.enabled) and getattr(frappe.local, "login_manager", None): frappe.local.login_manager.logout(user=self.name) # toggle notifications based on the user's status toggle_notifications(self.name, enable=cint(self.enabled)) def add_system_manager_role(self): # if adding system manager, do nothing if not cint(self.enabled) or ("System Manager" in [user_role.role for user_role in self.get("roles")]): return if (self.name not in STANDARD_USERS and self.user_type == "System User" and not self.get_other_system_managers() and cint(frappe.db.get_single_value('System Settings', 'setup_complete'))): msgprint(_("Adding System Manager to this User as there must be atleast one System Manager")) self.append("roles", { "doctype": "Has Role", "role": "System Manager" }) if self.name == 'Administrator': # Administrator should always have System Manager Role self.extend("roles", [ { "doctype": "Has Role", "role": "System Manager" }, { "doctype": "Has Role", "role": "Administrator" } ]) def email_new_password(self, new_password=None): if new_password and not self.flags.in_insert: _update_password(user=self.name, pwd=new_password, logout_all_sessions=self.logout_all_sessions) def set_system_user(self): '''For the standard users like admin and guest, the user type is fixed.''' user_type_mapper = { 'Administrator': 'System User', 'Guest': 'Website User' } if self.user_type and not frappe.get_cached_value('User Type', self.user_type, 'is_standard'): if user_type_mapper.get(self.name): self.user_type = user_type_mapper.get(self.name) else: self.set_roles_and_modules_based_on_user_type() else: '''Set as System User if any of the given roles has desk_access''' self.user_type = 'System User' if self.has_desk_access() else 'Website User' def set_roles_and_modules_based_on_user_type(self): user_type_doc = frappe.get_cached_doc('User Type', self.user_type) if user_type_doc.role: self.roles = [] # Check whether User has linked with the 'Apply User Permission On' doctype or not if user_linked_with_permission_on_doctype(user_type_doc, self.name): self.append('roles', { 'role': user_type_doc.role }) frappe.msgprint(_('Role has been set as per the user type {0}') .format(self.user_type), alert=True) user_type_doc.update_modules_in_user(self) def has_desk_access(self): '''Return true if any of the set roles has desk access''' if not self.roles: return False return len(frappe.db.sql("""select name from `tabRole` where desk_access=1 and name in ({0}) limit 1""".format(', '.join(['%s'] * len(self.roles))), [d.role for d in self.roles])) def share_with_self(self): frappe.share.add(self.doctype, self.name, self.name, write=1, share=1, flags={"ignore_share_permission": True}) def validate_share(self, docshare): pass # if docshare.user == self.name: # if self.user_type=="System User": # if docshare.share != 1: # frappe.throw(_("Sorry! User should have complete access to their own record.")) # else: # frappe.throw(_("Sorry! Sharing with Website User is prohibited.")) def send_password_notification(self, new_password): try: if self.flags.in_insert: if self.name not in STANDARD_USERS: if new_password: # new password given, no email required _update_password(user=self.name, pwd=new_password, logout_all_sessions=self.logout_all_sessions) if not self.flags.no_welcome_mail and cint(self.send_welcome_email): self.send_welcome_mail_to_user() self.flags.email_sent = 1 if frappe.session.user != 'Guest': msgprint(_("Welcome email sent")) return else: self.email_new_password(new_password) except frappe.OutgoingEmailError: print(frappe.get_traceback()) pass # email server not set, don't send email @Document.hook def validate_reset_password(self): pass def reset_password(self, send_email=False, password_expired=False): from frappe.utils import random_string, get_url key = random_string(32) self.db_set("reset_password_key", key) url = "/update-password?key=" + key if password_expired: url = "/update-password?key=" + key + '&password_expired=true' link = get_url(url) if send_email: self.password_reset_mail(link) return link def get_other_system_managers(self): return frappe.db.sql("""select distinct `user`.`name` from `tabHas Role` as `user_role`, `tabUser` as `user` where user_role.role='System Manager' and `user`.docstatus<2 and `user`.enabled=1 and `user_role`.parent = `user`.name and `user_role`.parent not in ('Administrator', %s) limit 1""", (self.name,)) def get_fullname(self): """get first_name space last_name""" return (self.first_name or '') + \ (self.first_name and " " or '') + (self.last_name or '') def password_reset_mail(self, link): self.send_login_mail(_("Password Reset"), "password_reset", {"link": link}, now=True) def send_welcome_mail_to_user(self): from frappe.utils import get_url link = self.reset_password() subject = None method = frappe.get_hooks("welcome_email") if method: subject = frappe.get_attr(method[-1])() if not subject: site_name = frappe.db.get_default('site_name') or frappe.get_conf().get("site_name") if site_name: subject = _("Welcome to {0}").format(site_name) else: subject = _("Complete Registration") self.send_login_mail(subject, "new_user", dict( link=link, site_url=get_url(), )) def send_login_mail(self, subject, template, add_args, now=None): """send mail with login details""" from frappe.utils.user import get_user_fullname from frappe.utils import get_url created_by = get_user_fullname(frappe.session['user']) if created_by == "Guest": created_by = "Administrator" args = { 'first_name': self.first_name or self.last_name or "user", 'user': self.name, 'title': subject, 'login_url': get_url(), 'created_by': created_by } args.update(add_args) sender = frappe.session.user not in STANDARD_USERS and get_formatted_email(frappe.session.user) or None frappe.sendmail(recipients=self.email, sender=sender, subject=subject, template=template, args=args, header=[subject, "green"], delayed=(not now) if now!=None else self.flags.delay_emails, retry=3) def a_system_manager_should_exist(self): if not self.get_other_system_managers(): throw(_("There should remain at least one System Manager")) def on_trash(self): frappe.clear_cache(user=self.name) if self.name in STANDARD_USERS: throw(_("User {0} cannot be deleted").format(self.name)) self.a_system_manager_should_exist() # disable the user and log him/her out self.enabled = 0 if getattr(frappe.local, "login_manager", None): frappe.local.login_manager.logout(user=self.name) # delete todos frappe.db.sql("""DELETE FROM `tabToDo` WHERE `owner`=%s""", (self.name,)) frappe.db.sql("""UPDATE `tabToDo` SET `assigned_by`=NULL WHERE `assigned_by`=%s""", (self.name,)) # delete events frappe.db.sql("""delete from `tabEvent` where owner=%s and event_type='Private'""", (self.name,)) # delete shares frappe.db.sql("""delete from `tabDocShare` where user=%s""", self.name) # delete messages frappe.db.sql("""delete from `tabCommunication` where communication_type in ('Chat', 'Notification') and reference_doctype='User' and (reference_name=%s or owner=%s)""", (self.name, self.name)) # unlink contact frappe.db.sql("""update `tabContact` set `user`=null where `user`=%s""", (self.name)) # delete notification settings frappe.delete_doc("Notification Settings", self.name, ignore_permissions=True) if self.get('allow_in_mentions'): frappe.cache().delete_key('users_for_mentions') def before_rename(self, old_name, new_name, merge=False): self.check_demo() frappe.clear_cache(user=old_name) self.validate_rename(old_name, new_name) def validate_rename(self, old_name, new_name): # do not allow renaming administrator and guest if old_name in STANDARD_USERS: throw(_("User {0} cannot be renamed").format(self.name)) self.validate_email_type(new_name) def validate_email_type(self, email): from frappe.utils import validate_email_address validate_email_address(email.strip(), True) def after_rename(self, old_name, new_name, merge=False): tables = frappe.db.get_tables() for tab in tables: desc = frappe.db.get_table_columns_description(tab) has_fields = [] for d in desc: if d.get('name') in ['owner', 'modified_by']: has_fields.append(d.get('name')) for field in has_fields: frappe.db.sql("""UPDATE `%s` SET `%s` = %s WHERE `%s` = %s""" % (tab, field, '%s', field, '%s'), (new_name, old_name)) if frappe.db.exists("Chat Profile", old_name): frappe.rename_doc("Chat Profile", old_name, new_name, force=True, show_alert=False) if frappe.db.exists("Notification Settings", old_name): frappe.rename_doc("Notification Settings", old_name, new_name, force=True, show_alert=False) # set email frappe.db.sql("""UPDATE `tabUser` SET email = %s WHERE name = %s""", (new_name, new_name)) def append_roles(self, *roles): """Add roles to user""" current_roles = [d.role for d in self.get("roles")] for role in roles: if role in current_roles: continue self.append("roles", {"role": role}) def add_roles(self, *roles): """Add roles to user and save""" self.append_roles(*roles) self.save() def remove_roles(self, *roles): existing_roles = dict((d.role, d) for d in self.get("roles")) for role in roles: if role in existing_roles: self.get("roles").remove(existing_roles[role]) self.save() def remove_all_roles_for_guest(self): if self.name == "Guest": self.set("roles", list(set(d for d in self.get("roles") if d.role == "Guest"))) def remove_disabled_roles(self): disabled_roles = [d.name for d in frappe.get_all("Role", filters={"disabled":1})] for role in list(self.get('roles')): if role.role in disabled_roles: self.get('roles').remove(role) def ensure_unique_roles(self): exists = [] for i, d in enumerate(self.get("roles")): if (not d.role) or (d.role in exists): self.get("roles").remove(d) else: exists.append(d.role) def validate_username(self): if not self.username and self.is_new() and self.first_name: self.username = frappe.scrub(self.first_name) if not self.username: return # strip space and @ self.username = self.username.strip(" @") if self.username_exists(): if self.user_type == 'System User': frappe.msgprint(_("Username {0} already exists").format(self.username)) self.suggest_username() self.username = "" def password_strength_test(self): """ test password strength """ if self.flags.ignore_password_policy: return if self.__new_password: user_data = (self.first_name, self.middle_name, self.last_name, self.email, self.birth_date) result = test_password_strength(self.__new_password, '', None, user_data) feedback = result.get("feedback", None) if feedback and not feedback.get('password_policy_validation_passed', False): handle_password_test_fail(result) def suggest_username(self): def _check_suggestion(suggestion): if self.username != suggestion and not self.username_exists(suggestion): return suggestion return None # @firstname username = _check_suggestion(frappe.scrub(self.first_name)) if not username: # @firstname_last_name username = _check_suggestion(frappe.scrub("{0} {1}".format(self.first_name, self.last_name or ""))) if username: frappe.msgprint(_("Suggested Username: {0}").format(username)) return username def username_exists(self, username=None): return frappe.db.get_value("User", {"username": username or self.username, "name": ("!=", self.name)}) def get_blocked_modules(self): """Returns list of modules blocked for that user""" return [d.module for d in self.block_modules] if self.block_modules else [] def validate_user_email_inbox(self): """ check if same email account added in User Emails twice """ email_accounts = [ user_email.email_account for user_email in self.user_emails ] if len(email_accounts) != len(set(email_accounts)): frappe.throw(_("Email Account added multiple times")) def get_social_login_userid(self, provider): try: for p in self.social_logins: if p.provider == provider: return p.userid except: return None def set_social_login_userid(self, provider, userid, username=None): social_logins = { "provider": provider, "userid": userid } if username: social_logins["username"] = username self.append("social_logins", social_logins) def get_restricted_ip_list(self): if not self.restrict_ip: return return [i.strip() for i in self.restrict_ip.split(",")] @classmethod def find_by_credentials(cls, user_name: str, password: str, validate_password: bool = True): """Find the user by credentials. This is a login utility that needs to check login related system settings while finding the user. 1. Find user by email ID by default 2. If allow_login_using_mobile_number is set, you can use mobile number while finding the user. 3. If allow_login_using_user_name is set, you can use username while finding the user. """ login_with_mobile = cint(frappe.db.get_value("System Settings", "System Settings", "allow_login_using_mobile_number")) login_with_username = cint(frappe.db.get_value("System Settings", "System Settings", "allow_login_using_user_name")) or_filters = [{"name": user_name}] if login_with_mobile: or_filters.append({"mobile_no": user_name}) if login_with_username: or_filters.append({"username": user_name}) users = frappe.db.get_all('User', fields=['name', 'enabled'], or_filters=or_filters, limit=1) if not users: return user = users[0] user['is_authenticated'] = True if validate_password: try: check_password(user['name'], password, delete_tracker_cache=False) except frappe.AuthenticationError: user['is_authenticated'] = False return user @frappe.whitelist() def get_timezones(): import pytz return { "timezones": pytz.all_timezones } @frappe.whitelist() def get_all_roles(arg=None): """return all roles""" active_domains = frappe.get_active_domains() roles = frappe.get_all("Role", filters={ "name": ("not in", "Administrator,Guest,All"), "disabled": 0 }, or_filters={ "ifnull(restrict_to_domain, '')": "", "restrict_to_domain": ("in", active_domains) }, order_by="name") return [ role.get("name") for role in roles ] @frappe.whitelist() def get_roles(arg=None): """get roles for a user""" return frappe.get_roles(frappe.form_dict['uid']) @frappe.whitelist() def get_perm_info(role): """get permission info""" from frappe.permissions import get_all_perms return get_all_perms(role) @frappe.whitelist(allow_guest=True) def update_password(new_password, logout_all_sessions=0, key=None, old_password=None): #validate key to avoid key input like ['like', '%'], '', ['in', ['']] if key and not isinstance(key, str): frappe.throw(_('Invalid key type')) result = test_password_strength(new_password, key, old_password) feedback = result.get("feedback", None) if feedback and not feedback.get('password_policy_validation_passed', False): handle_password_test_fail(result) res = _get_user_for_update_password(key, old_password) if res.get('message'): frappe.local.response.http_status_code = 410 return res['message'] else: user = res['user'] logout_all_sessions = cint(logout_all_sessions) or frappe.db.get_single_value("System Settings", "logout_on_password_reset") _update_password(user, new_password, logout_all_sessions=cint(logout_all_sessions)) user_doc, redirect_url = reset_user_data(user) # get redirect url from cache redirect_to = frappe.cache().hget('redirect_after_login', user) if redirect_to: redirect_url = redirect_to frappe.cache().hdel('redirect_after_login', user) frappe.local.login_manager.login_as(user) frappe.db.set_value("User", user, "last_password_reset_date", today()) frappe.db.set_value("User", user, "reset_password_key", "") if user_doc.user_type == "System User": return "/app" else: return redirect_url if redirect_url else "/" @frappe.whitelist(allow_guest=True) def test_password_strength(new_password, key=None, old_password=None, user_data=None): from frappe.utils.password_strength import test_password_strength as _test_password_strength password_policy = frappe.db.get_value("System Settings", None, ["enable_password_policy", "minimum_password_score"], as_dict=True) or {} enable_password_policy = cint(password_policy.get("enable_password_policy", 0)) minimum_password_score = cint(password_policy.get("minimum_password_score", 0)) if not enable_password_policy: return {} if not user_data: user_data = frappe.db.get_value('User', frappe.session.user, ['first_name', 'middle_name', 'last_name', 'email', 'birth_date']) if new_password: result = _test_password_strength(new_password, user_inputs=user_data) password_policy_validation_passed = False # score should be greater than 0 and minimum_password_score if result.get('score') and result.get('score') >= minimum_password_score: password_policy_validation_passed = True result['feedback']['password_policy_validation_passed'] = password_policy_validation_passed return result #for login @frappe.whitelist() def has_email_account(email): return frappe.get_list("Email Account", filters={"email_id": email}) @frappe.whitelist(allow_guest=False) def get_email_awaiting(user): waiting = frappe.db.sql("""select email_account,email_id from `tabUser Email` where awaiting_password = 1 and parent = %(user)s""", {"user":user}, as_dict=1) if waiting: return waiting else: frappe.db.sql("""update `tabUser Email` set awaiting_password =0 where parent = %(user)s""",{"user":user}) return False @frappe.whitelist(allow_guest=False) def set_email_password(email_account, user, password): account = frappe.get_doc("Email Account", email_account) if account.awaiting_password: account.awaiting_password = 0 account.password = password try: account.save(ignore_permissions=True) except Exception: frappe.db.rollback() return False return True def setup_user_email_inbox(email_account, awaiting_password, email_id, enable_outgoing): """ setup email inbox for user """ def add_user_email(user): user = frappe.get_doc("User", user) row = user.append("user_emails", {}) row.email_id = email_id row.email_account = email_account row.awaiting_password = awaiting_password or 0 row.enable_outgoing = enable_outgoing or 0 user.save(ignore_permissions=True) udpate_user_email_settings = False if not all([email_account, email_id]): return user_names = frappe.db.get_values("User", { "email": email_id }, as_dict=True) if not user_names: return for user in user_names: user_name = user.get("name") # check if inbox is alreay configured user_inbox = frappe.db.get_value("User Email", { "email_account": email_account, "parent": user_name }, ["name"]) or None if not user_inbox: add_user_email(user_name) else: # update awaiting password for email account udpate_user_email_settings = True if udpate_user_email_settings: frappe.db.sql("""UPDATE `tabUser Email` SET awaiting_password = %(awaiting_password)s, enable_outgoing = %(enable_outgoing)s WHERE email_account = %(email_account)s""", { "email_account": email_account, "enable_outgoing": enable_outgoing, "awaiting_password": awaiting_password or 0 }) else: users = " and ".join([frappe.bold(user.get("name")) for user in user_names]) frappe.msgprint(_("Enabled email inbox for user {0}").format(users)) ask_pass_update() def remove_user_email_inbox(email_account): """ remove user email inbox settings if email account is deleted """ if not email_account: return users = frappe.get_all("User Email", filters={ "email_account": email_account }, fields=["parent as name"]) for user in users: doc = frappe.get_doc("User", user.get("name")) to_remove = [ row for row in doc.user_emails if row.email_account == email_account ] [ doc.remove(row) for row in to_remove ] doc.save(ignore_permissions=True) def ask_pass_update(): # update the sys defaults as to awaiting users from frappe.utils import set_default users = frappe.db.sql("""SELECT DISTINCT(parent) as user FROM `tabUser Email` WHERE awaiting_password = 1""", as_dict=True) password_list = [ user.get("user") for user in users ] set_default("email_user_password", u','.join(password_list)) def _get_user_for_update_password(key, old_password): # verify old password if key: user = frappe.db.get_value("User", {"reset_password_key": key}) if not user: return { 'message': _("The Link specified has either been used before or Invalid") } elif old_password: # verify old password frappe.local.login_manager.check_password(frappe.session.user, old_password) user = frappe.session.user else: return return { 'user': user } def reset_user_data(user): user_doc = frappe.get_doc("User", user) redirect_url = user_doc.redirect_url user_doc.reset_password_key = '' user_doc.redirect_url = '' user_doc.save(ignore_permissions=True) return user_doc, redirect_url @frappe.whitelist() def verify_password(password): frappe.local.login_manager.check_password(frappe.session.user, password) @frappe.whitelist(allow_guest=True) def sign_up(email, full_name, redirect_to): if not is_signup_enabled(): frappe.throw(_('Sign Up is disabled'), title='Not Allowed') user = frappe.db.get("User", {"email": email}) if user: if user.disabled: return 0, _("Registered but disabled") else: return 0, _("Already Registered") else: if frappe.db.sql("""select count(*) from tabUser where HOUR(TIMEDIFF(CURRENT_TIMESTAMP, TIMESTAMP(modified)))=1""")[0][0] > 300: frappe.respond_as_web_page(_('Temporarily Disabled'), _('Too many users signed up recently, so the registration is disabled. Please try back in an hour'), http_status_code=429) from frappe.utils import random_string user = frappe.get_doc({ "doctype":"User", "email": email, "first_name": escape_html(full_name), "enabled": 1, "new_password": random_string(10), "user_type": "Website User" }) user.flags.ignore_permissions = True user.flags.ignore_password_policy = True user.insert() # set default signup role as per Portal Settings default_role = frappe.db.get_value("Portal Settings", None, "default_role") if default_role: user.add_roles(default_role) if redirect_to: frappe.cache().hset('redirect_after_login', user.name, redirect_to) if user.flags.email_sent: return 1, _("Please check your email for verification") else: return 2, _("Please ask your administrator to verify your sign-up") @frappe.whitelist(allow_guest=True) @rate_limit(key='user', limit=get_password_reset_limit, seconds = 24*60*60, methods=['POST']) def reset_password(user): if user=="Administrator": return 'not allowed' try: user = frappe.get_doc("User", user) if not user.enabled: return 'disabled' user.validate_reset_password() user.reset_password(send_email=True) return frappe.msgprint(_("Password reset instructions have been sent to your email")) except frappe.DoesNotExistError: frappe.clear_messages() return 'not found' @frappe.whitelist() @frappe.validate_and_sanitize_search_inputs def user_query(doctype, txt, searchfield, start, page_len, filters): from frappe.desk.reportview import get_match_cond, get_filters_cond conditions=[] user_type_condition = "and user_type != 'Website User'" if filters and filters.get('ignore_user_type'): user_type_condition = '' filters.pop('ignore_user_type') txt = "%{}%".format(txt) return frappe.db.sql("""SELECT `name`, CONCAT_WS(' ', first_name, middle_name, last_name) FROM `tabUser` WHERE `enabled`=1 {user_type_condition} AND `docstatus` < 2 AND `name` NOT IN ({standard_users}) AND ({key} LIKE %(txt)s OR CONCAT_WS(' ', first_name, middle_name, last_name) LIKE %(txt)s) {fcond} {mcond} ORDER BY CASE WHEN `name` LIKE %(txt)s THEN 0 ELSE 1 END, CASE WHEN concat_ws(' ', first_name, middle_name, last_name) LIKE %(txt)s THEN 0 ELSE 1 END, NAME asc LIMIT %(page_len)s OFFSET %(start)s """.format( user_type_condition = user_type_condition, standard_users=", ".join([frappe.db.escape(u) for u in STANDARD_USERS]), key=searchfield, fcond=get_filters_cond(doctype, filters, conditions), mcond=get_match_cond(doctype) ), dict(start=start, page_len=page_len, txt=txt) ) def get_total_users(): """Returns total no. of system users""" return flt(frappe.db.sql('''SELECT SUM(`simultaneous_sessions`) FROM `tabUser` WHERE `enabled` = 1 AND `user_type` = 'System User' AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0]) def get_system_users(exclude_users=None, limit=None): if not exclude_users: exclude_users = [] elif not isinstance(exclude_users, (list, tuple)): exclude_users = [exclude_users] limit_cond = '' if limit: limit_cond = 'limit {0}'.format(limit) exclude_users += list(STANDARD_USERS) system_users = frappe.db.sql_list("""select name from `tabUser` where enabled=1 and user_type != 'Website User' and name not in ({}) {}""".format(", ".join(["%s"]*len(exclude_users)), limit_cond), exclude_users) return system_users def get_active_users(): """Returns No. of system users who logged in, in the last 3 days""" return frappe.db.sql("""select count(*) from `tabUser` where enabled = 1 and user_type != 'Website User' and name not in ({}) and hour(timediff(now(), last_active)) < 72""".format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0] def get_website_users(): """Returns total no. of website users""" return frappe.db.sql("""select count(*) from `tabUser` where enabled = 1 and user_type = 'Website User'""")[0][0] def get_active_website_users(): """Returns No. of website users who logged in, in the last 3 days""" return frappe.db.sql("""select count(*) from `tabUser` where enabled = 1 and user_type = 'Website User' and hour(timediff(now(), last_active)) < 72""")[0][0] def get_permission_query_conditions(user): if user=="Administrator": return "" else: return """(`tabUser`.name not in ({standard_users}))""".format( standard_users = ", ".join(frappe.db.escape(user) for user in STANDARD_USERS)) def has_permission(doc, user): if (user != "Administrator") and (doc.name in STANDARD_USERS): # dont allow non Administrator user to view / edit Administrator user return False def notify_admin_access_to_system_manager(login_manager=None): if (login_manager and login_manager.user == "Administrator" and frappe.local.conf.notify_admin_access_to_system_manager): site = '<a href="{0}" target="_blank">{0}</a>'.format(frappe.local.request.host_url) date_and_time = '<b>{0}</b>'.format(format_datetime(now_datetime(), format_string="medium")) ip_address = frappe.local.request_ip access_message = _('Administrator accessed {0} on {1} via IP Address {2}.').format( site, date_and_time, ip_address) frappe.sendmail( recipients=get_system_managers(), subject=_("Administrator Logged In"), template="administrator_logged_in", args={'access_message': access_message}, header=['Access Notification', 'orange'] ) def extract_mentions(txt): """Find all instances of @mentions in the html.""" soup = BeautifulSoup(txt, 'html.parser') emails = [] for mention in soup.find_all(class_='mention'): if mention.get('data-is-group') == 'true': try: user_group = frappe.get_cached_doc('User Group', mention['data-id']) emails += [d.user for d in user_group.user_group_members] except frappe.DoesNotExistError: pass continue email = mention['data-id'] emails.append(email) return emails def handle_password_test_fail(result): suggestions = result['feedback']['suggestions'][0] if result['feedback']['suggestions'] else '' warning = result['feedback']['warning'] if 'warning' in result['feedback'] else '' suggestions += "<br>" + _("Hint: Include symbols, numbers and capital letters in the password") + '<br>' frappe.throw(' '.join([_('Invalid Password:'), warning, suggestions])) def update_gravatar(name): gravatar = has_gravatar(name) if gravatar: frappe.db.set_value('User', name, 'user_image', gravatar) @frappe.whitelist(allow_guest=True) def send_token_via_sms(tmp_id,phone_no=None,user=None): try: from frappe.core.doctype.sms_settings.sms_settings import send_request except: return False if not frappe.cache().ttl(tmp_id + '_token'): return False ss = frappe.get_doc('SMS Settings', 'SMS Settings') if not ss.sms_gateway_url: return False token = frappe.cache().get(tmp_id + '_token') args = {ss.message_parameter: 'verification code is {}'.format(token)} for d in ss.get("parameters"): args[d.parameter] = d.value if user: user_phone = frappe.db.get_value('User', user, ['phone','mobile_no'], as_dict=1) usr_phone = user_phone.mobile_no or user_phone.phone if not usr_phone: return False else: if phone_no: usr_phone = phone_no else: return False args[ss.receiver_parameter] = usr_phone status = send_request(ss.sms_gateway_url, args, use_post=ss.use_post) if 200 <= status < 300: frappe.cache().delete(tmp_id + '_token') return True else: return False @frappe.whitelist(allow_guest=True) def send_token_via_email(tmp_id,token=None): import pyotp user = frappe.cache().get(tmp_id + '_user') count = token or frappe.cache().get(tmp_id + '_token') if ((not user) or (user == 'None') or (not count)): return False user_email = frappe.db.get_value('User',user, 'email') if not user_email: return False otpsecret = frappe.cache().get(tmp_id + '_otp_secret') hotp = pyotp.HOTP(otpsecret) frappe.sendmail( recipients=user_email, sender=None, subject="Verification Code", template="verification_code", args=dict(code=hotp.at(int(count))), delayed=False, retry=3 ) return True @frappe.whitelist(allow_guest=True) def reset_otp_secret(user): otp_issuer = frappe.db.get_value('System Settings', 'System Settings', 'otp_issuer_name') user_email = frappe.db.get_value('User',user, 'email') if frappe.session.user in ["Administrator", user] : frappe.defaults.clear_default(user + '_otplogin') frappe.defaults.clear_default(user + '_otpsecret') email_args = { 'recipients':user_email, 'sender':None, 'subject':'OTP Secret Reset - {}'.format(otp_issuer or "Frappe Framework"), 'message':'<p>Your OTP secret on {} has been reset. If you did not perform this reset and did not request it, please contact your System Administrator immediately.</p>'.format(otp_issuer or "Frappe Framework"), 'delayed':False, 'retry':3 } enqueue(method=frappe.sendmail, queue='short', timeout=300, event=None, is_async=True, job_name=None, now=False, **email_args) return frappe.msgprint(_("OTP Secret has been reset. Re-registration will be required on next login.")) else: return frappe.throw(_("OTP secret can only be reset by the Administrator.")) def throttle_user_creation(): if frappe.flags.in_import: return if frappe.db.get_creation_count('User', 60) > frappe.local.conf.get("throttle_user_limit", 60): frappe.throw(_('Throttled')) @frappe.whitelist() def get_role_profile(role_profile): roles = frappe.get_doc('Role Profile', {'role_profile': role_profile}) return roles.roles @frappe.whitelist() def get_module_profile(module_profile): module_profile = frappe.get_doc('Module Profile', {'module_profile_name': module_profile}) return module_profile.get('block_modules') def update_roles(role_profile): users = frappe.get_all('User', filters={'role_profile_name': role_profile}) role_profile = frappe.get_doc('Role Profile', role_profile) roles = [role.role for role in role_profile.roles] for d in users: user = frappe.get_doc('User', d) user.set('roles', []) user.add_roles(*roles) def create_contact(user, ignore_links=False, ignore_mandatory=False): from frappe.contacts.doctype.contact.contact import get_contact_name if user.name in ["Administrator", "Guest"]: return contact_name = get_contact_name(user.email) if not contact_name: contact = frappe.get_doc({ "doctype": "Contact", "first_name": user.first_name, "last_name": user.last_name, "user": user.name, "gender": user.gender, }) if user.email: contact.add_email(user.email, is_primary=True) if user.phone: contact.add_phone(user.phone, is_primary_phone=True) if user.mobile_no: contact.add_phone(user.mobile_no, is_primary_mobile_no=True) contact.insert(ignore_permissions=True, ignore_links=ignore_links, ignore_mandatory=ignore_mandatory) else: contact = frappe.get_doc("Contact", contact_name) contact.first_name = user.first_name contact.last_name = user.last_name contact.gender = user.gender # Add mobile number if phone does not exists in contact if user.phone and not any(new_contact.phone == user.phone for new_contact in contact.phone_nos): # Set primary phone if there is no primary phone number contact.add_phone( user.phone, is_primary_phone=not any( new_contact.is_primary_phone == 1 for new_contact in contact.phone_nos ) ) # Add mobile number if mobile does not exists in contact if user.mobile_no and not any(new_contact.phone == user.mobile_no for new_contact in contact.phone_nos): # Set primary mobile if there is no primary mobile number contact.add_phone( user.mobile_no, is_primary_mobile_no=not any( new_contact.is_primary_mobile_no == 1 for new_contact in contact.phone_nos ) ) contact.save(ignore_permissions=True) @frappe.whitelist() def generate_keys(user): """ generate api key and api secret :param user: str """ if "System Manager" in frappe.get_roles(): user_details = frappe.get_doc("User", user) api_secret = frappe.generate_hash(length=15) # if api key is not set generate api key if not user_details.api_key: api_key = frappe.generate_hash(length=15) user_details.api_key = api_key user_details.api_secret = api_secret user_details.save() return {"api_secret": api_secret} frappe.throw(frappe._("Not Permitted"), frappe.PermissionError) @frappe.whitelist() def switch_theme(theme): if theme in ["Dark", "Light"]: frappe.db.set_value("User", frappe.session.user, "desk_theme", theme)
mit
-1,635,436,983,036,877,300
31.306634
213
0.69565
false
3.128163
false
false
false
jacebrowning/dropthebeat
setup.py
1
2371
#!/usr/bin/env python """Setup script for DropTheBeat.""" import os import sys import setuptools PACKAGE_NAME = 'dtb' MINIMUM_PYTHON_VERSION = 3, 4 def check_python_version(): """Exit when the Python version is too low.""" if sys.version_info < MINIMUM_PYTHON_VERSION: sys.exit("Python {}.{}+ is required.".format(*MINIMUM_PYTHON_VERSION)) def read_package_variable(key): """Read the value of a variable from the package without importing.""" module_path = os.path.join(PACKAGE_NAME, '__init__.py') with open(module_path) as module: for line in module: parts = line.strip().split(' ') if parts and parts[0] == key: return parts[-1].strip("'") assert 0, "'{0}' not found in '{1}'".format(key, module_path) def read_descriptions(): """Build a description for the project from documentation files.""" try: readme = open("README.rst").read() changelog = open("CHANGELOG.rst").read() except IOError: return "<placeholder>" else: return readme + '\n' + changelog check_python_version() setuptools.setup( name=read_package_variable('__project__'), version=read_package_variable('__version__'), description="Music sharing using Dropbox.", url='https://github.com/jacebrowning/dropthebeat', author='Jace Browning', author_email='[email protected]', packages=setuptools.find_packages(), entry_points={'console_scripts': ['dtb = dtb.cli:main', 'DropTheBeat = dtb.gui:main']}, long_description=read_descriptions(), license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Environment :: MacOS X', 'Environment :: Win32 (MS Windows)', 'Environment :: X11 Applications', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Communications :: File Sharing', 'Topic :: Multimedia :: Sound/Audio', ], install_requires=open("requirements.txt").readlines(), )
lgpl-3.0
5,393,324,607,916,161,000
29.397436
78
0.609869
false
4.018644
false
false
false
MeanEYE/Sunflower
sunflower/gui/preferences/toolbar.py
1
8025
import json from gi.repository import Gtk from sunflower.widgets.settings_page import SettingsPage class Column: NAME = 0 DESCRIPTION = 1 TYPE = 2 ICON = 3 CONFIG = 4 class ToolbarOptions(SettingsPage): """Toolbar options extension class""" def __init__(self, parent, application): SettingsPage.__init__(self, parent, application, 'toolbar', _('Toolbar')) self._toolbar_manager = self._application.toolbar_manager # create list box container = Gtk.ScrolledWindow() container.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.ALWAYS) container.set_shadow_type(Gtk.ShadowType.IN) self._store = Gtk.ListStore(str, str, str, str, str) self._list = Gtk.TreeView() self._list.set_model(self._store) cell_icon = Gtk.CellRendererPixbuf() cell_name = Gtk.CellRendererText() cell_name.set_property('editable', True) cell_name.set_property('mode', Gtk.CellRendererMode.EDITABLE) cell_name.connect('edited', self._edited_name, 0) cell_type = Gtk.CellRendererText() # create name column col_name = Gtk.TreeViewColumn(_('Name')) col_name.set_min_width(200) col_name.set_resizable(True) # pack and configure renderes col_name.pack_start(cell_icon, False) col_name.pack_start(cell_name, True) col_name.add_attribute(cell_icon, 'icon-name', Column.ICON) col_name.add_attribute(cell_name, 'text', Column.NAME) # create type column col_type = Gtk.TreeViewColumn(_('Type'), cell_type, markup=Column.DESCRIPTION) col_type.set_resizable(True) col_type.set_expand(True) # add columns to the list self._list.append_column(col_name) self._list.append_column(col_type) container.add(self._list) # create controls button_box = Gtk.HBox(False, 5) button_add = Gtk.Button(stock=Gtk.STOCK_ADD) button_add.connect('clicked', self._add_widget) button_delete = Gtk.Button(stock=Gtk.STOCK_DELETE) button_delete.connect('clicked', self._delete_widget) button_edit = Gtk.Button(stock=Gtk.STOCK_EDIT) button_edit.connect('clicked', self._edit_widget) image_up = Gtk.Image() image_up.set_from_stock(Gtk.STOCK_GO_UP, Gtk.IconSize.BUTTON) button_move_up = Gtk.Button(label=None) button_move_up.add(image_up) button_move_up.set_tooltip_text(_('Move Up')) button_move_up.connect('clicked', self._move_widget, -1) image_down = Gtk.Image() image_down.set_from_stock(Gtk.STOCK_GO_DOWN, Gtk.IconSize.BUTTON) button_move_down = Gtk.Button(label=None) button_move_down.add(image_down) button_move_down.set_tooltip_text(_('Move Down')) button_move_down.connect('clicked', self._move_widget, 1) # pack ui button_box.pack_start(button_add, False, False, 0) button_box.pack_start(button_delete, False, False, 0) button_box.pack_start(button_edit, False, False, 0) button_box.pack_end(button_move_down, False, False, 0) button_box.pack_end(button_move_up, False, False, 0) # toolbar style label_style = Gtk.Label(label=_('Toolbar style:')) list_styles = Gtk.ListStore(str, int) list_styles.append((_('Icons'), Gtk.ToolbarStyle.ICONS)) list_styles.append((_('Text'), Gtk.ToolbarStyle.TEXT)) list_styles.append((_('Both'), Gtk.ToolbarStyle.BOTH)) list_styles.append((_('Both horizontal'), Gtk.ToolbarStyle.BOTH_HORIZ)) renderer = Gtk.CellRendererText() self._combobox_styles = Gtk.ComboBox(model=list_styles) self._combobox_styles.pack_start(renderer, True) self._combobox_styles.add_attribute(renderer, 'text', 0) self._combobox_styles.connect('changed', self._parent.enable_save) # toolbar icon size label_icon_size = Gtk.Label(label=_('Icon size:')) list_icon_size = Gtk.ListStore(str, int) list_icon_size.append((_('Small toolbar icon'), Gtk.IconSize.SMALL_TOOLBAR)) list_icon_size.append((_('Large toolbar icon'), Gtk.IconSize.LARGE_TOOLBAR)) list_icon_size.append((_('Same as drag icons'), Gtk.IconSize.DND)) list_icon_size.append((_('Same as dialog'), Gtk.IconSize.DIALOG)) renderer = Gtk.CellRendererText() self._combobox_icon_size = Gtk.ComboBox(model=list_icon_size) self._combobox_icon_size.pack_start(renderer, True) self._combobox_icon_size.add_attribute(renderer, 'text', 0) self._combobox_icon_size.connect('changed', self._parent.enable_save) style_box = Gtk.HBox(False, 5) style_box.pack_start(label_style, False, False, 0) style_box.pack_start(self._combobox_styles, False, False, 0) size_box = Gtk.HBox(False, 5) size_box.pack_start(label_icon_size, False, False, 0) size_box.pack_start(self._combobox_icon_size, False, False, 0) self.pack_start(style_box, False, False, 0) self.pack_start(size_box, False, False, 0) self.pack_start(container, True, True, 0) self.pack_start(button_box, False, False, 0) def _add_widget(self, widget, data=None): """Show dialog for creating toolbar widget""" widget_added = self._toolbar_manager.show_create_widget_dialog(self._parent) if widget_added: self._add_item_to_list(widget_added) # enable save button self._parent.enable_save() def _delete_widget(self, widget, data=None): """Delete selected toolbar widget""" selection = self._list.get_selection() list_, iter_ = selection.get_selected() if iter_ is not None: # remove item from list list_.remove(iter_) # enable save button if item was removed self._parent.enable_save() def _edited_name(self, cell, path, text, column): """Record edited text""" selected_iter = self._store.get_iter(path) if selected_iter is not None: self._store.set_value(selected_iter, column, text) # enable save button self._parent.enable_save() def _edit_widget(self, widget, data=None): """Edit selected toolbar widget""" selection = self._list.get_selection() list_, iter_ = selection.get_selected() if iter_ is not None: name = list_.get_value(iter_, Column.NAME) widget_type = list_.get_value(iter_, Column.TYPE) widget_config = list_.get_value(iter_, Column.CONFIG) edited = self._toolbar_manager.show_configure_widget_dialog( name, widget_type, json.loads(widget_config), self._parent ) # enable save button if edited: self._store.set_value(iter_, Column.CONFIG, json.dumps(edited)) self._parent.enable_save() def _move_widget(self, widget, direction): """Move selected bookmark up""" selection = self._list.get_selection() list_, iter_ = selection.get_selected() if iter_ is not None: # get iter index index = list_.get_path(iter_)[0] # depending on direction, swap iters if (direction == -1 and index > 0) \ or (direction == 1 and index < len(list_) - 1): list_.swap(iter_, list_[index + direction].iter) # enable save button if iters were swapped self._parent.enable_save() def _add_item_to_list(self, item): name = item['name'] widget_type = item['type'] widget_config = item['config'] if 'config' in item else {} data = self._toolbar_manager.get_widget_data(widget_type) if data is not None: icon = data[1] description = data[0] else: # failsafe, display raw widget type icon = '' description = '{0} <small><i>({1})</i></small>'.format(widget_type, _('missing plugin')) self._store.append((name, description, widget_type, icon, json.dumps(widget_config))) def _load_options(self): """Load options from file""" options = self._application.toolbar_options self._combobox_styles.set_active(options.get('style')) self._combobox_icon_size.set_active(options.get('icon_size')) # clear list store self._store.clear() for item in options.get('items'): self._add_item_to_list(item) def _save_options(self): """Save settings to config file""" options = self._application.toolbar_options options.set('style', self._combobox_styles.get_active()) options.set('icon_size', self._combobox_icon_size.get_active()) # save toolbar items settings items = [] for data in self._store: items.append({ 'name': data[Column.NAME], 'type': data[Column.TYPE], 'config': json.loads(data[Column.CONFIG]), }) options.set('items', items)
gpl-3.0
-8,279,819,229,029,512,000
30.470588
91
0.694704
false
3.016917
true
false
false
raiden-network/raiden
raiden/utils/notifying_queue.py
1
1422
from typing import Generic, Iterable, List, TypeVar from gevent.event import Event from gevent.queue import Queue T = TypeVar("T") class NotifyingQueue(Event, Generic[T]): """This is not the same as a JoinableQueue. Here, instead of waiting for all the work to be processed, the wait is for work to be available. """ def __init__(self, maxsize: int = None, items: Iterable[T] = ()) -> None: super().__init__() self.queue = Queue(maxsize, items) if items: self.set() def put(self, item: T) -> None: """Add new item to the queue.""" self.queue.put(item) self.set() def get(self, block: bool = True, timeout: float = None) -> T: """Removes and returns an item from the queue.""" value = self.queue.get(block, timeout) if self.queue.empty(): self.clear() return value def peek(self, block: bool = True, timeout: float = None) -> T: return self.queue.peek(block, timeout) def __len__(self) -> int: return len(self.queue) def copy(self) -> List[T]: """Copies the current queue items.""" copy = self.queue.copy() result = list() while not copy.empty(): result.append(copy.get_nowait()) return result def __repr__(self) -> str: return f"NotifyingQueue(id={id(self)}, num_items={len(self.queue)})"
mit
7,711,695,073,187,145,000
28.020408
77
0.580169
false
3.812332
false
false
false
pwarren/AGDeviceControl
agdevicecontrol/tests/test_aggregator.py
1
9428
# AGDeviceControl # Copyright (C) 2005 The Australian National University # # This file is part of AGDeviceControl. # # AGDeviceControl is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # AGDeviceControl is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with AGDeviceControl; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import os import random import types import agdevicecontrol from agdevicecontrol.server.aggregator import Aggregator from agdevicecontrol.server.configurator import Configurator from twisted.internet import defer, reactor from twisted.trial import unittest from twisted.spread import pb import agdevicecontrol.server.ports as ports from twisted.test.test_process import SignalMixin from agdevicecontrol.tests.subprocessprotocol import SubProcessProtocol configdata = """ # sample Aggregator.conf [DeviceServer1] host: localhost port: %s password: bkurk """ % ports.deviceserver class TestAggregator(SignalMixin, unittest.TestCase): def setUpClass(self): """Start a DeviceServer in a child process to test against""" self.deviceserverprocess = SubProcessProtocol() self.deviceserverprocess.waitOnStartUp( ['server.py', 'deviceserver.conf', '-n'], \ path=os.path.join(agdevicecontrol.path,'bin') ) if self.deviceserverprocess.running is False: raise unittest.SkipTest, "DeviceServer didn't start correctly, skipping tests" #wait for slow single CPU buildbots to catch up import time time.sleep(1) # use the config above conf = Configurator() conf.fromString(configdata) # can be set by timeout self.failure = False # safety timeout self.timeout = reactor.callLater(10, self.failed, "Aggregator failed to connect to all deviceservers ... failing") self.aggregator = Aggregator(conf) self.done = False while not self.done: print "Waiting for aggregator to connect to deviceservers" reactor.iterate(0.1) if self.aggregator.connected: self.succeeded() if self.failure: raise unittest.SkipTest, "Aggregator didn't connect to all deviceservers ... skipping tests" # FIXME: we really should handle missing and newly appearing deviceservers. # safety timeout self.timeout = reactor.callLater(10, self.failed, "Aggregator failed to map all deviceservers ... failing") self.aggregator.notifyOnMapped(self.succeeded) self.done = False while not self.done: print "Waiting for aggregator to map deviceservers" reactor.iterate(0.1) if self.failure: raise unittest.SkipTest, "Aggregator didn't start correctly, skipping tests" def tearDownClass(self): """Stop the DeviceServer running in a child process""" print "*** tearDownClass: ", self.deviceserverprocess.done self.deviceserverprocess.waitOnShutDown() def succeeded(self, *args): """Allow reactor iteration loop in test proper to exit and pass test""" self.done = True if self.timeout is not None: self.timeout.cancel() # safety timeout no longer necessary self.timeout = None self.lastargs = args # make persistent for later checks def failed(self, reason): """Allow reactor iteration loop in test proper to exit and fail test""" self.done = True self.failure = reason self.timeout.cancel() # safety timeout no longer necessary self.timeout = None def setUp(self): """I'm called at the very beginning of each test""" self.done = False self.failure = None self.timeout = None def tearDown(self): """I'm called at the end of each test""" if self.timeout: self.timeout.cancel() def timedOut(self): """I'm called when the safety timer expires indicating test probably won't complete""" print "timedOut callback, test did not complete" self.failed("Safety timeout callback ... test did not complete") reactor.crash() #---------- tests proper ------------------------------------ def test_handled_configurator(self): """Aggregator instantiated with a configurator rather than .conf filename""" assert 'DeviceServer1' in self.aggregator.config def test_password(self): """Aggregator should have random password""" assert type(self.aggregator.getPassword()) == type("") # ensure a second instance has differing password ... conf = Configurator() conf.fromString('') other = Aggregator(conf) assert other.getPassword() != self.aggregator.getPassword() def test_devicelist_as_deferred(self): """Return aggregated device list""" # safety timeout self.timeout = reactor.callLater(10, self.failed, "retrieving devicelist timed out ... failing") d = self.aggregator.getDeviceList() assert isinstance(d, defer.Deferred) d.addCallback(self.succeeded) # idle until code above triggers succeeded or timeout causes failure while not self.done: reactor.iterate(0.1) # will arrive here eventually when either succeeded or failed method has fired if self.failure: self.failed(self.failure) devicelist = self.lastargs[0] assert len(devicelist) == 2 assert 'Device1' in devicelist assert 'Device2' in devicelist def test_devicemap_as_deferred(self): """Return aggregated device map""" # safety timeout self.timeout = reactor.callLater(10, self.failed, "retrieving devicemap timed out ... failing") d = self.aggregator.getDeviceMap() assert isinstance(d, defer.Deferred) # caution: as this deferred is ready-to-go, the callback is called *immediately* d.addCallback(self.succeeded) # i.e., self.succeeded has now been called # idle until code above triggers succeeded or timeout causes failure while not self.done: reactor.iterate(0.1) # will arrive here eventually when either succeeded or failed method has fired if self.failure: self.failed(self.failure) devicemap = self.lastargs[0] print devicemap assert type(devicemap) == types.DictType assert len(devicemap) == 1 assert 'PseudoDevice' in devicemap assert len(devicemap['PseudoDevice']) == 2 assert 'Device1' in devicemap['PseudoDevice'] assert 'Device2' in devicemap['PseudoDevice'] def test_device_execute(self): """Proxy forward command to correct DeviceServer""" # safety timeout self.timeout = reactor.callLater(10, self.failed, "executing remote setParameter timed out ... failing") # 3-digit random integer value = int(random.random()*1000) # get a device key for use in next step self.done = False d = self.aggregator.getDeviceList() d.addCallback(self.succeeded) d.addErrback(self.failed) while not self.done: reactor.iterate(0.1) if self.failure: self.fail(self.failure) print print "DEBUG:" device = self.lastargs[0][0] print device.name # store number in 'remote' PseudoDevice d = self.aggregator.deviceExecute(device, 'setParameter', value) assert isinstance(d, defer.Deferred) d.addCallback(self.succeeded) # idle until code above triggers succeeded or timeout causes failure self.done = False while not self.done: reactor.iterate(0.1) # will arrive here eventually when either succeeded or failed method has fired if self.failure: self.failed(self.failure) # safety timeout self.timeout = reactor.callLater(10, self.failed, "executing remote getParameter timed out ... failing") # store number in 'remote' PseudoDevice d = self.aggregator.deviceExecute(device, 'getParameter') assert isinstance(d, defer.Deferred) d.addCallback(self.succeeded) # idle until code above triggers succeeded or timeout causes failure self.done = False while not self.done: reactor.iterate(0.1) # will arrive here eventually when either succeeded or failed method has fired if self.failure: self.failed(self.failure) returnvalue = self.lastargs[0] assert returnvalue == value if False: test_handled_configurator = True test_devicelist_as_deferred = True test_devicemap_as_deferred = True test_device_execute = True test_password = True
gpl-2.0
-369,536,809,628,326,600
30.851351
122
0.655176
false
4.480989
true
false
false
vallemrv/tpvB3
tpv_for_eetop/valle_libs/valleorm/models/relatedfields.py
1
9703
# -*- coding: utf-8 -*- # @Author: Manuel Rodriguez <vallemrv> # @Date: 29-Aug-2017 # @Email: [email protected] # @Last modified by: valle # @Last modified time: 18-Feb-2018 # @License: Apache license vesion 2.0 import sys import inspect import importlib from constant import constant class RelationShip(object): def __init__(self, othermodel, **options): self.tipo_class = constant.TIPO_RELATION self.class_name = "ForeignKey" self.main_module = None self.related_class = None self.main_class = None self.field_related_name = None self.field_related_id = None self.on_delete = constant.CASCADE if type(othermodel) in (str, unicode): self.related_name = othermodel else: self.related_name = othermodel.__name__ self.related_class = othermodel for k, v in options.items(): setattr(self, k, v) def get_id_field_name(self): if self.field_related_name == None: return self.related_name.lower() + "_id" return self.field_related_name def set_id_field_name(self, value): self.field_related_name = value def get(self, **condition): pass field_name_id = property(get_id_field_name, set_id_field_name) class OneToManyField(RelationShip): def __init__(self, main_class, related_name, **kargs): super(OneToManyField, self).__init__(related_name, **kargs) self.class_name = "OneToManyField" self.main_class = main_class self.related_name = related_name if self.main_module == None: self.main_module = self.main_class.__module__ self.related_class = create_class_related(self.main_module, self.related_name) self.tb_name_main = self.main_class.get_db_table() if self.field_related_id == None: self.field_name_id = self.tb_name_main + "_id" def get(self, **condition): query = u"{0}={1}".format(self.field_name_id, self.main_class.id) if 'query' in condition: condition['query'] += " AND " + query else: condition['query'] = query return self.related_class.filter(**condition) def add(self, child): if self.main_class.id == -1: self.main_class.save() setattr(child, self.field_name_id, self.main_class.id) child.save() class ForeignKey(RelationShip): def __init__(self, othermodel, on_delete=constant.CASCADE, **kargs): super(ForeignKey, self).__init__(othermodel, **kargs) self.class_name = "ForeignKey" self.on_delete = on_delete def get_choices(self, **condition): return self.related_class.getAll(**condition) def get_sql_pk(self): sql = u"FOREIGN KEY({0}) REFERENCES {1}(id) %s" % self.on_delete sql = sql.format(self.field_name_id, self.related_name) return sql def get(self): if self.related_class == None: if self.main_module == None: self.main_module = self.main_class.__module__ self.related_class = create_class_related(self.main_module, self.related_name) reg = self.related_class(db_name=self.main_class.db_name) reg.load_by_pk(getattr(self.main_class, self.field_name_id)) return reg class ManyToManyField(RelationShip): def __init__(self, othermodel, db_table_nexo=None, **kargs): super(ManyToManyField, self).__init__(othermodel, **kargs) self.class_name = "ManyToManyField" self.db_table_nexo = db_table_nexo if self.main_class != None: if self.main_module == None: self.main_module = self.main_class.__module__ self.tb_name_main = self.main_class.get_db_table() self.related_class = create_class_related(self.main_module, self.related_name) self.tb_name_related = self.related_class.get_db_table() if self.field_related_id == None: self.field_name_id = self.tb_name_main + "_id" self.field_related_id = self.tb_name_related + "_id" def get_sql_tb_nexo(self): key = "PRIMARY KEY ({0}, {1})".format(self.field_name_id, self.field_related_id) frgKey = u"FOREIGN KEY({0}) REFERENCES {1}(id) ON DELETE CASCADE, " frgKey = frgKey.format(self.field_name_id, self.tb_name_main) frgKey += u"FOREIGN KEY({0}) REFERENCES {1}(id) ON DELETE CASCADE" frgKey = frgKey.format(self.field_related_id, self.tb_name_related) sql = u"CREATE TABLE IF NOT EXISTS {0} ({1}, {2} ,{3}, {4});" sql = sql.format(self.db_table_nexo, self.field_name_id+" INTEGER NOT NULL", self.field_related_id+" INTEGER NOT NULL ",key, frgKey) return sql def get(self, **condition): if "tb_nexo" in condition: self.db_table_nexo = condition["tb_nexo"] if "field_related_id" in condition: self.field_related_id = condition["field_related_id"] if "field_name_id" in condition: self.field_name_id = condition["field_name_id"] condition["columns"] = [self.tb_name_related+".*"] condition["joins"] = [(self.db_table_nexo + " ON "+ \ self.db_table_nexo+"."+self.field_related_id+\ "="+self.tb_name_related+".id")] query = self.field_name_id+"="+str(self.main_class.id) if 'query' in condition: condition["query"] += " AND " + query else: condition["query"] = query if self.related_class == None: if self.main_module == None: self.main_module = self.main_class.__module__ self.related_class = create_class_related(self.main_module, self.related_name) return self.related_class.filter(**condition) def add(self, *childs): for child in childs: child.save() cols = [self.field_name_id, self.field_related_id] values = [str(self.main_class.id), str(child.id)] sql = u"INSERT OR REPLACE INTO {0} ({1}) VALUES ({2});".format(self.db_table_nexo, ", ".join(cols), ", ".join(values)); self.main_class.execute(sql) def delete(self, child): sql = u"DELETE FROM {0} WHERE {1}={2} AND {3}={4};".format(self.db_table_nexo, self.field_name_id, child.id, self.field_related_id, self.main_class.id) self.main_class.execute(sql) class ManyToManyChild(RelationShip): def __init__(self, main_class, related_name, **kargs): super(ManyToManyChild, self).__init__(related_name, **kargs) self.class_name = "ManyToManyChild" self.main_class = main_class self.related_name = related_name if self.main_module == None: self.main_module = self.main_class.__module__ self.related_class = create_class_related(self.main_module, self.related_name) self.tb_name_main = self.main_class.get_db_table() self.tb_name_related = self.related_class.get_db_table() self.db_table_nexo = self.tb_name_related + '_' + self.tb_name_main if self.field_related_id == None: self.field_name_id = self.tb_name_main + "_id" self.field_related_id = self.tb_name_related + "_id" def get(self, **condition): if "tb_nexo" in condition: self.db_table_nexo = condition["tb_nexo"] if "field_related_id" in condition: self.field_related_id = condition["field_related_id"] if "field_name_id" in condition: self.field_name_id = condition["field_name_id"] condition["columns"] = [self.tb_name_related+".*"] condition["joins"] = [(self.db_table_nexo + " ON "+ \ self.db_table_nexo+"."+self.field_related_id+\ "="+self.tb_name_related+".id")] query = self.field_name_id+"="+str(self.main_class.id) if 'query' in condition: condition["query"] += " AND " + query else: condition["query"] = query return self.related_class.filter(**condition) def add(self, *childs): for child in childs: child.save() cols = [self.field_name_id, self.field_related_id] values = [str(self.main_class.id), str(child.id)] sql = u"INSERT OR REPLACE INTO {0} ({1}) VALUES ({2});".format(self.db_table_nexo, ", ".join(cols), ", ".join(values)); self.main_class.execute(sql) def delete(self, child): sql = u"DELETE FROM {0} WHERE {1}={2} AND {3}={4};".format(self.db_table_nexo, self.field_related_id, child.id, self.field_name_id, self.main_class.id) self.main_class.execute(sql) def create_class_related(module, class_name): module = ".".join(module.split(".")[:-1]) modulo = importlib.import_module(module) nclass = getattr(modulo, str(class_name)) return nclass
apache-2.0
-2,517,989,328,502,603,300
37.351779
99
0.547356
false
3.673987
false
false
false
sassoftware/saspy
saspy/sasiocom.py
1
37140
# # Copyright SAS Institute # # Licensed under the Apache License, Version 2.0 (the License); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import datetime import csv import io import numbers import os import shlex import sys import warnings try: from win32com.client import dynamic except ImportError: pass try: import pandas as pd except ImportError: pass class SASConfigCOM(object): """ This object is not intended to be used directly. Instantiate a SASSession object instead. """ NO_OVERRIDE = ['kernel', 'sb'] def __init__(self, **kwargs): self._kernel = kwargs.get('kernel') session = kwargs['sb'] sascfg = session.sascfg.SAScfg name = session.sascfg.name cfg = getattr(sascfg, name) opts = getattr(sascfg, 'SAS_config_options', {}) outs = getattr(sascfg, 'SAS_output_options', {}) self.host = cfg.get('iomhost') self.port = cfg.get('iomport') self.user = cfg.get('omruser') self.pw = cfg.get('omrpw') self.authkey = cfg.get('authkey') self.class_id = cfg.get('class_id', '440196d4-90f0-11d0-9f41-00a024bb830c') self.provider = cfg.get('provider') self.encoding = cfg.get('encoding', '') self.output = outs.get('output', 'html5') self.verbose = opts.get('verbose', True) self.verbose = kwargs.get('verbose', self.verbose) self._lock = opts.get('lock_down', True) self._prompt = session.sascfg._prompt if self.authkey is not None: self._set_authinfo() for key, value in filter(lambda x: x[0] not in self.NO_OVERRIDE, kwargs.items()): self._try_override(key, value) def _set_authinfo(self): """ Attempt to set the session user's credentials based on provided key to read from ~/.authinfo file. See .authinfo documentation here: https://documentation.sas.com/api/docsets/authinfo/9.4/content/authinfo.pdf. This method supports a subset of the .authinfo spec, in accordance with other IO access methods. This method will only parse `user` and `password` arguments, but does support spaces in values if the value is quoted. Use python's `shlex` library to parse these values. """ if os.name == 'nt': authfile = os.path.expanduser(os.path.join('~', '_authinfo')) else: authfile = os.path.expanduser(os.path.join('~', '.authinfo')) try: with open(authfile, 'r') as f: # Take first matching line found parsed = (shlex.split(x, posix=False) for x in f.readlines()) authline = next(filter(lambda x: x[0] == self.authkey, parsed), None) except OSError: print('Error trying to read {}'.format(authfile)) authline = None if authline is None: print('Key {} not found in authinfo file: {}'.format(self.authkey, authfile)) elif len(authline) < 5: print('Incomplete authinfo credentials in {}; key: {}'.format(authfile, self.authkey)) else: # Override user/pw if previously set # `authline` is in the following format: # AUTHKEY username USERNAME password PASSWORD self.user = authline[2] self.pw = authline[4] def _try_override(self, attr, value): """ Attempt to override a configuration file option if `self._lock` is False. Otherwise, warn the user. :param attr: Configuration attribute. :param value: Configuration value. """ if self._lock is False: setattr(self, attr, value) else: err = "Param '{}' was ignored due to configuration restriction".format(attr) print(err, file=sys.stderr) class SASSessionCOM(object): """ Initiate a connection to a SAS server and provide access for Windows clients without the Java dependency. Utilizes available COM objects for client communication with the IOM interface. It may be possible to communicate with local SAS instances as well, although this is functionality is untested. A slight change may be required to the `_startsas` method to support local instances. """ SAS_APP = 'SASApp' HTML_RESULT_FILE = 'saspy_results.html' # SASObjectManager.Protocols Enum values PROTOCOL_COM = 0 PROTOCOL_IOM = 2 # SAS Date/Time/Datetime formats FMT_DEFAULT_DATE_NAME = 'E8601DA' FMT_DEFAULT_DATE_LENGTH = 10 FMT_DEFAULT_DATE_PRECISION = 0 FMT_DEFAULT_TIME_NAME = 'E8601TM' FMT_DEFAULT_TIME_LENGTH = 15 FMT_DEFAULT_TIME_PRECISION = 6 FMT_DEFAULT_DATETIME_NAME = 'E8601DT' FMT_DEFAULT_DATETIME_LENGTH = 26 FMT_DEFAULT_DATETIME_PRECISION = 6 # Pandas data types PD_NUM_TYPE = ('i', 'u', 'f', 'c') PD_STR_TYPE = ('S', 'U', 'V') PD_DT_TYPE = ('M') PD_BOOL_TYPE = ('b') # ADODB RecordSet CursorTypeEnum values CURSOR_UNSPECIFIED = -1 CURSOR_FORWARD = 0 CURSOR_KEYSET = 1 CURSOR_DYNAMIC = 2 CURSOR_STATIC = 3 # ADODB RecordSet LockTypeEnum values LOCK_UNSPECIFIED = -1 LOCK_READONLY = 1 LOCK_PESSIMISTIC = 2 LOCK_OPTIMISTIC = 3 LOCK_BATCH_OPTIMISTIC = 4 # ADODB RecordSet CommandTypeEnum values CMD_UNSPECIFIED = -1 CMD_TEXT = 1 CMD_TABLE = 2 CMD_STORED_PROC = 4 CMD_UNKNOWN = 8 CMD_FILE = 256 CMD_TABLE_DIRECT = 512 # ADODB Connection SchemaEnum values SCHEMA_COLUMNS = 4 SCHEMA_TABLES = 20 # ADODB ObjectStateEnum values STATE_CLOSED = 0 STATE_OPEN = 1 # FileService StreamOpenMode values STREAM_READ = 1 STREAM_WRITE = 2 def __init__(self, **kwargs): self._log = '' self.sascfg = SASConfigCOM(**kwargs) self._sb = kwargs.get('sb') self.pid = self._startsas() def __del__(self): if self.adodb.State == self.STATE_OPEN: self._endsas() def _startsas(self) -> str: """ Create a workspace and open a connection with SAS. :return [str]: """ if getattr(self, 'workspace', None) is not None: # Do not create a new connection return self.workspace.UniqueIdentifier factory = dynamic.Dispatch('SASObjectManager.ObjectFactoryMulti2') server = dynamic.Dispatch('SASObjectManager.ServerDef') self.keeper = dynamic.Dispatch('SASObjectManager.ObjectKeeper') self.adodb = dynamic.Dispatch('ADODB.Connection') if self.sascfg.host is None: # Create a local connection. server.MachineDNSName = '127.0.0.1' server.Port = 0 server.Protocol = self.PROTOCOL_COM user = None password = None else: # Create a remote connection. The following are required: # 1. host # 2. port # 3. class_id server.MachineDNSName = self.sascfg.host server.Port = self.sascfg.port server.Protocol = self.PROTOCOL_IOM server.ClassIdentifier = self.sascfg.class_id if self.sascfg.user is not None: user = self.sascfg.user else: user = self.sascfg._prompt('Username: ') if self.sascfg.pw is not None: password = self.sascfg.pw else: password = self.sascfg._prompt('Password: ', pw=True) self.workspace = factory.CreateObjectByServer(self.SAS_APP, True, server, user, password) self.keeper.AddObject(1, 'WorkspaceObject', self.workspace) self.adodb.Open('Provider={}; Data Source=iom-id://{}'.format( self.sascfg.provider, self.workspace.UniqueIdentifier)) ll = self.submit("options svgtitle='svgtitle'; options validvarname=any validmemname=extend pagesize=max nosyntaxcheck; ods graphics on;", "text") if self.sascfg.verbose: print("SAS Connection established. Workspace UniqueIdentifier is "+str(self.workspace.UniqueIdentifier)+"\n") return self.workspace.UniqueIdentifier def _endsas(self): """ Close a connection with SAS. """ self.adodb.Close() self.keeper.RemoveObject(self.workspace) self.workspace.Close() if self.sascfg.verbose: print("SAS Connection terminated. Workspace UniqueIdentifierid was "+str(self.pid)) def _getlst(self, buf: int=2048) -> str: """ Flush listing. :option buf [int]: Download buffer. Default 2048. :return [str]: """ flushed = self.workspace.LanguageService.FlushList(buf) result = flushed while flushed: flushed = self.workspace.LanguageService.FlushList(buf) result += flushed return result def _getlog(self, buf: int=2048) -> str: """ Flush log. :option buf [int]: Download buffer. Default 2048. :return [str]: """ flushed = self.workspace.LanguageService.FlushLog(buf) result = flushed while flushed: flushed = self.workspace.LanguageService.FlushLog(buf) result += flushed # Store flush result in running log self._log += result if result.count('ERROR:') > 0: warnings.warn("Noticed 'ERROR:' in LOG, you ought to take a look and see if there was a problem") self._sb.check_error_log = True return result def _getfile(self, fname: str, buf: int=2048, decode: bool=False) -> str: """ Use object file service to download a file from the provider. :param fname [str]: Filename. :option buf [int]: Download buffer. Default 2048. :option decode [bool]: Decode the byte stream. :return [str]: """ fobj = self.workspace.FileService.AssignFileref('outfile', 'DISK', fname, '', '') # Use binary stream to support text and image transfers. The binary # stream interface does not require a max line length, which allows # support of arbitrarily wide tables. stream = fobj[0].OpenBinaryStream(self.STREAM_READ) flushed = stream.Read(buf) result = bytes(flushed) while flushed: flushed = stream.Read(buf) result += bytes(flushed) stream.Close() self.workspace.FileService.DeassignFileref(fobj[0].FilerefName) if decode is True: result = result.decode(self.sascfg.encoding, errors='replace') return result def _gethtmlfn(self) -> str: """ Return the path of the output HTML file. This is the combination of the `workpath` attribute and `HTML_RESULT_FILE` constant. :return [str]: """ return self._sb.workpath + self.HTML_RESULT_FILE def _reset(self): """ Reset the LanguageService interface to its initial state with respect to token scanning. Use it to release the LanguageService from an error state associated with the execution of invalid syntax or incomplete program source. This primarily occurs when a statement is submitted without a trailing semicolon. """ self.workspace.LanguageService.Reset() def _tablepath(self, table: str, libref: str=None) -> str: """ Define a sas dataset path based on a table name and optional libref name. Will return a two-level or one-level path string based on the provided arguments. One-level names are of this form: `table`, while two-level names are of this form: `libref.table`. If libref is not defined, SAS will implicitly define the library to WORK or USER. The USER library needs to have been defined previously in SAS, otherwise WORK is the default option. If the `libref` parameter is any value that evaluates to `False`, the one-level path is returned. :param table [str]: SAS data set name. :option libref [str]: Optional library name. :return [str]: """ if not libref: path = "'{}'n".format(table.strip()) else: path = "{}.'{}'n".format(libref, table.strip()) return path def _schema(self, table: str, libref: str=None) -> dict: """ Request a table schema for a given `libref.table`. :param table [str]: Table name :option libref [str]: Library name. :return [dict]: """ #tablepath = self._tablepath(table, libref=libref) if not libref: tablepath = table else: tablepath = "{}.{}".format(libref, table) criteria = [None, None, tablepath] schema = self.adodb.OpenSchema(self.SCHEMA_COLUMNS, criteria) schema.MoveFirst() metadata = {} while not schema.EOF: col_info = {x.Name: x.Value for x in schema.Fields} if col_info['FORMAT_NAME'] in self._sb.sas_date_fmts: col_info['CONVERT'] = lambda x: self._sb.SAS_EPOCH + datetime.timedelta(days=x) if x else x elif col_info['FORMAT_NAME'] in self._sb.sas_datetime_fmts: col_info['CONVERT'] = lambda x: self._sb.SAS_EPOCH + datetime.timedelta(seconds=x) if x else x # elif FIXME TIME FORMATS else: col_info['CONVERT'] = lambda x: x metadata[col_info['COLUMN_NAME']] = col_info schema.MoveNext() schema.Close() return metadata def _prompt(self, key: str, hide: bool=False) -> tuple: """ Ask the user for input about a given key. :param key [str]: Key name. :option hide [bool]: Hide user keyboard input. :return [tuple]: """ input_ok = False while input_ok is False: val = self.sascfg._prompt('Enter value for macro variable {} '.format(key), pw=hide) if val is None: raise RuntimeError("No value for prompted macro variable provided.") if val: input_ok = True else: print('Input not valid.') return (key, val) def _asubmit(self, code: str, results: str='html'): """ Submit any SAS code. Does not return a result. :param code [str]: SAS statements to execute. """ # Support html ods if results.lower() == 'html': ods_open = """ ods listing close; ods {} (id=saspy_internal) options(bitmap_mode='inline') file="{}" device=svg style={}; ods graphics on / outputfmt=png; """.format(self.sascfg.output, self._gethtmlfn(), self._sb.HTML_Style) ods_close = """ ods {} (id=saspy_internal) close; ods listing; """.format(self.sascfg.output) else: ods_open = '' ods_close = '' # Submit program full_code = ods_open + code + ods_close self.workspace.LanguageService.Submit(full_code) def submit(self, code: str, results: str='html', prompt: dict=None, **kwargs) -> dict: """ Submit any SAS code. Returns log and listing as dictionary with keys LOG and LST. :param code [str]: SAS statements to execute. :option results [str]: Result format. Options: HTML, TEXT. Default HTML. :option prompt [dict]: Create macro variables from prompted keys. """ RESET = """;*';*";*/;quit;run;""" prompt = prompt if prompt is not None else {} printto = kwargs.pop('undo', False) macro_declare = '' for key, value in prompt.items(): macro_declare += '%let {} = {};\n'.format(*self._prompt(key, value)) # Submit program self._asubmit(RESET + macro_declare + code + RESET, results) # Retrieve listing and log log = self._getlog() if results.lower() == 'html': # Make the following replacements in HTML listing: # 1. Swap \x0c for \n # 2. Change body class selector # 3. Increase font size listing = self._getfile(self._gethtmlfn(), decode=True) \ .replace(chr(12), chr(10)) \ .replace('<body class="c body">', '<body class="l body">') \ .replace('font-size: x-small;', 'font-size: normal;') else: listing = self._getlst() # Invalid syntax will put the interface in to an error state. Reset # the LanguageService to prevent further errors. # FIXME: In the future, may only want to reset on ERROR. However, this # operation seems pretty lightweight, so calling `_reset()` on all # submits is not a burden. self._reset() if printto: self._asubmit("\nproc printto;run;\n", 'text') log += self._getlog() self._sb._lastlog = log return {'LOG': log, 'LST': listing} def saslog(self) -> str: """ Return the full SAS log. :return [str]: """ return self._log def exist(self, table: str, libref: str=None) -> bool: """ Determine if a `libref.table` exists. :param table [str]: Table name :option libref [str]: Library name. :return [bool]: """ #tablepath = self._tablepath(table, libref=libref) #criteria = [None, None, tablepath] #schema = self.adodb.OpenSchema(self.SCHEMA_COLUMNS, criteria) #exists = not schema.BOF #schema.Close() #return exists code = 'data _null_; e = exist("' if len(libref): code += libref+"." code += "'"+table.strip()+"'n"+'"'+");\n" code += 'v = exist("' if len(libref): code += libref+"." code += "'"+table.strip()+"'n"+'"'+", 'VIEW');\n if e or v then e = 1;\n" code += "te='TABLE_EXISTS='; put te e;run;\n" ll = self.submit(code, "text") l2 = ll['LOG'].rpartition("TABLE_EXISTS= ") l2 = l2[2].partition("\n") exists = int(l2[0]) return bool(exists) def read_sasdata(self, table: str, libref: str=None, dsopts: dict=None) -> tuple: """ Read any SAS dataset and return as a tuple of header, rows :param table [str]: Table name :option libref [str]: Library name. :option dsopts [dict]: Dataset options. :return [tuple]: """ TARGET = '_saspy_sd2df' EXPORT = """ data {tgt}; set {tbl} {dopt}; run; """ dsopts = self._sb._dsopts(dsopts) if dsopts is not None else '' tablepath = self._tablepath(table, libref=libref) recordset = dynamic.Dispatch('ADODB.RecordSet') # Create an intermediate dataset with `dsopts` applied export = EXPORT.format(tgt=TARGET, tbl=tablepath, dopt=dsopts) self.workspace.LanguageService.Submit(export) meta = self._schema(TARGET) # Connect RecordSet object to ADODB connection with params: # Cursor: Forward Only # Lock: Read Only # Command: Table Direct recordset.Open(TARGET, self.adodb, self.CURSOR_FORWARD, self.LOCK_READONLY, self.CMD_TABLE_DIRECT) recordset.MoveFirst() header = [x.Name for x in recordset.Fields] rows = [] while not recordset.EOF: rows.append([meta[x.Name]['CONVERT'](x.Value) for x in recordset.Fields]) recordset.MoveNext() recordset.Close() return (header, rows, meta) def read_csv(self, filepath: str, table: str, libref: str=None, nosub: bool=False, opts: dict=None): """ Submit an import job to the SAS workspace. :param filepath [str]: File URI. :param table [str]: Table name. :option libref [str]: Library name. :option nosob [bool]: Return the SAS code instead of executing it. :option opts [dict]: SAS PROC IMPORT options. """ opts = opts if opts is not None else {} filepath = 'url ' + filepath if filepath.lower().startswith('http') else filepath tablepath = self._tablepath(table, libref=libref) proc_code = """ filename csv_file "{}"; proc import datafile=csv_file out={} dbms=csv replace; {} run; """.format(filepath.replace('"', '""'), tablepath, self._sb._impopts(opts)) if nosub is True: return proc_code else: return self.submit(proc_code, 'text') def write_csv(self, filepath: str, table: str, libref: str=None, nosub: bool=True, dsopts: dict=None, opts: dict=None): """ Submit an export job to the SAS workspace. :param filepath [str]: File URI. :param table [str]: Table name. :option libref [str]: Library name. :option nosob [bool]: Return the SAS code instead of executing it. :option opts [dict]: SAS PROC IMPORT options. :option dsopts [dict]: SAS dataset options. """ opts = opts if opts is not None else {} dsopts = dsopts if dsopts is not None else {} tablepath = self._tablepath(table, libref=libref) proc_code = """ filename csv_file "{}"; proc export data={} {} outfile=csv_file dbms=csv replace; {} run; """.format(filepath.replace('"', '""'), tablepath, self._sb._dsopts(dsopts), self._sb._expopts(opts)) if nosub is True: return proc_code else: return self.submit(proc_code, 'text')['LOG'] def dataframe2sasdata(self, df: '<Pandas Data Frame object>', table: str ='a', libref: str ="", keep_outer_quotes: bool=False, embedded_newlines: bool=True, LF: str = '\x01', CR: str = '\x02', colsep: str = '\x03', colrep: str = ' ', datetimes: dict={}, outfmts: dict={}, labels: dict={}, outdsopts: dict={}, encode_errors = None, char_lengths = None, **kwargs): """ Create a SAS dataset from a pandas data frame. :param df [pd.DataFrame]: Pandas data frame containing data to write. :param table [str]: Table name. :option libref [str]: Library name. Default work. None of these options are used by this access method; they are needed for other access methods keep_outer_quotes - for character columns, have SAS keep any outer quotes instead of stripping them off. embedded_newlines - if any char columns have embedded CR or LF, set this to True to get them iported into the SAS data set LF - if embedded_newlines=True, the chacter to use for LF when transferring the data; defaults to '\x01' CR - if embedded_newlines=True, the chacter to use for CR when transferring the data; defaults to '\x02' colsep - the column seperator character used for streaming the delimmited data to SAS defaults to '\x03' colrep - the char to convert to for any embedded colsep, LF, CR chars in the data; defaults to ' ' datetimes - not implemented yet in this access method outfmts - not implemented yet in this access method labels - not implemented yet in this access method outdsopts - not implemented yet in this access method encode_errors - not implemented yet in this access method char_lengths - not implemented yet in this access method """ DATETIME_NAME = 'DATETIME26.6' DATETIME_FMT = '%Y-%m-%dT%H:%M:%S.%f' if self.sascfg.verbose: if keep_outer_quotes != False: print("'keep_outer_quotes=' is not used with this access method. option ignored.") if embedded_newlines != True: print("'embedded_newlines=' is not used with this access method. option ignored.") if LF != '\x01' or CR != '\x02' or colsep != '\x03': print("'LF=, CR= and colsep=' are not used with this access method. option(s) ignored.") if datetimes != {}: print("'datetimes=' is not used with this access method. option ignored.") if outfmts != {}: print("'outfmts=' is not used with this access method. option ignored.") if labels != {}: print("'labels=' is not used with this access method. option ignored.") if outdsopts != {}: print("'outdsopts=' is not used with this access method. option ignored.") if encode_errors: print("'encode_errors=' is not used with this access method. option ignored.") if char_lengths: print("'char_lengths=' is not used with this access method. option ignored.") tablepath = self._tablepath(table, libref=libref) if type(df.index) != pd.RangeIndex: warnings.warn("Note that Indexes are not transferred over as columns. Only actual coulmns are transferred") columns = [] formats = {} for i, name in enumerate(df.columns): if df[name].dtypes.kind in self.PD_NUM_TYPE: # Numeric type definition = "'{}'n num".format(name) formats[name] = lambda x: str(x) if pd.isnull(x) is False else 'NULL' elif df[name].dtypes.kind in self.PD_STR_TYPE: # Character type # NOTE: If a character string contains a single `'`, replace # it with `''`. This is the SAS equivalent to `\'`. length = df[name].map(len).max() definition = "'{}'n char({})".format(name, length) formats[name] = lambda x: "'{}'".format(x.replace("'", "''")) if pd.isnull(x) is False else 'NULL' elif df[name].dtypes.kind in self.PD_DT_TYPE: # Datetime type definition = "'{}'n num informat={} format={}".format(name, DATETIME_NAME, DATETIME_NAME) formats[name] = lambda x: "'{:{}}'DT".format(x, DATETIME_FMT) if pd.isnull(x) is False else 'NULL' else: # Default to character type # NOTE: If a character string contains a single `'`, replace # it with `''`. This is the SAS equivalent to `\'`. length = df[name].map(str).map(len).max() definition = "'{}'n char({})".format(name, length) formats[name] = lambda x: "'{}'".format(x.replace("'", "''")) if pd.isnull(x) is False else 'NULL' columns.append(definition) sql_values = [] for index, row in df.iterrows(): vals = [] for i, col in enumerate(row): func = formats[df.columns[i]] vals.append(func(col)) sql_values.append('values({})'.format(', '.join(vals))) sql_create = 'create table {} ({});'.format(tablepath, ', '.join(columns)) sql_insert = 'insert into {} {};'.format(tablepath, '\n'.join(sql_values)) self.adodb.Execute(sql_create) self.adodb.Execute(sql_insert) return None def sasdata2dataframe(self, table: str, libref: str=None, dsopts: dict=None, method: str='', **kwargs) -> 'pd.DataFrame': """ Create a pandas data frame from a SAS dataset. :param table [str]: Table name. :option libref [str]: Library name. :option dsopts [dict]: Dataset options. :option method [str]: Download method. :option tempkeep [bool]: Download the csv file if using the csv method. :option tempfile [str]: File path for the saved output file. :return [pd.DataFrame]: """ # strip off unused by this access method options from kwargs # so they can't be passes to panda later rowsep = kwargs.pop('rowsep', ' ') colsep = kwargs.pop('colsep', ' ') rowrep = kwargs.pop('rowrep', ' ') colrep = kwargs.pop('colrep', ' ') if method.upper() == 'DISK': print("This access method doesn't support the DISK method. Try CSV or MEMORY") return None if method.upper() == 'CSV': df = self.sasdata2dataframeCSV(table, libref, dsopts=dsopts, **kwargs) else: my_fmts = kwargs.pop('my_fmts', False) k_dts = kwargs.pop('dtype', None) if self.sascfg.verbose: if my_fmts != False: print("'my_fmts=' is not supported in this access method. option ignored.") if k_dts is not None: print("'dtype=' is only used with the CSV version of this method. option ignored.") header, rows, meta = self.read_sasdata(table, libref, dsopts=dsopts) df = pd.DataFrame.from_records(rows, columns=header, **kwargs) for col in meta.keys(): if meta[col]['FORMAT_NAME'] in self._sb.sas_date_fmts + self._sb.sas_datetime_fmts: df[col] = pd.to_datetime(df[col], errors='coerce') elif meta[col]['DATA_TYPE'] == 5: df[col] = pd.to_numeric(df[col], errors='coerce') return df def sasdata2dataframeCSV(self, table: str, libref: str ='', dsopts: dict = None, tempfile: str=None, tempkeep: bool=False, **kwargs) -> 'pd.DataFrame': """ Create a pandas data frame from a SAS dataset. :param table [str]: Table name. :option libref [str]: Library name. :option dsopts [dict]: Dataset options. :option opts [dict]: dictionary containing any of the following Proc Export options(delimiter, putnames) :option tempkeep [bool]: Download the csv file if using the csv method. :option tempfile [str]: File path for the saved output file. :return [pd.DataFrame]: """ FORMAT_STRING = '{column} {format}{length}.{precision}' EXPORT = """ data _saspy_sd2df; format {fmt}; set {tbl}; run; proc export data=_saspy_sd2df {dopt} outfile="{out}" dbms=csv replace; {exopts} run; """ k_dts = kwargs.get('dtype', None) my_fmts = kwargs.pop('my_fmts', False) if self.sascfg.verbose: if my_fmts != False: print("'my_fmts=' is not supported in this access method. option ignored.") sas_csv = '{}saspy_sd2df.csv'.format(self._sb.workpath) dopts = self._sb._dsopts(dsopts) if dsopts is not None else '' tablepath = self._tablepath(table, libref=libref) expopts = self._sb._expopts(kwargs.pop('opts', {})) # Convert any date format to one pandas can understand (ISO-8601). # Save a reference of the column name in a list so pandas can parse # the column during construction. datecols = [] fmtlist = [] meta = self._schema(table, libref) for name, col in meta.items(): if col['FORMAT_NAME'] in self._sb.sas_date_fmts: datecols.append(name) col_format = self.FMT_DEFAULT_DATE_NAME col_length = self.FMT_DEFAULT_DATE_LENGTH col_precis = self.FMT_DEFAULT_DATE_PRECISION elif col['FORMAT_NAME'] in self._sb.sas_datetime_fmts: datecols.append(name) col_format = self.FMT_DEFAULT_DATETIME_NAME col_length = self.FMT_DEFAULT_DATETIME_LENGTH col_precis = self.FMT_DEFAULT_DATETIME_PRECISION # elif FIXME TIME FORMATS else: col_format = col['FORMAT_NAME'] col_length = col['FORMAT_LENGTH'] col_precis = col['FORMAT_DECIMAL'] if col['FORMAT_NAME']: full_format = FORMAT_STRING.format( column=col['COLUMN_NAME'], format=col_format, length=col_length, precision=col_precis) fmtlist.append(full_format) export = EXPORT.format(fmt=' '.join(fmtlist), tbl=tablepath, dopt=dopts, exopts=expopts, out=sas_csv) # Use `LanguageService.Submit` instead of `submit` for a slight # performance bump. We don't need the log or listing here so skip # the wrapper function. self.workspace.LanguageService.Submit(export) outstring = self._getfile(sas_csv, decode=True) # Write temp file if requested by user if kwargs.get('tempkeep') is True and kwargs.get('tempfile') is not None: with open(kwargs['tempfile'], 'w') as f: f.write(outstring) df = pd.read_csv(io.StringIO(outstring), parse_dates=datecols, **kwargs) if k_dts is None: # don't override these if user provided their own dtypes for col in meta.keys(): if meta[col]['FORMAT_NAME'] in self._sb.sas_date_fmts + self._sb.sas_datetime_fmts: df[col] = pd.to_datetime(df[col], errors='coerce') return df def upload(self, local: str, remote: str, overwrite: bool=True, permission: str='', **kwargs): """ Upload a file to the SAS server. :param local [str]: Local filename. :param remote [str]: Local filename. :option overwrite [bool]: Overwrite the file if it exists. :option permission [str]: See SAS filename statement documentation. """ perms = "PERMISSION='{}'".format(permission) if permission else '' valid = self._sb.file_info(remote, quiet=True) if valid == {}: # Parameter `remote` references a directory. Default to using the # filename in `local` path. remote_file = remote + self._sb.hostsep + os.path.basename(local) elif valid is not None and overwrite is False: # Parameter `remote` references a file that exists but we cannot # overwrite it. # TODO: Raise exception here instead of returning dict return {'Success': False, 'LOG': 'File {} exists and overwrite was set to False. Upload was stopped.'.format(remote)} else: remote_file = remote with open(local, 'rb') as f: fobj = self.workspace.FileService.AssignFileref('infile', 'DISK', remote_file, perms, '') stream = fobj[0].OpenBinaryStream(self.STREAM_WRITE) stream.Write(f.read()) stream.Close() self.workspace.FileService.DeassignFileref(fobj[0].FilerefName) return {'Success': True, 'LOG': 'File successfully written using FileService.'} def download(self, local: str, remote: str, overwrite: bool=True, **kwargs): """ Download a file from the SAS server. :param local [str]: Local filename. :param remote [str]: Local filename. :option overwrite [bool]: Overwrite the file if it exists. """ valid = self._sb.file_info(remote, quiet=True) if valid is None: # Parameter `remote` references an invalid file path. # TODO: Raise exception here instead of returning dict return {'Success': False, 'LOG': 'File {} does not exist.'.format(remote)} elif valid == {}: # Parameter `remote` references a directory. # TODO: Raise exception here instead of returning dict return {'Success': False, 'LOG': 'File {} is a directory.'.format(remote)} if os.path.isdir(local) is True: # Parameter `local` references a directory. Default to using the # filename in `remote` path. local_file = os.path.join(local, remote.rpartition(self._sb.hostsep)[2]) else: local_file = local with open(local_file, 'wb') as f: f.write(self._getfile(remote)) return {'Success': True, 'LOG': 'File successfully read using FileService.'}
apache-2.0
6,462,494,287,132,911,000
38.135933
154
0.575229
false
4.042669
false
false
false
CindyvdVries/News_Crawler
Sat2/sat/pipelines.py
1
2575
# -*- coding: utf-8 -*- # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html from scrapy.utils.conf import get_config from scrapy.exceptions import DropItem import pika.credentials import pika import json import logging class JsonWriterPipeline(object): def __init__(self): self.file = open('items.jl', 'wb') def process_item(self, item, spider): line = json.dumps(dict(item)) + "\n" self.file.write(line) return item class DuplicatePipeline(object): def __init__(self): self.ids_seen = set() def process_item(self, item, spider): if item['id'] in self.ids_seen: raise DropItem("Duplicate item found: %s" % item) else: self.ids_seen.add(item['id']) return item class RabbitMQPipeline(object): def __init__(self): self.logger = logging.getLogger(self.__class__.__name__) self.logger.info("Constructing rabbitmq logger") username = get_config().get('rabbitmq', 'username') password = get_config().get('rabbitmq', 'password') credentials = pika.credentials.PlainCredentials( username=username, password=password ) host = get_config().get('rabbitmq', 'host') parameters = pika.ConnectionParameters( host=host, port=5672, virtual_host='/', credentials=credentials ) connection = pika.BlockingConnection( parameters=parameters ) channel = connection.channel() # we're publishing to two channels, the download request # so that a download queue can pick it up channel.queue_declare('crisis_download_requests') # and a fanout exchange to notify listeners that we've crawled something channel.exchange_declare( 'crisis_crawl', type='fanout' ) self.channel = channel def process_item(self, item, spider): self.logger.info('sending message') serialized = json.dumps(dict(item)) # send to the work queue self.channel.basic_publish( exchange='', routing_key='crisis_download_requests', body='%s' % (serialized,) ) # and to the channel self.channel.basic_publish( exchange='crisis_crawl', routing_key='', body='%s' % (serialized,) ) return item
gpl-3.0
1,274,635,759,553,769,500
30.024096
80
0.593398
false
4.17342
false
false
false
PieterMostert/Lipgloss
model/serializers/oxideserializer.py
1
2233
import json try: from lipgloss.core_data import Oxide except: from ..lipgloss.core_data import Oxide class OxideSerializer(object): """A class to support serializing/deserializing of a single oxide and dictionaries of oxides. Needs improvement""" @staticmethod def get_serializable_oxide(oxide): """A serializable oxide is one that can be serialized to JSON using the python json encoder.""" serializable_oxide = {} serializable_oxide["molar_mass"] = oxide.molar_mass serializable_oxide["flux"] = oxide.flux serializable_oxide["min_threshhold"] = oxide.min_threshhold return serializable_oxide @staticmethod def serialize(oxide): """Serialize a single Oxide object to JSON.""" return json.dumps(OxideSerializer.get_serializable_oxide(oxide), indent=4) @staticmethod def serialize_dict(oxide_dict): """Convert a dictionary of Oxide objects to serializable dictionary. Use json.dump(output, file) to save output to file""" serializable_dict = {}; for index, oxide in oxide_dict.items(): serializable_dict[index] = OxideSerializer.get_serializable_oxide(oxide) return serializable_dict @staticmethod def get_oxide(serialized_oxide): """Convert a serialized oxide (a dict) returned by the JSON decoder into a Oxide object.""" oxide = Oxide(serialized_oxide["molar_mass"], serialized_oxide["flux"], serialized_oxide["min_threshhold"]) return oxide @staticmethod def deserialize(json_str): """Deserialize a single oxide from JSON to a Oxide object.""" serialized_oxide_dict = json.loads(json_str) return OxideSerializer.get_oxide(serialized_oxide_dict) @staticmethod def deserialize_dict(serialized_oxide_dict): """Deserialize a number of oxides from JSON to a dict containing Oxide objects, indexed by Oxide name.""" oxide_dict = {} for i, serialized_oxide in serialized_oxide_dict.items(): oxide_dict[i] = OxideSerializer.get_oxide(serialized_oxide) return oxide_dict
gpl-3.0
1,879,315,643,800,368,000
39.6
119
0.653829
false
3.897033
false
false
false
kulbirsaini/pdfrw-fork
examples/rl1/booklet.py
1
1588
#!/usr/bin/env python ''' usage: booklet.py my.pdf Uses Form XObjects and reportlab to create booklet.my.pdf. Demonstrates use of pdfrw with reportlab. ''' import sys import os from reportlab.pdfgen.canvas import Canvas import find_pdfrw from pdfrw import PdfReader from pdfrw.buildxobj import pagexobj from pdfrw.toreportlab import makerl def read_and_double(inpfn): pages = PdfReader(inpfn, decompress=False).pages pages = [pagexobj(x) for x in pages] if len(pages) & 1: pages.append(pages[0]) # Sentinel -- get same size for back as front xobjs = [] while len(pages) > 2: xobjs.append((pages.pop(), pages.pop(0))) xobjs.append((pages.pop(0), pages.pop())) xobjs += [(x,) for x in pages] return xobjs def make_pdf(outfn, xobjpairs): canvas = Canvas(outfn) for xobjlist in xobjpairs: x = y = 0 for xobj in xobjlist: x += xobj.BBox[2] y = max(y, xobj.BBox[3]) canvas.setPageSize((x,y)) # Handle blank back page if len(xobjlist) > 1 and xobjlist[0] == xobjlist[-1]: xobjlist = xobjlist[:1] x = xobjlist[0].BBox[2] else: x = 0 y = 0 for xobj in xobjlist: canvas.saveState() canvas.translate(x, y) canvas.doForm(makerl(canvas, xobj)) canvas.restoreState() x += xobj.BBox[2] canvas.showPage() canvas.save() inpfn, = sys.argv[1:] outfn = 'booklet.' + os.path.basename(inpfn) make_pdf(outfn, read_and_double(inpfn))
mit
5,298,639,258,305,954,000
22.014493
77
0.595718
false
3.113725
false
false
false
varunarya10/oslo.utils
oslo_utils/tests/test_reflection.py
1
8493
# -*- coding: utf-8 -*- # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base as test_base import six import testtools from oslo_utils import reflection if six.PY3: RUNTIME_ERROR_CLASSES = ['RuntimeError', 'Exception', 'BaseException', 'object'] else: RUNTIME_ERROR_CLASSES = ['RuntimeError', 'StandardError', 'Exception', 'BaseException', 'object'] def dummy_decorator(f): @six.wraps(f) def wrapper(*args, **kwargs): return f(*args, **kwargs) return wrapper def mere_function(a, b): pass def function_with_defs(a, b, optional=None): pass def function_with_kwargs(a, b, **kwargs): pass class Class(object): def method(self, c, d): pass @staticmethod def static_method(e, f): pass @classmethod def class_method(cls, g, h): pass class CallableClass(object): def __call__(self, i, j): pass class ClassWithInit(object): def __init__(self, k, l): pass class CallbackEqualityTest(test_base.BaseTestCase): def test_different_simple_callbacks(self): def a(): pass def b(): pass self.assertFalse(reflection.is_same_callback(a, b)) def test_static_instance_callbacks(self): class A(object): @staticmethod def b(a, b, c): pass a = A() b = A() self.assertTrue(reflection.is_same_callback(a.b, b.b)) def test_different_instance_callbacks(self): class A(object): def b(self): pass def __eq__(self, other): return True b = A() c = A() self.assertFalse(reflection.is_same_callback(b.b, c.b)) self.assertTrue(reflection.is_same_callback(b.b, c.b, strict=False)) class GetCallableNameTest(test_base.BaseTestCase): def test_mere_function(self): name = reflection.get_callable_name(mere_function) self.assertEqual('.'.join((__name__, 'mere_function')), name) def test_method(self): name = reflection.get_callable_name(Class.method) self.assertEqual('.'.join((__name__, 'Class', 'method')), name) def test_instance_method(self): name = reflection.get_callable_name(Class().method) self.assertEqual('.'.join((__name__, 'Class', 'method')), name) def test_static_method(self): name = reflection.get_callable_name(Class.static_method) if six.PY3: self.assertEqual('.'.join((__name__, 'Class', 'static_method')), name) else: # NOTE(imelnikov): static method are just functions, class name # is not recorded anywhere in them. self.assertEqual('.'.join((__name__, 'static_method')), name) def test_class_method(self): name = reflection.get_callable_name(Class.class_method) self.assertEqual('.'.join((__name__, 'Class', 'class_method')), name) def test_constructor(self): name = reflection.get_callable_name(Class) self.assertEqual('.'.join((__name__, 'Class')), name) def test_callable_class(self): name = reflection.get_callable_name(CallableClass()) self.assertEqual('.'.join((__name__, 'CallableClass')), name) def test_callable_class_call(self): name = reflection.get_callable_name(CallableClass().__call__) self.assertEqual('.'.join((__name__, 'CallableClass', '__call__')), name) # These extended/special case tests only work on python 3, due to python 2 # being broken/incorrect with regard to these special cases... @testtools.skipIf(not six.PY3, 'python 3.x is not currently available') class GetCallableNameTestExtended(test_base.BaseTestCase): # Tests items in http://legacy.python.org/dev/peps/pep-3155/ class InnerCallableClass(object): def __call__(self): pass def test_inner_callable_class(self): obj = self.InnerCallableClass() name = reflection.get_callable_name(obj.__call__) expected_name = '.'.join((__name__, 'GetCallableNameTestExtended', 'InnerCallableClass', '__call__')) self.assertEqual(expected_name, name) def test_inner_callable_function(self): def a(): def b(): pass return b name = reflection.get_callable_name(a()) expected_name = '.'.join((__name__, 'GetCallableNameTestExtended', 'test_inner_callable_function', '<locals>', 'a', '<locals>', 'b')) self.assertEqual(expected_name, name) def test_inner_class(self): obj = self.InnerCallableClass() name = reflection.get_callable_name(obj) expected_name = '.'.join((__name__, 'GetCallableNameTestExtended', 'InnerCallableClass')) self.assertEqual(expected_name, name) class GetCallableArgsTest(test_base.BaseTestCase): def test_mere_function(self): result = reflection.get_callable_args(mere_function) self.assertEqual(['a', 'b'], result) def test_function_with_defaults(self): result = reflection.get_callable_args(function_with_defs) self.assertEqual(['a', 'b', 'optional'], result) def test_required_only(self): result = reflection.get_callable_args(function_with_defs, required_only=True) self.assertEqual(['a', 'b'], result) def test_method(self): result = reflection.get_callable_args(Class.method) self.assertEqual(['self', 'c', 'd'], result) def test_instance_method(self): result = reflection.get_callable_args(Class().method) self.assertEqual(['c', 'd'], result) def test_class_method(self): result = reflection.get_callable_args(Class.class_method) self.assertEqual(['g', 'h'], result) def test_class_constructor(self): result = reflection.get_callable_args(ClassWithInit) self.assertEqual(['k', 'l'], result) def test_class_with_call(self): result = reflection.get_callable_args(CallableClass()) self.assertEqual(['i', 'j'], result) def test_decorators_work(self): @dummy_decorator def special_fun(x, y): pass result = reflection.get_callable_args(special_fun) self.assertEqual(['x', 'y'], result) class AcceptsKwargsTest(test_base.BaseTestCase): def test_no_kwargs(self): self.assertEqual(False, reflection.accepts_kwargs(mere_function)) def test_with_kwargs(self): self.assertEqual(True, reflection.accepts_kwargs(function_with_kwargs)) class GetClassNameTest(test_base.BaseTestCase): def test_std_exception(self): name = reflection.get_class_name(RuntimeError) self.assertEqual('RuntimeError', name) def test_class(self): name = reflection.get_class_name(Class) self.assertEqual('.'.join((__name__, 'Class')), name) def test_instance(self): name = reflection.get_class_name(Class()) self.assertEqual('.'.join((__name__, 'Class')), name) def test_int(self): name = reflection.get_class_name(42) self.assertEqual('int', name) class GetAllClassNamesTest(test_base.BaseTestCase): def test_std_class(self): names = list(reflection.get_all_class_names(RuntimeError)) self.assertEqual(RUNTIME_ERROR_CLASSES, names) def test_std_class_up_to(self): names = list(reflection.get_all_class_names(RuntimeError, up_to=Exception)) self.assertEqual(RUNTIME_ERROR_CLASSES[:-2], names)
apache-2.0
-899,403,400,105,379,200
29.44086
79
0.601554
false
3.991071
true
false
false
yukaritan/qtbot3
qtbot3_service/plugins/achievements.py
1
2926
from util import irc from util.garbage import rainbow from util.handler_utils import prehook, get_value, set_value, get_target, cmdhook, fetch_all from qtbot3_common.types.message import Message disconnection_ladder = { 1: "Connection reset by peer", 5: "Connection reset by beer", 10: "Connection reset by queer", 25: "Connection reset by Cher", 50: "Connection reset by ...deer?", 100: "Connection reset by ... enough already. I don't know.. Gears?", 250: "Connection reset 250 times. Seriously?", 500: "You've lost your connection 500 times. Do you even internet?", 1000: "One thousand disconnects. A thousand. One, three zeros. Holy shit." } def get_achievement(message: Message, match: dict, nick: str, count: int) -> str: print("Achievement progress for {user}: {count}".format(count=count, **match)) if count in disconnection_ladder: print("Dealt achievement \"" + disconnection_ladder[count] + "\" to", match['nick']) if not 'target' in match or match['target'] is None: return target = get_target(message, nick) msg = "{nick} has unlocked an achievement: {desc}" msg = rainbow(msg.format(nick=match['nick'], desc=disconnection_ladder[count])) return irc.chat_message(target, msg) return None @prehook(':(?P<nick>[^\s]+)' '!(?P<user>[^\s]+)' ' QUIT' '( :(?P<message>.*))?') @prehook(':(?P<nick>[^\s]+)' '!(?P<user>[^\s]+)' ' PART' ' (?P<target>[^\s]+)' '( :(?P<message>.*))?') def achievement_prehook_part(message: Message, match: dict, nick: str): try: key = 'chiev_partcount_' + match['user'] print("old value:", get_value(key)) count = (get_value(key) or 0) + 1 print("new value:", count) set_value(key, count) return get_achievement(message, match, nick, count) except Exception as ex: print("achievement prehook exception:", ex) @prehook(':(?P<nick>[^\s]+)' '!(?P<user>[^\s]+)' ' JOIN' ' (?P<target>[^\s]+)') def achievement_prehook_join(message: Message, match: dict, nick: str): try: key = 'chiev_partcount_' + match['user'] count = get_value(key) or 0 return get_achievement(message, match, nick, count) except Exception as ex: print("achievement prehook exception:", ex) @cmdhook('aimbot (?P<nick>[^\s]+)') def achievement_cheat_codes(message: Message, match: dict, nick: str) -> str: fetched = fetch_all(keyfilter='user_', valuefilter=match['nick']) target = get_target(message, nick) output = [] for key in fetched: user = key.split('_', 1)[1] key = 'chiev_partcount_' + user count = get_value(key) or 0 msg = rainbow("%s has disconnected %d times" % (user, count)) output.append(irc.chat_message(target, msg)) return output
gpl-3.0
-309,478,902,671,180,200
32.25
92
0.598086
false
3.42623
false
false
false
CobwebOrg/cobweb-django
core/migrations/0014_auto_20181026_1019.py
1
1216
# Generated by Django 2.1.2 on 2018-10-26 17:19 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('core', '0013_user_terms_accepted'), ] operations = [ migrations.AddField( model_name='organization', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='organization', name='updated_at', field=models.DateTimeField(auto_now=True), ), migrations.AddField( model_name='user', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='user', name='updated_at', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='user', name='terms_accepted', field=models.BooleanField(default=False), ), ]
mit
243,347,793,550,178,400
28.658537
93
0.570724
false
4.405797
false
false
false
lmorchard/django-teamwork
teamwork/templatetags/teamwork_tags.py
1
3741
""" ``django-teamwork`` template tags, loaded like so: {% load teamwork_tags %} """ from __future__ import unicode_literals from django import template from django.contrib.auth import get_user_model from django.contrib.auth.models import Group, AnonymousUser from django.template import get_library from django.template import InvalidTemplateLibrary from django.template.defaulttags import LoadNode from ..shortcuts import build_policy_admin_links register = template.Library() class ObjectPermissionsNode(template.Node): def __init__(self, user_var, obj, context_var): self.user_var = template.Variable(user_var) self.obj = template.Variable(obj) self.context_var = context_var def render(self, context): user_var = self.user_var.resolve(context) if isinstance(user_var, get_user_model()): self.user = user_var elif isinstance(user_var, AnonymousUser): self.user = user_var else: raise Exception("User instance required (got %s)" % user_var.__class__) obj = self.obj.resolve(context) perms = self.user.get_all_permissions(obj) context[self.context_var] = perms return '' @register.tag def get_all_obj_permissions(parser, token): """ Get all of a user's permissions granted by an object. For example: {% get_all_obj_permissions user for obj as "context_var" %} """ bits = token.split_contents() format = '{% get_all_obj_permissions user for obj as "context_var" %}' if len(bits) != 6 or bits[2] != 'for' or bits[4] != 'as': raise template.TemplateSyntaxError("get_all_permissions tag should be in " "format: %s" % format) _, user_var, _, obj, _, context_var = bits if context_var[0] != context_var[-1] or context_var[0] not in ('"', "'"): raise template.TemplateSyntaxError( "get_all_obj_permissions tag's context_var argument should be " "quoted") context_var = context_var[1:-1] return ObjectPermissionsNode(user_var, obj, context_var) class PolicyAdminLinksNode(template.Node): def __init__(self, user_var, obj, context_var): self.user_var = template.Variable(user_var) self.obj = template.Variable(obj) self.context_var = context_var def render(self, context): user_var = self.user_var.resolve(context) if isinstance(user_var, get_user_model()): self.user = user_var elif isinstance(user_var, AnonymousUser): self.user = user_var else: raise Exception("User instance required (got %s)" % user_var.__class__) obj = self.obj.resolve(context) links = build_policy_admin_links(self.user, obj) context[self.context_var] = links return '' @register.tag def get_policy_admin_links(parser, token): """ Get a set of links to admin pages to manage policy for an object by a user {% policy_admin_links user for obj as "context_var" %} """ bits = token.split_contents() format = '{% policy_admin_links user for obj as "context_var" %}' if len(bits) != 6 or bits[2] != 'for' or bits[4] != 'as': raise template.TemplateSyntaxError("get_all_permissions tag should be in " "format: %s" % format) _, user_var, _, obj, _, context_var = bits if context_var[0] != context_var[-1] or context_var[0] not in ('"', "'"): raise template.TemplateSyntaxError( "policy_admin_links tag's context_var argument should be " "quoted") context_var = context_var[1:-1] return PolicyAdminLinksNode(user_var, obj, context_var)
mpl-2.0
7,748,176,117,772,263,000
31.25
82
0.624967
false
3.825153
false
false
false
summychou/TBTracker
src/TBTracker_Gui/TBTracker_Gui_Button.py
1
4941
# -*- coding: utf-8 -*- from PyQt5.QtCore import QCoreApplication from PyQt5.QtGui import QIcon from PyQt5.QtWidgets import QPushButton ''' @author : Zhou Jian @email : [email protected] @version : V1.1 @date : 2018.04.22 ''' class BaseButton(QPushButton): ''' 基类按钮 ''' def __init__(self, name=""): super(BaseButton, self).__init__(name) class SearchButton(BaseButton): ''' 搜素按钮,继承自基类按钮 ''' def __init__(self): super(SearchButton, self).__init__(name="商品搜索") self.function_init() # 功能绑定 - def function_init(self): pass class AddButton(BaseButton): ''' 添加标签按钮,继承自基类按钮 ''' def __init__(self): super(AddButton, self).__init__(name="添加标签") self.function_init() # 功能绑定 - def function_init(self): pass class AttachButton(BaseButton): ''' 标注标签按钮,继承自基类按钮 ''' def __init__(self): super(AttachButton, self).__init__(name="标注标签") self.function_init() # 功能绑定 - def function_init(self): pass class ImportButton(BaseButton): ''' 导入数据按钮,继承自基类按钮 ''' def __init__(self): super(ImportButton, self).__init__(name="导入数据") self.function_init() # 功能绑定 - def function_init(self): pass class ExportButton(BaseButton): ''' 导出数据按钮,继承自基类按钮 ''' def __init__(self): super(ExportButton, self).__init__(name="导出数据") self.function_init() # 功能绑定 - def function_init(self): pass class InsertButton(BaseButton): ''' 添加数据按钮,继承自基类按钮 ''' def __init__(self): super(InsertButton, self).__init__(name="添加数据") self.function_init() # 功能绑定 - def function_init(self): pass class DeleteButton(BaseButton): ''' 删除数据按钮,继承自基类按钮 ''' def __init__(self): super(DeleteButton, self).__init__(name="删除数据") self.function_init() # 功能绑定 - def function_init(self): pass class ConfirmButton(BaseButton): ''' 确定按钮,继承自基类按钮 ''' def __init__(self): super(ConfirmButton, self).__init__(name="确定") self.function_init() # 功能绑定 - def function_init(self): pass class CancelButton(BaseButton): ''' 取消按钮,继承自基类按钮 ''' def __init__(self): super(CancelButton, self).__init__(name="取消") self.function_init() # 功能绑定 - def function_init(self): pass class GlobalSelectButton(BaseButton): ''' 全局按钮,继承自基类按钮 ''' def __init__(self): super(GlobalSelectButton, self).__init__(name="全局选择") self.function_init() # 功能绑定 - def function_init(self): pass class AllSelectButton(BaseButton): ''' 全选按钮,继承自基类按钮 ''' def __init__(self): super(AllSelectButton, self).__init__(name="全部选择") self.function_init() # 功能绑定 - def function_init(self): pass class ChangeConfigButton(BaseButton): ''' 更改配置按钮,继承自基类按钮 ''' def __init__(self): super(ChangeConfigButton, self).__init__(name="更改配置") self.function_init() # 功能绑定 - def function_init(self): pass class ManualUpdateButton(BaseButton): ''' 手动更新按钮,继承自基类按钮 ''' def __init__(self): super(ManualUpdateButton, self).__init__(name="手动更新") self.function_init() # 功能绑定 - def function_init(self): pass class SelectCommodityButton(BaseButton): ''' 选择商品按钮,继承自基类按钮 ''' def __init__(self): super(SelectCommodityButton, self).__init__(name="选择商品") self.function_init() # 功能绑定 - def function_init(self): pass class MonthlyDataButton(BaseButton): ''' 月份数据按钮,继承自基类按钮 ''' def __init__(self): super(MonthlyDataButton, self).__init__(name="月份数据") self.function_init() # 功能绑定 - def function_init(self): pass class YearlyDataButton(BaseButton): ''' 年份数据按钮,继承自基类按钮 ''' def __init__(self): super(YearlyDataButton, self).__init__(name="年份数据") self.function_init() # 功能绑定 - def function_init(self): pass
mit
4,804,963,411,780,969,000
17.836283
64
0.547099
false
2.576877
false
false
false
AlexStarov/Shop
applications/discount/management/commands/processing_actions.py
1
6004
# -*- coding: utf-8 -*- from django.core.management.base import BaseCommand from applications.product.models import Category, Product from applications.discount.models import Action __author__ = 'Alex Starov' class Command(BaseCommand, ): def handle(self, *args, **options): try: action_category = Category.objects.get(url=u'акции', ) except Category.DoesNotExist: action_category = False """ Выключаем продукты из "АКЦИИ" срок действия акции которой уже подощёл к концу """ action_not_active = Action.objects.not_active() if action_not_active: print 'Action - NOT ACTIVE:', action_not_active for action in action_not_active: products_of_action = action.product_in_action.all() print 'All products:', products_of_action """ Если акция с авто окончанием, то заканчиваем еЁ. """ if action.auto_end: products_of_action = action.product_in_action.in_action() if len(products_of_action, ) > 0: print 'Product auto_end:', products_of_action for product in products_of_action: print 'Del product from Action: ', product """ Помечает товар как не учавствующий в акции """ if action_category: product.category.remove(action_category, ) product.in_action = False if action.auto_del_action_from_product: if action_category: product.action.remove(action, ) product.save() if action.auto_del: action.deleted = True action.save() action_active = Action.objects.active() if action_active: print 'Action - ACTIVE:', action_active for action in action_active: products_of_action = action.product_in_action.all() print 'All products:', products_of_action """ Если акция с автостартом, то мы еЁ стартуем. """ if action.auto_start: """ Включаем галочку 'Учавствует в акции' всем продуктам которые внесены в акцию исключая продукты 'отсутсвующие на складе' """ products_of_action = action.product_in_action.exclude(is_availability=4, ) if len(products_of_action, ) > 0: print 'Product auto_start:', products_of_action for product in products_of_action: """ Помечает товар как учавствующий в акции """ product.in_action = True """ Добавляем категорию 'Акция' в товар """ if action_category: product.category.add(action_category, ) product.save() """ Удаляем товары учавствующие в активной акции но при этом 'отсутсвующие на складе' """ products_remove_from_action = action.product_in_action.exclude(is_availability__lt=4, ) if len(products_remove_from_action, ) > 0: print 'Product auto_start remove:', products_remove_from_action for product in products_remove_from_action: """ Помечает товар как не учавствующий в акции """ product.in_action = False """ Удаляем категорию 'Акция' из товара """ if action_category: product.category.remove(action_category, ) product.save() """ Убираем галочку 'участвует в акции' всем продуктам у которых она почемуто установлена, но при этом отсутвует хоть какая то акция """ products = Product.objects.filter(in_action=True, action=None, ).update(in_action=False, ) print 'Товары удаленные из акции по причине вывода их из акции: ', products """ Убираем галочку 'участвует в акции' всем продуктам которые отсутсвуют на складе """ products = Product.objects.filter(in_action=True, is_availability=4, ).update(in_action=False, ) print 'Товары удаленные из акции по причине отсутсвия на складе: ', products """ Делаем активной акционную категорию, если есть хоть один акционный товар """ all_actions_products = action_category.products.all() if len(all_actions_products) != 0 and not action_category.is_active: action_category.is_active = True action_category.save() elif len(all_actions_products) == 0 and action_category.is_active: action_category.is_active = False action_category.save()
apache-2.0
-2,092,252,847,127,148,000
51.525253
109
0.527115
false
3.13253
false
false
false
d120/pyfeedback
src/feedback/migrations/0043_auto_20190618_2221.py
1
8403
# -*- coding: utf-8 -*- # Generated by Django 1.11.21 on 2019-06-18 22:21 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('feedback', '0042_auto_20180608_1423'), ] operations = [ migrations.CreateModel( name='FragebogenUE2016', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('fach', models.CharField(blank=True, choices=[('inf', 'Informatik'), ('math', 'Mathematik'), ('ce', 'Computational Engineering'), ('ist', 'Informationssystemtechnik'), ('etit', 'Elektrotechnik'), ('psyit', 'Psychologie in IT'), ('winf', 'Wirtschaftsinformatik'), ('sonst', 'etwas anderes')], max_length=5)), ('abschluss', models.CharField(blank=True, choices=[('bsc', 'Bachelor'), ('msc', 'Master'), ('dipl', 'Diplom'), ('lehr', 'Lehramt'), ('sonst', 'anderer Abschluss')], max_length=5)), ('semester', models.CharField(blank=True, choices=[('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '>=10')], max_length=4)), ('geschlecht', models.CharField(blank=True, choices=[('w', 'weiblich'), ('m', 'männlich'), ('s', 'sonstiges')], max_length=1)), ('studienberechtigung', models.CharField(blank=True, choices=[('d', 'Deutschland'), ('o', 'anderes Land')], max_length=1)), ('ue_wie_oft_besucht', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_besuch_ueberschneidung', models.CharField(blank=True, choices=[('j', 'ja'), ('n', 'nein')], max_length=1)), ('ue_besuch_qualitaet', models.CharField(blank=True, choices=[('j', 'ja'), ('n', 'nein')], max_length=1)), ('ue_besuch_verhaeltnisse', models.CharField(blank=True, choices=[('j', 'ja'), ('n', 'nein')], max_length=1)), ('ue_besuch_privat', models.CharField(blank=True, choices=[('j', 'ja'), ('n', 'nein')], max_length=1)), ('ue_besuch_elearning', models.CharField(blank=True, choices=[('j', 'ja'), ('n', 'nein')], max_length=1)), ('ue_besuch_zufrueh', models.CharField(blank=True, choices=[('j', 'ja'), ('n', 'nein')], max_length=1)), ('ue_besuch_sonstiges', models.CharField(blank=True, choices=[('j', 'ja'), ('n', 'nein')], max_length=1)), ('ue_3_1', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_3_2', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_3_3', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_3_4', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_3_5', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_3_6', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_3_7', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_3_8', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_1', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_2', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_3', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_4', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_5', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_6', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_7', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_8', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_9', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_4_10', models.CharField(blank=True, max_length=1)), ('ue_4_11', models.CharField(blank=True, max_length=1)), ('kennziffer', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_1', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_2', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_3', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_4', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_5', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_6', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_7', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_8', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_9', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_10', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_11', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_12', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_13', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_14', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_15', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_5_16', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_6_1', models.CharField(blank=True, choices=[('0', '0'), ('1', '0.5'), ('2', '1'), ('3', '2'), ('4', '3'), ('5', '4'), ('6', '5'), ('7', '>=5')], max_length=1)), ('ue_6_2', models.PositiveSmallIntegerField(blank=True, null=True)), ('ue_6_3', models.PositiveSmallIntegerField(blank=True, null=True)), ('veranstaltung', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='feedback.Veranstaltung')), ], options={ 'verbose_name': 'Übungsfragebogen 2016', 'verbose_name_plural': 'Übungfragebögen 2016', 'ordering': ['semester', 'veranstaltung'], }, ), migrations.AddField( model_name='ergebnis2016', name='ue_arbeitsbedingungen', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='ergebnis2016', name='ue_arbeitsbedingungen_count', field=models.PositiveIntegerField(default=0), ), migrations.AddField( model_name='ergebnis2016', name='ue_didaktik', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='ergebnis2016', name='ue_didaktik_count', field=models.PositiveIntegerField(default=0), ), migrations.AddField( model_name='ergebnis2016', name='ue_feedbackpreis', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='ergebnis2016', name='ue_feedbackpreis_count', field=models.PositiveIntegerField(default=0), ), migrations.AddField( model_name='ergebnis2016', name='ue_lernerfolg', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='ergebnis2016', name='ue_lernerfolg_count', field=models.PositiveIntegerField(default=0), ), migrations.AddField( model_name='ergebnis2016', name='ue_organisation', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='ergebnis2016', name='ue_organisation_count', field=models.PositiveIntegerField(default=0), ), migrations.AddField( model_name='ergebnis2016', name='ue_umgang', field=models.FloatField(blank=True, null=True), ), migrations.AddField( model_name='ergebnis2016', name='ue_umgang_count', field=models.PositiveIntegerField(default=0), ), ]
agpl-3.0
591,802,522,073,656,600
58.992857
324
0.57233
false
3.771441
false
false
false
mskala/birdie
birdieapp/utils/media.py
1
4300
# -*- coding: utf-8 -*- # Copyright (C) 2013-2014 Ivo Nunes/Vasco Nunes # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from PIL import Image, ImageDraw from gi.repository import GdkPixbuf from birdieapp.constants import BIRDIE_CACHE_PATH import StringIO import os def resize_and_crop(img, size, crop_type='middle'): """ Resize and crop an image to fit the specified size. """ # Get current and desired ratio for the images img_ratio = img.size[0] / float(img.size[1]) ratio = size[0] / float(size[1]) # The image is scaled/cropped vertically or horizontally depending on the # ratio if ratio > img_ratio: img = img.resize( (size[0], size[0] * img.size[1] / img.size[0]), Image.ANTIALIAS) # Crop in the top, middle or bottom if crop_type == 'top': box = (0, 0, img.size[0], size[1]) elif crop_type == 'middle': box = (0, (img.size[1] - size[1]) / 2, img.size[ 0], (img.size[1] + size[1]) / 2) elif crop_type == 'bottom': box = (0, img.size[1] - size[1], img.size[0], img.size[1]) else: raise ValueError('ERROR: invalid value for crop_type') img = img.crop(box) elif ratio < img_ratio: img = img.resize( (size[1] * img.size[0] / img.size[1], size[1]), Image.ANTIALIAS) # Crop in the top, middle or bottom if crop_type == 'top': box = (0, 0, size[0], img.size[1]) elif crop_type == 'middle': box = ((img.size[0] - size[0]) / 2, 0, ( img.size[0] + size[0]) / 2, img.size[1]) elif crop_type == 'bottom': box = (img.size[0] - size[0], 0, img.size[0], img.size[1]) else: raise ValueError('ERROR: invalid value for crop_type') img = img.crop(box) else: img = img.resize((size[0], size[1]), Image.ANTIALIAS) return img def cropped_thumbnail(img): """Creates a centered cropped thumbnail GdkPixbuf of given image""" # thumbnail and crop try: im = Image.open(img) im = im.convert('RGBA') im = resize_and_crop(im, (318, 120)) # Convert to GdkPixbuf buff = StringIO.StringIO() im.save(buff, 'ppm') contents = buff.getvalue() buff.close() loader = GdkPixbuf.PixbufLoader.new_with_type('pnm') loader.write(contents) pixbuf = loader.get_pixbuf() loader.close() return pixbuf except IOError: print("Invalid image file %s"%img) try: os.remove(img) except IOError: pass return None def fit_image_screen(img, widget): pixbuf = GdkPixbuf.Pixbuf.new_from_file(img) screen_h = widget.get_screen().get_height() screen_w = widget.get_screen().get_width() if pixbuf.get_height() >= screen_h - 100: factor = float(pixbuf.get_width()) / pixbuf.get_height() new_width = factor * (screen_h - 100) pixbuf = pixbuf.scale_simple( new_width, screen_h - 100, GdkPixbuf.InterpType.BILINEAR) return pixbuf if pixbuf.get_width() >= screen_w: factor = float(pixbuf.get_height()) / pixbuf.get_width() new_height = factor * (screen_w - 100) pixbuf.scale_simple( screen_w - 100, new_height, GdkPixbuf.InterType.BILINEAR) return pixbuf return pixbuf def simple_resize(img_path, w, h): try: im = Image.open(img_path) img = im.resize((w, h), Image.ANTIALIAS) dest = BIRDIE_CACHE_PATH + os.path.basename(img_path) + ".jpg" img.save(dest) return dest except IOError: return None
gpl-3.0
2,320,095,578,948,772,000
33.126984
77
0.594651
false
3.43725
false
false
false
wenxichen/tensorflow_yolo2
src/img_dataset/ilsvrc2017_cls.py
1
7175
"""ILSVRC 2017 Classicifation Dataset. DEPRECATED version. For the purpose of keeping history only. Use ilsvrc2017_cls_multithread.py instead. """ import os import cv2 import numpy as np import random import config as cfg class ilsvrc_cls: def __init__(self, image_set, rebuild=False, data_aug=True): self.name = 'ilsvrc_2017' self.devkit_path = cfg.ILSVRC_PATH self.data_path = self.devkit_path self.cache_path = cfg.CACHE_PATH self.batch_size = cfg.BATCH_SIZE self.image_size = cfg.IMAGE_SIZE self.image_set = image_set self.rebuild = rebuild self.data_aug = data_aug self.cursor = 0 self.load_classes() # self.gt_labels = None assert os.path.exists(self.devkit_path), \ 'VOCdevkit path does not exist: {}'.format(self.devkit_path) assert os.path.exists(self.data_path), \ 'Path does not exist: {}'.format(self.data_path) self.prepare() def prepare(self): """Create a list of ground truth that includes input path and label. """ if (self.image_set == "train"): imgset_fname = "train_cls.txt" else: imgset_fname = self.image_set + ".txt" imgset_file = os.path.join( self.data_path, 'ImageSets', 'CLS-LOC', imgset_fname) print('Processing gt_labels using ' + imgset_file) gt_labels = [] with open(imgset_file, 'r') as f: for line in f.readlines(): img_path = line.strip().split()[0] label = self.class_to_ind[img_path.split("/")[0]] imname = os.path.join( self.data_path, 'Data', 'CLS-LOC', self.image_set, img_path + ".JPEG") gt_labels.append( {'imname': imname, 'label': label}) random.shuffle(gt_labels) self.gt_labels = gt_labels def load_classes(self): """Use the folder name to get labels.""" if (self.image_set == "train"): img_folder = os.path.join( self.data_path, 'Data', 'CLS-LOC', 'train') print('Loading class info from ' + img_folder) self.classes = [item for item in os.listdir(img_folder) if os.path.isdir(os.path.join(img_folder, item))] self.num_class = len(self.classes) assert (self.num_class == 1000), "number of classes is not 1000!" self.class_to_ind = dict( list(zip(self.classes, list(range(self.num_class))))) def get(self): """Get shuffled images and labels according to batchsize. Return: images: 4D numpy array labels: 1D numpy array """ images = np.zeros( (self.batch_size, self.image_size, self.image_size, 3)) labels = np.zeros(self.batch_size) count = 0 while count < self.batch_size: imname = self.gt_labels[self.cursor]['imname'] images[count, :, :, :] = self.image_read(imname, data_aug=self.data_aug) labels[count] = self.gt_labels[self.cursor]['label'] count += 1 self.cursor += 1 if self.cursor >= len(self.gt_labels): random.shuffle(self.gt_labels) self.cursor = 0 return images, labels def image_read(self, imname, data_aug=False): image = cv2.imread(imname) ##################### # Data Augmentation # ##################### if data_aug: flip = bool(random.getrandbits(1)) rotate_deg = random.randint(0, 359) # 75% chance to do random crop # another 25% change in maintaining input at 224x224 # this help simplify the input processing for test, val # TODO: can make multiscale test input later random_crop_chance = random.randint(0, 3) too_small = False color_pert = bool(random.getrandbits(1)) if flip: image = image[:, ::-1, :] # assume color image rows, cols, _ = image.shape M = cv2.getRotationMatrix2D((cols / 2, rows / 2), rotate_deg, 1) image = cv2.warpAffine(image, M, (cols, rows)) # color perturbation if color_pert: hue_shift_sign = bool(random.getrandbits(1)) hue_shift = random.randint(0, 10) saturation_shift_sign = bool(random.getrandbits(1)) saturation_shift = random.randint(0, 10) hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) # TODO: currently not sure what cv2 does to values # that are larger than the maximum. # It seems it does not cut at the max # nor normalize the whole by multiplying a factor. # need to expore this in more detail if hue_shift_sign: hsv[:, :, 0] += hue_shift else: hsv[:, :, 0] -= hue_shift if saturation_shift_sign: hsv[:, :, 1] += saturation_shift else: hsv[:, :, 1] -= saturation_shift image = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR) # random crop if random_crop_chance > 0: # current random crop upbound is 292 (1.3 x 224) short_side_len = random.randint( self.image_size, cfg.RAND_CROP_UPBOUND) short_side = min([cols, rows]) if short_side == cols: scaled_cols = short_side_len factor = float(short_side_len) / cols scaled_rows = int(rows * factor) else: scaled_rows = short_side_len factor = float(short_side_len) / rows scaled_cols = int(cols * factor) # print "scaled_cols and rows:", scaled_cols, scaled_rows if scaled_cols < 224 or scaled_rows < 224: too_small = True print "Image is too small,", imname else: image = cv2.resize(image, (scaled_cols, scaled_rows)) col_offset = random.randint(0, scaled_cols - self.image_size) row_offset = random.randint(0, scaled_rows - self.image_size) # print "col_offset and row_offset:", col_offset, row_offset image = image[row_offset:self.image_size + row_offset, col_offset:self.image_size + col_offset] # assuming still using image size 224x224 # print "image shape is", image.shape if random_crop_chance == 0 or too_small: image = cv2.resize(image, (self.image_size, self.image_size)) else: image = cv2.resize(image, (self.image_size, self.image_size)) image = image.astype(np.float32) image = (image / 255.0) * 2.0 - 1.0 return image
mit
-763,985,578,201,332,100
39.767045
90
0.522509
false
3.905825
false
false
false
oss/rutgers-repository-utils
lib/repoclosure.py
1
11619
#!/usr/bin/python -t # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # seth vidal 2005 (c) etc etc #Read in the metadata of a series of repositories and check all the # dependencies in all packages for resolution. Print out the list of # packages with unresolved dependencies import sys import os import logging import yum import yum.Errors from yum.misc import getCacheDir from optparse import OptionParser import rpmUtils.arch import rpmUtils.updates from yum.constants import * from yum.packageSack import ListPackageSack def parseArgs(): usage = """ Read in the metadata of a series of repositories and check all the dependencies in all packages for resolution. Print out the list of packages with unresolved dependencies %s [-c <config file>] [-a <arch>] [-l <lookaside>] [-r <repoid>] [-r <repoid2>] """ % sys.argv[0] parser = OptionParser(usage=usage) parser.add_option("-c", "--config", default='/etc/yum.conf', help='config file to use (defaults to /etc/yum.conf)') parser.add_option("-a", "--arch", default=[], action='append', help='check packages of the given archs, can be specified multiple ' + 'times (default: current arch)') parser.add_option("--basearch", default=None, help="set the basearch for yum to run as") parser.add_option("-b", "--builddeps", default=False, action="store_true", help='check build dependencies only (needs source repos enabled)') parser.add_option("-l", "--lookaside", default=[], action='append', help="specify a lookaside repo id to query, can be specified multiple times") parser.add_option("-r", "--repoid", default=[], action='append', help="specify repo ids to query, can be specified multiple times (default is all enabled)") parser.add_option("-t", "--tempcache", default=False, action="store_true", help="Use a temp dir for storing/accessing yum-cache") parser.add_option("-q", "--quiet", default=0, action="store_true", help="quiet (no output to stderr)") parser.add_option("-n", "--newest", default=0, action="store_true", help="check only the newest packages in the repos") parser.add_option("--repofrompath", action="append", help="specify repoid & paths of additional repositories - unique repoid and path required, can be specified multiple times. Example. --repofrompath=myrepo,/path/to/repo") parser.add_option("-p", "--pkg", action="append", help="check closure for this package only") parser.add_option("-g", "--group", action="append", help="check closure for packages in this group only") (opts, args) = parser.parse_args() return (opts, args) # Note that this is a "real" API, used by spam-o-matic etc. # so we have to do at least some API guarantee stuff. class RepoClosure(yum.YumBase): def __init__(self, arch=[], config="/etc/yum.conf", builddeps=False, pkgonly=None, basearch=None, grouponly=None): yum.YumBase.__init__(self) if basearch: self.preconf.arch = basearch self.logger = logging.getLogger("yum.verbose.repoclosure") self.lookaside = [] self.builddeps = builddeps self.pkgonly = pkgonly self.grouponly = grouponly self.doConfigSetup(fn = config,init_plugins=False) self._rc_arches = arch if hasattr(self.repos, 'sqlite'): self.repos.sqlite = False self.repos._selectSackType() def evrTupletoVer(self,tup): """convert an evr tuple to a version string, return None if nothing to convert""" e, v, r = tup if v is None: return None val = v if e is not None: val = '%s:%s' % (e, v) if r is not None: val = '%s-%s' % (val, r) return val def readMetadata(self): self.doRepoSetup() archs = [] if not self._rc_arches: archs.extend(self.arch.archlist) else: for arch in self._rc_arches: archs.extend(self.arch.get_arch_list(arch)) if self.builddeps and 'src' not in archs: archs.append('src') self.doSackSetup(archs) for repo in self.repos.listEnabled(): self.repos.populateSack(which=[repo.id], mdtype='filelists') def getBrokenDeps(self, newest=False): unresolved = {} resolved = {} pkgs = self.pkgSack if newest: pkgs = self.pkgSack.returnNewestByNameArch() mypkgSack = ListPackageSack(pkgs) pkgtuplist = mypkgSack.simplePkgList() # toss out any of the obsoleted pkgs so we can't depsolve with them self.up = rpmUtils.updates.Updates([], pkgtuplist) self.up.rawobsoletes = mypkgSack.returnObsoletes() for pkg in pkgs: fo = self.up.checkForObsolete([pkg.pkgtup]) if fo: # useful debug to make sure the obsoletes is sane #print "ignoring obsolete pkg %s" % pkg #for i in fo[pkg.pkgtup]: # print i self.pkgSack.delPackage(pkg) # we've deleted items so remake the pkgs pkgs = self.pkgSack.returnNewestByNameArch() pkgtuplist = mypkgSack.simplePkgList() if self.builddeps: pkgs = filter(lambda x: x.arch == 'src', pkgs) pkglist = self.pkgonly if self.grouponly: if not pkglist: pkglist = [] for group in self.grouponly: groupobj = self.comps.return_group(group) if not groupobj: continue pkglist.extend(groupobj.packages) if pkglist: pkgs = filter(lambda x: x.name in pkglist, pkgs) for pkg in pkgs: if pkg.repoid in self.lookaside: # don't attempt to resolve dependancy issues for # packages from lookaside repositories continue for (req, flags, (reqe, reqv, reqr)) in pkg.returnPrco('requires'): if req.startswith('rpmlib'): continue # ignore rpmlib deps ver = self.evrTupletoVer((reqe, reqv, reqr)) if (req,flags,ver) in resolved: continue try: resolve_sack = self.whatProvides(req, flags, ver) except yum.Errors.RepoError, e: pass if len(resolve_sack) < 1: if pkg not in unresolved: unresolved[pkg] = [] unresolved[pkg].append((req, flags, ver)) continue if newest: resolved_by_newest = False for po in resolve_sack:# look through and make sure all our answers are newest-only if po.pkgtup in pkgtuplist: resolved_by_newest = True break if resolved_by_newest: resolved[(req,flags,ver)] = 1 else: if pkg not in unresolved: unresolved[pkg] = [] unresolved[pkg].append((req, flags, ver)) return unresolved def main(): (opts, cruft) = parseArgs() my = RepoClosure(arch=opts.arch, config=opts.config, builddeps=opts.builddeps, pkgonly=opts.pkg, grouponly=opts.group, basearch=opts.basearch) if opts.repofrompath: # setup the fake repos for repo in opts.repofrompath: repoid,repopath = tuple(repo.split(',')) if repopath.startswith('http') or repopath.startswith('ftp') or repopath.startswith('file:'): baseurl = repopath else: repopath = os.path.abspath(repopath) baseurl = 'file://' + repopath newrepo = yum.yumRepo.YumRepository(repoid) newrepo.name = repopath newrepo.baseurl = baseurl newrepo.basecachedir = my.conf.cachedir newrepo.metadata_expire = 0 newrepo.timestamp_check = False my.repos.add(newrepo) my.repos.enableRepo(newrepo.id) my.logger.info( "Added %s repo from %s" % (repoid,repopath)) if opts.repoid: for repo in my.repos.repos.values(): if ((repo.id not in opts.repoid) and (repo.id not in opts.lookaside)): repo.disable() else: repo.enable() if opts.lookaside: my.lookaside = opts.lookaside if os.geteuid() != 0 or opts.tempcache: cachedir = getCacheDir() if cachedir is None: my.logger.error("Error: Could not make cachedir, exiting") sys.exit(50) my.repos.setCacheDir(cachedir) if not opts.quiet: my.logger.info('Reading in repository metadata - please wait....') try: my.readMetadata() except yum.Errors.RepoError, e: my.logger.info(e) my.logger.info('Some dependencies may not be complete for this repository') my.logger.info('Run as root to get all dependencies or use -t to enable a user temp cache') if not opts.quiet: my.logger.info('Checking Dependencies') baddeps = my.getBrokenDeps(opts.newest) if opts.newest: num = len(my.pkgSack.returnNewestByNameArch()) else: num = len(my.pkgSack) repos = my.repos.listEnabled() if not opts.quiet: my.logger.info('Repos looked at: %s' % len(repos)) for repo in repos: my.logger.info(' %s' % repo) my.logger.info('Num Packages in Repos: %s' % num) pkgs = baddeps.keys() def sortbyname(a,b): return cmp(a.__str__(),b.__str__()) pkgs.sort(sortbyname) for pkg in pkgs: my.logger.info('package: %s from %s\n unresolved deps: ' % (pkg, pkg.repoid)) for (n, f, v) in baddeps[pkg]: req = '%s' % n if f: flag = LETTERFLAGS[f] req = '%s %s'% (req, flag) if v: req = '%s %s' % (req, v) my.logger.info(' %s' % req) if __name__ == "__main__": try: main() except (yum.Errors.YumBaseError, ValueError), e: print >> sys.stderr, str(e) sys.exit(1)
gpl-2.0
6,160,217,929,008,276,000
36.846906
192
0.560117
false
4.096968
true
false
false
dominikgiermala/properties-editor
src/properties_editor.py
1
5314
import os import sublime import sublime_plugin from .lib.pyjavaproperties import Properties class AddEditPropertiesCommand(sublime_plugin.WindowCommand): def run(self, paths = []): # TODO: validate if *.properties file self.paths = paths self.window.show_input_panel("Properties to add/edit:", '', self.on_properties_put, None, None) def on_properties_put(self, properties_string): if properties_string and properties_string.strip() and '=' in properties_string: self.properties = {} for property_string in properties_string.split('\n'): key_value = property_string.split('=', 1) if key_value[0] and key_value[1]: self.properties[key_value[0]] = key_value[1] self.edit_properties(self.properties) def edit_properties(self, properties): files_without_key = {} files_with_key = {} for key in properties: files_with_key[key] = [] files_without_key[key] = [] for file in self.paths: p = Properties() p.load(open(file, encoding='latin-1', mode='r')) for key, value in properties.items(): if p.getProperty(key): files_with_key[key].append(os.path.basename(file)) else: files_without_key[key].append(os.path.basename(file)) p[key] = value p.store(open(file, encoding='latin-1', mode='w')) self.display_confirmation_message(files_without_key, files_with_key) def display_confirmation_message(self, files_without_key, files_with_key): confirmation_message = "" for key, value in self.properties.items(): confirmation_message += "Property " + key + "=" + value + " was: " if files_without_key[key]: confirmation_message += "\nAdded in files:\n" + "\n".join(files_without_key[key]) if files_with_key[key]: confirmation_message += "\n\nEdited in files:\n" + "\n".join(files_with_key[key]) confirmation_message += "\n\n" sublime.message_dialog(confirmation_message) class RemovePropertyCommand(sublime_plugin.WindowCommand): def run(self, paths = []): # TODO: validate if *.properties file self.paths = paths self.window.show_input_panel("Property key to remove:", '', self.on_key_put, None, None) def on_key_put(self, key): if key and key.strip(): self.key = key self.remove_property(key, self.paths) def remove_property(self, key, paths): files_without_key = [] files_with_key = [] for file in self.paths: p = Properties() p.load(open(file)) if p.getProperty(key): p.removeProperty(key) files_with_key.append(os.path.basename(file)) p.store(open(file, 'w')) else: files_without_key.append(os.path.basename(file)) self.display_confirmation_message(files_without_key, files_with_key) def display_confirmation_message(self, files_without_key, files_with_key): confirmation_message = "Property with key " + self.key + " was: " if files_with_key: confirmation_message += "\nRemoved in files:\n" + "\n".join(files_with_key) if files_without_key: confirmation_message += "\n\nNot found in files:\n" + "\n".join(files_without_key) if files_without_key: sublime.error_message(confirmation_message) else: sublime.message_dialog(confirmation_message) class RenameKeyCommand(sublime_plugin.WindowCommand): def run(self, paths = []): # TODO: validate if *.properties file self.paths = paths self.window.show_input_panel("Key to rename:", '', self.on_old_key_put, None, None) def on_old_key_put(self, old_key): if old_key and old_key.strip(): self.old_key = old_key self.window.show_input_panel("New key:", '', self.on_new_key_put, None, None) def on_new_key_put(self, new_key): if new_key and new_key.strip(): self.new_key = new_key self.rename_key(self.old_key, self.new_key) def rename_key(self, old_key, new_key): files_without_old_key = [] files_with_new_key = [] files_with_renamed_key = [] for file in self.paths: p = Properties() p.load(open(file)) if p.getProperty(old_key): if not p.getProperty(new_key): p[new_key] = p[old_key] p.removeProperty(old_key) files_with_renamed_key.append(os.path.basename(file)) else: files_with_new_key.append(os.path.basename(file)) else: files_without_old_key.append(os.path.basename(file)) p.store(open(file, 'w')) self.display_confirmation_message(files_without_old_key, files_with_new_key, files_with_renamed_key) def display_confirmation_message(self, files_without_old_key, files_with_new_key, files_with_renamed_key): confirmation_message = "Key " + self.old_key + " was: " if files_with_renamed_key: confirmation_message += "\nRenamed in files:\n" + "\n".join(files_with_renamed_key) if files_without_old_key: confirmation_message += "\n\nNot found in files:\n" + "\n".join(files_without_old_key) if files_with_new_key: confirmation_message += "\n\nKey " + self.new_key + " already exists in files:\n" + "\n".join(files_with_new_key) if files_without_old_key or files_with_new_key: sublime.error_message(confirmation_message) else: sublime.message_dialog(confirmation_message)
mit
-4,607,190,421,713,108,000
38.664179
119
0.649417
false
3.361164
false
false
false
SIPp/pysipp
pysipp/launch.py
1
5708
""" Launchers for invoking SIPp user agents """ import subprocess import os import shlex import select import threading import signal import time from . import utils from pprint import pformat from collections import OrderedDict, namedtuple log = utils.get_logger() Streams = namedtuple("Streams", "stdout stderr") class TimeoutError(Exception): "SIPp process timeout exception" class PopenRunner(object): """Run a sequence of SIPp agents asynchronously. If any process terminates with a non-zero exit code, immediately kill all remaining processes and collect std streams. Adheres to an interface similar to `multiprocessing.pool.AsyncResult`. """ def __init__( self, subprocmod=subprocess, osmod=os, poller=select.epoll, ): # these could optionally be rpyc proxy objs self.spm = subprocmod self.osm = osmod self.poller = poller() # collector thread placeholder self._waiter = None # store proc results self._procs = OrderedDict() def __call__(self, cmds, block=True, rate=300, **kwargs): if self._waiter and self._waiter.is_alive(): raise RuntimeError("Not all processes from a prior run have completed") if self._procs: raise RuntimeError( "Process results have not been cleared from previous run" ) sp = self.spm os = self.osm DEVNULL = open(os.devnull, "wb") fds2procs = OrderedDict() # run agent commands in sequence for cmd in cmds: log.debug('launching cmd:\n"{}"\n'.format(cmd)) proc = sp.Popen(shlex.split(cmd), stdout=DEVNULL, stderr=sp.PIPE) fd = proc.stderr.fileno() log.debug("registering fd '{}' for pid '{}'".format(fd, proc.pid)) fds2procs[fd] = self._procs[cmd] = proc # register for stderr hangup events self.poller.register(proc.stderr.fileno(), select.EPOLLHUP) # limit launch rate time.sleep(1.0 / rate) # launch waiter self._waiter = threading.Thread(target=self._wait, args=(fds2procs,)) self._waiter.daemon = True self._waiter.start() return self.get(**kwargs) if block else self._procs def _wait(self, fds2procs): log.debug("started waiter for procs {}".format(fds2procs)) signalled = None left = len(fds2procs) collected = 0 while collected < left: pairs = self.poller.poll() # wait on hangup events log.debug("received hangup for pairs '{}'".format(pairs)) for fd, status in pairs: collected += 1 proc = fds2procs[fd] # attach streams so they can be read more then once log.debug("collecting streams for {}".format(proc)) proc.streams = Streams(*proc.communicate()) # timeout=2)) if proc.returncode != 0 and not signalled: # stop all other agents if there is a failure signalled = self.stop() log.debug("terminating waiter thread") def get(self, timeout=180): """Block up to `timeout` seconds for all agents to complete. Either return (cmd, proc) pairs or raise `TimeoutError` on timeout """ if self._waiter.is_alive(): self._waiter.join(timeout=timeout) if self._waiter.is_alive(): # kill them mfin SIPps signalled = self.stop() self._waiter.join(timeout=10) if self._waiter.is_alive(): # try to stop a few more times for _ in range(3): signalled = self.stop() self._waiter.join(timeout=1) if self._waiter.is_alive(): # some procs failed to terminate via signalling raise RuntimeError("Unable to kill all agents!?") # all procs were killed by SIGUSR1 raise TimeoutError( "pids '{}' failed to complete after '{}' seconds".format( pformat([p.pid for p in signalled.values()]), timeout ) ) return self._procs def stop(self): """Stop all agents with SIGUSR1 as per SIPp's signal handling""" return self._signalall(signal.SIGUSR1) def terminate(self): """Kill all agents with SIGTERM""" return self._signalall(signal.SIGTERM) def _signalall(self, signum): signalled = OrderedDict() for cmd, proc in self.iterprocs(): proc.send_signal(signum) log.warn( "sent signal '{}' to cmd '{}' with pid '{}'".format( signum, cmd, proc.pid ) ) signalled[cmd] = proc return signalled def iterprocs(self): """Iterate all processes which are still alive yielding (cmd, proc) pairs """ return ( (cmd, proc) for cmd, proc in self._procs.items() if proc and proc.poll() is None ) def is_alive(self): """Return bool indicating whether some agents are still alive""" return any(self.iterprocs()) def ready(self): """Return bool indicating whether all agents have completed""" return not self.is_alive() def clear(self): """Clear all processes from the last run""" assert self.ready(), "Not all processes have completed" self._procs.clear()
gpl-2.0
-7,029,735,260,306,038,000
32.576471
83
0.563595
false
4.452418
false
false
false
Khan/pyobjc-framework-FSEvents
setup.py
1
1152
''' Wrappers for the "FSEvents" API in MacOS X. The functions in this framework allow you to reliably observe changes to the filesystem, even when your program is not running al the time. These wrappers don't include documentation, please check Apple's documention for information on how to use this framework and PyObjC's documentation for general tips and tricks regarding the translation between Python and (Objective-)C frameworks ''' from pyobjc_setup import setup, Extension setup( min_os_level='10.5', name='pyobjc-framework-FSEvents', version="2.5.1", description = "Wrappers for the framework FSEvents on Mac OS X", packages = [ "FSEvents" ], # setup_requires doesn't like git links, so we just have to # pip install these first: #setup_requires = [ # 'https://github.com/Khan/pyobjc-core/tarball/master', #], dependency_links = [ 'https://github.com/Khan/pyobjc-core/tarball/master', 'https://github.com/Khan/pyobjc-framework-Cocoa/tarball/master', ], ext_modules = [ Extension("FSEvents._callbacks", [ "Modules/_callbacks.m" ], ), ], )
mit
-8,843,281,425,080,310,000
33.909091
76
0.684896
false
3.611285
false
false
false
maxmind/GeoIP2-python
tests/webservice_test.py
1
12944
#!/usr/bin/env python # -*- coding: utf-8 -*- import asyncio import copy import ipaddress import json import sys from typing import cast, Dict import unittest sys.path.append("..") # httpretty currently doesn't work, but mocket with the compat interface # does. from mocket import Mocket # type: ignore from mocket.plugins.httpretty import httpretty, httprettified # type: ignore import geoip2 from geoip2.errors import ( AddressNotFoundError, AuthenticationError, GeoIP2Error, HTTPError, InvalidRequestError, OutOfQueriesError, PermissionRequiredError, ) from geoip2.webservice import AsyncClient, Client class TestBaseClient(unittest.TestCase): base_uri = "https://geoip.maxmind.com/geoip/v2.1/" country = { "continent": {"code": "NA", "geoname_id": 42, "names": {"en": "North America"}}, "country": { "geoname_id": 1, "iso_code": "US", "names": {"en": "United States of America"}, }, "maxmind": {"queries_remaining": 11}, "registered_country": { "geoname_id": 2, "is_in_european_union": True, "iso_code": "DE", "names": {"en": "Germany"}, }, "traits": {"ip_address": "1.2.3.4", "network": "1.2.3.0/24"}, } # this is not a comprehensive representation of the # JSON from the server insights = cast(Dict, copy.deepcopy(country)) insights["traits"]["user_count"] = 2 insights["traits"]["static_ip_score"] = 1.3 def _content_type(self, endpoint): return ( "application/vnd.maxmind.com-" + endpoint + "+json; charset=UTF-8; version=1.0" ) @httprettified def test_country_ok(self): httpretty.register_uri( httpretty.GET, self.base_uri + "country/1.2.3.4", body=json.dumps(self.country), status=200, content_type=self._content_type("country"), ) country = self.run_client(self.client.country("1.2.3.4")) self.assertEqual( type(country), geoip2.models.Country, "return value of client.country" ) self.assertEqual(country.continent.geoname_id, 42, "continent geoname_id is 42") self.assertEqual(country.continent.code, "NA", "continent code is NA") self.assertEqual( country.continent.name, "North America", "continent name is North America" ) self.assertEqual(country.country.geoname_id, 1, "country geoname_id is 1") self.assertIs( country.country.is_in_european_union, False, "country is_in_european_union is False", ) self.assertEqual(country.country.iso_code, "US", "country iso_code is US") self.assertEqual( country.country.names, {"en": "United States of America"}, "country names" ) self.assertEqual( country.country.name, "United States of America", "country name is United States of America", ) self.assertEqual( country.maxmind.queries_remaining, 11, "queries_remaining is 11" ) self.assertIs( country.registered_country.is_in_european_union, True, "registered_country is_in_european_union is True", ) self.assertEqual( country.traits.network, ipaddress.ip_network("1.2.3.0/24"), "network" ) self.assertEqual(country.raw, self.country, "raw response is correct") @httprettified def test_me(self): httpretty.register_uri( httpretty.GET, self.base_uri + "country/me", body=json.dumps(self.country), status=200, content_type=self._content_type("country"), ) implicit_me = self.run_client(self.client.country()) self.assertEqual( type(implicit_me), geoip2.models.Country, "country() returns Country object" ) explicit_me = self.run_client(self.client.country()) self.assertEqual( type(explicit_me), geoip2.models.Country, "country('me') returns Country object", ) @httprettified def test_200_error(self): httpretty.register_uri( httpretty.GET, self.base_uri + "country/1.1.1.1", body="", status=200, content_type=self._content_type("country"), ) with self.assertRaisesRegex( GeoIP2Error, "could not decode the response as JSON" ): self.run_client(self.client.country("1.1.1.1")) @httprettified def test_bad_ip_address(self): with self.assertRaisesRegex( ValueError, "'1.2.3' does not appear to be an IPv4 " "or IPv6 address" ): self.run_client(self.client.country("1.2.3")) @httprettified def test_no_body_error(self): httpretty.register_uri( httpretty.GET, self.base_uri + "country/" + "1.2.3.7", body="", status=400, content_type=self._content_type("country"), ) with self.assertRaisesRegex( HTTPError, "Received a 400 error for .* with no body" ): self.run_client(self.client.country("1.2.3.7")) @httprettified def test_weird_body_error(self): httpretty.register_uri( httpretty.GET, self.base_uri + "country/" + "1.2.3.8", body='{"wierd": 42}', status=400, content_type=self._content_type("country"), ) with self.assertRaisesRegex( HTTPError, "Response contains JSON but it does not " "specify code or error keys", ): self.run_client(self.client.country("1.2.3.8")) @httprettified def test_bad_body_error(self): httpretty.register_uri( httpretty.GET, self.base_uri + "country/" + "1.2.3.9", body="bad body", status=400, content_type=self._content_type("country"), ) with self.assertRaisesRegex( HTTPError, "it did not include the expected JSON body" ): self.run_client(self.client.country("1.2.3.9")) @httprettified def test_500_error(self): httpretty.register_uri( httpretty.GET, self.base_uri + "country/" + "1.2.3.10", status=500 ) with self.assertRaisesRegex(HTTPError, r"Received a server error \(500\) for"): self.run_client(self.client.country("1.2.3.10")) @httprettified def test_300_error(self): httpretty.register_uri( httpretty.GET, self.base_uri + "country/" + "1.2.3.11", status=300, content_type=self._content_type("country"), ) with self.assertRaisesRegex( HTTPError, r"Received a very surprising HTTP status \(300\) for" ): self.run_client(self.client.country("1.2.3.11")) @httprettified def test_ip_address_required(self): self._test_error(400, "IP_ADDRESS_REQUIRED", InvalidRequestError) @httprettified def test_ip_address_not_found(self): self._test_error(404, "IP_ADDRESS_NOT_FOUND", AddressNotFoundError) @httprettified def test_ip_address_reserved(self): self._test_error(400, "IP_ADDRESS_RESERVED", AddressNotFoundError) @httprettified def test_permission_required(self): self._test_error(403, "PERMISSION_REQUIRED", PermissionRequiredError) @httprettified def test_auth_invalid(self): self._test_error(400, "AUTHORIZATION_INVALID", AuthenticationError) @httprettified def test_license_key_required(self): self._test_error(401, "LICENSE_KEY_REQUIRED", AuthenticationError) @httprettified def test_account_id_required(self): self._test_error(401, "ACCOUNT_ID_REQUIRED", AuthenticationError) @httprettified def test_user_id_required(self): self._test_error(401, "USER_ID_REQUIRED", AuthenticationError) @httprettified def test_account_id_unkown(self): self._test_error(401, "ACCOUNT_ID_UNKNOWN", AuthenticationError) @httprettified def test_user_id_unkown(self): self._test_error(401, "USER_ID_UNKNOWN", AuthenticationError) @httprettified def test_out_of_queries_error(self): self._test_error(402, "OUT_OF_QUERIES", OutOfQueriesError) def _test_error(self, status, error_code, error_class): msg = "Some error message" body = {"error": msg, "code": error_code} httpretty.register_uri( httpretty.GET, self.base_uri + "country/1.2.3.18", body=json.dumps(body), status=status, content_type=self._content_type("country"), ) with self.assertRaisesRegex(error_class, msg): self.run_client(self.client.country("1.2.3.18")) @httprettified def test_unknown_error(self): msg = "Unknown error type" ip = "1.2.3.19" body = {"error": msg, "code": "UNKNOWN_TYPE"} httpretty.register_uri( httpretty.GET, self.base_uri + "country/" + ip, body=json.dumps(body), status=400, content_type=self._content_type("country"), ) with self.assertRaisesRegex(InvalidRequestError, msg): self.run_client(self.client.country(ip)) @httprettified def test_request(self): httpretty.register_uri( httpretty.GET, self.base_uri + "country/" + "1.2.3.4", body=json.dumps(self.country), status=200, content_type=self._content_type("country"), ) self.run_client(self.client.country("1.2.3.4")) request = httpretty.last_request self.assertEqual( request.path, "/geoip/v2.1/country/1.2.3.4", "correct URI is used" ) self.assertEqual( request.headers["Accept"], "application/json", "correct Accept header" ) self.assertRegex( request.headers["User-Agent"], "^GeoIP2-Python-Client/", "Correct User-Agent", ) self.assertEqual( request.headers["Authorization"], "Basic NDI6YWJjZGVmMTIzNDU2", "correct auth", ) @httprettified def test_city_ok(self): httpretty.register_uri( httpretty.GET, self.base_uri + "city/" + "1.2.3.4", body=json.dumps(self.country), status=200, content_type=self._content_type("city"), ) city = self.run_client(self.client.city("1.2.3.4")) self.assertEqual(type(city), geoip2.models.City, "return value of client.city") self.assertEqual( city.traits.network, ipaddress.ip_network("1.2.3.0/24"), "network" ) @httprettified def test_insights_ok(self): httpretty.register_uri( httpretty.GET, self.base_uri + "insights/1.2.3.4", body=json.dumps(self.insights), status=200, content_type=self._content_type("country"), ) insights = self.run_client(self.client.insights("1.2.3.4")) self.assertEqual( type(insights), geoip2.models.Insights, "return value of client.insights" ) self.assertEqual( insights.traits.network, ipaddress.ip_network("1.2.3.0/24"), "network" ) self.assertEqual(insights.traits.static_ip_score, 1.3, "static_ip_score is 1.3") self.assertEqual(insights.traits.user_count, 2, "user_count is 2") def test_named_constructor_args(self): id = 47 key = "1234567890ab" client = self.client_class(account_id=id, license_key=key) self.assertEqual(client._account_id, str(id)) self.assertEqual(client._license_key, key) def test_missing_constructor_args(self): with self.assertRaises(TypeError): self.client_class(license_key="1234567890ab") with self.assertRaises(TypeError): self.client_class("47") class TestClient(TestBaseClient): def setUp(self): self.client_class = Client self.client = Client(42, "abcdef123456") def run_client(self, v): return v class TestAsyncClient(TestBaseClient): def setUp(self): self._loop = asyncio.new_event_loop() self.client_class = AsyncClient self.client = AsyncClient(42, "abcdef123456") def tearDown(self): self._loop.run_until_complete(self.client.close()) self._loop.close() def run_client(self, v): return self._loop.run_until_complete(v) del TestBaseClient if __name__ == "__main__": unittest.main()
apache-2.0
-4,090,489,454,764,451,300
32.020408
88
0.584518
false
3.698286
true
false
false
pbl-cloud/paas-manager
paas_manager/app/util/gmail.py
1
1115
import sys import smtplib from email.mime.text import MIMEText from email.utils import formatdate from ... import config def create_message(from_addr, to_addr, subject, message, encoding): body = MIMEText(message, 'plain', encoding) body['Subject'] = subject body['From'] = from_addr body['To'] = to_addr body['Date'] = formatdate() return body def send_via_gmail(from_addr, to_addr, body): s = smtplib.SMTP('smtp.gmail.com', 587) s.ehlo() s.starttls() s.ehlo() s.login( config['gmail']['user'], config['gmail']['password']) s.sendmail(from_addr, [to_addr], body.as_string()) s.close() def gmail(message, to_addr): body = create_message( config['gmail']['user'], to_addr, '[Notification]', message, 'utf8') send_via_gmail(config['gmail']['user'], to_addr, body) return if __name__ == '__main__': argvs = sys.argv argc = len(argvs) if (argc < 3): print('USAGE: python gmail.py address message') raise SystemExit(0) else: to_addr = argvs[1] message = argvs[2] gmail(message, to_addr)
mit
-1,493,291,774,116,415,200
24.340909
76
0.612556
false
3.308605
false
false
false
jkomiyama/duelingbanditlib
gather.py
1
1151
#!/usr/bin/env python # coding:utf-8 #a tool for merging multiple simulation results import sys,os,re def avg(elems): return sum(elems)/float(len(elems)) def splitavg(splits): l = len(splits[0]) for sp in splits: if len(sp) != l: print "split size not match" sys.exit() sums = [0 for i in range(l)] for sp in splits: for i in range(l): sums[i] += float(sp[i]) return map(lambda i:i/len(splits), sums) def gather(filenames): lines_files = [] for afile in filenames: lines_files.append([line.strip() for line in file(afile, "r").readlines() if len(line)>0]) l = 0 for i in range(len(lines_files)-1): if len(lines_files[i]) != len(lines_files[i+1]): print "line num does not match!" sys.exit() while l < len(lines_files[0]): if len(lines_files[0][l])==0: pass elif lines_files[0][l][0]=="#": print lines_files[0][l] else: splits = [lines_files[i][l].split(" ") for i in range(len(lines_files))] avgs = splitavg(splits) avgs[0] = int(avgs[0]) print " ".join(map(str, avgs)) l+=1 if __name__ == "__main__": gather(sys.argv[1:])
mit
-3,247,382,617,199,091,000
24.577778
94
0.591659
false
2.8775
false
false
false
jadsonjs/DataScience
python/arrays_dimesion.py
1
2664
# Consider the case where you have one sequence of multiple time steps and one feature. from numpy import array data = array([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]) # We can then use the reshape() function on the NumPy array to reshape this one-dimensional array # into a three-dimensional array with 1 sample, 10 time steps, and 1 feature at each time step. data = data.reshape((1, 10, 1)) print(data.shape) # Consider the case where you have multiple parallel series as input for your model. # For example, this could be two parallel series of 10 values: #series 1: 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 #series 2: 1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1 from numpy import array data = array([ [0.1, 1.0], [0.2, 0.9], [0.3, 0.8], [0.4, 0.7], [0.5, 0.6], [0.6, 0.5], [0.7, 0.4], [0.8, 0.3], [0.9, 0.2], [1.0, 0.1]]) #This data can be framed as 1 sample with 10 time steps and 2 features. #It can be reshaped as a 3D array as follows: #model = Sequential() #model.add(LSTM(32, input_shape=(10, 2))) #model.add(Dense(1)) data = data.reshape(1, 10, 2) print(data.shape) #Here, we have 25 samples, 200 time steps per sample, and 1 feature # split into samples (e.g. 5000/200 = 25) samples = list() length = 200 # step over the 5,000 in jumps of 200 for i in range(0,n,length): # grab from i to i + 200 sample = data[i:i+length] samples.append(sample) print(len(samples)) data = array(samples) print(data.shape) # reshape into [samples, timesteps, features] # expect [25, 200, 1] data = data.reshape((len(samples), length, 1)) print(data.shape) #https://machinelearningmastery.com/reshape-input-data-long-short-term-memory-networks-keras/ # #For a feed-forward network, your input has the shape (number of samples, number of features). With an LSTM/RNN, you add a time dimension, #and your input shape becomes (number of samples, number of timesteps, number of features). This is in the documentation. #So if your feature dimension is 5, and you have 2 timesteps, your input could look like #[ [ # [1,2,3,4,5], # [2,3,4,5,6] # ], # [ # [2,4,6,8,0], # [9,8,7,6,5] # ] #] #Your output shape depends on how you configure the net. If your LSTM/RNN has return_sequences=False, you'll have one label #per sequence; #if you set return_sequences=True, you'll have one label per timestep. #So in the example, [ [[1,2,3,4,5], [2,3,4,5,6]], [[2,4,6,8,0], [9,8,7,6,5]] ] #input_shape is (2, 2, 5). #And a 'sequence' is '[[1,2,3,4,5], [2,3,4,5,6]]' I assume. #and has 2 timesteps
apache-2.0
-1,277,704,131,615,162,400
25.64
139
0.631381
false
2.648111
false
false
false
SegundoBob/GNXrepeats
hrngpS2.py
1
1831
#!/usr/bin/python #coding=utf-8 #@+leo-ver=5-thin #@+node:bob07.20140715160011.1575: * @file hrngpS2.py #@@first #@@first #@@language python #@@tabwidth -4 import os import sys import leo.core.leoBridge as leoBridge from leo_lib import lib_leo03 #@+others #@+node:bob07.20140715160011.1576: ** gnxRepeats() def gnxRepeats(cmdrx, infoList): hrnGnx = cmdrx.hiddenRootNode.gnx gnxDict = {hrnGnx: cmdrx.hiddenRootNode.h} errorFlag = False for vnode in lib_leo03.bffvWalk(cmdrx): hdr = '"{0}"'.format(vnode.h) if vnode.gnx in gnxDict: errorFlag = True hdr = '"{0}" {1}'.format(gnxDict[vnode.gnx], hdr) gnxDict[vnode.gnx] = hdr infoList.append('Error: {0}'.format(errorFlag)) gnxList = gnxDict.keys() gnxList.sort() for gnx in gnxList: infoList.append('{gnx} {hdrs}'.format(gnx=gnx, hdrs=gnxDict[gnx])) #@-others TestDir = 'hidden_root_tsts' def main(): infoList = list() fpn1 = sys.argv[1] bridge = leoBridge.controller(gui='nullGui', verbose=False, loadPlugins=False, readSettings=False) leoG = bridge.globals() infoList.append('After bridge create: {0}'.format(leoG.app.nodeIndices.lastIndex)) cmdr1 = bridge.openLeoFile(fpn1) infoList.append('After {fpn} open: {idx}'.format(fpn=fpn1, idx=leoG.app.nodeIndices.lastIndex)) rp = cmdr1.rootPosition() posx = rp.insertAfter() posx.h = '{cnt} - {idx}'.format(cnt=2, idx=1) infoList.append('After adding 1 vnode: {idx}'.format(fpn=fpn1, idx=leoG.app.nodeIndices.lastIndex)) gnxRepeats(cmdr1, infoList) cmdr1.save() cmdr1.close() fpnError = os.path.join(TestDir, 'SlaveLog.txt') fdError = open(fpnError, 'w') fdError.write('\n'.join(infoList) + '\n') fdError.close() if __name__ == "__main__": main() #@-leo
mit
6,549,371,217,770,367,000
27.609375
103
0.647187
false
2.653623
false
false
false
SINGROUP/pycp2k
pycp2k/classes/_opt_ri_basis3.py
1
1543
from pycp2k.inputsection import InputSection class _opt_ri_basis3(InputSection): def __init__(self): InputSection.__init__(self) self.Delta_i_rel = None self.Delta_ri = None self.Eps_deriv = None self.Max_iter = None self.Num_func = None self.Basis_size = None self._name = "OPT_RI_BASIS" self._keywords = {'Num_func': 'NUM_FUNC', 'Delta_i_rel': 'DELTA_I_REL', 'Basis_size': 'BASIS_SIZE', 'Delta_ri': 'DELTA_RI', 'Eps_deriv': 'EPS_DERIV', 'Max_iter': 'MAX_ITER'} self._aliases = {'Max_num_iter': 'Max_iter', 'Dri': 'Delta_ri', 'Di_rel': 'Delta_i_rel', 'Eps_num_deriv': 'Eps_deriv'} @property def Di_rel(self): """ See documentation for Delta_i_rel """ return self.Delta_i_rel @property def Dri(self): """ See documentation for Delta_ri """ return self.Delta_ri @property def Eps_num_deriv(self): """ See documentation for Eps_deriv """ return self.Eps_deriv @property def Max_num_iter(self): """ See documentation for Max_iter """ return self.Max_iter @Di_rel.setter def Di_rel(self, value): self.Delta_i_rel = value @Dri.setter def Dri(self, value): self.Delta_ri = value @Eps_num_deriv.setter def Eps_num_deriv(self, value): self.Eps_deriv = value @Max_num_iter.setter def Max_num_iter(self, value): self.Max_iter = value
lgpl-3.0
7,370,216,220,963,202,000
24.716667
181
0.552819
false
3.269068
false
false
false
uglyfruitcake/Axelrod
axelrod/tests/unit/test_cooperator.py
1
1499
"""Test for the cooperator strategy.""" import axelrod from .test_player import TestPlayer C, D = axelrod.Actions.C, axelrod.Actions.D class TestCooperator(TestPlayer): name = "Cooperator" player = axelrod.Cooperator expected_classifier = { 'memory_depth': 0, 'stochastic': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_source': False, 'manipulates_state': False } def test_strategy(self): """Starts by cooperating.""" self.first_play_test(C) def test_effect_of_strategy(self): """Simply does the opposite to what the strategy did last time.""" self.markov_test([C, C, C, C]) class TestTrickyCooperator(TestPlayer): name = "Tricky Cooperator" player = axelrod.TrickyCooperator expected_classifier = { 'memory_depth': 10, 'stochastic': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_state': False } def test_strategy(self): """Starts by cooperating.""" self.first_play_test(C) def test_effect_of_strategy(self): """Test if it tries to trick opponent""" self.responses_test([C, C, C], [C, C, C], [D]) self.responses_test([C, C, C, D, D], [C, C, C, C, D], [C]) history = [C, C, C, D, D] + [C] * 11 opponent_histroy = [C, C, C, C, D] + [D] + [C] * 10 self.responses_test(history, opponent_histroy,[D])
mit
-220,543,415,429,763,360
27.283019
74
0.588392
false
3.301762
true
false
false
souravbadami/oppia
core/storage/base_model/gae_models.py
1
36051
# Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Base model class.""" from constants import constants from core.platform import models import utils from google.appengine.datastore import datastore_query from google.appengine.ext import ndb transaction_services = models.Registry.import_transaction_services() # The delimiter used to separate the version number from the model instance # id. To get the instance id from a snapshot id, use Python's rfind() # method to find the location of this delimiter. _VERSION_DELIMITER = '-' # Constants used for generating ids. MAX_RETRIES = 10 RAND_RANGE = (1 << 30) - 1 ID_LENGTH = 12 class BaseModel(ndb.Model): """Base model for all persistent object storage classes.""" # When this entity was first created. This can be overwritten and # set explicitly. created_on = ndb.DateTimeProperty(auto_now_add=True, indexed=True) # When this entity was last updated. This cannot be set directly. last_updated = ndb.DateTimeProperty(auto_now=True, indexed=True) # Whether the current version of the model instance is deleted. deleted = ndb.BooleanProperty(indexed=True, default=False) @property def id(self): """A unique id for this model instance.""" return self.key.id() def _pre_put_hook(self): """This is run before model instances are saved to the datastore. Subclasses of BaseModel should override this method. """ pass class EntityNotFoundError(Exception): """Raised when no entity for a given id exists in the datastore.""" pass @staticmethod def export_data(user_id): """This method should be implemented by subclasses. Args: user_id: str. The ID of the user whose data should be exported. Raises: NotImplementedError: The method is not overwritten in derived classes. """ raise NotImplementedError @classmethod def get(cls, entity_id, strict=True): """Gets an entity by id. Args: entity_id: str. strict: bool. Whether to fail noisily if no entity with the given id exists in the datastore. Default is True. Returns: None, if strict == False and no undeleted entity with the given id exists in the datastore. Otherwise, the entity instance that corresponds to the given id. Raises: base_models.BaseModel.EntityNotFoundError: if strict == True and no undeleted entity with the given id exists in the datastore. """ entity = cls.get_by_id(entity_id) if entity and entity.deleted: entity = None if strict and entity is None: raise cls.EntityNotFoundError( 'Entity for class %s with id %s not found' % (cls.__name__, entity_id)) return entity @classmethod def get_multi(cls, entity_ids, include_deleted=False): """Gets list of entities by list of ids. Args: entity_ids: list(str). include_deleted: bool. Whether to include deleted entities in the return list. Default is False. Returns: list(*|None). A list that contains model instances that match the corresponding entity_ids in the input list. If an instance is not found, or it has been deleted and include_deleted is False, then the corresponding entry is None. """ entity_keys = [] none_argument_indices = [] for index, entity_id in enumerate(entity_ids): if entity_id: entity_keys.append(ndb.Key(cls, entity_id)) else: none_argument_indices.append(index) entities = ndb.get_multi(entity_keys) for index in none_argument_indices: entities.insert(index, None) if not include_deleted: for i in xrange(len(entities)): if entities[i] and entities[i].deleted: entities[i] = None return entities @classmethod def put_multi(cls, entities): """Stores the given ndb.Model instances. Args: entities: list(ndb.Model). """ ndb.put_multi(entities) @classmethod def delete_multi(cls, entities): """Deletes the given ndb.Model instances. Args: entities: list(ndb.Model). """ keys = [entity.key for entity in entities] ndb.delete_multi(keys) def delete(self): """Deletes this instance.""" super(BaseModel, self).key.delete() @classmethod def get_all(cls, include_deleted=False): """Gets iterable of all entities of this class. Args: include_deleted: bool. If True, then entities that have been marked deleted are returned as well. Defaults to False. Returns: iterable. Filterable iterable of all entities of this class. """ query = cls.query() if not include_deleted: query = query.filter(cls.deleted == False) # pylint: disable=singleton-comparison return query @classmethod def get_new_id(cls, entity_name): """Gets a new id for an entity, based on its name. The returned id is guaranteed to be unique among all instances of this entity. Args: entity_name: The name of the entity. Coerced to a utf-8 encoded string. Defaults to ''. Returns: str. New unique id for this entity class. Raises: Exception: An ID cannot be generated within a reasonable number of attempts. """ try: entity_name = unicode(entity_name).encode(encoding='utf-8') except Exception: entity_name = '' for _ in range(MAX_RETRIES): new_id = utils.convert_to_hash( '%s%s' % (entity_name, utils.get_random_int(RAND_RANGE)), ID_LENGTH) if not cls.get_by_id(new_id): return new_id raise Exception('New id generator is producing too many collisions.') @classmethod def _fetch_page_sorted_by_last_updated( cls, query, page_size, urlsafe_start_cursor): """Fetches a page of entities sorted by their last_updated attribute in descending order (newly updated first). Args: query: ndb.Query. page_size: int. The maximum number of entities to be returned. urlsafe_start_cursor: str or None. If provided, the list of returned entities starts from this datastore cursor. Otherwise, the returned entities start from the beginning of the full list of entities. Returns: 3-tuple of (results, cursor, more) as described in fetch_page() at: https://developers.google.com/appengine/docs/python/ndb/queryclass, where: results: List of query results. cursor: str or None. A query cursor pointing to the next batch of results. If there are no more results, this will be None. more: bool. If True, there are (probably) more results after this batch. If False, there are no further results after this batch. """ if urlsafe_start_cursor: start_cursor = datastore_query.Cursor(urlsafe=urlsafe_start_cursor) else: start_cursor = None result = query.order(-cls.last_updated).fetch_page( page_size, start_cursor=start_cursor) return ( result[0], (result[1].urlsafe() if result[1] else None), result[2]) class BaseCommitLogEntryModel(BaseModel): """Base Model for the models that store the log of commits to a construct. """ # Update superclass model to make these properties indexed. created_on = ndb.DateTimeProperty(auto_now_add=True, indexed=True) last_updated = ndb.DateTimeProperty(auto_now=True, indexed=True) # The id of the user. user_id = ndb.StringProperty(indexed=True, required=True) # The username of the user, at the time of the edit. username = ndb.StringProperty(indexed=True, required=True) # The type of the commit: 'create', 'revert', 'edit', 'delete'. commit_type = ndb.StringProperty(indexed=True, required=True) # The commit message. commit_message = ndb.TextProperty(indexed=False) # The commit_cmds dict for this commit. commit_cmds = ndb.JsonProperty(indexed=False, required=True) # The status of the entity after the edit event ('private', 'public'). post_commit_status = ndb.StringProperty(indexed=True, required=True) # Whether the entity is community-owned after the edit event. post_commit_community_owned = ndb.BooleanProperty(indexed=True) # Whether the entity is private after the edit event. Having a # separate field for this makes queries faster, since an equality query # on this property is faster than an inequality query on # post_commit_status. post_commit_is_private = ndb.BooleanProperty(indexed=True) # The version number of the model after this commit. version = ndb.IntegerProperty() @classmethod def create( cls, entity_id, version, committer_id, committer_username, commit_type, commit_message, commit_cmds, status, community_owned): """This method returns an instance of the CommitLogEntryModel for a construct with the common fields filled. Args: entity_id: str. The ID of the construct corresponding to this commit log entry model (e.g. the exp_id for an exploration, the story_id for a story, etc.). version: int. The version number of the model after the commit. committer_id: str. The user_id of the user who committed the change. committer_username: str. The username of the user who committed the change. commit_type: str. The type of commit. Possible values are in core.storage.base_models.COMMIT_TYPE_CHOICES. commit_message: str. The commit description message. commit_cmds: list(dict). A list of commands, describing changes made in this model, which should give sufficient information to reconstruct the commit. Each dict always contains: cmd: str. Unique command. and then additional arguments for that command. status: str. The status of the entity after the commit. community_owned: bool. Whether the entity is community_owned after the commit. Returns: CommitLogEntryModel. Returns the respective CommitLogEntryModel instance of the construct from which this is called. """ return cls( id=cls._get_instance_id(entity_id, version), user_id=committer_id, username=committer_username, commit_type=commit_type, commit_message=commit_message, commit_cmds=commit_cmds, version=version, post_commit_status=status, post_commit_community_owned=community_owned, post_commit_is_private=( status == constants.ACTIVITY_STATUS_PRIVATE) ) @classmethod def _get_instance_id(cls, target_entity_id, version): """This method should be implemented in the inherited classes. Args: target_entity_id: str. The ID of the construct corresponding to this commit log entry model (e.g. the exp_id for an exploration, the story_id for a story, etc.). version: int. The version number of the model after the commit. Raises: NotImplementedError: The method is not overwritten in derived classes. """ raise NotImplementedError @classmethod def get_all_commits(cls, page_size, urlsafe_start_cursor): """Fetches a list of all the commits sorted by their last updated attribute. Args: page_size: int. The maximum number of entities to be returned. urlsafe_start_cursor: str or None. If provided, the list of returned entities starts from this datastore cursor. Otherwise, the returned entities start from the beginning of the full list of entities. Returns: 3-tuple of (results, cursor, more) as described in fetch_page() at: https://developers.google.com/appengine/docs/python/ndb/queryclass, where: results: List of query results. cursor: str or None. A query cursor pointing to the next batch of results. If there are no more results, this might be None. more: bool. If True, there are (probably) more results after this batch. If False, there are no further results after this batch. """ return cls._fetch_page_sorted_by_last_updated( cls.query(), page_size, urlsafe_start_cursor) @classmethod def get_commit(cls, target_entity_id, version): """Returns the commit corresponding to an instance id and version number. Args: target_entity_id: str. The ID of the construct corresponding to this commit log entry model (e.g. the exp_id for an exploration, the story_id for a story, etc.). version: int. The version number of the instance after the commit. Returns: BaseCommitLogEntryModel. The commit with the target entity id and version number. """ commit_id = cls._get_instance_id(target_entity_id, version) return cls.get_by_id(commit_id) class VersionedModel(BaseModel): """Model that handles storage of the version history of model instances. To use this class, you must declare a SNAPSHOT_METADATA_CLASS and a SNAPSHOT_CONTENT_CLASS. The former must contain the String fields 'committer_id', 'commit_type' and 'commit_message', and a JSON field for the Python list of dicts, 'commit_cmds'. The latter must contain the JSON field 'content'. The item that is being versioned must be serializable to a JSON blob. Note that commit() should be used for VersionedModels, as opposed to put() for direct subclasses of BaseModel. """ # The class designated as the snapshot model. This should be a subclass of # BaseSnapshotMetadataModel. SNAPSHOT_METADATA_CLASS = None # The class designated as the snapshot content model. This should be a # subclass of BaseSnapshotContentModel. SNAPSHOT_CONTENT_CLASS = None # Whether reverting is allowed. Default is False. ALLOW_REVERT = False # IMPORTANT: Subclasses should only overwrite things above this line. # The possible commit types. _COMMIT_TYPE_CREATE = 'create' _COMMIT_TYPE_REVERT = 'revert' _COMMIT_TYPE_EDIT = 'edit' _COMMIT_TYPE_DELETE = 'delete' # A list containing the possible commit types. COMMIT_TYPE_CHOICES = [ _COMMIT_TYPE_CREATE, _COMMIT_TYPE_REVERT, _COMMIT_TYPE_EDIT, _COMMIT_TYPE_DELETE ] # The reserved prefix for keys that are automatically inserted into a # commit_cmd dict by this model. _AUTOGENERATED_PREFIX = 'AUTO' # The command string for a revert commit. CMD_REVERT_COMMIT = '%s_revert_version_number' % _AUTOGENERATED_PREFIX # The command string for a delete commit. CMD_DELETE_COMMIT = '%s_mark_deleted' % _AUTOGENERATED_PREFIX # The current version number of this instance. In each PUT operation, # this number is incremented and a snapshot of the modified instance is # stored in the snapshot metadata and content models. The snapshot # version number starts at 1 when the model instance is first created. # All data in this instance represents the version at HEAD; data about the # previous versions is stored in the snapshot models. version = ndb.IntegerProperty(default=0) def _require_not_marked_deleted(self): """Checks whether the model instance is deleted.""" if self.deleted: raise Exception('This model instance has been deleted.') def _compute_snapshot(self): """Generates a snapshot (dict) from the model property values.""" return self.to_dict(exclude=['created_on', 'last_updated']) def _reconstitute(self, snapshot_dict): """Populates the model instance with the snapshot. Args: snapshot_dict: dict(str, *). The snapshot with the model property values. Returns: VersionedModel. The instance of the VersionedModel class populated with the the snapshot. """ self.populate(**snapshot_dict) return self def _reconstitute_from_snapshot_id(self, snapshot_id): """Gets a reconstituted instance of this model class, based on the given snapshot id. Args: snapshot_id: str. Returns: VersionedModel. Reconstituted instance. """ snapshot_model = self.SNAPSHOT_CONTENT_CLASS.get(snapshot_id) snapshot_dict = snapshot_model.content reconstituted_model = self._reconstitute(snapshot_dict) # TODO(sll): The 'created_on' and 'last_updated' values here will be # slightly different from the values the entity model would have had, # since they correspond to the corresponding fields for the snapshot # content model instead. Figure out whether this is a problem or not, # and whether we need to record the contents of those fields in the # actual entity model (in which case we also need a way to deal with # old snapshots that don't have this information). reconstituted_model.created_on = snapshot_model.created_on reconstituted_model.last_updated = snapshot_model.last_updated return reconstituted_model @classmethod def _get_snapshot_id(cls, instance_id, version_number): """Gets a unique snapshot id for this instance and version. Args: instance_id: str. version_number: int. Returns: str. The unique snapshot id corresponding to the given instance and version. """ return '%s%s%s' % ( instance_id, _VERSION_DELIMITER, version_number) def _trusted_commit( self, committer_id, commit_type, commit_message, commit_cmds): """Evaluates and executes commit. Main function for all commit types. Args: committer_id: str. The user_id of the user who committed the change. commit_type: str. Unique identifier of commit type. Possible values are in COMMIT_TYPE_CHOICES. commit_message: str. commit_cmds: list(dict). A list of commands, describing changes made in this model, should give sufficient information to reconstruct the commit. Dict always contains: cmd: str. Unique command. And then additional arguments for that command. For example: {'cmd': 'AUTO_revert_version_number' 'version_number': 4} Raises: Exception: No snapshot metadata class has been defined. Exception: No snapshot content class has been defined. Exception: commit_cmds is not a list of dicts. """ if self.SNAPSHOT_METADATA_CLASS is None: raise Exception('No snapshot metadata class defined.') if self.SNAPSHOT_CONTENT_CLASS is None: raise Exception('No snapshot content class defined.') if not isinstance(commit_cmds, list): raise Exception( 'Expected commit_cmds to be a list of dicts, received %s' % commit_cmds) self.version += 1 snapshot = self._compute_snapshot() snapshot_id = self._get_snapshot_id(self.id, self.version) snapshot_metadata_instance = self.SNAPSHOT_METADATA_CLASS( # pylint: disable=not-callable id=snapshot_id, committer_id=committer_id, commit_type=commit_type, commit_message=commit_message, commit_cmds=commit_cmds) snapshot_content_instance = self.SNAPSHOT_CONTENT_CLASS( # pylint: disable=not-callable id=snapshot_id, content=snapshot) transaction_services.run_in_transaction( ndb.put_multi, [snapshot_metadata_instance, snapshot_content_instance, self]) def delete(self, committer_id, commit_message, force_deletion=False): """Deletes this model instance. Args: committer_id: str. The user_id of the user who committed the change. commit_message: str. force_deletion: bool. If True this model is deleted completely from storage, otherwise it is only marked as deleted. Default is False. Raises: Exception: This model instance has been already deleted. """ if force_deletion: current_version = self.version version_numbers = [str(num + 1) for num in range(current_version)] snapshot_ids = [ self._get_snapshot_id(self.id, version_number) for version_number in version_numbers] metadata_keys = [ ndb.Key(self.SNAPSHOT_METADATA_CLASS, snapshot_id) for snapshot_id in snapshot_ids] ndb.delete_multi(metadata_keys) content_keys = [ ndb.Key(self.SNAPSHOT_CONTENT_CLASS, snapshot_id) for snapshot_id in snapshot_ids] ndb.delete_multi(content_keys) super(VersionedModel, self).delete() else: self._require_not_marked_deleted() # pylint: disable=protected-access self.deleted = True commit_cmds = [{ 'cmd': self.CMD_DELETE_COMMIT }] self._trusted_commit( committer_id, self._COMMIT_TYPE_DELETE, commit_message, commit_cmds) def put(self, *args, **kwargs): """For VersionedModels, this method is replaced with commit().""" raise NotImplementedError def commit(self, committer_id, commit_message, commit_cmds): """Saves a version snapshot and updates the model. Args: committer_id: str. The user_id of the user who committed the change. commit_message: str. commit_cmds: list(dict). A list of commands, describing changes made in this model, should give sufficient information to reconstruct the commit. Dict always contains: cmd: str. Unique command. And then additional arguments for that command. For example: {'cmd': 'AUTO_revert_version_number' 'version_number': 4} Raises: Exception: This model instance has been already deleted. Exception: commit_cmd is in invalid format. """ self._require_not_marked_deleted() for item in commit_cmds: if not isinstance(item, dict): raise Exception( 'Expected commit_cmds to be a list of dicts, received %s' % commit_cmds) for commit_cmd in commit_cmds: if 'cmd' not in commit_cmd: raise Exception( 'Invalid commit_cmd: %s. Expected a \'cmd\' key.' % commit_cmd) if commit_cmd['cmd'].startswith(self._AUTOGENERATED_PREFIX): raise Exception( 'Invalid change list command: %s' % commit_cmd['cmd']) commit_type = ( self._COMMIT_TYPE_CREATE if self.version == 0 else self._COMMIT_TYPE_EDIT) self._trusted_commit( committer_id, commit_type, commit_message, commit_cmds) @classmethod def revert(cls, model, committer_id, commit_message, version_number): """Reverts model to previous version. Args: model: VersionedModel. committer_id: str. The user_id of the user who committed the change. commit_message: str. version_number: int. Version to revert to. Raises: Exception: This model instance has been deleted. Exception: Reverting is not allowed on this model. """ model._require_not_marked_deleted() # pylint: disable=protected-access if not model.ALLOW_REVERT: raise Exception( 'Reverting objects of type %s is not allowed.' % model.__class__.__name__) commit_cmds = [{ 'cmd': model.CMD_REVERT_COMMIT, 'version_number': version_number }] # Do not overwrite the version number. current_version = model.version # If a new property is introduced after a certain version of a model, # the property should be its default value when an old snapshot of the # model is applied during reversion. E.g. states_schema_version in # ExplorationModel may be added after some version of a saved # exploration. If that exploration is reverted to a version that does # not have a states_schema_version property, it should revert to the # default states_schema_version value rather than taking the # states_schema_version value from the latest exploration version. # pylint: disable=protected-access snapshot_id = model._get_snapshot_id(model.id, version_number) new_model = cls(id=model.id) new_model._reconstitute_from_snapshot_id(snapshot_id) new_model.version = current_version new_model._trusted_commit( committer_id, cls._COMMIT_TYPE_REVERT, commit_message, commit_cmds) # pylint: enable=protected-access @classmethod def get_version(cls, entity_id, version_number): """Gets model instance representing the given version. The snapshot content is used to populate this model instance. The snapshot metadata is not used. Args: entity_id: str. version_number: int. Returns: VersionedModel. Model instance representing given version. Raises: Exception: This model instance has been deleted. """ # pylint: disable=protected-access cls.get(entity_id)._require_not_marked_deleted() snapshot_id = cls._get_snapshot_id(entity_id, version_number) return cls( id=entity_id, version=version_number)._reconstitute_from_snapshot_id(snapshot_id) # pylint: enable=protected-access @classmethod def get_multi_versions(cls, entity_id, version_numbers): """Gets model instances for each version specified in version_numbers. Args: entity_id: str. ID of the entity. version_numbers: list(int). List of version numbers. Returns: list(VersionedModel). Model instances representing the given versions. Raises: ValueError. The given entity_id is invalid. ValueError. Requested version number cannot be higher than the current version number. ValueError. At least one version number is invalid. """ instances = [] entity = cls.get(entity_id, strict=False) if not entity: raise ValueError('The given entity_id %s is invalid.' % (entity_id)) current_version = entity.version max_version = max(version_numbers) if max_version > current_version: raise ValueError( 'Requested version number %s cannot be higher than the current ' 'version number %s.' % (max_version, current_version)) snapshot_ids = [] # pylint: disable=protected-access for version in version_numbers: snapshot_id = cls._get_snapshot_id(entity_id, version) snapshot_ids.append(snapshot_id) snapshot_models = cls.SNAPSHOT_CONTENT_CLASS.get_multi(snapshot_ids) for snapshot_model in snapshot_models: if snapshot_model is None: raise ValueError( 'At least one version number is invalid.') snapshot_dict = snapshot_model.content reconstituted_model = cls(id=entity_id)._reconstitute( snapshot_dict) reconstituted_model.created_on = snapshot_model.created_on reconstituted_model.last_updated = snapshot_model.last_updated instances.append(reconstituted_model) # pylint: enable=protected-access return instances @classmethod def get(cls, entity_id, strict=True, version=None): """Gets model instance. Args: entity_id: str. strict: bool. Whether to fail noisily if no entity with the given id exists in the datastore. Default is True. version: int. Version we want to get. Default is None. Returns: VersionedModel. If version is None, get the newest version of the model. Otherwise, get the specified version. """ if version is None: return super(VersionedModel, cls).get(entity_id, strict=strict) else: return cls.get_version(entity_id, version) @classmethod def get_snapshots_metadata( cls, model_instance_id, version_numbers, allow_deleted=False): """Gets a list of dicts, each representing a model snapshot. One dict is returned for each version number in the list of version numbers requested. If any of the version numbers does not exist, an error is raised. Args: model_instance_id: str. Id of requested model. version_numbers: list(int). List of version numbers. allow_deleted: bool. If is False, an error is raised if the current model has been deleted. Default is False. Returns: list(dict). Each dict contains metadata for a particular snapshot. It has the following keys: committer_id: str. The user_id of the user who committed the change. commit_message: str. commit_cmds: list(dict). A list of commands, describing changes made in this model, should give sufficient information to reconstruct the commit. Dict always contains: cmd: str. Unique command. And then additional arguments for that command. For example: {'cmd': 'AUTO_revert_version_number' 'version_number': 4} commit_type: str. Unique identifier of commit type. Possible values are in COMMIT_TYPE_CHOICES. version_number: int. created_on_ms: float. Snapshot creation time in milliseconds since the Epoch. Raises: Exception: There is no model instance corresponding to at least one of the given version numbers. """ # pylint: disable=protected-access if not allow_deleted: cls.get(model_instance_id)._require_not_marked_deleted() snapshot_ids = [ cls._get_snapshot_id(model_instance_id, version_number) for version_number in version_numbers] # pylint: enable=protected-access metadata_keys = [ ndb.Key(cls.SNAPSHOT_METADATA_CLASS, snapshot_id) for snapshot_id in snapshot_ids] returned_models = ndb.get_multi(metadata_keys) for ind, model in enumerate(returned_models): if model is None: raise Exception( 'Invalid version number %s for model %s with id %s' % (version_numbers[ind], cls.__name__, model_instance_id)) return [{ 'committer_id': model.committer_id, 'commit_message': model.commit_message, 'commit_cmds': model.commit_cmds, 'commit_type': model.commit_type, 'version_number': version_numbers[ind], 'created_on_ms': utils.get_time_in_millisecs(model.created_on), } for (ind, model) in enumerate(returned_models)] class BaseSnapshotMetadataModel(BaseModel): """Base class for snapshot metadata classes. The id of this model is computed using VersionedModel.get_snapshot_id(). """ # The id of the user who committed this revision. committer_id = ndb.StringProperty(required=True) # The type of the commit associated with this snapshot. commit_type = ndb.StringProperty( required=True, choices=VersionedModel.COMMIT_TYPE_CHOICES) # The commit message associated with this snapshot. commit_message = ndb.TextProperty(indexed=False) # A sequence of commands that can be used to describe this commit. # Represented as a list of dicts. commit_cmds = ndb.JsonProperty(indexed=False) def get_unversioned_instance_id(self): """Gets the instance id from the snapshot id. Returns: str. Instance id part of snapshot id. """ return self.id[:self.id.rfind(_VERSION_DELIMITER)] def get_version_string(self): """Gets the version number from the snapshot id. Returns: str. Version number part of snapshot id. """ return self.id[self.id.rfind(_VERSION_DELIMITER) + 1:] class BaseSnapshotContentModel(BaseModel): """Base class for snapshot content classes. The id of this model is computed using VersionedModel.get_snapshot_id(). """ # The snapshot content, as a JSON blob. content = ndb.JsonProperty(indexed=False) def get_unversioned_instance_id(self): """Gets the instance id from the snapshot id. Returns: str. Instance id part of snapshot id. """ return self.id[:self.id.rfind(_VERSION_DELIMITER)] def get_version_string(self): """Gets the version number from the snapshot id. Returns: str. Version number part of snapshot id. """ return self.id[self.id.rfind(_VERSION_DELIMITER) + 1:] class BaseMapReduceBatchResultsModel(BaseModel): """Base model for batch storage for MR jobs. This model turns off caching, because this results in stale data being shown after each MapReduce job run. Classes which are used by a MR job to store its batch results should subclass this class. """ _use_cache = False _use_memcache = False
apache-2.0
8,921,728,561,893,953,000
38.100868
98
0.618596
false
4.546727
false
false
false
pettarin/penelope
setup.py
1
2657
#!/usr/bin/env python # coding=utf-8 """ Set penelope package up """ from setuptools import Extension from setuptools import setup __author__ = "Alberto Pettarin" __copyright__ = "Copyright 2012-2016, Alberto Pettarin (www.albertopettarin.it)" __license__ = "MIT" __version__ = "3.1.3" __email__ = "[email protected]" __status__ = "Production" setup( name="penelope", packages=["penelope"], package_data={"penelope": ["res/*"]}, version="3.1.3.0", description="Penelope is a multi-tool for creating, editing and converting dictionaries, especially for eReader devices", author="Alberto Pettarin", author_email="[email protected]", url="https://github.com/pettarin/penelope", license="MIT License", long_description=open("README.rst", "r").read(), install_requires=["lxml>=3.0", "marisa-trie>=0.7.2"], scripts=["bin/penelope"], keywords=[ "Dictionary", "Dictionaries", "Index", "Merge", "Flatten", "eReader", "eReaders", "Bookeen", "CSV", "EPUB", "MOBI", "Kindle", "Kobo", "StarDict", "XML", "MARISA", "kindlegen", "dictzip", ], classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Desktop Environment", "Topic :: Documentation", "Topic :: Office/Business", "Topic :: Software Development :: Internationalization", "Topic :: Software Development :: Localization", "Topic :: Text Editors", "Topic :: Text Editors :: Text Processing", "Topic :: Text Processing", "Topic :: Text Processing :: General", "Topic :: Text Processing :: Indexing", "Topic :: Text Processing :: Linguistic", "Topic :: Text Processing :: Markup", "Topic :: Text Processing :: Markup :: HTML", "Topic :: Text Processing :: Markup :: XML", "Topic :: Utilities" ], )
mit
-1,803,094,169,845,251,800
31.012048
125
0.572074
false
3.924668
false
false
false
DomainDrivenConsulting/dogen
projects/masd.dogen.dia/python/add_to_package.py
1
1625
# -*- mode: python; tab-width: 4; indent-tabs-mode: nil -*- # # Copyright (C) 2012-2015 Marco Craveiro <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # # First locate the parent # for layer in dia.active_display().diagram.data.layers: for object in layer.objects: if object.type.name == "UML - LargePackage": if object.properties["name"].value == "transforms": parent = object print "found parent" # # Then update all objects without a parent. Make sure all classes # that are orphaned actually belong to this package before running. # if parent != None: for layer in dia.active_display().diagram.data.layers: for object in layer.objects: if object.type.name == "UML - Class": if object.parent == None: print object.properties["name"].value object.parent = parent print "done"
gpl-3.0
-5,694,978,738,083,802,000
37.690476
70
0.675077
false
4.072682
false
false
false
kikocorreoso/mplutils
mplutils/axes.py
1
8516
# -*- coding: utf-8 -*- """ Created on Sun Feb 21 23:43:37 2016 @author: kiko """ from __future__ import division, absolute_import from .settings import RICH_DISPLAY import numpy as np if RICH_DISPLAY: from IPython.display import display def axes_set_better_defaults(ax, axes_color = '#777777', grid = False, show = False): """ Enter an Axes instance and it will change the defaults to an opinionated version of how a simple plot should be. Parameters: ----------- ax : matplotlib.axes.Axes or matplotlib.axes.Subplot instance axes_color : str A string indicating a valid matplotlib color. grid : bool If `True` the grid of the axes will be shown, if `False` (default) the grid, if active, will be supressed. show : bool if `True` the figure will be shown. If you are working in a rich display environment like the IPython qtconsole or the Jupyter notebook it will use `IPython.display.display` to show the figure. If you are working otherwise it will call the `show` of the `Figure` instance. """ ax.set_axis_bgcolor((1, 1, 1)) ax.grid(grid) for key in ax.spines.keys(): if ax.spines[key].get_visible(): ax.spines[key].set_color(axes_color) ax.tick_params(axis = 'x', colors = axes_color) ax.tick_params(axis = 'y', colors = axes_color) ax.figure.set_facecolor('white') ax.figure.canvas.draw() if show: if RICH_DISPLAY: display(ax.figure) else: ax.figure.show() # http://matplotlib.org/examples/pylab_examples/spine_placement_demo.html def axes_set_axis_position(ax, spines = ['bottom', 'left'], pan = 0, show = False): """ Enter an Axes instance and depending the options it will display the axis where you selected. Parameters: ----------- ax : matplotlib.axes.Axes or matplotlib.axes.Subplot instance spines : str or iterable A string or an iterable of strings with the following valid options: 'bottom' : To active the bottom x-axis. 'top' : To active the top x-axis. 'left' : To active the left y-axis. 'right' : To active the right y-axis. pan : int or iterable A integer value or an iterable of integer values indicating the value to pan the axis. It has to have the same lenght and the same order than the spines input. show : bool if `True` the figure will be shown. If you are working in a rich display environment like the IPython qtconsole or the Jupyter notebook it will use `IPython.display.display` to show the figure. If you are working otherwise it will call the `show` of the `Figure` instance. """ if np.isscalar(spines): spines = (spines,) len_spines = 1 else: len_spines = len(spines) if np.isscalar(pan): pan = np.repeat(pan, len_spines) len_pan = 1 else: len_pan = len(pan) if len_pan > 1 and len_pan != len_spines: raise ValueError(('Length of `spines` and `pan` mismatch. `pan` ') ('should be a scalar or should have the same length than `spines`.')) i = 0 for loc, spine in ax.spines.items(): if loc in spines: spine.set_position(('outward', pan[i])) # outward by `pan` points spine.set_smart_bounds(True) i += 1 else: #spine.set_color('none') # don't draw spine spine.set_visible(False) # turn off ticks where there is no spine if 'left' in spines: ax.yaxis.set_ticks_position('left') ax.tick_params(labelleft = True) if 'right' in spines: ax.yaxis.set_ticks_position('right') ax.tick_params(labelright = True) if 'left' in spines and 'right' in spines: ax.yaxis.set_ticks_position('both') ax.tick_params(labelleft = True, labelright = True) if 'left' not in spines and 'right' not in spines: ax.yaxis.set_ticks([]) if 'bottom' in spines: ax.xaxis.set_ticks_position('bottom') ax.tick_params(labelbottom = True) if 'top' in spines: ax.xaxis.set_ticks_position('top') ax.tick_params(labeltop = True) if 'bottom' in spines and 'top' in spines: ax.xaxis.set_ticks_position('both') ax.tick_params(labelbottom = True, labeltop = True) if 'bottom' not in spines and 'top' not in spines: ax.xaxis.set_ticks([]) ax.figure.canvas.draw() if show: if RICH_DISPLAY: display(ax.figure) else: ax.figure.show() def axes_set_origin(ax, x = 0, y = 0, xticks_position = 'bottom', yticks_position = 'left', xticks_visible = True, yticks_visible = True, show = False): """ function to locate x-axis and y-axis on the position you want. Parameters: ----------- ax : matplotlib.axes.Axes or matplotlib.axes.Subplot instance x : int or float Value indicating the position on the y-axis where you want the x-axis to be located. y : int or float Value indicating the position on the x-axis where you want the y-axis to be located. xticks_position : str Default value is 'bottom' if you want the ticks to be located below the x-axis. 'top' if you want the ticks to be located above the x-axis. yticks_position : str Default value is 'left' if you want the ticks to be located on the left side of the y-axis. 'right' if you want the ticks to be located on the right side of the y-axis. xticks_visible : bool Default value is True if you want ticks visible on the x-axis. False if you don't want to see the ticks on the x-axis. yticks_visible : bool Default value is True if you want ticks visible on the y-axis. False if you don't want to see the ticks on the y-axis. show : bool if `True` the figure will be shown. If you are working in a rich display environment like the IPython qtconsole or the Jupyter notebook it will use `IPython.display.display` to show the figure. If you are working otherwise it will call the `show` of the `Figure` instance. """ ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) ax.xaxis.set_ticks_position(xticks_position) ax.spines['bottom'].set_position(('data', x)) ax.yaxis.set_ticks_position(yticks_position) ax.spines['left'].set_position(('data', y)) if not xticks_visible: ax.set_xticks([]) if not yticks_visible: ax.set_yticks([]) ax.figure.canvas.draw() if show: if RICH_DISPLAY: display(ax.figure) else: ax.figure.show() def axes_set_aspect_ratio(ax, ratio = 'equal', show = True): """ function that accepts an Axes instance and update the information setting the aspect ratio of the axis to the defined quantity Parameters: ----------- ax : matplotlib.axes.Axes or matplotlib.axes.Subplot instance ratio : str or int/float The value can be a string with the following values: 'equal' : (default) same scaling from data to plot units for x and y 'auto' : automatic; fill position rectangle with data Or a: number (int or float) : a circle will be stretched such that the height is num times the width. aspec t =1 is the same as aspect='equal'. show : bool if `True` the figure will be shown. If you are working in a rich display environment like the IPython qtconsole or the Jupyter notebook it will use `IPython.display.display` to show the figure. If you are working otherwise it will call the `show` of the `Figure` instance. """ ax.set_aspect(ratio, adjustable = None) if show: if RICH_DISPLAY: display(ax.figure) else: ax.figure.show()
mit
-877,875,270,445,470,800
36.685841
80
0.588539
false
3.95174
false
false
false
ancafarcas/superdesk-core
superdesk/media/media_operations.py
1
5561
# -*- coding: utf-8; -*- # # This file is part of Superdesk. # # Copyright 2013, 2014 Sourcefabric z.u. and contributors. # # For the full copyright and license information, please see the # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license import arrow import magic import hashlib import logging import requests from bson import ObjectId from io import BytesIO from PIL import Image from flask import json from .image import get_meta, fix_orientation from .video import get_meta as video_meta import base64 from superdesk.errors import SuperdeskApiError logger = logging.getLogger(__name__) def hash_file(afile, hasher, blocksize=65536): buf = afile.read(blocksize) while len(buf) > 0: hasher.update(buf) buf = afile.read(blocksize) return hasher.hexdigest() def get_file_name(file): return hash_file(file, hashlib.sha256()) def download_file_from_url(url): rv = requests.get(url, timeout=15) if rv.status_code not in (200, 201): raise SuperdeskApiError.internalError('Failed to retrieve file from URL: %s' % url) mime = magic.from_buffer(rv.content, mime=True) ext = str(mime).split('/')[1] name = str(ObjectId()) + ext return BytesIO(rv.content), name, str(mime) def download_file_from_encoded_str(encoded_str): content = encoded_str.split(';base64,') mime = content[0].split(':')[1] ext = content[0].split('/')[1] name = str(ObjectId()) + ext content = base64.b64decode(content[1]) return BytesIO(content), name, mime def process_file_from_stream(content, content_type=None): content_type = content_type or content.content_type content = BytesIO(content.read()) if 'application/' in content_type: content_type = magic.from_buffer(content.getvalue(), mime=True) content.seek(0) file_type, ext = content_type.split('/') try: metadata = process_file(content, file_type) except OSError: # error from PIL when image is supposed to be an image but is not. raise SuperdeskApiError.internalError('Failed to process file') file_name = get_file_name(content) content.seek(0) metadata = encode_metadata(metadata) metadata.update({'length': json.dumps(len(content.getvalue()))}) return file_name, content_type, metadata def encode_metadata(metadata): return dict((k.lower(), json.dumps(v)) for k, v in metadata.items()) def decode_metadata(metadata): return dict((k.lower(), decode_val(v)) for k, v in metadata.items()) def decode_val(string_val): """Format dates that elastic will try to convert automatically.""" val = json.loads(string_val) try: arrow.get(val, 'YYYY-MM-DD') # test if it will get matched by elastic return str(arrow.get(val)) except (Exception): return val def process_file(content, type): """Retrieves the media file metadata :param BytesIO content: content stream :param str type: type of media file :return: dict metadata related to media file. """ if type == 'image': return process_image(content) if type in ('audio', 'video'): return process_video(content) return {} def process_video(content): """Retrieves the video/audio metadata :param BytesIO content: content stream :return: dict video/audio metadata """ content.seek(0) meta = video_meta(content) content.seek(0) return meta def process_image(content): """Retrieves the image metadata :param BytesIO content: content stream :return: dict image metadata """ content.seek(0) meta = get_meta(content) fix_orientation(content) content.seek(0) return meta def _get_cropping_data(doc): """Get PIL Image crop data from doc with superdesk crops specs. :param doc: crop dict """ if all([doc.get('CropTop', None) is not None, doc.get('CropLeft', None) is not None, doc.get('CropRight', None) is not None, doc.get('CropBottom', None) is not None]): return (int(doc['CropLeft']), int(doc['CropTop']), int(doc['CropRight']), int(doc['CropBottom'])) def crop_image(content, file_name, cropping_data, exact_size=None, image_format=None): """Crop image stream to given crop. :param content: image file stream :param file_name :param cropping_data: superdesk crop dict ({'CropLeft': 0, 'CropTop': 0, ...}) :param exact_size: dict with `width` and `height` values """ if not isinstance(cropping_data, tuple): cropping_data = _get_cropping_data(cropping_data) if cropping_data: logger.debug('Opened image {} from stream, going to crop it'.format(file_name)) content.seek(0) img = Image.open(content) cropped = img.crop(cropping_data) if exact_size and 'width' in exact_size and 'height' in exact_size: cropped = cropped.resize((int(exact_size['width']), int(exact_size['height'])), Image.ANTIALIAS) logger.debug('Cropped image {} from stream, going to save it'.format(file_name)) try: out = BytesIO() cropped.save(out, image_format or img.format) out.seek(0) setattr(out, 'width', cropped.size[0]) setattr(out, 'height', cropped.size[1]) return True, out except Exception as io: logger.exception('Failed to generate crop for filename: {}. Crop: {}'.format(file_name, cropping_data)) return False, io return False, content
agpl-3.0
2,342,765,068,469,414,000
30.95977
115
0.660133
false
3.656147
false
false
false
FirmlyReality/docklet
src/master/testTaskMgr.py
2
5417
import master.taskmgr from concurrent import futures import grpc from protos.rpc_pb2 import * from protos.rpc_pb2_grpc import * import threading, json, time, random from utils import env class SimulatedNodeMgr(): def get_batch_nodeips(self): return ['0.0.0.0'] class SimulatedMonitorFetcher(): def __init__(self, ip): self.info = {} self.info['cpuconfig'] = [1,1,1,1,1,1,1,1] self.info['meminfo'] = {} self.info['meminfo']['free'] = 8 * 1024 * 1024 # (kb) simulate 8 GB memory self.info['meminfo']['buffers'] = 8 * 1024 * 1024 self.info['meminfo']['cached'] = 8 * 1024 * 1024 self.info['diskinfo'] = [] self.info['diskinfo'].append({}) self.info['diskinfo'][0]['free'] = 16 * 1024 * 1024 * 1024 # (b) simulate 16 GB disk self.info['gpuinfo'] = [1,1] class SimulatedTaskController(WorkerServicer): def __init__(self, worker): self.worker = worker def start_vnode(self, vnodeinfo, context): print('[SimulatedTaskController] start vnode, taskid [%s] vnodeid [%d]' % (vnodeinfo.taskid, vnodeinfo.vnodeid)) return Reply(status=Reply.ACCEPTED,message="") def stop_vnode(self, vnodeinfo, context): print('[SimulatedTaskController] stop vnode, taskid [%s] vnodeid [%d]' % (vnodeinfo.taskid, vnodeinfo.vnodeid)) return Reply(status=Reply.ACCEPTED,message="") def start_task(self, taskinfo, context): print('[SimulatedTaskController] start task, taskid [%s] vnodeid [%d] token [%s]' % (taskinfo.taskid, taskinfo.vnodeid, taskinfo.token)) worker.process(taskinfo) return Reply(status=Reply.ACCEPTED,message="") def stop_task(self, taskinfo, context): print('[SimulatedTaskController] stop task, taskid [%s] vnodeid [%d] token [%s]' % (taskinfo.taskid, taskinfo.vnodeid, taskinfo.token)) return Reply(status=Reply.ACCEPTED,message="") class SimulatedWorker(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.thread_stop = False self.tasks = [] def run(self): worker_port = env.getenv('BATCH_WORKER_PORT') server = grpc.server(futures.ThreadPoolExecutor(max_workers=5)) add_WorkerServicer_to_server(SimulatedTaskController(self), server) server.add_insecure_port('[::]:' + worker_port) server.start() while not self.thread_stop: for task in self.tasks: seed = random.random() if seed < 0.25: report(task.taskid, task.vnodeid, RUNNING, task.token) elif seed < 0.5: report(task.taskid, task.vnodeid, COMPLETED, task.token) self.tasks.remove(task) break elif seed < 0.75: report(task.taskid, task.vnodeid, FAILED, task.token) self.tasks.remove(task) break else: pass time.sleep(5) server.stop(0) def stop(self): self.thread_stop = True def process(self, task): self.tasks.append(task) class SimulatedJobMgr(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.thread_stop = False def run(self): while not self.thread_stop: time.sleep(5) server.stop(0) def stop(self): self.thread_stop = True def report(self, task): print('[SimulatedJobMgr] task[%s] status %d' % (task.info.id, task.status)) def assignTask(self, taskmgr, taskid, instance_count, retry_count, timeout, cpu, memory, disk, gpu): task = {} task['instCount'] = instance_count task['retryCount'] = retry_count task['expTime'] = timeout task['at_same_time'] = True task['multicommand'] = True task['command'] = 'ls' task['srcAddr'] = '' task['envVars'] = {'a':'1'} task['stdErrRedPth'] = '' task['stdOutRedPth'] = '' task['image'] = 'root_root_base' task['cpuSetting'] = cpu task['memorySetting'] = memory task['diskSetting'] = disk task['gpuSetting'] = 0 task['mapping'] = [] taskmgr.add_task('root', taskid, task) class SimulatedLogger(): def info(self, msg): print('[INFO] ' + msg) def warning(self, msg): print('[WARNING] ' + msg) def error(self, msg): print('[ERROR] ' + msg) def test(): global worker global jobmgr global taskmgr worker = SimulatedWorker() worker.start() jobmgr = SimulatedJobMgr() jobmgr.start() taskmgr = master.taskmgr.TaskMgr(SimulatedNodeMgr(), SimulatedMonitorFetcher, master_ip='', scheduler_interval=2, external_logger=SimulatedLogger()) # taskmgr.set_jobmgr(jobmgr) taskmgr.start() add('task_0', instance_count=2, retry_count=2, timeout=60, cpu=2, memory=2048, disk=2048, gpu=0) def test2(): global jobmgr global taskmgr jobmgr = SimulatedJobMgr() jobmgr.start() taskmgr = master.taskmgr.TaskMgr(SimulatedNodeMgr(), SimulatedMonitorFetcher, master_ip='', scheduler_interval=2, external_logger=SimulatedLogger()) taskmgr.set_jobmgr(jobmgr) taskmgr.start() add('task_0', instance_count=2, retry_count=2, timeout=60, cpu=2, memory=2048, disk=2048, gpu=0) def add(taskid, instance_count, retry_count, timeout, cpu, memory, disk, gpu): global jobmgr global taskmgr jobmgr.assignTask(taskmgr, taskid, instance_count, retry_count, timeout, cpu, memory, disk, gpu) def report(taskid, instanceid, status, token): global taskmgr master_port = env.getenv('BATCH_MASTER_PORT') channel = grpc.insecure_channel('%s:%s' % ('0.0.0.0', master_port)) stub = MasterStub(channel) response = stub.report(ReportMsg(taskmsgs=[TaskMsg(taskid=taskid, username='root', vnodeid=instanceid, subTaskStatus=status, token=token)])) def stop(): global worker global jobmgr global taskmgr worker.stop() jobmgr.stop() taskmgr.stop()
bsd-3-clause
-676,241,921,716,462,500
27.067358
149
0.690419
false
2.918642
false
false
false
fastinetserver/portage-idfetch
pym/portage/cache/sql_template.py
1
9336
# Copyright: 2005 Gentoo Foundation # Author(s): Brian Harring ([email protected]) # License: GPL2 import sys from portage.cache import template, cache_errors from portage.cache.template import reconstruct_eclasses class SQLDatabase(template.database): """template class for RDBM based caches This class is designed such that derivatives don't have to change much code, mostly constant strings. _BaseError must be an exception class that all Exceptions thrown from the derived RDBMS are derived from. SCHEMA_INSERT_CPV_INTO_PACKAGE should be modified dependant on the RDBMS, as should SCHEMA_PACKAGE_CREATE- basically you need to deal with creation of a unique pkgid. If the dbapi2 rdbms class has a method of recovering that id, then modify _insert_cpv to remove the extra select. Creation of a derived class involves supplying _initdb_con, and table_exists. Additionally, the default schemas may have to be modified. """ SCHEMA_PACKAGE_NAME = "package_cache" SCHEMA_PACKAGE_CREATE = "CREATE TABLE %s (\ pkgid INTEGER PRIMARY KEY, label VARCHAR(255), cpv VARCHAR(255), UNIQUE(label, cpv))" % SCHEMA_PACKAGE_NAME SCHEMA_PACKAGE_DROP = "DROP TABLE %s" % SCHEMA_PACKAGE_NAME SCHEMA_VALUES_NAME = "values_cache" SCHEMA_VALUES_CREATE = "CREATE TABLE %s ( pkgid integer references %s (pkgid) on delete cascade, \ key varchar(255), value text, UNIQUE(pkgid, key))" % (SCHEMA_VALUES_NAME, SCHEMA_PACKAGE_NAME) SCHEMA_VALUES_DROP = "DROP TABLE %s" % SCHEMA_VALUES_NAME SCHEMA_INSERT_CPV_INTO_PACKAGE = "INSERT INTO %s (label, cpv) VALUES(%%s, %%s)" % SCHEMA_PACKAGE_NAME _BaseError = () _dbClass = None autocommits = False # cleanse_keys = True # boolean indicating if the derived RDBMS class supports replace syntax _supports_replace = False def __init__(self, location, label, auxdbkeys, *args, **config): """initialize the instance. derived classes shouldn't need to override this""" super(SQLDatabase, self).__init__(location, label, auxdbkeys, *args, **config) config.setdefault("host","127.0.0.1") config.setdefault("autocommit", self.autocommits) self._initdb_con(config) self.label = self._sfilter(self.label) def _dbconnect(self, config): """should be overridden if the derived class needs special parameters for initializing the db connection, or cursor""" self.db = self._dbClass(**config) self.con = self.db.cursor() def _initdb_con(self,config): """ensure needed tables are in place. If the derived class needs a different set of table creation commands, overload the approriate SCHEMA_ attributes. If it needs additional execution beyond, override""" self._dbconnect(config) if not self._table_exists(self.SCHEMA_PACKAGE_NAME): if self.readonly: raise cache_errors.ReadOnlyRestriction("table %s doesn't exist" % \ self.SCHEMA_PACKAGE_NAME) try: self.con.execute(self.SCHEMA_PACKAGE_CREATE) except self._BaseError as e: raise cache_errors.InitializationError(self.__class__, e) if not self._table_exists(self.SCHEMA_VALUES_NAME): if self.readonly: raise cache_errors.ReadOnlyRestriction("table %s doesn't exist" % \ self.SCHEMA_VALUES_NAME) try: self.con.execute(self.SCHEMA_VALUES_CREATE) except self._BaseError as e: raise cache_errors.InitializationError(self.__class__, e) def _table_exists(self, tbl): """return true if a table exists derived classes must override this""" raise NotImplementedError def _sfilter(self, s): """meta escaping, returns quoted string for use in sql statements""" return "\"%s\"" % s.replace("\\","\\\\").replace("\"","\\\"") def _getitem(self, cpv): try: self.con.execute("SELECT key, value FROM %s NATURAL JOIN %s " "WHERE label=%s AND cpv=%s" % (self.SCHEMA_PACKAGE_NAME, self.SCHEMA_VALUES_NAME, self.label, self._sfilter(cpv))) except self._BaseError as e: raise cache_errors.CacheCorruption(self, cpv, e) rows = self.con.fetchall() if len(rows) == 0: raise KeyError(cpv) vals = dict([(k,"") for k in self._known_keys]) vals.update(dict(rows)) return vals def _delitem(self, cpv): """delete a cpv cache entry derived RDBM classes for this *must* either support cascaded deletes, or override this method""" try: try: self.con.execute("DELETE FROM %s WHERE label=%s AND cpv=%s" % \ (self.SCHEMA_PACKAGE_NAME, self.label, self._sfilter(cpv))) if self.autocommits: self.commit() except self._BaseError as e: raise cache_errors.CacheCorruption(self, cpv, e) if self.con.rowcount <= 0: raise KeyError(cpv) except Exception: if not self.autocommits: self.db.rollback() # yes, this can roll back a lot more then just the delete. deal. raise def __del__(self): # just to be safe. if "db" in self.__dict__ and self.db != None: self.commit() self.db.close() def _setitem(self, cpv, values): try: # insert. try: pkgid = self._insert_cpv(cpv) except self._BaseError as e: raise cache_errors.CacheCorruption(cpv, e) # __getitem__ fills out missing values, # so we store only what's handed to us and is a known key db_values = [] for key in self._known_keys: if key in values and values[key]: db_values.append({"key":key, "value":values[key]}) if len(db_values) > 0: try: self.con.executemany("INSERT INTO %s (pkgid, key, value) VALUES(\"%s\", %%(key)s, %%(value)s)" % \ (self.SCHEMA_VALUES_NAME, str(pkgid)), db_values) except self._BaseError as e: raise cache_errors.CacheCorruption(cpv, e) if self.autocommits: self.commit() except Exception: if not self.autocommits: try: self.db.rollback() except self._BaseError: pass raise def _insert_cpv(self, cpv): """uses SCHEMA_INSERT_CPV_INTO_PACKAGE, which must be overloaded if the table definition doesn't support auto-increment columns for pkgid. returns the cpvs new pkgid note this doesn't commit the transaction. The caller is expected to.""" cpv = self._sfilter(cpv) if self._supports_replace: query_str = self.SCHEMA_INSERT_CPV_INTO_PACKAGE.replace("INSERT","REPLACE",1) else: # just delete it. try: del self[cpv] except (cache_errors.CacheCorruption, KeyError): pass query_str = self.SCHEMA_INSERT_CPV_INTO_PACKAGE try: self.con.execute(query_str % (self.label, cpv)) except self._BaseError: self.db.rollback() raise self.con.execute("SELECT pkgid FROM %s WHERE label=%s AND cpv=%s" % \ (self.SCHEMA_PACKAGE_NAME, self.label, cpv)) if self.con.rowcount != 1: raise cache_error.CacheCorruption(cpv, "Tried to insert the cpv, but found " " %i matches upon the following select!" % len(rows)) return self.con.fetchone()[0] def __contains__(self, cpv): if not self.autocommits: try: self.commit() except self._BaseError as e: raise cache_errors.GeneralCacheCorruption(e) try: self.con.execute("SELECT cpv FROM %s WHERE label=%s AND cpv=%s" % \ (self.SCHEMA_PACKAGE_NAME, self.label, self._sfilter(cpv))) except self._BaseError as e: raise cache_errors.GeneralCacheCorruption(e) return self.con.rowcount > 0 def __iter__(self): if not self.autocommits: try: self.commit() except self._BaseError as e: raise cache_errors.GeneralCacheCorruption(e) try: self.con.execute("SELECT cpv FROM %s WHERE label=%s" % (self.SCHEMA_PACKAGE_NAME, self.label)) except self._BaseError as e: raise cache_errors.GeneralCacheCorruption(e) # return [ row[0] for row in self.con.fetchall() ] for x in self.con.fetchall(): yield x[0] def iteritems(self): try: self.con.execute("SELECT cpv, key, value FROM %s NATURAL JOIN %s " "WHERE label=%s" % (self.SCHEMA_PACKAGE_NAME, self.SCHEMA_VALUES_NAME, self.label)) except self._BaseError as e: raise cache_errors.CacheCorruption(self, cpv, e) oldcpv = None l = [] for x, y, v in self.con.fetchall(): if oldcpv != x: if oldcpv != None: d = dict(l) if "_eclasses_" in d: d["_eclasses_"] = reconstruct_eclasses(oldcpv, d["_eclasses_"]) else: d["_eclasses_"] = {} yield cpv, d l.clear() oldcpv = x l.append((y,v)) if oldcpv != None: d = dict(l) if "_eclasses_" in d: d["_eclasses_"] = reconstruct_eclasses(oldcpv, d["_eclasses_"]) else: d["_eclasses_"] = {} yield cpv, d def commit(self): self.db.commit() def get_matches(self,match_dict): query_list = [] for k,v in match_dict.items(): if k not in self._known_keys: raise cache_errors.InvalidRestriction(k, v, "key isn't known to this cache instance") v = v.replace("%","\\%") v = v.replace(".*","%") query_list.append("(key=%s AND value LIKE %s)" % (self._sfilter(k), self._sfilter(v))) if len(query_list): query = " AND "+" AND ".join(query_list) else: query = '' print("query = SELECT cpv from package_cache natural join values_cache WHERE label=%s %s" % (self.label, query)) try: self.con.execute("SELECT cpv from package_cache natural join values_cache WHERE label=%s %s" % \ (self.label, query)) except self._BaseError as e: raise cache_errors.GeneralCacheCorruption(e) return [ row[0] for row in self.con.fetchall() ] if sys.hexversion >= 0x3000000: items = iteritems keys = __iter__
gpl-2.0
8,476,169,925,880,248,000
30.434343
114
0.678877
false
3.053974
true
false
false
Ryex/Rabbyt
rabbyt/sprites.py
1
6567
from rabbyt._sprites import cBaseSprite, cSprite from rabbyt._rabbyt import pick_texture_target from rabbyt.anims import anim_slot, swizzle, Animable from rabbyt.primitives import Quad class BaseSprite(cBaseSprite, Animable): """ ``BaseSprite(...)`` This class provides some basic functionality for sprites: * transformations (x, y, rot, scale) * color (red, green, blue, alpha) * bounding_radius (for collision detection) ``BaseSprite`` doesn't render anything itself You'll want to subclass it and override either ``render()`` or ``render_after_transform()``. You can pass any of the ``BaseSprite`` properties as keyword arguments. (``x``, ``y``, ``xy``, etc.) """ x = anim_slot(default=0, index=0, doc="x coordinate of the sprite") y = anim_slot(default=0, index=1, doc="y coordinate of the sprite") rot = anim_slot(default=0, index=2, doc="rotation angle in degrees.") red = anim_slot(default=1, index=3, doc="red color component") green = anim_slot(default=1, index=4, doc="green color component") blue = anim_slot(default=1, index=5, doc="blue color component") alpha = anim_slot(default=1, index=6, doc="alpha color component") scale_x = anim_slot(default=1, index=7, doc="x component of ``scale``") scale_y = anim_slot(default=1, index=8, doc="y component of ``scale``") xy = swizzle("x", "y") rgb = swizzle("red", "green", "blue") rgba = swizzle("red", "green", "blue", "alpha") def _get_scale(self): if self.scale_x == self.scale_y: return self.scale_x else: return (self.scale_x, self.scale_y) def _set_scale(self, s): if hasattr(s, "__len__"): self.scale_x, self.scale_y = s else: self.scale_x = self.scale_y = s scale = property(_get_scale, _set_scale, doc= """ scale ``1.0`` is normal size; ``0.5`` is half size, ``2.0`` is double size... you get the point. You can scale the x and y axes independently by assigning a tuple with a length of two. """) class Sprite(cSprite, BaseSprite): """ ``Sprite(texture=None, shape=None, tex_shape=(0,1,1,0), ...)`` This class provides a basic, four point, textured sprite. All arguments are optional. ``texture`` should be an image filename, a pyglet texture object, or an OpenGL texture id. (See ``Sprite.texture`` for more information.) If ``shape`` is not given it will default to the dimensions of the texture if they are available. For more information on ``shape`` and ``tex_shape`` read the docstrings for ``Sprite.shape`` and ``Sprite.tex_shape`` Additionally, you can pass values for most of the properties as keyword arguments. (``x``, ``y``, ``xy``, ``u``, ``v``, ``uv``, etc...) """ u = anim_slot(default=0, index=9, doc="texture offset") v = anim_slot(default=0, index=10, doc="texture offset") uv = swizzle("u", "v") def __init__(self, texture=None, shape=None, tex_shape=None, **kwargs): BaseSprite.__init__(self) cSprite.__init__(self) self.red = self.green = self.blue = self.alpha = 1 self.x = self.y = 0 self.scale = 1 self.rot = 0 self.texture_id = -1 # If no shape or tex_shape was given, we want to have useful defaults # in case the texture doesn't set them. if shape is None: s = 10. self.shape = [s, s, -s, -s] if tex_shape is None: self.tex_shape = (0,1,1,0) self.texture = texture # If shape or tex_shape were given, we want them to override the # values set when we set the texture. if shape is not None: self.shape = shape if tex_shape is not None: self.tex_shape = tex_shape for name, value in list(kwargs.items()): if hasattr(self.__class__, name) and isinstance( getattr(self.__class__, name), (swizzle, anim_slot, property)): setattr(self, name, value) else: raise ValueError("unexpected keyword argument %r" % name) def ensure_target(self): if not self.texture_target: target = pick_texture_target() self.texture_target = target def _get_texture(self): return self._tex_obj def _set_texture(self, texture): self._tex_obj = texture tex_size = None if isinstance(texture, str): from rabbyt._rabbyt import load_texture_file_hook res = load_texture_file_hook(texture) if isinstance(res, tuple) and len(res) == 2: self.texture_id, tex_size = res else: self.texture = res # Recursive elif isinstance(texture, int): self.texture_id = texture elif hasattr(texture, "id"): if hasattr(texture, "target"): self.texture_target = texture.target self.texture_id = texture.id if hasattr(texture, "tex_coords"): self.tex_shape = texture.tex_coords self.uv = 0,0 elif hasattr(texture, "tex_shape"): self.tex_shape = texture.tex_shape if hasattr(texture, "width") and hasattr(texture, "height"): tex_size = (texture.width, texture.height) elif texture is None: self.texture_id = 0 else: raise ValueError("texture should be either an int or str.") if tex_size: w, h = tex_size self.shape = [-w/2, h/2, w/2, -h/2] texture = property(_get_texture, _set_texture, doc= """ ``Sprite.texture`` The texture used for this sprite. The value can be in a variety of formats: If it's a string, it will be used as a filename to load the texture. If it's an integer, it will be used as an OpenGL texture id. If it's an object with an ``id`` attribute, it will be treated as a pyglet texture object. (The ``width``, ``height``, and ``tex_coords`` attributes will set the sprite's ``shape`` and ``tex_shape`` properties.) """) __docs_all__ = ["BaseSprite", "Sprite"]
mit
-8,949,477,883,439,346,000
35.525714
78
0.559769
false
3.908929
false
false
false
aronasorman/kolibri
kolibri/logger/serializers.py
1
2680
from kolibri.logger.models import AttemptLog, ContentRatingLog, ContentSessionLog, ContentSummaryLog, MasteryLog, UserSessionLog from rest_framework import serializers class ContentSessionLogSerializer(serializers.ModelSerializer): class Meta: model = ContentSessionLog fields = ('pk', 'user', 'content_id', 'channel_id', 'start_timestamp', 'end_timestamp', 'time_spent', 'kind', 'extra_fields', 'progress') class MasteryLogSerializer(serializers.ModelSerializer): pastattempts = serializers.SerializerMethodField() totalattempts = serializers.SerializerMethodField() class Meta: model = MasteryLog fields = ('id', 'summarylog', 'start_timestamp', 'pastattempts', 'totalattempts', 'end_timestamp', 'completion_timestamp', 'mastery_criterion', 'mastery_level', 'complete') def get_pastattempts(self, obj): # will return a list of the latest 10 correct and hint_taken fields for each attempt. return AttemptLog.objects.filter(masterylog__summarylog=obj.summarylog).values('correct', 'hinted').order_by('-start_timestamp')[:10] def get_totalattempts(self, obj): return AttemptLog.objects.filter(masterylog__summarylog=obj.summarylog).count() class AttemptLogSerializer(serializers.ModelSerializer): class Meta: model = AttemptLog fields = ('id', 'masterylog', 'start_timestamp', 'sessionlog', 'end_timestamp', 'completion_timestamp', 'item', 'time_spent', 'complete', 'correct', 'hinted', 'answer', 'simple_answer', 'interaction_history') class ContentSummaryLogSerializer(serializers.ModelSerializer): currentmasterylog = serializers.SerializerMethodField() class Meta: model = ContentSummaryLog fields = ('pk', 'user', 'content_id', 'channel_id', 'start_timestamp', 'currentmasterylog', 'end_timestamp', 'completion_timestamp', 'time_spent', 'progress', 'kind', 'extra_fields') def get_currentmasterylog(self, obj): try: current_log = obj.masterylogs.latest('end_timestamp') return MasteryLogSerializer(current_log).data except MasteryLog.DoesNotExist: return None class ContentRatingLogSerializer(serializers.ModelSerializer): class Meta: model = ContentRatingLog fields = ('pk', 'user', 'content_id', 'channel_id', 'quality', 'ease', 'learning', 'feedback') class UserSessionLogSerializer(serializers.ModelSerializer): class Meta: model = UserSessionLog fields = ('pk', 'user', 'channels', 'start_timestamp', 'last_interaction_timestamp', 'pages')
mit
401,927,789,942,079,940
39.606061
141
0.680597
false
4.135802
false
false
false
SAAVY/magpie
client/blacklist.py
1
1546
from flask import current_app from netaddr import IPNetwork, IPAddress from netaddr.core import AddrFormatError bl_website_ip = [] # array of tuples (network mask, port) def build_website_blacklist(logger): with open("config/blacklist_website_ip.txt") as f: for line in f: network_address = line.strip() ip, separator, port = network_address.rpartition(':') if not separator: address = (network_address, '') else: address = (ip, port) if not port: logger.error("check blacklist_website_ip.txt: must specify port number after ':' in ip") continue try: IPNetwork(address[0]) bl_website_ip.append(address) except AddrFormatError as e: logger.error("Format error. check blacklist_website_ip.txt: %s" % str(e)) def is_website_blacklisted(website_ip, website_port): logger = current_app.logger logger.debug("FUNC: is_website_blacklisted ip_address: %s port: %s" % (website_ip, website_port)) for network_mask, port in bl_website_ip: try: if IPAddress(website_ip) in IPNetwork(network_mask): if port and website_port == port: return True elif port: return False return True except Exception as e: logger.exception("FUNC: is_website_blacklisted Exception: %s" % str(e)) return False
mit
-8,088,540,729,857,940,000
36.707317
108
0.574386
false
4.270718
false
false
false
pycrystem/pycrystem
pyxem/tests/test_signals/test_power2d.py
1
3662
# -*- coding: utf-8 -*- # Copyright 2017-2019 The pyXem developers # # This file is part of pyXem. # # pyXem is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyXem is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pyXem. If not, see <http://www.gnu.org/licenses/>. import pytest import numpy as np import dask.array as da from hyperspy.signals import Signal2D from pyxem.signals.power2d import Power2D, LazyPower2D class TestComputeAndAsLazy2D: def test_2d_data_compute(self): dask_array = da.random.random((100, 150), chunks=(50, 50)) s = LazyPower2D(dask_array) scale0, scale1, metadata_string = 0.5, 1.5, "test" s.axes_manager[0].scale = scale0 s.axes_manager[1].scale = scale1 s.metadata.Test = metadata_string s.compute() assert s.__class__ == Power2D assert not hasattr(s.data, "compute") assert s.axes_manager[0].scale == scale0 assert s.axes_manager[1].scale == scale1 assert s.metadata.Test == metadata_string assert dask_array.shape == s.data.shape def test_4d_data_compute(self): dask_array = da.random.random((4, 4, 10, 15), chunks=(1, 1, 10, 15)) s = LazyPower2D(dask_array) s.compute() assert s.__class__ == Power2D assert dask_array.shape == s.data.shape def test_2d_data_as_lazy(self): data = np.random.random((100, 150)) s = Power2D(data) scale0, scale1, metadata_string = 0.5, 1.5, "test" s.axes_manager[0].scale = scale0 s.axes_manager[1].scale = scale1 s.metadata.Test = metadata_string s_lazy = s.as_lazy() assert s_lazy.__class__ == LazyPower2D assert hasattr(s_lazy.data, "compute") assert s_lazy.axes_manager[0].scale == scale0 assert s_lazy.axes_manager[1].scale == scale1 assert s_lazy.metadata.Test == metadata_string assert data.shape == s_lazy.data.shape def test_4d_data_as_lazy(self): data = np.random.random((4, 10, 15)) s = Power2D(data) s_lazy = s.as_lazy() assert s_lazy.__class__ == LazyPower2D assert data.shape == s_lazy.data.shape class TestPower: @pytest.fixture def flat_pattern(self): pd = Power2D(data=np.ones(shape=(2, 2, 5, 5))) return pd @pytest.mark.parametrize("k_region", [None, [2.0, 4.0]]) @pytest.mark.parametrize("sym", [None, 4, [2, 4]]) def test_power_signal_get_map(self, flat_pattern, k_region, sym): flat_pattern.get_map(k_region=k_region, symmetry=sym) @pytest.mark.parametrize("k_region", [None, [2.0, 4.0]]) @pytest.mark.parametrize("sym", [[2, 4]]) def test_power_signal_plot_symmetries(self, flat_pattern, k_region, sym): flat_pattern.plot_symmetries(k_region=k_region, symmetry=sym) class TestDecomposition: def test_decomposition_is_performed(self, diffraction_pattern): s = Power2D(diffraction_pattern) s.decomposition() assert s.learning_results is not None def test_decomposition_class_assignment(self, diffraction_pattern): s = Power2D(diffraction_pattern) s.decomposition() assert isinstance(s, Power2D)
gpl-3.0
3,197,397,849,401,754,000
35.62
77
0.647733
false
3.326067
true
false
false
McGillX/edx_data_research
edx_data_research/parsing/parse_course_structure.py
1
4815
import json from edx_data_research.parsing.parse import Parse class CourseStructure(Parse): def __init__(self, args): super(CourseStructure, self).__init__(args) self.collections = ['course_structure'] self.course_structure_file = args.course_structure_file self.drop = args.drop def migrate(self): if self.drop: self.collections['course_structure'].drop() json_data = self._load_json_data(self.course_structure_file) json_data = self._parse_key_names(json_data) json_data = self._delete_category(json_data, 'conditional') json_data = self._delete_category(json_data, 'wrapper') json_data = self._build_parent_data(json_data) json_data = self._update_parent_data(json_data) for key in json_data: self.collections['course_structure'].insert(json_data[key]) def _load_json_data(self, file_name): '''Retrieve data from the json file''' with open(file_name) as file_handler: json_data = json.load(file_handler) return json_data def _parse_key_names(self, json_data): '''Parse key names''' new_json_data = {} for key in json_data: new_key = key.split('/')[-1] json_data[key]['_id'] = new_key if json_data[key]['children']: for index, child in enumerate(json_data[key]['children']): json_data[key]['children'][index] = child.split('/')[-1] new_json_data[new_key] = json_data[key] return new_json_data def _delete_category(self, json_data, category): '''Delete data with given category from json_data ''' for key in json_data.keys(): if json_data[key]['category'] == category: for item in json_data.keys(): if json_data[item]['children'] and key in json_data[item]['children']: parent_id = item index_child = json_data[parent_id]['children'].index(key) left_list = json_data[parent_id]['children'][:index_child] right_list = json_data[parent_id]['children'][index_child + 1:] json_data[parent_id]['children'] = left_list + json_data[key]['children'] + right_list del json_data[key] return json_data def _build_parent_data(self, json_data): '''Build parent data''' error_count = 0 for key in json_data: if json_data[key]['children']: for index, child_key in enumerate(json_data[key]['children']): try: json_data[child_key]['parent_data'] = {} except: error_count += 1 continue parent_category = json_data[key]['category'] parent_order_key = parent_category + '_order' parent_id_key = parent_category + '_id' parent_display_name_key = parent_category + '_display_name' json_data[child_key]['parent_data'][parent_order_key] = index json_data[child_key]['parent_data'][parent_id_key] = json_data[key]['_id'] json_data[child_key]['parent_data'][parent_display_name_key] = json_data[key]['metadata']['display_name'] print "Number of errors when building parent data: {0}".format(error_count) return json_data def _update_parent_data(self, json_data): for key in json_data: if json_data[key]['category'] == 'sequential': chapter_id = json_data[key]['parent_data']['chapter_id'] chapter_parent_data = json_data[chapter_id]['parent_data'] json_data[key]['parent_data'].update(chapter_parent_data) for key in json_data: if json_data[key]['category'] == 'vertical': sequential_id = json_data[key]['parent_data']['sequential_id'] sequential_parent_data = json_data[sequential_id]['parent_data'] json_data[key]['parent_data'].update(sequential_parent_data) for key in json_data: if json_data[key]['category'] not in set(['vertical', 'sequential', 'chapter', 'course']): try: vertical_id = json_data[key]['parent_data']['vertical_id'] vertical_parent_data = json_data[vertical_id]['parent_data'] json_data[key]['parent_data'].update(vertical_parent_data) except: print "ERROR: {0}".format(json_data[key]) return json_data
mit
5,415,329,089,375,148,000
46.636364
125
0.538525
false
4.143718
false
false
false
lizardsystem/threedilib
threedilib/modeling/convert.py
1
8275
# (c) Nelen & Schuurmans. GPL licensed, see LICENSE.rst. # -*- coding: utf-8 -*- """ Convert shapefiles with z coordinates. Choose from the following formats: 'inp' to create an inp file, 'img' to create an image with a plot of the feature, or 'shp' to output a shapefile with the average height of a feature stored in an extra attribute. """ from __future__ import print_function from __future__ import unicode_literals from __future__ import absolute_import from __future__ import division import argparse import math import os import shutil import tempfile from matplotlib.backends import backend_agg from matplotlib import figure from osgeo import gdal from osgeo import ogr from PIL import Image ogr.UseExceptions() def get_parser(): """ Return argument parser. """ parser = argparse.ArgumentParser( description=__doc__, ) parser.add_argument('source_path', metavar='SOURCE', help=('Path to source shapefile.')) parser.add_argument('target_path', metavar='TARGET', help=('Path to target file.')) parser.add_argument('-of', '--output-format', metavar='FORMAT', choices=['inp', 'img', 'shp'], default='shp', help=("Path to output.")) return parser class InputFileWriter(object): """ Writer for input files. """ def __init__(self, path): """ Init the counters and tmpdirs """ self.path = path self.node_count = 0 self.link_count = 0 def __enter__(self): """ Setup tempfiles. """ self.temp_directory = tempfile.mkdtemp() self.node_file = open( os.path.join(self.temp_directory, 'nodes'), 'a+', ) self.link_file = open( os.path.join(self.temp_directory, 'links'), 'a+', ) return self def __exit__(self, type, value, traceback): """ Write 'inputfile' at path. """ with open(self.path, 'w') as input_file: self.node_file.seek(0) input_file.write(self.node_file.read()) input_file.write('-1\n') self.link_file.seek(0) input_file.write(self.link_file.read()) self.node_file.close() self.link_file.close() shutil.rmtree(self.temp_directory) def _write_node(self, node): """ Write a node. """ self.node_count += 1 self.node_file.write('{} {} {} {}\n'.format( self.node_count, node[0], node[1], -node[2] # Depth, not height! )) def _write_link(self): """ Write a link between previous node and next node.""" self.link_count += 1 self.link_file.write('{} {} {}\n'.format( self.link_count, self.node_count, self.node_count + 1, )) def _add_wkb_line_string(self, wkb_line_string): """ Add linestring as nodes and links. """ nodes = [wkb_line_string.GetPoint(i) for i in range(wkb_line_string.GetPointCount())] # Add nodes and links up to the last node for i in range(len(nodes) - 1): self._write_node(nodes[i]) self._write_link() # Add last node, link already covered. self._write_node(nodes[-1]) def add_feature(self, feature): """ Add feature as nodes and links. """ geometry = feature.geometry() geometry_type = geometry.GetGeometryType() if geometry_type == ogr.wkbLineString25D: self._add_wkb_line_string(geometry) elif geometry_type == ogr.wkbMultiLineString25D: for wkb_line_string in geometry: self._add_wkb_line_string(wkb_line_string) class ImageWriter(object): """ Writer for images. """ def __init__(self, path): self.count = 0 self.path = path def __enter__(self): return self def _add_wkb_line_string(self, wkb_line_string, label): """ Plot linestring as separate image. """ # Get data x, y, z = zip(*[wkb_line_string.GetPoint(i) for i in range(wkb_line_string.GetPointCount())]) # Determine distance along line l = [0] for i in range(len(z) - 1): l.append(l[-1] + math.sqrt( (x[i + 1] - x[i]) ** 2 + (y[i + 1] - y[i]) ** 2, )) # Plot in matplotlib fig = figure.Figure() axes = fig.add_axes([0.1, 0.1, 0.8, 0.8]) axes.plot(l, z, label=label) axes.legend(loc='best', frameon=False) # Write to image backend_agg.FigureCanvasAgg(fig) buf, size = fig.canvas.print_to_buffer() image = Image.fromstring('RGBA', size, buf) root, ext = os.path.splitext(self.path) image.save(root + '{:00.0f}'.format(self.count) + ext) self.count += 1 def add_feature(self, feature): """ Currently saves every feature in a separate image. """ # Plotlabel label = '\n'.join([': '.join(str(v) for v in item) for item in feature.items().items()]) # Plot according to geometry type geometry = feature.geometry() geometry_type = geometry.GetGeometryType() if geometry_type == ogr.wkbLineString25D: self._add_wkb_line_string(geometry, label=label) elif geometry_type == ogr.wkbMultiLineString25D: for wkb_line_string in geometry: self._add_wkb_line_string(wkb_line_string, label=label) def __exit__(self, type, value, traceback): pass class ShapefileWriter(object): """ Writer for shapefiles. """ ATTRIBUTE = b'kruinhoogt' def __init__(self, path): self.count = 0 self.path = path self.datasource = None self.layer = None def __enter__(self): return self def create_datasource(self, feature): """ Create a datasource based on feature. """ root, ext = os.path.splitext(os.path.basename(self.path)) driver = ogr.GetDriverByName(b'ESRI Shapefile') datasource = driver.CreateDataSource(self.path) layer = datasource.CreateLayer(root) for i in range(feature.GetFieldCount()): layer.CreateField(feature.GetFieldDefnRef(i)) field_defn = ogr.FieldDefn(self.ATTRIBUTE, ogr.OFTReal) layer.CreateField(field_defn) self.datasource = datasource self.layer = layer def add_feature(self, feature): """ Currently saves every feature in a separate image. """ if self.layer is None: self.create_datasource(feature) layer_defn = self.layer.GetLayerDefn() # elevation geometry = feature.geometry().Clone() geometry_type = geometry.GetGeometryType() if geometry_type == ogr.wkbLineString25D: elevation = min([p[2] for p in geometry.GetPoints()]) else: # multilinestring elevation = min([p[2] for g in geometry for p in g.GetPoints()]) geometry.FlattenTo2D() new_feature = ogr.Feature(layer_defn) new_feature.SetGeometry(geometry) for k, v in feature.items().items(): new_feature[k] = v new_feature[self.ATTRIBUTE] = elevation self.layer.CreateFeature(new_feature) def __exit__(self, type, value, traceback): pass def convert(source_path, target_path, output_format): """ Convert shapefile to inp file.""" source_dataset = ogr.Open(str(source_path)) writers = dict( inp=InputFileWriter, img=ImageWriter, shp=ShapefileWriter, ) with writers[output_format](target_path) as writer: for source_layer in source_dataset: total = source_layer.GetFeatureCount() for count, source_feature in enumerate(source_layer, 1): writer.add_feature(source_feature) gdal.TermProgress_nocb(count / total) def main(): """ Call convert() with commandline args. """ convert(**vars(get_parser().parse_args())) if __name__ == '__main__': exit(main())
gpl-3.0
6,584,589,084,668,582,000
32.1
77
0.570997
false
3.927385
false
false
false
holzenburg/feedshare
feedshare/feedlists/migrations/0002_auto__add_feedlistfeed__del_field_feed_feedlist__del_field_feed_tags__.py
1
8074
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'FeedListFeed' db.create_table(u'feedlists_feedlistfeed', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('feedlist', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['feedlists.FeedList'])), ('feed', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['feedlists.Feed'])), ('title', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), )) db.send_create_signal(u'feedlists', ['FeedListFeed']) # Deleting field 'Feed.feedlist' db.delete_column(u'feedlists_feed', 'feedlist_id') # Deleting field 'Feed.tags' db.delete_column(u'feedlists_feed', 'tags') # Adding field 'Feed.site_url' db.add_column(u'feedlists_feed', 'site_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True), keep_default=False) # Changing field 'Feed.description' db.alter_column(u'feedlists_feed', 'description', self.gf('django.db.models.fields.TextField')(null=True)) # Changing field 'Feed.title' db.alter_column(u'feedlists_feed', 'title', self.gf('django.db.models.fields.CharField')(max_length=255, null=True)) # Changing field 'FeedList.description' db.alter_column(u'feedlists_feedlist', 'description', self.gf('django.db.models.fields.TextField')(null=True)) # Changing field 'FeedList.title' db.alter_column(u'feedlists_feedlist', 'title', self.gf('django.db.models.fields.CharField')(max_length=255, null=True)) # Changing field 'FeedList.author_email' db.alter_column(u'feedlists_feedlist', 'author_email', self.gf('django.db.models.fields.EmailField')(max_length=255, null=True)) # Changing field 'FeedList.url' db.alter_column(u'feedlists_feedlist', 'url', self.gf('django.db.models.fields.URLField')(max_length=255, null=True)) # Changing field 'FeedList.author' db.alter_column(u'feedlists_feedlist', 'author', self.gf('django.db.models.fields.CharField')(max_length=255, null=True)) # Changing field 'FeedList.file' db.alter_column(u'feedlists_feedlist', 'file', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True)) def backwards(self, orm): # Deleting model 'FeedListFeed' db.delete_table(u'feedlists_feedlistfeed') # User chose to not deal with backwards NULL issues for 'Feed.feedlist' raise RuntimeError("Cannot reverse this migration. 'Feed.feedlist' and its values cannot be restored.") # The following code is provided here to aid in writing a correct migration # Adding field 'Feed.feedlist' db.add_column(u'feedlists_feed', 'feedlist', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['feedlists.FeedList']), keep_default=False) # Adding field 'Feed.tags' db.add_column(u'feedlists_feed', 'tags', self.gf('django.db.models.fields.TextField')(default='', blank=True), keep_default=False) # Deleting field 'Feed.site_url' db.delete_column(u'feedlists_feed', 'site_url') # Changing field 'Feed.description' db.alter_column(u'feedlists_feed', 'description', self.gf('django.db.models.fields.TextField')(default='')) # Changing field 'Feed.title' db.alter_column(u'feedlists_feed', 'title', self.gf('django.db.models.fields.CharField')(default='', max_length=255)) # Changing field 'FeedList.description' db.alter_column(u'feedlists_feedlist', 'description', self.gf('django.db.models.fields.TextField')(default='')) # Changing field 'FeedList.title' db.alter_column(u'feedlists_feedlist', 'title', self.gf('django.db.models.fields.CharField')(default='', max_length=255)) # Changing field 'FeedList.author_email' db.alter_column(u'feedlists_feedlist', 'author_email', self.gf('django.db.models.fields.EmailField')(default='', max_length=255)) # Changing field 'FeedList.url' db.alter_column(u'feedlists_feedlist', 'url', self.gf('django.db.models.fields.URLField')(default='', max_length=255)) # Changing field 'FeedList.author' db.alter_column(u'feedlists_feedlist', 'author', self.gf('django.db.models.fields.CharField')(default='', max_length=255)) # User chose to not deal with backwards NULL issues for 'FeedList.file' raise RuntimeError("Cannot reverse this migration. 'FeedList.file' and its values cannot be restored.") # The following code is provided here to aid in writing a correct migration # Changing field 'FeedList.file' db.alter_column(u'feedlists_feedlist', 'file', self.gf('django.db.models.fields.files.FileField')(max_length=100)) models = { u'feedlists.feed': { 'Meta': {'object_name': 'Feed'}, 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'site_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'url': ('django.db.models.fields.TextField', [], {}) }, u'feedlists.feedlist': { 'Meta': {'object_name': 'FeedList'}, 'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'author_email': ('django.db.models.fields.EmailField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'datetime_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'datetime_updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'feeds': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['feedlists.Feed']", 'through': u"orm['feedlists.FeedListFeed']", 'symmetrical': 'False'}), 'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'processing_error': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'secret': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'views': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) }, u'feedlists.feedlistfeed': { 'Meta': {'object_name': 'FeedListFeed'}, 'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feedlists.Feed']"}), 'feedlist': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feedlists.FeedList']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) } } complete_apps = ['feedlists']
mit
8,905,969,177,572,606,000
56.269504
180
0.614194
false
3.607685
false
false
false
RealP/Everpy
examples.py
1
1776
"""Some examples of how to use modules.""" # from everpy_extras import EverPyExtras from everpy_pro import EverPyPro import everpy_utilities PATH_TO_ENSCRIPT = r"C:\Program Files (x86)\Evernote\Evernote\ENScript.exe" def createnote(epy): """Example of how to make a note from python.""" content = open("README.md", "r").read() notebook = "_INBOX" title = "Everpy Generated Note" tags = ["everpy"] attachments = ["README.md"] epy.create_note_from_content(content, notebook_name=notebook, title=title, tags=tags, file_attachments=attachments) def main(): """Example usages.""" dev_token = everpy_utilities.get_token() try: my_evernote = EverPyPro(dev_token, PATH_TO_ENSCRIPT) except: everpy_utilities.refresh_token() my_evernote = EverPyPro(dev_token, PATH_TO_ENSCRIPT) # Find and replace # my_evernote.find_and_replace("evernote", "Evernote", "any:") # Creating a note. # createnote(my_evernote) # Opening client with specific search attributes # my_evernote.get_notes_to_manage() # or # my_evernote.search_notes("stack:Work intitle:\"new employee\"") # Creating a note from an hmtl template # my_evernote.create_note(open("Templates/testnote.html", "r").read(), title="testnote", notebook="_INBOX", tags=["everpy"], attachments=["Templates/testnote.html"]) ############################## # VVVV Tests may not work VVVV. # my_evernote.create_template("Templates/simple_sections.txt") my_evernote.create_template("Templates/card_template.txt") # my_evernote.create_textnote_from_file("template.html", notebook_name="_INBOX") # my_evernote.learn_notebooks() # print(my_evernote.note_book_dict) if __name__ == '__main__': main()
gpl-3.0
-8,499,846,340,086,754,000
33.823529
169
0.662162
false
3.338346
false
false
false
Stemer114/Reprap_KTY-84-130
repetier/KTY84-130_repetier.py
1
1879
# based on python script from # http://diyhpl.us/reprap/trunk/users/wizard23/python/lookupTables/KTY84-130.py # # adapted by Stemer114 for usage with 4.7k pull-up resistor # table format for repetier firmware # https://github.com/Stemer114/Reprap_KTY-84-130 # # generates a Lookuptable for the following termistor # KTY 84-130 # http://www.datasheetcatalog.org/datasheet/philips/KTY84_SERIES_5.pdf # usage: # python KTY84-130.py >ThermistorTable.h # copy ThermistorTable.h into your firmware dir # enable the lookup table in firmware config.h (depends on firmware) # resistor values are taken from data sheet page 4, table 1 # temperature range is 0C to 300C in steps of 10K # the negative temperature entries and the entry for 25C are omitted resistorValues = [ 498, 538, 581, 626, 672, 722, 773, 826, 882, 940, 1000, 1062, 1127, 1194, 1262, 1334, 1407, 1482, 1560, 1640, 1722, 1807, 1893, 1982, 2073, 2166, 2261, 2357, 2452, 2542, 2624] tempValues = range(0, 301, 10) if len(tempValues) != len(resistorValues): print "Length of temValues %d and resistorValues %d does not match" % (len(tempValues), len(resistorValues)) else: print "// reprap thermistor table for KTY 84-130 temperature sensor" print "// adapted for repetier firmware user thermistortable 1 format" print "// for further details see https://github.com/Stemer114/Reprap_KTY-84-130" print "" print "// consult the readme for how to insert the table into" print "// repetier Configuration.h" print "#define NUM_TEMPS_USERTHERMISTOR1 %d" % (len(tempValues)) print "#define USER_THERMISTORTABLE1 {\ " suffix = "," for i in range(0, len(tempValues)): current = 5.0/(4700.0+resistorValues[i]) voltage = current*resistorValues[i] adValue = round(voltage*1023.0/5.0) if i == len(tempValues)-1: suffix = "" print " {%d*4, %d*8}%s \ " % (adValue, tempValues[i], suffix) print "};"
mit
9,178,785,433,663,180,000
24.391892
109
0.722193
false
2.727141
false
false
false
cs411-entree-app/entree
entree_project/entree_project/urls.py
1
1445
"""entree_project URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf import settings from django.conf.urls.static import static from django.conf.urls import url, include from django.contrib import admin from django.views.generic import RedirectView from entree import views handler400 = 'entree.views.bad_request' handler403 = 'entree.views.permission_denied' handler404 = 'entree.views.page_not_found' handler500 = 'entree.views.server_error' urlpatterns = [ url(r'^$', RedirectView.as_view(url='entree/')), url(r'^entree/', include('entree.urls', namespace='entree')), url(r'^admin/', admin.site.urls), ] if settings.DEBUG: urlpatterns += [ url(r'^400/$', views.bad_request), url(r'^403/$', views.permission_denied), url(r'^404/$', views.page_not_found), url(r'^500/$', views.server_error), ]
apache-2.0
-1,928,786,586,874,997,500
35.125
79
0.692734
false
3.424171
false
false
false
Ruide/angr-dev
angr/angr/blade.py
1
12198
import networkx import pyvex from .slicer import SimSlicer class Blade(object): """ Blade is a light-weight program slicer that works with networkx DiGraph containing CFGNodes. It is meant to be used in angr for small or on-the-fly analyses. """ def __init__(self, graph, dst_run, dst_stmt_idx, direction='backward', project=None, cfg=None, ignore_sp=False, ignore_bp=False, ignored_regs=None, max_level=3): """ :param networkx.DiGraph graph: A graph representing the control flow graph. Note that it does not take angr.analyses.CFGAccurate or angr.analyses.CFGFast. :param int dst_run: An address specifying the target SimRun. :param int dst_stmt_idx: The target statement index. -1 means executing until the last statement. :param str direction: 'backward' or 'forward' slicing. Forward slicing is not yet supported. :param angr.Project project: The project instance. :param angr.analyses.CFGBase cfg: the CFG instance. It will be made mandatory later. :param bool ignore_sp: Whether the stack pointer should be ignored in dependency tracking. Any dependency from/to stack pointers will be ignored if this options is True. :param bool ignore_bp: Whether the base pointer should be ignored or not. :param int max_level: The maximum number of blocks that we trace back for. :return: None """ self._graph = graph self._dst_run = dst_run self._dst_stmt_idx = dst_stmt_idx self._ignore_sp = ignore_sp self._ignore_bp = ignore_bp self._max_level = max_level self._slice = networkx.DiGraph() self.project = project self._cfg = cfg if self._cfg is None: # `cfg` is made optional only for compatibility concern. It will be made a positional parameter later. raise AngrBladeError('"cfg" must be specified.') if not self._in_graph(self._dst_run): raise AngrBladeError("The specified SimRun %s doesn't exist in graph." % self._dst_run) self._ignored_regs = set() if ignored_regs: for r in ignored_regs: if isinstance(r, (int, long)): self._ignored_regs.add(r) else: self._ignored_regs.add(self.project.arch.registers[r][0]) self._run_cache = { } self._traced_runs = set() if direction == 'backward': self._backward_slice() elif direction == 'forward': raise AngrBladeError('Forward slicing is not implemented yet') else: raise AngrBladeError("Unknown slicing direction %s", direction) # # Properties # @property def slice(self): return self._slice # # Public methods # def dbg_repr(self, arch=None): if arch is None and self.project is not None: arch = self.project.arch s = "" block_addrs = list(set([ a for a, _ in self.slice.nodes_iter() ])) for block_addr in block_addrs: block_str = "IRSB %#x\n" % block_addr block = self.project.factory.block(block_addr).vex included_stmts = set([ stmt for _, stmt in self.slice.nodes_iter() if _ == block_addr ]) for i, stmt in enumerate(block.statements): if arch is not None: if isinstance(stmt, pyvex.IRStmt.Put): reg_name = arch.translate_register_name(stmt.offset) stmt_str = stmt.__str__(reg_name=reg_name) elif isinstance(stmt, pyvex.IRStmt.WrTmp) and isinstance(stmt.data, pyvex.IRExpr.Get): reg_name = arch.translate_register_name(stmt.data.offset) stmt_str = stmt.__str__(reg_name=reg_name) else: stmt_str = str(stmt) else: stmt_str = str(stmt) block_str += "%02s: %s\n" % ("+" if i in included_stmts else "-", stmt_str ) s += block_str s += "\n" return s # # Private methods # def _get_irsb(self, v): """ Get the IRSB object from an address, a SimRun, or a CFGNode. :param v: Can be one of the following: an address, or a CFGNode. :return: The IRSB instance. :rtype: pyvex.IRSB """ if isinstance(v, CFGNode): v = v.addr if type(v) in (int, long): # Generate an IRSB from self._project if v in self._run_cache: return self._run_cache[v] if self.project: irsb = self.project.factory.block(v).vex self._run_cache[v] = irsb return irsb else: raise AngrBladeError("Project must be specified if you give me all addresses for SimRuns") else: raise AngrBladeError('Unsupported SimRun argument type %s', type(v)) def _get_cfgnode(self, thing): """ Get the CFGNode corresponding to the specific address. :param thing: Can be anything that self._normalize() accepts. Usually it's the address of the node :return: the CFGNode instance :rtype: CFGNode """ return self._cfg.get_any_node(self._get_addr(thing)) def _get_addr(self, v): """ Get address of the basic block or CFG node specified by v. :param v: Can be one of the following: a CFGNode, or an address. :return: The address. :rtype: int """ if isinstance(v, CFGNode): return v.addr elif type(v) in (int, long): return v else: raise AngrBladeError('Unsupported SimRun argument type %s' % type(v)) def _in_graph(self, v): return self._get_cfgnode(v) in self._graph def _inslice_callback(self, stmt_idx, stmt, infodict): # pylint:disable=unused-argument tpl = (infodict['irsb_addr'], stmt_idx) if 'prev' in infodict and infodict['prev']: prev = infodict['prev'] self._slice.add_edge(tpl, prev) else: self._slice.add_node(tpl) infodict['prev'] = tpl infodict['has_statement'] = True def _backward_slice(self): """ Backward slicing. We support the following IRStmts: # WrTmp # Put We support the following IRExprs: # Get # RdTmp # Const :return: """ temps = set() regs = set() # Retrieve the target: are we slicing from a register(IRStmt.Put), or a temp(IRStmt.WrTmp)? stmts = self._get_irsb(self._dst_run).statements if self._dst_stmt_idx != -1: dst_stmt = stmts[self._dst_stmt_idx] if type(dst_stmt) is pyvex.IRStmt.Put: regs.add(dst_stmt.offset) elif type(dst_stmt) is pyvex.IRStmt.WrTmp: temps.add(dst_stmt.tmp) else: raise AngrBladeError('Incorrect type of the specified target statement. We only support Put and WrTmp.') prev = (self._get_addr(self._dst_run), self._dst_stmt_idx) else: next_expr = self._get_irsb(self._dst_run).next if type(next_expr) is pyvex.IRExpr.RdTmp: temps.add(next_expr.tmp) elif type(next_expr) is pyvex.IRExpr.Const: # A const doesn't rely on anything else! pass else: raise AngrBladeError('Unsupported type for irsb.next: %s' % type(next_expr)) # Then we gotta start from the very last statement! self._dst_stmt_idx = len(stmts) - 1 prev = (self._get_addr(self._dst_run), 'default') slicer = SimSlicer(self.project.arch, stmts, target_tmps=temps, target_regs=regs, target_stack_offsets=None, inslice_callback=self._inslice_callback, inslice_callback_infodict={ 'irsb_addr': self._get_irsb(self._dst_run)._addr, 'prev': prev, }) regs = slicer.final_regs if self._ignore_sp and self.project.arch.sp_offset in regs: regs.remove(self.project.arch.sp_offset) if self._ignore_bp and self.project.arch.bp_offset in regs: regs.remove(self.project.arch.bp_offset) for offset in self._ignored_regs: if offset in regs: regs.remove(offset) stack_offsets = slicer.final_stack_offsets prev = slicer.inslice_callback_infodict['prev'] if regs or stack_offsets: cfgnode = self._get_cfgnode(self._dst_run) in_edges = self._graph.in_edges(cfgnode, data=True) for pred, _, data in in_edges: if 'jumpkind' in data and data['jumpkind'] == 'Ijk_FakeRet': continue self._backward_slice_recursive(self._max_level - 1, pred, regs, stack_offsets, prev, data.get('stmt_idx', None) ) def _backward_slice_recursive(self, level, run, regs, stack_offsets, prev, exit_stmt_idx): if level <= 0: return temps = set() regs = regs.copy() stmts = self._get_irsb(run).statements if exit_stmt_idx is None or exit_stmt_idx == 'default': # Initialize the temps set with whatever in the `next` attribute of this irsb next_expr = self._get_irsb(run).next if type(next_expr) is pyvex.IRExpr.RdTmp: temps.add(next_expr.tmp) else: exit_stmt = self._get_irsb(run).statements[exit_stmt_idx] if type(exit_stmt.guard) is pyvex.IRExpr.RdTmp: temps.add(exit_stmt.guard.tmp) # Put it in our slice irsb_addr = self._get_addr(run) self._inslice_callback(exit_stmt_idx, exit_stmt, {'irsb_addr': irsb_addr, 'prev': prev}) prev = (irsb_addr, exit_stmt_idx) infodict = {'irsb_addr' : self._get_addr(run), 'prev' : prev, 'has_statement': False } slicer = SimSlicer(self.project.arch, stmts, target_tmps=temps, target_regs=regs, target_stack_offsets=stack_offsets, inslice_callback=self._inslice_callback, inslice_callback_infodict=infodict ) if not infodict['has_statement']: # put this block into the slice self._inslice_callback(0, None, infodict) if run in self._traced_runs: return self._traced_runs.add(run) regs = slicer.final_regs if self._ignore_sp and self.project.arch.sp_offset in regs: regs.remove(self.project.arch.sp_offset) if self._ignore_bp and self.project.arch.bp_offset in regs: regs.remove(self.project.arch.bp_offset) stack_offsets = slicer.final_stack_offsets prev = slicer.inslice_callback_infodict['prev'] if regs or stack_offsets: in_edges = self._graph.in_edges(self._get_cfgnode(run), data=True) for pred, _, data in in_edges: if 'jumpkind' in data and data['jumpkind'] == 'Ijk_FakeRet': continue self._backward_slice_recursive(level - 1, pred, regs, stack_offsets, prev, data.get('stmt_idx', None)) from .errors import AngrBladeError, AngrBladeSimProcError from .analyses.cfg.cfg_node import CFGNode
bsd-2-clause
6,807,302,043,918,270,000
34.876471
120
0.542712
false
3.948851
false
false
false
blomquisg/heat
heat/common/client.py
1
21833
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010-2011 OpenStack, LLC # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # HTTPSClientAuthConnection code comes courtesy of ActiveState website: # http://code.activestate.com/recipes/ # 577548-https-httplib-client-connection-with-certificate-v/ import collections import errno import functools import httplib import logging import os import urllib import urlparse try: from eventlet.green import socket, ssl except ImportError: import socket import ssl try: import sendfile SENDFILE_SUPPORTED = True except ImportError: SENDFILE_SUPPORTED = False from heat.common import auth from heat.common import exception, utils # common chunk size for get and put CHUNKSIZE = 65536 def handle_unauthorized(func): """ Wrap a function to re-authenticate and retry. """ @functools.wraps(func) def wrapped(self, *args, **kwargs): try: return func(self, *args, **kwargs) except exception.NotAuthorized: self._authenticate(force_reauth=True) return func(self, *args, **kwargs) return wrapped def handle_redirects(func): """ Wrap the _do_request function to handle HTTP redirects. """ MAX_REDIRECTS = 5 @functools.wraps(func) def wrapped(self, method, url, body, headers): for _ in xrange(MAX_REDIRECTS): try: return func(self, method, url, body, headers) except exception.RedirectException as redirect: if redirect.url is None: raise exception.InvalidRedirect() url = redirect.url raise exception.MaxRedirectsExceeded(redirects=MAX_REDIRECTS) return wrapped class ImageBodyIterator(object): """ A class that acts as an iterator over an image file's chunks of data. This is returned as part of the result tuple from `heat.client.Client.get_image` """ def __init__(self, source): """ Constructs the object from a readable image source (such as an HTTPResponse or file-like object) """ self.source = source def __iter__(self): """ Exposes an iterator over the chunks of data in the image file. """ while True: chunk = self.source.read(CHUNKSIZE) if chunk: yield chunk else: break class SendFileIterator: """ Emulate iterator pattern over sendfile, in order to allow send progress be followed by wrapping the iteration. """ def __init__(self, connection, body): self.connection = connection self.body = body self.offset = 0 self.sending = True def __iter__(self): class OfLength: def __init__(self, len): self.len = len def __len__(self): return self.len while self.sending: sent = sendfile.sendfile(self.connection.sock.fileno(), self.body.fileno(), self.offset, CHUNKSIZE) self.sending = (sent != 0) self.offset += sent yield OfLength(sent) class HTTPSClientAuthConnection(httplib.HTTPSConnection): """ Class to make a HTTPS connection, with support for full client-based SSL Authentication :see http://code.activestate.com/recipes/ 577548-https-httplib-client-connection-with-certificate-v/ """ def __init__(self, host, port, key_file, cert_file, ca_file, timeout=None, insecure=False): httplib.HTTPSConnection.__init__(self, host, port, key_file=key_file, cert_file=cert_file) self.key_file = key_file self.cert_file = cert_file self.ca_file = ca_file self.timeout = timeout self.insecure = insecure def connect(self): """ Connect to a host on a given (SSL) port. If ca_file is pointing somewhere, use it to check Server Certificate. Redefined/copied and extended from httplib.py:1105 (Python 2.6.x). This is needed to pass cert_reqs=ssl.CERT_REQUIRED as parameter to ssl.wrap_socket(), which forces SSL to check server certificate against our client certificate. """ sock = socket.create_connection((self.host, self.port), self.timeout) if self._tunnel_host: self.sock = sock self._tunnel() # Check CA file unless 'insecure' is specificed if self.insecure is True: self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, cert_reqs=ssl.CERT_NONE) else: self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ca_certs=self.ca_file, cert_reqs=ssl.CERT_REQUIRED) class BaseClient(object): """A base client class""" DEFAULT_PORT = 80 DEFAULT_DOC_ROOT = None # Standard CA file locations for Debian/Ubuntu, RedHat/Fedora, # Suse, FreeBSD/OpenBSD DEFAULT_CA_FILE_PATH = '/etc/ssl/certs/ca-certificates.crt:'\ '/etc/pki/tls/certs/ca-bundle.crt:'\ '/etc/ssl/ca-bundle.pem:'\ '/etc/ssl/cert.pem' OK_RESPONSE_CODES = ( httplib.OK, httplib.CREATED, httplib.ACCEPTED, httplib.NO_CONTENT, ) REDIRECT_RESPONSE_CODES = ( httplib.MOVED_PERMANENTLY, httplib.FOUND, httplib.SEE_OTHER, httplib.USE_PROXY, httplib.TEMPORARY_REDIRECT, ) def __init__(self, host, port=None, use_ssl=False, auth_tok=None, creds=None, doc_root=None, key_file=None, cert_file=None, ca_file=None, insecure=False, configure_via_auth=True): """ Creates a new client to some service. :param host: The host where service resides :param port: The port where service resides :param use_ssl: Should we use HTTPS? :param auth_tok: The auth token to pass to the server :param creds: The credentials to pass to the auth plugin :param doc_root: Prefix for all URLs we request from host :param key_file: Optional PEM-formatted file that contains the private key. If use_ssl is True, and this param is None (the default), then an environ variable heat_CLIENT_KEY_FILE is looked for. If no such environ variable is found, ClientConnectionError will be raised. :param cert_file: Optional PEM-formatted certificate chain file. If use_ssl is True, and this param is None (the default), then an environ variable heat_CLIENT_CERT_FILE is looked for. If no such environ variable is found, ClientConnectionError will be raised. :param ca_file: Optional CA cert file to use in SSL connections If use_ssl is True, and this param is None (the default), then an environ variable heat_CLIENT_CA_FILE is looked for. :param insecure: Optional. If set then the server's certificate will not be verified. """ self.host = host self.port = port or self.DEFAULT_PORT self.use_ssl = use_ssl self.auth_tok = auth_tok self.creds = creds or {} self.connection = None self.configure_via_auth = configure_via_auth # doc_root can be a nullstring, which is valid, and why we # cannot simply do doc_root or self.DEFAULT_DOC_ROOT below. self.doc_root = (doc_root if doc_root is not None else self.DEFAULT_DOC_ROOT) self.auth_plugin = self.make_auth_plugin(self.creds) self.key_file = key_file self.cert_file = cert_file self.ca_file = ca_file self.insecure = insecure self.connect_kwargs = self.get_connect_kwargs() def get_connect_kwargs(self): connect_kwargs = {} if self.use_ssl: if self.key_file is None: self.key_file = os.environ.get('heat_CLIENT_KEY_FILE') if self.cert_file is None: self.cert_file = os.environ.get('heat_CLIENT_CERT_FILE') if self.ca_file is None: self.ca_file = os.environ.get('heat_CLIENT_CA_FILE') # Check that key_file/cert_file are either both set or both unset if self.cert_file is not None and self.key_file is None: msg = _("You have selected to use SSL in connecting, " "and you have supplied a cert, " "however you have failed to supply either a " "key_file parameter or set the " "heat_CLIENT_KEY_FILE environ variable") raise exception.ClientConnectionError(msg) if self.key_file is not None and self.cert_file is None: msg = _("You have selected to use SSL in connecting, " "and you have supplied a key, " "however you have failed to supply either a " "cert_file parameter or set the " "heat_CLIENT_CERT_FILE environ variable") raise exception.ClientConnectionError(msg) if (self.key_file is not None and not os.path.exists(self.key_file)): msg = _("The key file you specified %s does not " "exist") % self.key_file raise exception.ClientConnectionError(msg) connect_kwargs['key_file'] = self.key_file if (self.cert_file is not None and not os.path.exists(self.cert_file)): msg = _("The cert file you specified %s does not " "exist") % self.cert_file raise exception.ClientConnectionError(msg) connect_kwargs['cert_file'] = self.cert_file if (self.ca_file is not None and not os.path.exists(self.ca_file)): msg = _("The CA file you specified %s does not " "exist") % self.ca_file raise exception.ClientConnectionError(msg) if self.ca_file is None: for ca in self.DEFAULT_CA_FILE_PATH.split(":"): if os.path.exists(ca): self.ca_file = ca break connect_kwargs['ca_file'] = self.ca_file connect_kwargs['insecure'] = self.insecure return connect_kwargs def set_auth_token(self, auth_tok): """ Updates the authentication token for this client connection. """ # FIXME(sirp): Nova image/heat.py currently calls this. Since this # method isn't really doing anything useful[1], we should go ahead and # rip it out, first in Nova, then here. Steps: # # 1. Change auth_tok in heat to auth_token # 2. Change image/heat.py in Nova to use client.auth_token # 3. Remove this method # # [1] http://mail.python.org/pipermail/tutor/2003-October/025932.html self.auth_tok = auth_tok def configure_from_url(self, url): """ Setups the connection based on the given url. The form is: <http|https>://<host>:port/doc_root """ parsed = urlparse.urlparse(url) self.use_ssl = parsed.scheme == 'https' self.host = parsed.hostname self.port = parsed.port or 80 self.doc_root = parsed.path # ensure connection kwargs are re-evaluated after the service catalog # publicURL is parsed for potential SSL usage self.connect_kwargs = self.get_connect_kwargs() def make_auth_plugin(self, creds): """ Returns an instantiated authentication plugin. """ strategy = creds.get('strategy', 'noauth') plugin = auth.get_plugin_from_strategy(strategy, creds) return plugin def get_connection_type(self): """ Returns the proper connection type """ if self.use_ssl: return HTTPSClientAuthConnection else: return httplib.HTTPConnection def _authenticate(self, force_reauth=False): """ Use the authentication plugin to authenticate and set the auth token. :param force_reauth: For re-authentication to bypass cache. """ auth_plugin = self.auth_plugin if not auth_plugin.is_authenticated or force_reauth: auth_plugin.authenticate() self.auth_tok = auth_plugin.auth_token management_url = auth_plugin.management_url if management_url and self.configure_via_auth: self.configure_from_url(management_url) @handle_unauthorized def do_request(self, method, action, body=None, headers=None, params=None): """ Make a request, returning an HTTP response object. :param method: HTTP verb (GET, POST, PUT, etc.) :param action: Requested path to append to self.doc_root :param body: Data to send in the body of the request :param headers: Headers to send with the request :param params: Key/value pairs to use in query string :returns: HTTP response object """ if not self.auth_tok: self._authenticate() url = self._construct_url(action, params) return self._do_request(method=method, url=url, body=body, headers=headers) def _construct_url(self, action, params=None): """ Create a URL object we can use to pass to _do_request(). """ path = '/'.join([self.doc_root or '', action.lstrip('/')]) scheme = "https" if self.use_ssl else "http" netloc = "%s:%d" % (self.host, self.port) if isinstance(params, dict): for (key, value) in params.items(): if value is None: del params[key] query = urllib.urlencode(params) else: query = None return urlparse.ParseResult(scheme, netloc, path, '', query, '') @handle_redirects def _do_request(self, method, url, body, headers): """ Connects to the server and issues a request. Handles converting any returned HTTP error status codes to OpenStack/heat exceptions and closing the server connection. Returns the result data, or raises an appropriate exception. :param method: HTTP method ("GET", "POST", "PUT", etc...) :param url: urlparse.ParsedResult object with URL information :param body: data to send (as string, filelike or iterable), or None (default) :param headers: mapping of key/value pairs to add as headers :note If the body param has a read attribute, and method is either POST or PUT, this method will automatically conduct a chunked-transfer encoding and use the body as a file object or iterable, transferring chunks of data using the connection's send() method. This allows large objects to be transferred efficiently without buffering the entire body in memory. """ if url.query: path = url.path + "?" + url.query else: path = url.path try: connection_type = self.get_connection_type() headers = headers or {} if 'x-auth-token' not in headers and self.auth_tok: headers['x-auth-token'] = self.auth_tok c = connection_type(url.hostname, url.port, **self.connect_kwargs) def _pushing(method): return method.lower() in ('post', 'put') def _simple(body): return body is None or isinstance(body, basestring) def _filelike(body): return hasattr(body, 'read') def _sendbody(connection, iter): connection.endheaders() for sent in iter: # iterator has done the heavy lifting pass def _chunkbody(connection, iter): connection.putheader('Transfer-Encoding', 'chunked') connection.endheaders() for chunk in iter: connection.send('%x\r\n%s\r\n' % (len(chunk), chunk)) connection.send('0\r\n\r\n') # Do a simple request or a chunked request, depending # on whether the body param is file-like or iterable and # the method is PUT or POST # if not _pushing(method) or _simple(body): # Simple request... c.request(method, path, body, headers) elif _filelike(body) or self._iterable(body): c.putrequest(method, path) for header, value in headers.items(): c.putheader(header, value) iter = self.image_iterator(c, headers, body) if self._sendable(body): # send actual file without copying into userspace _sendbody(c, iter) else: # otherwise iterate and chunk _chunkbody(c, iter) else: raise TypeError('Unsupported image type: %s' % body.__class__) res = c.getresponse() status_code = self.get_status_code(res) if status_code in self.OK_RESPONSE_CODES: return res elif status_code in self.REDIRECT_RESPONSE_CODES: raise exception.RedirectException(res.getheader('Location')) elif status_code == httplib.UNAUTHORIZED: raise exception.NotAuthorized(res.read()) elif status_code == httplib.FORBIDDEN: raise exception.NotAuthorized(res.read()) elif status_code == httplib.NOT_FOUND: raise exception.NotFound(res.read()) elif status_code == httplib.CONFLICT: raise exception.Duplicate(res.read()) elif status_code == httplib.BAD_REQUEST: raise exception.Invalid(res.read()) elif status_code == httplib.MULTIPLE_CHOICES: raise exception.MultipleChoices(body=res.read()) elif status_code == httplib.INTERNAL_SERVER_ERROR: raise Exception("Internal Server error: %s" % res.read()) else: raise Exception("Unknown error occurred! %s" % res.read()) except (socket.error, IOError), e: raise exception.ClientConnectionError(e) def _seekable(self, body): # pipes are not seekable, avoids sendfile() failure on e.g. # cat /path/to/image | heat add ... # or where add command is launched via popen try: os.lseek(body.fileno(), 0, os.SEEK_SET) return True except OSError as e: return (e.errno != errno.ESPIPE) def _sendable(self, body): return (SENDFILE_SUPPORTED and hasattr(body, 'fileno') and self._seekable(body) and not self.use_ssl) def _iterable(self, body): return isinstance(body, collections.Iterable) def image_iterator(self, connection, headers, body): if self._sendable(body): return SendFileIterator(connection, body) elif self._iterable(body): return utils.chunkreadable(body) else: return ImageBodyIterator(body) def get_status_code(self, response): """ Returns the integer status code from the response, which can be either a Webob.Response (used in testing) or httplib.Response """ if hasattr(response, 'status_int'): return response.status_int else: return response.status def _extract_params(self, actual_params, allowed_params): """ Extract a subset of keys from a dictionary. The filters key will also be extracted, and each of its values will be returned as an individual param. :param actual_params: dict of keys to filter :param allowed_params: list of keys that 'actual_params' will be reduced to :retval subset of 'params' dict """ result = {} for param in actual_params: if param in allowed_params: result[param] = actual_params[param] elif 'Parameters.member.' in param: result[param] = actual_params[param] return result
apache-2.0
-8,148,688,654,965,902,000
35.880068
79
0.573032
false
4.439406
true
false
false
Kriechi/mitmproxy
mitmproxy/addons/tlsconfig.py
1
12516
import os from pathlib import Path from typing import List, Optional, TypedDict, Any from OpenSSL import SSL from mitmproxy import certs, ctx, exceptions, connection from mitmproxy.net import tls as net_tls from mitmproxy.options import CONF_BASENAME from mitmproxy.proxy import context from mitmproxy.proxy.layers import tls # We manually need to specify this, otherwise OpenSSL may select a non-HTTP2 cipher by default. # https://ssl-config.mozilla.org/#config=old DEFAULT_CIPHERS = ( 'ECDHE-ECDSA-AES128-GCM-SHA256', 'ECDHE-RSA-AES128-GCM-SHA256', 'ECDHE-ECDSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES256-GCM-SHA384', 'ECDHE-ECDSA-CHACHA20-POLY1305', 'ECDHE-RSA-CHACHA20-POLY1305', 'DHE-RSA-AES128-GCM-SHA256', 'DHE-RSA-AES256-GCM-SHA384', 'DHE-RSA-CHACHA20-POLY1305', 'ECDHE-ECDSA-AES128-SHA256', 'ECDHE-RSA-AES128-SHA256', 'ECDHE-ECDSA-AES128-SHA', 'ECDHE-RSA-AES128-SHA', 'ECDHE-ECDSA-AES256-SHA384', 'ECDHE-RSA-AES256-SHA384', 'ECDHE-ECDSA-AES256-SHA', 'ECDHE-RSA-AES256-SHA', 'DHE-RSA-AES128-SHA256', 'DHE-RSA-AES256-SHA256', 'AES128-GCM-SHA256', 'AES256-GCM-SHA384', 'AES128-SHA256', 'AES256-SHA256', 'AES128-SHA', 'AES256-SHA', 'DES-CBC3-SHA' ) class AppData(TypedDict): server_alpn: Optional[bytes] http2: bool def alpn_select_callback(conn: SSL.Connection, options: List[bytes]) -> Any: app_data: AppData = conn.get_app_data() server_alpn = app_data["server_alpn"] http2 = app_data["http2"] if server_alpn and server_alpn in options: return server_alpn http_alpns = tls.HTTP_ALPNS if http2 else tls.HTTP1_ALPNS for alpn in options: # client sends in order of preference, so we are nice and respect that. if alpn in http_alpns: return alpn else: return SSL.NO_OVERLAPPING_PROTOCOLS class TlsConfig: """ This addon supplies the proxy core with the desired OpenSSL connection objects to negotiate TLS. """ certstore: certs.CertStore = None # type: ignore # TODO: We should support configuring TLS 1.3 cipher suites (https://github.com/mitmproxy/mitmproxy/issues/4260) # TODO: We should re-use SSL.Context options here, if only for TLS session resumption. # This may require patches to pyOpenSSL, as some functionality is only exposed on contexts. # TODO: This addon should manage the following options itself, which are current defined in mitmproxy/options.py: # - upstream_cert # - add_upstream_certs_to_client_chain # - ciphers_client # - ciphers_server # - key_size # - certs # - cert_passphrase # - ssl_verify_upstream_trusted_ca # - ssl_verify_upstream_trusted_confdir def load(self, loader): loader.add_option( name="tls_version_client_min", typespec=str, default=net_tls.DEFAULT_MIN_VERSION.name, choices=[x.name for x in net_tls.Version], help=f"Set the minimum TLS version for client connections.", ) loader.add_option( name="tls_version_client_max", typespec=str, default=net_tls.DEFAULT_MAX_VERSION.name, choices=[x.name for x in net_tls.Version], help=f"Set the maximum TLS version for client connections.", ) loader.add_option( name="tls_version_server_min", typespec=str, default=net_tls.DEFAULT_MIN_VERSION.name, choices=[x.name for x in net_tls.Version], help=f"Set the minimum TLS version for server connections.", ) loader.add_option( name="tls_version_server_max", typespec=str, default=net_tls.DEFAULT_MAX_VERSION.name, choices=[x.name for x in net_tls.Version], help=f"Set the maximum TLS version for server connections.", ) def tls_clienthello(self, tls_clienthello: tls.ClientHelloData): conn_context = tls_clienthello.context only_non_http_alpns = ( conn_context.client.alpn_offers and all(x not in tls.HTTP_ALPNS for x in conn_context.client.alpn_offers) ) tls_clienthello.establish_server_tls_first = conn_context.server.tls and ( ctx.options.connection_strategy == "eager" or ctx.options.add_upstream_certs_to_client_chain or ctx.options.upstream_cert and ( only_non_http_alpns or not conn_context.client.sni ) ) def tls_start(self, tls_start: tls.TlsStartData): if tls_start.conn == tls_start.context.client: self.create_client_proxy_ssl_conn(tls_start) else: self.create_proxy_server_ssl_conn(tls_start) def create_client_proxy_ssl_conn(self, tls_start: tls.TlsStartData) -> None: client: connection.Client = tls_start.context.client server: connection.Server = tls_start.context.server entry = self.get_cert(tls_start.context) if not client.cipher_list and ctx.options.ciphers_client: client.cipher_list = ctx.options.ciphers_client.split(":") # don't assign to client.cipher_list, doesn't need to be stored. cipher_list = client.cipher_list or DEFAULT_CIPHERS if ctx.options.add_upstream_certs_to_client_chain: # pragma: no cover # exempted from coverage until https://bugs.python.org/issue18233 is fixed. extra_chain_certs = server.certificate_list else: extra_chain_certs = [] ssl_ctx = net_tls.create_client_proxy_context( min_version=net_tls.Version[ctx.options.tls_version_client_min], max_version=net_tls.Version[ctx.options.tls_version_client_max], cipher_list=cipher_list, cert=entry.cert, key=entry.privatekey, chain_file=entry.chain_file, request_client_cert=False, alpn_select_callback=alpn_select_callback, extra_chain_certs=extra_chain_certs, dhparams=self.certstore.dhparams, ) tls_start.ssl_conn = SSL.Connection(ssl_ctx) tls_start.ssl_conn.set_app_data(AppData( server_alpn=server.alpn, http2=ctx.options.http2, )) tls_start.ssl_conn.set_accept_state() def create_proxy_server_ssl_conn(self, tls_start: tls.TlsStartData) -> None: client: connection.Client = tls_start.context.client server: connection.Server = tls_start.context.server assert server.address if ctx.options.ssl_insecure: verify = net_tls.Verify.VERIFY_NONE else: verify = net_tls.Verify.VERIFY_PEER if server.sni is True: server.sni = client.sni or server.address[0] if not server.alpn_offers: if client.alpn_offers: if ctx.options.http2: server.alpn_offers = tuple(client.alpn_offers) else: server.alpn_offers = tuple(x for x in client.alpn_offers if x != b"h2") elif client.tls_established: # We would perfectly support HTTP/1 -> HTTP/2, but we want to keep things on the same protocol version. # There are some edge cases where we want to mirror the regular server's behavior accurately, # for example header capitalization. server.alpn_offers = [] elif ctx.options.http2: server.alpn_offers = tls.HTTP_ALPNS else: server.alpn_offers = tls.HTTP1_ALPNS if not server.cipher_list and ctx.options.ciphers_server: server.cipher_list = ctx.options.ciphers_server.split(":") # don't assign to client.cipher_list, doesn't need to be stored. cipher_list = server.cipher_list or DEFAULT_CIPHERS client_cert: Optional[str] = None if ctx.options.client_certs: client_certs = os.path.expanduser(ctx.options.client_certs) if os.path.isfile(client_certs): client_cert = client_certs else: server_name: str = server.sni or server.address[0] p = os.path.join(client_certs, f"{server_name}.pem") if os.path.isfile(p): client_cert = p ssl_ctx = net_tls.create_proxy_server_context( min_version=net_tls.Version[ctx.options.tls_version_client_min], max_version=net_tls.Version[ctx.options.tls_version_client_max], cipher_list=cipher_list, verify=verify, sni=server.sni, ca_path=ctx.options.ssl_verify_upstream_trusted_confdir, ca_pemfile=ctx.options.ssl_verify_upstream_trusted_ca, client_cert=client_cert, alpn_protos=server.alpn_offers, ) tls_start.ssl_conn = SSL.Connection(ssl_ctx) if server.sni: tls_start.ssl_conn.set_tlsext_host_name(server.sni.encode()) tls_start.ssl_conn.set_connect_state() def running(self): # FIXME: We have a weird bug where the contract for configure is not followed and it is never called with # confdir or command_history as updated. self.configure("confdir") # pragma: no cover def configure(self, updated): if "confdir" not in updated and "certs" not in updated: return certstore_path = os.path.expanduser(ctx.options.confdir) self.certstore = certs.CertStore.from_store( path=certstore_path, basename=CONF_BASENAME, key_size=ctx.options.key_size, passphrase=ctx.options.cert_passphrase.encode("utf8") if ctx.options.cert_passphrase else None, ) if self.certstore.default_ca.has_expired(): ctx.log.warn( "The mitmproxy certificate authority has expired!\n" "Please delete all CA-related files in your ~/.mitmproxy folder.\n" "The CA will be regenerated automatically after restarting mitmproxy.\n" "See https://docs.mitmproxy.org/stable/concepts-certificates/ for additional help.", ) for certspec in ctx.options.certs: parts = certspec.split("=", 1) if len(parts) == 1: parts = ["*", parts[0]] cert = Path(parts[1]).expanduser() if not cert.exists(): raise exceptions.OptionsError(f"Certificate file does not exist: {cert}") try: self.certstore.add_cert_file( parts[0], cert, passphrase=ctx.options.cert_passphrase.encode("utf8") if ctx.options.cert_passphrase else None, ) except ValueError as e: raise exceptions.OptionsError(f"Invalid certificate format for {cert}: {e}") from e def get_cert(self, conn_context: context.Context) -> certs.CertStoreEntry: """ This function determines the Common Name (CN), Subject Alternative Names (SANs) and Organization Name our certificate should have and then fetches a matching cert from the certstore. """ altnames: List[str] = [] organization: Optional[str] = None # Use upstream certificate if available. if conn_context.server.certificate_list: upstream_cert = conn_context.server.certificate_list[0] if upstream_cert.cn: altnames.append(upstream_cert.cn) altnames.extend(upstream_cert.altnames) if upstream_cert.organization: organization = upstream_cert.organization # Add SNI. If not available, try the server address as well. if conn_context.client.sni: altnames.append(conn_context.client.sni) elif conn_context.server.address: altnames.append(conn_context.server.address[0]) # As a last resort, add *something* so that we have a certificate to serve. if not altnames: altnames.append("mitmproxy") # only keep first occurrence of each hostname altnames = list(dict.fromkeys(altnames)) # RFC 2818: If a subjectAltName extension of type dNSName is present, that MUST be used as the identity. # In other words, the Common Name is irrelevant then. return self.certstore.get_cert(altnames[0], altnames, organization)
mit
-6,556,963,244,387,718,000
42.762238
119
0.622803
false
3.723892
true
false
false
Carreau/difflib2.py
examples/lcs_cutmodule.py
1
5199
from __future__ import print_function from array import array from itertools import islice def lcs_cut2(s1, s2, lcs_low_bound=0, bg=None, debug=False): """Compule the length of the LCS 2 sequences s1 and s2. lcs_low_bound : (int), hint of lower bound for the lenght of the lcs to search for. Default to 0. Algorithmic description: This is a derivation of Hirschberg's algorithm which include some optimisation for specific case. This shoudl use an O(n) memory (n = len(s1)) and should have a worse case scenario time complexity of O(n**2). In the best case scenario, (l ~ n) the time complexity is closer to O(n*l) where l is the lenght of the longest common subsequence. Though, detail of implementaiton of s1 and s2 object slicing will affect the optimal performace. bg is four debug purpose, to see how the algorithme behave visually using iptyhonblocks. uncomment bg lines below to use. """ m = len(s1) n = len(s2) if n==0 or m==0: return 0 # rng is for row "rang" in french, "c" is for current and "p" for previous. # array are n+1 so that last elemnt is 0. This allow # to avoid special casing j=0 as j-1 will wrap arround. # alternative is to offset all indexes by 1, wichi becames hard to # track rngc = array('i',[0 for x in range(n+1)]) ## current row rngp = array('i',[0 for x in range(n+1)]) ## previous row # current max value of the LCS durrgin the search. currentmax = lcs_low_bound # correspond to rngc[j-1], used to avoid lookup in the array # through the loop to shave off soem execution time. rngcjm = None # lower and upper bound for current loop on s2/j limm,limpp = 0,0 # lower bound for iteration on s1/i and # another lower bound s2/j mini,minj = 0,0 if debug: import pdb; pdb.set_trace() for i,c1 in enumerate(s1): # current row become previous, and we reuse previous to avoid # creating a new empty list. rngc, rngp = rngp, rngc limm,limp= max(i-m+currentmax,0,minj-1),min(i+n-currentmax+1,n) rngcjm = rngc[limm-1] if i < mini: print('continue') continue isl = islice(s2,limm,limp) rsl = range(limm,limp) zsl = zip(rsl,isl) for j,c2 in zsl: # if bg: # bg[i,j].green=255 if c1 == c2 : if i == 0 or j == 0: newval = 1 else: newval = rngp[j-1]+1 # here we will peak ahead as far as possible # while the two string are matching, # for strings with high similarity # this with give us hints on which part of the # lcs matrix we do not need to explore. # # we do this only once, if we are at # the beginning of the matching streem. if s1[i-1] != s2[j-1] or i==0 or j==0: lookahead = -1 k = min(m-i,n-j) for cx,cy in zip(s1[i:i+k],s2[j:j+k]): if cx==cy: lookahead +=1 else: break # if bg: # for xx in range(0,lookahead): # bg[i+xx,j+xx].blue=255 tmp = rngc[j]+lookahead # if we are on i,j and have a value M # then it is useless to process columns that have : # - a j value lower than M-j # - a i value lower than M-i lminj=tmp-j lmini=tmp-i if lmini > mini: mini=lmini if lminj > minj: minj=lminj for xx in range(0,minj): rngp[xx]=tmp-1 rngc[xx]=tmp-1 # if bg: # for xx in range(0,lminj): # for lh in range(i,m): # bg[lh,xx].red =255 # for xx in range(0,lmini): # for lh in range(j,n): # bg[xx,lh].red =255 # bg[i+lookahead,j+lookahead].red =255 if j >= limp+1: break if tmp > currentmax: currentmax = tmp assert(currentmax <=m) assert(currentmax <=n) limp= min(i+n-currentmax+1,n) if newval > currentmax: currentmax = newval else : b = rngp[j] newval = rngcjm if rngcjm > b else b # assert(newval <= i+1) # assert(newval <= j+1) rngc[j] = rngcjm = newval print(rngc) print('==',rngc) return rngc[-2]
bsd-3-clause
-6,169,857,475,565,440,000
36.956204
79
0.476053
false
3.8369
false
false
false
myriadrf/pyLMS7002M
pyLMS7002M/LimeSDRMini.py
1
11270
#*************************************************************** #* Name: LimeSDRMini.py #* Purpose: Class implementing LimeSDRMini functions #* Author: Lime Microsystems () #* Created: 2018-04-16 #* Copyright: Lime Microsystems (limemicro.com) #* License: #************************************************************** from weakproxy import * from copy import copy from LMS7002 import * from timeit import default_timer as timer import atexit from cyLimeLib import * class LimeSDRMini(object): def __init__(self, fRef = 40.0e6, verbose=0): """ Initialize communication with LimeSDRMini. """ boards = cyLimeLib.getDeviceList() if len(boards)==0: raise ValueError("LimeSDR not found") self.cyDev = None for i in range(0,len(boards)): if "LimeSDR Mini" in boards[i]: self.cyDev = cyLimeLib(boards[i]) break if self.cyDev==None: raise ValueError("LimeSDRMini not found") self.usb = self.cyDev # http://stackoverflow.com/questions/8907905/del-myclass-doesnt-call-object-del # https://docs.python.org/3/reference/datamodel.html#object.__del__ # solution is to avoid __del__, define an explict close() and call it atexit atexit.register(self.close) #self.usb.setConfiguration() self.verbose = verbose self.bulkControl = False self.fRef = fRef # reference frequency FW_VER, DEV_TYPE, LMS_PROTOCOL_VER, HW_VER, EXP_BOARD = self.getInfo() if DEV_TYPE!=17: ret = "FW_VER : "+str(FW_VER)+"\n" ret += "DEV_TYPE : "+str(DEV_TYPE)+"\n" ret += "LMS_PROTOCOL_VER : " + str(LMS_PROTOCOL_VER)+"\n" ret += "HW_VER : " + str(HW_VER)+"\n" ret += "EXP_BOARD : " + str(EXP_BOARD)+"\n" raise ValueError("The board is not LimeSDR.\nBoard info:\n"+ret) if verbose>0: self.printInfo() # # Initialize on-board chips # self.LMS7002 = LMS7002(SPIwriteFn=Proxy(self.LMS7002_Write), SPIreadFn=Proxy(self.LMS7002_Read) , verbose=verbose, MCUProgram=Proxy(self.MCUProgram), fRef = self.fRef) self.LMS7002.MIMO = 'MIMO' def close(self): """ Close communication with LimeSDR """ del self.cyDev @staticmethod def findLMS7002(backend="PyUSB"): return cyLimeLib.getDeviceList() def log(self, logMsg): print logMsg def getCommandNumber(self, cmdName): if cmdName == "CMD_GET_INFO": return 0x00 elif cmdName == "CMD_LMS7002_RST": return 0x20 elif cmdName == "LMS_RST_DEACTIVATE": return 0x00 elif cmdName == "LMS_RST_ACTIVATE": return 0x01 elif cmdName == "LMS_RST_PULSE": return 0x02 elif cmdName == "CMD_LMS7002_WR": return 0x21 elif cmdName == "CMD_LMS7002_RD": return 0x22 elif cmdName == "CMD_PROG_MCU": return 0x2C else: raise ValueError("Unknown command "+cmdName) def getLMS7002(self): return self.LMS7002 # # Low level communication # @staticmethod def bytes2string(bytes): """ Convert the byte array to string. Used for serial communication. """ s = "" for i in range(0,len(bytes)): s += chr(bytes[i]) return s @staticmethod def string2bytes(string): """ Convert the string to byte array. Used for serial communication. """ bytes = [0]*int(len(string)) for i in range(0, len(string)): bytes[i] = ord(string[i]) return bytes def sendCommand(self, command, nDataBlocks=0, periphID=0, data=[]): """ Send the command to LimeSDR. Function returns (status, data) """ nData = len(data) if nData>56: raise ValueError("Length of data must be less than 56, "+str(nData)+" bytes given") return self.cyDev.transferLMS64C(command, data) # # Utility functions # def getInfo(self): """ Get the information about LimeSDR. Function returns (FW_VER, DEV_TYPE, LMS_PROTOCOL_VER, HW_VER, EXP_BOARD) """ command = self.getCommandNumber("CMD_GET_INFO") status, rxData = self.sendCommand(command) if status != 1: raise IOError("Command returned with status "+str(status)) FW_VER = rxData[0] DEV_TYPE = rxData[1] LMS_PROTOCOL_VER = rxData[2] HW_VER = rxData[3] EXP_BOARD = rxData[4] return (FW_VER, DEV_TYPE, LMS_PROTOCOL_VER, HW_VER, EXP_BOARD) def printInfo(self): """ Print info about LimeSDR """ FW_VER, DEV_TYPE, LMS_PROTOCOL_VER, HW_VER, EXP_BOARD = self.getInfo() self.log("FW_VER : "+str(FW_VER)) self.log("DEV_TYPE : "+str(DEV_TYPE)) self.log("LMS_PROTOCOL_VER : " + str(LMS_PROTOCOL_VER)) self.log("HW_VER : " + str(HW_VER)) self.log("EXP_BOARD : " + str(EXP_BOARD)) def LMS7002_Reset(self, rstType="pulse"): """ Reset LMS7002. rstType specifies the type of reset: pulse - activate and deactivate reset activate - activate reset deactivate - deactivate reset """ command = self.getCommandNumber("CMD_LMS7002_RST") if rstType=="pulse": data = [self.getCommandNumber("LMS_RST_PULSE")] elif rstType=="activate": data = [self.getCommandNumber("LMS_RST_ACTIVATE")] elif rstType=="deactivate": data = [self.getCommandNumber("LMS_RST_DEACTIVATE")] else: raise ValueError("Invalid reset type "+str(rstType)) rxStatus, rxData = self.sendCommand(command, data=data) if rxStatus != 1: raise IOError("Command returned with status "+str(status)) self.LMS7002.loadResetValues() self.cyDev.LMSInit() def LMS7002_Write(self, regList, packetSize=14): """ Write the data to LMS7002 via SPI interface. regList is a list of registers to write in the format: [ (regAddr, regData), (regAddr, regData), ...] packetSize controls the number of register writes in a single USB transfer """ command = self.getCommandNumber("CMD_LMS7002_WR") nDataBlocks = len(regList) toSend = copy(regList) while len(toSend)>0: nPackets = 0 data = [] while nPackets<packetSize and len(toSend)>0: regAddr, regData = toSend[0] toSend.pop(0) regAddrH = regAddr >> 8 regAddrL = regAddr % 256 regDataH = regData >> 8 regDataL = regData % 256 data += [regAddrH, regAddrL, regDataH, regDataL] nPackets += 1 rxStatus, rxData = self.sendCommand(command, nDataBlocks = nPackets, data=data) if rxStatus != 1: raise IOError("Command returned with status "+str(rxStatus)) def LMS7002_Read(self, regList, packetSize=14): """ Read the data from LMS7002 via SPI interface. regList is a list of registers to read in the format: [ regAddr, regAddr, ...] packetSize controls the number of register writes in a single USB transfer """ command = self.getCommandNumber("CMD_LMS7002_RD") nDataBlocks = len(regList) toRead = copy(regList) regData = [] while len(toRead)>0: nPackets = 0 data = [] while nPackets<packetSize and len(toRead)>0: regAddr = toRead[0] toRead.pop(0) regAddrH = regAddr >> 8 regAddrL = regAddr % 256 data += [regAddrH, regAddrL] nPackets += 1 rxStatus, rxData = self.sendCommand(command, nDataBlocks = nPackets, data=data) if rxStatus != 1: raise IOError("Command returned with status "+str(rxStatus)) for i in range(0, nPackets): regDataH = rxData[i*4+2] regDataL = rxData[i*4+3] regData.append( (regDataH << 8) + regDataL) return regData # # LMS7002 MCU program # def MCUProgram(self, mcuProgram, Mode): ver, rev, mask = self.getLMS7002().chipInfo if mask==1: # MCU has 16k RAM if len(mcuProgram)>16384: raise ValueError("MCU program for mask 1 chips must be less than 16 kB. Given program size:"+str(len(mcuProgram))) if len(mcuProgram)==8192: # Check if program is 8k mcuProgram += [0]*8192 # Extend it to 16k self._MCUProgram_Direct(mcuProgram, Mode) else: # MCU has 8k RAM if len(mcuProgram)>8192: raise ValueError("MCU program for mask 0 chips must be less than 8 kB. Given program size:"+str(len(mcuProgram))) self._MCUProgram_Direct(mcuProgram, Mode) def _MCUProgram_Direct(self, mcuProgram, Mode): """ Write the data to LMS7002 MCU via SPI interface. MCU is programmed directly by using bulk interface MCU commands. mcuProgram is 8192 or 16384 bytes long array holding the MCU program. mode selects the MCU programming mode. """ if Mode not in [0, 1,2,3, 'EEPROM_AND_SRAM', 'SRAM', 'SRAM_FROM_EEPROM']: raise ValueError("Mode should be [1,2,3, 'EEPROM_AND_SRAM', 'SRAM', 'SRAM_FROM_EEPROM']") if Mode==0: return elif Mode==1 or Mode=='EEPROM_AND_SRAM': mode = 1 elif Mode==2 or Mode=='SRAM': mode = 2 else: mode = 3 if len(mcuProgram)!=8192 and len(mcuProgram)!=16384: raise ValueError("MCU program should be 8192 or 16384 bytes long") toSend = [ (2, 0), (2, mode)] # Write 0 to address 2, write mode to address 2 (mSPI_CTRL) self.LMS7002_Write(toSend) lms7002 = self.getLMS7002() pos = 0 while pos<len(mcuProgram): startTime = timer() while lms7002.mSPI.EMPTY_WRITE_BUFF==0: if timer()-startTime>1: raise IOError("MCU programming timeout") for j in range(0, 4): toSend = [] for i in range(0, 8): toSend.append( (4, mcuProgram[pos]) ) pos += 1 self.LMS7002_Write(toSend) if mode==3: break startTime = timer() while lms7002.mSPI.PROGRAMMED==0: if timer()-startTime>1: raise IOError("MCU programming timeout")
apache-2.0
-7,336,002,023,781,494,000
34.329154
130
0.536823
false
3.78569
false
false
false
rgblabs/rgbTools
rgbTools/utils/filesystem.py
1
2692
import maya.cmds as cmds def which (program): ''' If application is found, returns path. This works with both full application paths, and applications available within the OS's defined PATH ''' import os def is_exe (fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ["PATH"].split(os.pathsep): path = path.strip('"') exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None def getOSPaths (): import os paths = [] for path in os.environ["PATH"].split(os.pathsep): paths.append(path.strip('"')) return paths def getPythonPaths (): import sys paths = [] for pythonPath in sys.path: paths.append(pythonPath) return paths def getUserPaths (): upaths = {} upaths['userAppDir'] = cmds.internalVar(userAppDir=1) upaths['userScriptDir'] = cmds.internalVar(userScriptDir=1) upaths['userPrefDir'] = cmds.internalVar(userPrefDir=1) upaths['userPresetsDir'] = cmds.internalVar(userPresetsDir=1) upaths['userShelfDir'] = cmds.internalVar(userShelfDir=1) upaths['userMarkingMenuDir'] = cmds.internalVar(userMarkingMenuDir=1) upaths['userBitmapsDir'] = cmds.internalVar(userBitmapsDir=1) upaths['userTmpDir'] = cmds.internalVar(userTmpDir=1) upaths['userWorkspaceDir'] = cmds.internalVar(userWorkspaceDir=1) return upaths def getEnvPaths(): import os import sys import maya.mel as mel scriptPaths = mel.eval("getenv \"MAYA_SCRIPT_PATH\"") plugInPaths = mel.eval("getenv \"MAYA_PLUG_IN_PATH\"") pythonPaths = mel.eval("getenv \"PYTHONPATH\"") iconPaths = mel.eval("getenv \"XBMLANGPATH\"") pathPaths = mel.eval("getenv \"PATH\"") sysPaths = sys.path return { 'MAYA_SCRIPT_PATH' : scriptPaths.split(os.pathsep), 'MAYA_PLUG_IN_PATH' : plugInPaths.split(os.pathsep), 'PYTHONPATH' : pythonPaths.split(os.pathsep), 'XBMLANGPATH' : iconPaths.split(os.pathsep), 'PATH' : pathPaths.split(os.pathsep), 'sys' : sysPaths } def getCurrentFilePath (): return cmds.file(query=True, sceneName=True) def crashRecoverDialog (): dirpath = cmds.internalVar(userTmpDir=1) mask = dirpath+'*.ma' filepath = cmds.fileDialog(title='Recover Crash File...', directoryMask=mask) if filepath is not '': cmds.file(filepath, open=True) cmds.file(renameToSave=True)
mit
-6,761,105,692,595,262,000
28.911111
81
0.635587
false
3.403287
false
false
false
sevagas/macro_pack
src/vbLib/Base64ToText.py
1
1559
VBA = \ r""" Function Base64ToText(ByVal vCode) Dim oXML, oNode Dim tempString As String tempString = "Msxm" tempString = tempString & "l2.DO" tempString = tempString & "MDoc" tempString = tempString & "ument.3.0" Set oXML = CreateObject(tempString) Set oNode = oXML.CreateElement("base64") oNode.DataType = "bin.base64" oNode.Text = vCode Base64ToText = Stream_BinaryToString(oNode.nodeTypedValue) Set oNode = Nothing Set oXML = Nothing End Function 'Stream_BinaryToString Function '2003 Antonin Foller, http://www.motobit.com 'Binary - VT_UI1 | VT_ARRAY data To convert To a string Private Function Stream_BinaryToString(Binary) Const adTypeText = 2 Const adTypeBinary = 1 'Create Stream object Dim BinaryStream 'As New Stream Dim tmpString As String tmpString = "ADO" tmpString = tmpString & "DB.St" tmpString = tmpString & "ream" Set BinaryStream = CreateObject(tmpString) 'Specify stream type - we want To save binary data. BinaryStream.Type = adTypeBinary 'Open the stream And write binary data To the object BinaryStream.Open BinaryStream.Write Binary 'Change stream type To text/string BinaryStream.Position = 0 BinaryStream.Type = adTypeText 'Specify charset For the output text (unicode) data. BinaryStream.Charset = "us-ascii" 'Open the stream And get text/string data from the object Stream_BinaryToString = BinaryStream.ReadText Set BinaryStream = Nothing End Function """
apache-2.0
-7,680,315,020,665,851,000
27.87037
62
0.695959
false
3.676887
false
false
false
wevoice/wesub
apps/socialauth/views.py
1
12656
from django.shortcuts import render_to_response, redirect from django.contrib import messages from django.template import RequestContext from django.contrib.auth import authenticate, login from django.http import HttpResponseRedirect, HttpResponse from django.core.urlresolvers import reverse from django.conf import settings from django.contrib.auth.decorators import login_required from django.contrib.auth.views import logout from django.utils.http import urlencode from auth.backends import OpenIdBackend from socialauth.models import AuthMeta from socialauth.forms import EditProfileForm from thirdpartyaccounts.models import TwitterAccount """ from socialauth.models import YahooContact, TwitterContact, FacebookContact,\ SocialProfile, GmailContact """ from openid_consumer.views import begin from socialauth.lib import oauthtwitter2 as oauthtwitter from socialauth.lib.facebook import get_facebook_signature from oauth import oauth from datetime import datetime from django.utils.http import urlquote from utils.translation import get_user_languages_from_cookie from auth.models import UserLanguage TWITTER_CONSUMER_KEY = getattr(settings, 'TWITTER_CONSUMER_KEY', '') TWITTER_CONSUMER_SECRET = getattr(settings, 'TWITTER_CONSUMER_SECRET', '') def get_url_host(request): # FIXME: Duplication if request.is_secure(): protocol = 'https' else: protocol = 'http' host = request.get_host() return '%s://%s' % (protocol, host) def login_page(request): payload = {'fb_api_key':settings.FACEBOOK_API_KEY,} return render_to_response('socialauth/login_page.html', payload, RequestContext(request)) def twitter_login(request, next=None): callback_url = None if next is not None: callback_url = '%s%s?next=%s' % \ (get_url_host(request), reverse("socialauth_twitter_login_done"), urlquote(next)) twitter = oauthtwitter.TwitterOAuthClient(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET) request_token = twitter.fetch_request_token(callback_url) request.session['request_token'] = request_token.to_string() signin_url = twitter.authorize_token_url(request_token) return HttpResponseRedirect(signin_url) def twitter_login_done(request): request_token = request.session.get('request_token', None) oauth_verifier = request.GET.get("oauth_verifier", None) # If there is no request_token for session, # Means we didn't redirect user to twitter if not request_token: # Redirect the user to the login page, # So the user can click on the sign-in with twitter button return HttpResponse("We didn't redirect you to twitter...") token = oauth.OAuthToken.from_string(request_token) # If the token from session and token from twitter does not match # means something bad happened to tokens if token.key != request.GET.get('oauth_token', 'no-token'): del request.session['request_token'] if request.GET.get('denied', None) is not None: messages.info(request, "Twitter authorization cancelled.") return redirect('profiles:account') messages.error(request, "Something wrong! Tokens do not match...") # Redirect the user to the login page return redirect('auth:login') twitter = oauthtwitter.TwitterOAuthClient(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET) access_token = twitter.fetch_access_token(token, oauth_verifier) request.session['access_token'] = access_token.to_string() if request.session.get('no-login', False): # The user is trying to link a Twitter account to their Amara account. if not request.user.is_authenticated(): messages.error(request, 'You must be logged in.') return redirect('auth:login') try: from socialauth.lib.oauthtwitter import OAuthApi twitter = OAuthApi(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET, access_token) userinfo = twitter.GetUserInfo() except Exception, e: # TODO: Raise something more useful here raise e username = userinfo.screen_name try: account = TwitterAccount.objects.get(username=username) if request.user.pk != account.user.pk: messages.error(request, 'Account already linked') return redirect('profiles:account') except TwitterAccount.DoesNotExist: TwitterAccount.objects.create(user=request.user, username=username, access_token=access_token.to_string()) del request.session['no-login'] messages.info(request, 'Successfully linked a Twitter account') return redirect('profiles:account') request.session['access_token'] = access_token.to_string() user = authenticate(access_token=access_token) # if user is authenticated then login user if user: if not user.userlanguage_set.exists(): langs = get_user_languages_from_cookie(request) for l in langs: UserLanguage.objects.get_or_create(user=user, language=l) login(request, user) else: # We were not able to authenticate user # Redirect to login page del request.session['access_token'] del request.session['request_token'] return HttpResponseRedirect(reverse('socialauth_login_page')) # authentication was successful, use is now logged in return HttpResponseRedirect(request.GET.get('next', settings.LOGIN_REDIRECT_URL)) def openid_login(request, confirmed=True): if 'openid_identifier' in request.GET: user_url = request.GET.get('openid_identifier') request.session['openid_provider'] = user_url return begin(request, user_url = user_url, confirmed=confirmed) else: if 'google.com' in request.POST.get('openid_url', ''): request.session['openid_provider'] = 'Google' return begin(request, user_url='https://www.google.com/accounts/o8/id', confirmed=confirmed) elif 'yahoo.com' in request.POST.get('openid_url', ''): request.session['openid_provider'] = 'Yahoo' else: request.session['openid_provider'] = 'Openid' return begin(request, confirmed=confirmed) def gmail_login(request): request.session['openid_provider'] = 'Google' return begin(request, user_url='https://www.google.com/accounts/o8/id') def udacity_login(request, confirmed=True): request.session['openid_provider'] = 'Udacity' return begin(request, user_url='https://www.udacity.com/openid/server', confirmed=confirmed) def gmail_login_complete(request): pass def yahoo_login(request): request.session['openid_provider'] = 'Yahoo' return begin(request, user_url='http://yahoo.com/') def openid_done(request, provider=None, confirmed=True): """ When the request reaches here, the user has completed the Openid authentication flow. He has authorised us to login via Openid, so request.openid is populated. After coming here, we want to check if we are seeing this openid first time. If we are, we will create a new Django user for this Openid, else login the existing openid. """ if not provider: provider = request.session.get('openid_provider', '') if request.openid: #check for already existing associations openid_key = str(request.openid) #authenticate and login if not confirmed: (existing, suggested_email) = OpenIdBackend.pre_authenticate(openid_key=openid_key, request=request, provider=provider) if not existing: if provider == 'Udacity': return redirect('auth:confirm_create_user', 'udacity', suggested_email) elif provider == 'Openid': openid_url = request.GET.get('openid_url', '') response = redirect('auth:confirm_create_user', 'openid', suggested_email) if openid_url: response['Location'] += '?' + urlencode({'openid_url': openid_url}) return response else: return redirect(reverse('auth:confirm_create_user', provider, suggested_email)) email = request.GET.get('email', None) user = authenticate(openid_key=openid_key, request=request, provider=provider, email=email) if user: if not user.userlanguage_set.exists(): langs = get_user_languages_from_cookie(request) for l in langs: UserLanguage.objects.get_or_create(user=user, language=l) login(request, user) next = None if 'openid_next' in request.session: next = request.session.get('openid_next') if 'next' in request.GET: next = request.GET['next'] if next is not None and len(next.strip()) > 0 : return HttpResponseRedirect(next) redirect_url = reverse('profiles:profile', args=(user,)) return HttpResponseRedirect(redirect_url) else: return HttpResponseRedirect(settings.LOGIN_URL) else: return HttpResponseRedirect(settings.LOGIN_URL) def facebook_login_done(request): API_KEY = settings.FACEBOOK_API_KEY API_SECRET = settings.FACEBOOK_SECRET_KEY REST_SERVER = 'http://api.facebook.com/restserver.php' # FB Connect will set a cookie with a key == FB App API Key if the user has been authenticated if API_KEY in request.COOKIES: signature_hash = get_facebook_signature(API_KEY, API_SECRET, request.COOKIES, True) # The hash of the values in the cookie to make sure they're not forged # AND If session hasn't expired if(signature_hash == request.COOKIES[API_KEY]) and (datetime.fromtimestamp(float(request.COOKIES[API_KEY+'_expires'])) > datetime.now()): #Log the user in now. user = authenticate(cookies=request.COOKIES) if user: # if user is authenticated then login user login(request, user) return HttpResponseRedirect(reverse('socialauth_signin_complete')) else: #Delete cookies and redirect to main Login page. del request.COOKIES[API_KEY + '_session_key'] del request.COOKIES[API_KEY + '_user'] return HttpResponseRedirect(reverse('socialauth_login_page')) return HttpResponseRedirect(reverse('socialauth_login_page')) def openid_login_page(request): return render_to_response('openid/index.html', {}, RequestContext(request)) def signin_complete(request): payload = {} return render_to_response('socialauth/signin_complete.html', payload, RequestContext(request)) @login_required def editprofile(request): if request.method == 'POST': edit_form = EditProfileForm(user=request.user, data=request.POST) if edit_form.is_valid(): user = edit_form.save() try: user.authmeta.is_profile_modified = True user.authmeta.save() except AuthMeta.DoesNotExist: pass if user.openidprofile_set.all().count(): openid_profile = user.openidprofile_set.all()[0] openid_profile.is_valid_username = True openid_profile.save() try: #If there is a profile. notify that we have set the username profile = user.get_profile() profile.is_valid_username = True profile.save() except: pass request.user.message_set.create(message='Your profile has been updated.') return HttpResponseRedirect('.') if request.method == 'GET': edit_form = EditProfileForm(user = request.user) payload = {'edit_form':edit_form} return render_to_response('socialauth/editprofile.html', payload, RequestContext(request)) def social_logout(request): # Todo # still need to handle FB cookies, session etc. # let the openid_consumer app handle openid-related cleanup from openid_consumer.views import signout as oid_signout oid_signout(request) # normal logout logout_response = logout(request) if getattr(settings, 'LOGOUT_REDIRECT_URL', None): return HttpResponseRedirect(settings.LOGOUT_REDIRECT_URL) else: return logout_response
agpl-3.0
7,060,478,658,976,568,000
41.469799
145
0.656606
false
4.200465
false
false
false
itu-oss-project-team/oss-github-analysis-project
github_analysis_tool/analyzer/tf-idf.py
1
3362
import os.path import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) from math import log10 import yaml from github_analysis_tool.services.database_service import DatabaseService class Tfidf: def __init__(self, secret_config): # Generate a github_requester with imported GitHub tokens self.__databaseService = DatabaseService(secret_config['mysql']) self.__commits = [] def addCommitToDictionary(self, commit_sha, commit_message): commit_msg = str(commit_message).encode('utf-8') commit_msg = str(commit_msg) #sha, message, tf-idf self.__commits.append([commit_sha, commit_msg, 0]) def printValues(self, commitList): print("size: " + str(len(commitList)) + "\n") for commit in commitList: commit_msg = str(commit[1]) print(commit_msg + " tf-idf: " + str(commit[2])) def generateContainer(self): repos = self.__databaseService.getAllRepos(get_only_ids=True) for repo_id in repos: commits = self.__databaseService.getCommitsOfRepo(repo_id, get_only_shas=False) for commit in commits: self.addCommitToDictionary(commit["sha"], commit["message"]) return def tf_idf(self, keywords, threshold_value=0): scored_commits = [] count_of_all_occurances=0 print("Total number of commits: " + str(len(self.__commits))) #idf calculation for commit in self.__commits: commit_msg = commit[1] for word in commit_msg.split(): for keyword in keywords: if word == keyword: count_of_all_occurances += 1 break idf = log10(len(self.__commits)/count_of_all_occurances) print("idf: " + str(idf)) #tf calculation for each commit message for commit in self.__commits: commit_msg = commit[1] count_of_similarities_in_msg=0 for word in commit_msg.split(): for keyword in keywords: if word == keyword: count_of_similarities_in_msg += 1 score = count_of_similarities_in_msg / len(commit_msg.split()) score = score * idf commit[2] = score if score > threshold_value: #sha, message, score scored_commits.append([commit[0], commit[1], commit[2]]) scored_commits.sort(key=lambda x:x[2]) return scored_commits def main(): with open(os.path.join(os.path.dirname(__file__), os.pardir, 'config_secret.yaml'), 'r') as ymlfile: secret_config = yaml.load(ymlfile) tfidf = Tfidf(secret_config) tfidf.generateContainer() print("\nBUG-FIX COMMITS\n") bugfix_commits = tfidf.tf_idf(["Fix", "fixed", "edit", "edited", "modify", "modified", "correct", "corrected"], 0.0) tfidf.printValues(bugfix_commits) print("\nADD NEW FEATURE COMMITS\n") add_commits = tfidf.tf_idf(["add", "added", "implement", "implemented", "feat", "feature"], 0.0) tfidf.printValues(add_commits) print("\nREMOVE COMMITS\n") remove_commits = tfidf.tf_idf(["delete", "deleted", "remove", "removed"], 0.0) tfidf.printValues(remove_commits) return main()
mit
8,553,894,782,115,481,000
34.020833
120
0.594289
false
3.735556
false
false
false