repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
LockScreen/Backend | venv/lib/python2.7/site-packages/awscli/customizations/codedeploy/utils.py | 2 | 4595 | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import platform
import re
from awscli.compat import urlopen, URLError
from awscli.customizations.codedeploy.systems import System, Ubuntu, Windows, RHEL
from socket import timeout
MAX_INSTANCE_NAME_LENGTH = 100
MAX_TAGS_PER_INSTANCE = 10
MAX_TAG_KEY_LENGTH = 128
MAX_TAG_VALUE_LENGTH = 256
INSTANCE_NAME_PATTERN = r'^[A-Za-z0-9+=,.@_-]+$'
IAM_USER_ARN_PATTERN = r'^arn:aws:iam::[0-9]{12}:user/[A-Za-z0-9/+=,.@_-]+$'
INSTANCE_NAME_ARG = {
'name': 'instance-name',
'synopsis': '--instance-name <instance-name>',
'required': True,
'help_text': (
'Required. The name of the on-premises instance.'
)
}
IAM_USER_ARN_ARG = {
'name': 'iam-user-arn',
'synopsis': '--iam-user-arn <iam-user-arn>',
'required': False,
'help_text': (
'Optional. The IAM user associated with the on-premises instance.'
)
}
def validate_region(params, parsed_globals):
if parsed_globals.region:
params.region = parsed_globals.region
else:
params.region = params.session.get_config_variable('region')
if not params.region:
raise RuntimeError('Region not specified.')
def validate_instance_name(params):
if params.instance_name:
if not re.match(INSTANCE_NAME_PATTERN, params.instance_name):
raise ValueError('Instance name contains invalid characters.')
if params.instance_name.startswith('i-'):
raise ValueError('Instance name cannot start with \'i-\'.')
if len(params.instance_name) > MAX_INSTANCE_NAME_LENGTH:
raise ValueError(
'Instance name cannot be longer than {0} characters.'.format(
MAX_INSTANCE_NAME_LENGTH
)
)
def validate_tags(params):
if params.tags:
if len(params.tags) > MAX_TAGS_PER_INSTANCE:
raise ValueError(
'Instances can only have a maximum of {0} tags.'.format(
MAX_TAGS_PER_INSTANCE
)
)
for tag in params.tags:
if len(tag['Key']) > MAX_TAG_KEY_LENGTH:
raise ValueError(
'Tag Key cannot be longer than {0} characters.'.format(
MAX_TAG_KEY_LENGTH
)
)
if len(tag['Value']) > MAX_TAG_KEY_LENGTH:
raise ValueError(
'Tag Value cannot be longer than {0} characters.'.format(
MAX_TAG_VALUE_LENGTH
)
)
def validate_iam_user_arn(params):
if params.iam_user_arn and \
not re.match(IAM_USER_ARN_PATTERN, params.iam_user_arn):
raise ValueError('Invalid IAM user ARN.')
def validate_instance(params):
if platform.system() == 'Linux':
if 'Ubuntu' in platform.linux_distribution()[0]:
params.system = Ubuntu(params)
if 'Red Hat Enterprise Linux Server' in platform.linux_distribution()[0]:
params.system = RHEL(params)
elif platform.system() == 'Windows':
params.system = Windows(params)
if 'system' not in params:
raise RuntimeError(
System.UNSUPPORTED_SYSTEM_MSG
)
try:
urlopen('http://169.254.169.254/latest/meta-data/', timeout=1)
raise RuntimeError('Amazon EC2 instances are not supported.')
except (URLError, timeout):
pass
def validate_s3_location(params, arg_name):
arg_name = arg_name.replace('-', '_')
if arg_name in params:
s3_location = getattr(params, arg_name)
if s3_location:
matcher = re.match('s3://(.+?)/(.+)', str(s3_location))
if matcher:
params.bucket = matcher.group(1)
params.key = matcher.group(2)
else:
raise ValueError(
'--{0} must specify the Amazon S3 URL format as '
's3://<bucket>/<key>.'.format(
arg_name.replace('_', '-')
)
)
| mit | -1,091,031,132,716,025,600 | 33.291045 | 82 | 0.588248 | false |
jdahlin/stoq-wubi | src/winui/defs.py | 5 | 126308 | #
# Copyright (c) 2007, 2008 Agostino Russo
#
# Written by Agostino Russo <[email protected]>
# Mostly copied from win32con.py
#
# winui is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# winui is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
'''
win32 constants, structures and functions
'''
import ctypes
INT = ctypes.c_int
WINVER = 1280
WM_USER = 1024
PY_0U = 0
OFN_READONLY = 1
OFN_OVERWRITEPROMPT = 2
OFN_HIDEREADONLY = 4
OFN_NOCHANGEDIR = 8
OFN_SHOWHELP = 16
OFN_ENABLEHOOK = 32
OFN_ENABLETEMPLATE = 64
OFN_ENABLETEMPLATEHANDLE = 128
OFN_NOVALIDATE = 256
OFN_ALLOWMULTISELECT = 512
OFN_EXTENSIONDIFFERENT = 1024
OFN_PATHMUSTEXIST = 2048
OFN_FILEMUSTEXIST = 4096
OFN_CREATEPROMPT = 8192
OFN_SHAREAWARE = 16384
OFN_NOREADONLYRETURN = 32768
OFN_NOTESTFILECREATE = 65536
OFN_NONETWORKBUTTON = 131072
OFN_NOLONGNAMES = 262144
OFN_EXPLORER = 524288 # new look commdlg
OFN_NODEREFERENCELINKS = 1048576
OFN_LONGNAMES = 2097152 # force long names for 3.x modules
OFN_ENABLEINCLUDENOTIFY = 4194304 # send include message to callback
OFN_ENABLESIZING = 8388608
OFN_DONTADDTORECENT = 33554432
OFN_FORCESHOWHIDDEN = 268435456 # Show All files including System and hidden files
OFN_EX_NOPLACESBAR = 1
OFN_SHAREFALLTHROUGH = 2
OFN_SHARENOWARN = 1
OFN_SHAREWARN = 0
CDN_FIRST = (PY_0U-601)
CDN_LAST = (PY_0U-699)
CDN_INITDONE = (CDN_FIRST - 0)
CDN_SELCHANGE = (CDN_FIRST - 1)
CDN_FOLDERCHANGE = (CDN_FIRST - 2)
CDN_SHAREVIOLATION = (CDN_FIRST - 3)
CDN_HELP = (CDN_FIRST - 4)
CDN_FILEOK = (CDN_FIRST - 5)
CDN_TYPECHANGE = (CDN_FIRST - 6)
CDN_INCLUDEITEM = (CDN_FIRST - 7)
CDM_FIRST = (WM_USER + 100)
CDM_LAST = (WM_USER + 200)
CDM_GETSPEC = (CDM_FIRST + 0)
CDM_GETFILEPATH = (CDM_FIRST + 1)
CDM_GETFOLDERPATH = (CDM_FIRST + 2)
CDM_GETFOLDERIDLIST = (CDM_FIRST + 3)
CDM_SETCONTROLTEXT = (CDM_FIRST + 4)
CDM_HIDECONTROL = (CDM_FIRST + 5)
CDM_SETDEFEXT = (CDM_FIRST + 6)
CC_RGBINIT = 1
CC_FULLOPEN = 2
CC_PREVENTFULLOPEN = 4
CC_SHOWHELP = 8
CC_ENABLEHOOK = 16
CC_ENABLETEMPLATE = 32
CC_ENABLETEMPLATEHANDLE = 64
CC_SOLIDCOLOR = 128
CC_ANYCOLOR = 256
FR_DOWN = 1
FR_WHOLEWORD = 2
FR_MATCHCASE = 4
FR_FINDNEXT = 8
FR_REPLACE = 16
FR_REPLACEALL = 32
FR_DIALOGTERM = 64
FR_SHOWHELP = 128
FR_ENABLEHOOK = 256
FR_ENABLETEMPLATE = 512
FR_NOUPDOWN = 1024
FR_NOMATCHCASE = 2048
FR_NOWHOLEWORD = 4096
FR_ENABLETEMPLATEHANDLE = 8192
FR_HIDEUPDOWN = 16384
FR_HIDEMATCHCASE = 32768
FR_HIDEWHOLEWORD = 65536
CF_SCREENFONTS = 1
CF_PRINTERFONTS = 2
CF_BOTH = (CF_SCREENFONTS | CF_PRINTERFONTS)
CF_SHOWHELP = 4
CF_ENABLEHOOK = 8
CF_ENABLETEMPLATE = 16
CF_ENABLETEMPLATEHANDLE = 32
CF_INITTOLOGFONTSTRUCT = 64
CF_USESTYLE = 128
CF_EFFECTS = 256
CF_APPLY = 512
CF_ANSIONLY = 1024
CF_SCRIPTSONLY = CF_ANSIONLY
CF_NOVECTORFONTS = 2048
CF_NOOEMFONTS = CF_NOVECTORFONTS
CF_NOSIMULATIONS = 4096
CF_LIMITSIZE = 8192
CF_FIXEDPITCHONLY = 16384
CF_WYSIWYG = 32768 # must also have CF_SCREENFONTS & CF_PRINTERFONTS
CF_FORCEFONTEXIST = 65536
CF_SCALABLEONLY = 131072
CF_TTONLY = 262144
CF_NOFACESEL = 524288
CF_NOSTYLESEL = 1048576
CF_NOSIZESEL = 2097152
CF_SELECTSCRIPT = 4194304
CF_NOSCRIPTSEL = 8388608
CF_NOVERTFONTS = 16777216
SIMULATED_FONTTYPE = 32768
PRINTER_FONTTYPE = 16384
SCREEN_FONTTYPE = 8192
BOLD_FONTTYPE = 256
ITALIC_FONTTYPE = 512
REGULAR_FONTTYPE = 1024
OPENTYPE_FONTTYPE = 65536
TYPE1_FONTTYPE = 131072
DSIG_FONTTYPE = 262144
WM_CHOOSEFONT_GETLOGFONT = (WM_USER + 1)
WM_CHOOSEFONT_SETLOGFONT = (WM_USER + 101)
WM_CHOOSEFONT_SETFLAGS = (WM_USER + 102)
LBSELCHSTRINGA = "commdlg_LBSelChangedNotify"
SHAREVISTRINGA = "commdlg_ShareViolation"
FILEOKSTRINGA = "commdlg_FileNameOK"
COLOROKSTRINGA = "commdlg_ColorOK"
SETRGBSTRINGA = "commdlg_SetRGBColor"
HELPMSGSTRINGA = "commdlg_help"
FINDMSGSTRINGA = "commdlg_FindReplace"
LBSELCHSTRING = LBSELCHSTRINGA
SHAREVISTRING = SHAREVISTRINGA
FILEOKSTRING = FILEOKSTRINGA
COLOROKSTRING = COLOROKSTRINGA
SETRGBSTRING = SETRGBSTRINGA
HELPMSGSTRING = HELPMSGSTRINGA
FINDMSGSTRING = FINDMSGSTRINGA
CD_LBSELNOITEMS = -1
CD_LBSELCHANGE = 0
CD_LBSELSUB = 1
CD_LBSELADD = 2
PD_ALLPAGES = 0
PD_SELECTION = 1
PD_PAGENUMS = 2
PD_NOSELECTION = 4
PD_NOPAGENUMS = 8
PD_COLLATE = 16
PD_PRINTTOFILE = 32
PD_PRINTSETUP = 64
PD_NOWARNING = 128
PD_RETURNDC = 256
PD_RETURNIC = 512
PD_RETURNDEFAULT = 1024
PD_SHOWHELP = 2048
PD_ENABLEPRINTHOOK = 4096
PD_ENABLESETUPHOOK = 8192
PD_ENABLEPRINTTEMPLATE = 16384
PD_ENABLESETUPTEMPLATE = 32768
PD_ENABLEPRINTTEMPLATEHANDLE = 65536
PD_ENABLESETUPTEMPLATEHANDLE = 131072
PD_USEDEVMODECOPIES = 262144
PD_DISABLEPRINTTOFILE = 524288
PD_HIDEPRINTTOFILE = 1048576
PD_NONETWORKBUTTON = 2097152
DN_DEFAULTPRN = 1
WM_PSD_PAGESETUPDLG = (WM_USER )
WM_PSD_FULLPAGERECT = (WM_USER+1)
WM_PSD_MINMARGINRECT = (WM_USER+2)
WM_PSD_MARGINRECT = (WM_USER+3)
WM_PSD_GREEKTEXTRECT = (WM_USER+4)
WM_PSD_ENVSTAMPRECT = (WM_USER+5)
WM_PSD_YAFULLPAGERECT = (WM_USER+6)
PSD_DEFAULTMINMARGINS = 0 # default (printer's)
PSD_INWININIINTLMEASURE = 0 # 1st of 4 possible
PSD_MINMARGINS = 1 # use caller's
PSD_MARGINS = 2 # use caller's
PSD_INTHOUSANDTHSOFINCHES = 4 # 2nd of 4 possible
PSD_INHUNDREDTHSOFMILLIMETERS = 8 # 3rd of 4 possible
PSD_DISABLEMARGINS = 16
PSD_DISABLEPRINTER = 32
PSD_NOWARNING = 128 # must be same as PD_*
PSD_DISABLEORIENTATION = 256
PSD_RETURNDEFAULT = 1024 # must be same as PD_*
PSD_DISABLEPAPER = 512
PSD_SHOWHELP = 2048 # must be same as PD_*
PSD_ENABLEPAGESETUPHOOK = 8192 # must be same as PD_*
PSD_ENABLEPAGESETUPTEMPLATE = 32768 # must be same as PD_*
PSD_ENABLEPAGESETUPTEMPLATEHANDLE = 131072 # must be same as PD_*
PSD_ENABLEPAGEPAINTHOOK = 262144
PSD_DISABLEPAGEPAINTING = 524288
PSD_NONETWORKBUTTON = 2097152 # must be same as PD_*
# Generated by h2py from winreg.h
HKEY_CLASSES_ROOT = -2147483648
HKEY_CURRENT_USER = -2147483647
HKEY_LOCAL_MACHINE = -2147483646
HKEY_USERS = -2147483645
HKEY_PERFORMANCE_DATA = -2147483644
HKEY_CURRENT_CONFIG = -2147483643
HKEY_DYN_DATA = -2147483642
HKEY_PERFORMANCE_TEXT = -2147483568 # ?? 4Jan98
HKEY_PERFORMANCE_NLSTEXT = -2147483552 # ?? 4Jan98
# Generated by h2py from winuser.h
HWND_BROADCAST = 65535
HWND_DESKTOP = 0
HWND_TOP = 0
HWND_BOTTOM = 1
HWND_TOPMOST = -1
HWND_NOTOPMOST = -2
HWND_MESSAGE = -3
# winuser.h line 4601
SM_CXSCREEN = 0
SM_CYSCREEN = 1
SM_CXVSCROLL = 2
SM_CYHSCROLL = 3
SM_CYCAPTION = 4
SM_CXBORDER = 5
SM_CYBORDER = 6
SM_CXDLGFRAME = 7
SM_CYDLGFRAME = 8
SM_CYVTHUMB = 9
SM_CXHTHUMB = 10
SM_CXICON = 11
SM_CYICON = 12
SM_CXCURSOR = 13
SM_CYCURSOR = 14
SM_CYMENU = 15
SM_CXFULLSCREEN = 16
SM_CYFULLSCREEN = 17
SM_CYKANJIWINDOW = 18
SM_MOUSEPRESENT = 19
SM_CYVSCROLL = 20
SM_CXHSCROLL = 21
SM_DEBUG = 22
SM_SWAPBUTTON = 23
SM_RESERVED1 = 24
SM_RESERVED2 = 25
SM_RESERVED3 = 26
SM_RESERVED4 = 27
SM_CXMIN = 28
SM_CYMIN = 29
SM_CXSIZE = 30
SM_CYSIZE = 31
SM_CXFRAME = 32
SM_CYFRAME = 33
SM_CXMINTRACK = 34
SM_CYMINTRACK = 35
SM_CXDOUBLECLK = 36
SM_CYDOUBLECLK = 37
SM_CXICONSPACING = 38
SM_CYICONSPACING = 39
SM_MENUDROPALIGNMENT = 40
SM_PENWINDOWS = 41
SM_DBCSENABLED = 42
SM_CMOUSEBUTTONS = 43
SM_CXFIXEDFRAME = SM_CXDLGFRAME
SM_CYFIXEDFRAME = SM_CYDLGFRAME
SM_CXSIZEFRAME = SM_CXFRAME
SM_CYSIZEFRAME = SM_CYFRAME
SM_SECURE = 44
SM_CXEDGE = 45
SM_CYEDGE = 46
SM_CXMINSPACING = 47
SM_CYMINSPACING = 48
SM_CXSMICON = 49
SM_CYSMICON = 50
SM_CYSMCAPTION = 51
SM_CXSMSIZE = 52
SM_CYSMSIZE = 53
SM_CXMENUSIZE = 54
SM_CYMENUSIZE = 55
SM_ARRANGE = 56
SM_CXMINIMIZED = 57
SM_CYMINIMIZED = 58
SM_CXMAXTRACK = 59
SM_CYMAXTRACK = 60
SM_CXMAXIMIZED = 61
SM_CYMAXIMIZED = 62
SM_NETWORK = 63
SM_CLEANBOOT = 67
SM_CXDRAG = 68
SM_CYDRAG = 69
SM_SHOWSOUNDS = 70
SM_CXMENUCHECK = 71
SM_CYMENUCHECK = 72
SM_SLOWMACHINE = 73
SM_MIDEASTENABLED = 74
SM_MOUSEWHEELPRESENT = 75
SM_XVIRTUALSCREEN = 76
SM_YVIRTUALSCREEN = 77
SM_CXVIRTUALSCREEN = 78
SM_CYVIRTUALSCREEN = 79
SM_CMONITORS = 80
SM_SAMEDISPLAYFORMAT = 81
SM_CMETRICS = 83
MNC_IGNORE = 0
MNC_CLOSE = 1
MNC_EXECUTE = 2
MNC_SELECT = 3
MNS_NOCHECK = -2147483648
MNS_MODELESS = 1073741824
MNS_DRAGDROP = 536870912
MNS_AUTODISMISS = 268435456
MNS_NOTIFYBYPOS = 134217728
MNS_CHECKORBMP = 67108864
MIM_MAXHEIGHT = 1
MIM_BACKGROUND = 2
MIM_HELPID = 4
MIM_MENUDATA = 8
MIM_STYLE = 16
MIM_APPLYTOSUBMENUS = -2147483648
MND_CONTINUE = 0
MND_ENDMENU = 1
MNGOF_GAP = 3
MNGO_NOINTERFACE = 0
MNGO_NOERROR = 1
MIIM_STATE = 1
MIIM_ID = 2
MIIM_SUBMENU = 4
MIIM_CHECKMARKS = 8
MIIM_TYPE = 16
MIIM_DATA = 32
MIIM_STRING = 64
MIIM_BITMAP = 128
MIIM_FTYPE = 256
HBMMENU_CALLBACK = -1
HBMMENU_SYSTEM = 1
HBMMENU_MBAR_RESTORE = 2
HBMMENU_MBAR_MINIMIZE = 3
HBMMENU_MBAR_CLOSE = 5
HBMMENU_MBAR_CLOSE_D = 6
HBMMENU_MBAR_MINIMIZE_D = 7
HBMMENU_POPUP_CLOSE = 8
HBMMENU_POPUP_RESTORE = 9
HBMMENU_POPUP_MAXIMIZE = 10
HBMMENU_POPUP_MINIMIZE = 11
GMDI_USEDISABLED = 1
GMDI_GOINTOPOPUPS = 2
TPM_LEFTBUTTON = 0
TPM_RIGHTBUTTON = 2
TPM_LEFTALIGN = 0
TPM_CENTERALIGN = 4
TPM_RIGHTALIGN = 8
TPM_TOPALIGN = 0
TPM_VCENTERALIGN = 16
TPM_BOTTOMALIGN = 32
TPM_HORIZONTAL = 0
TPM_VERTICAL = 64
TPM_NONOTIFY = 128
TPM_RETURNCMD = 256
TPM_RECURSE = 1
DOF_EXECUTABLE = 32769
DOF_DOCUMENT = 32770
DOF_DIRECTORY = 32771
DOF_MULTIPLE = 32772
DOF_PROGMAN = 1
DOF_SHELLDATA = 2
DO_DROPFILE = 1162627398
DO_PRINTFILE = 1414419024
DT_TOP = 0
DT_LEFT = 0
DT_CENTER = 1
DT_RIGHT = 2
DT_VCENTER = 4
DT_BOTTOM = 8
DT_WORDBREAK = 16
DT_SINGLELINE = 32
DT_EXPANDTABS = 64
DT_TABSTOP = 128
DT_NOCLIP = 256
DT_EXTERNALLEADING = 512
DT_CALCRECT = 1024
DT_NOPREFIX = 2048
DT_INTERNAL = 4096
DT_EDITCONTROL = 8192
DT_PATH_ELLIPSIS = 16384
DT_END_ELLIPSIS = 32768
DT_MODIFYSTRING = 65536
DT_RTLREADING = 131072
DT_WORD_ELLIPSIS = 262144
DST_COMPLEX = 0
DST_TEXT = 1
DST_PREFIXTEXT = 2
DST_ICON = 3
DST_BITMAP = 4
DSS_NORMAL = 0
DSS_UNION = 16
DSS_DISABLED = 32
DSS_MONO = 128
DSS_RIGHT = 32768
DCX_WINDOW = 1
DCX_CACHE = 2
DCX_NORESETATTRS = 4
DCX_CLIPCHILDREN = 8
DCX_CLIPSIBLINGS = 16
DCX_PARENTCLIP = 32
DCX_EXCLUDERGN = 64
DCX_INTERSECTRGN = 128
DCX_EXCLUDEUPDATE = 256
DCX_INTERSECTUPDATE = 512
DCX_LOCKWINDOWUPDATE = 1024
DCX_VALIDATE = 2097152
CUDR_NORMAL = 0
CUDR_NOSNAPTOGRID = 1
CUDR_NORESOLVEPOSITIONS = 2
CUDR_NOCLOSEGAPS = 4
CUDR_NEGATIVECOORDS = 8
CUDR_NOPRIMARY = 16
RDW_INVALIDATE = 1
RDW_INTERNALPAINT = 2
RDW_ERASE = 4
RDW_VALIDATE = 8
RDW_NOINTERNALPAINT = 16
RDW_NOERASE = 32
RDW_NOCHILDREN = 64
RDW_ALLCHILDREN = 128
RDW_UPDATENOW = 256
RDW_ERASENOW = 512
RDW_FRAME = 1024
RDW_NOFRAME = 2048
SW_SCROLLCHILDREN = 1
SW_INVALIDATE = 2
SW_ERASE = 4
SW_SMOOTHSCROLL = 16 # Use smooth scrolling
ESB_ENABLE_BOTH = 0
ESB_DISABLE_BOTH = 3
ESB_DISABLE_LEFT = 1
ESB_DISABLE_RIGHT = 2
ESB_DISABLE_UP = 1
ESB_DISABLE_DOWN = 2
ESB_DISABLE_LTUP = ESB_DISABLE_LEFT
ESB_DISABLE_RTDN = ESB_DISABLE_RIGHT
HELPINFO_WINDOW = 1
HELPINFO_MENUITEM = 2
MB_OK = 0
MB_OKCANCEL = 1
MB_ABORTRETRYIGNORE = 2
MB_YESNOCANCEL = 3
MB_YESNO = 4
MB_RETRYCANCEL = 5
MB_ICONHAND = 16
MB_ICONQUESTION = 32
MB_ICONEXCLAMATION = 48
MB_ICONASTERISK = 64
MB_ICONWARNING = MB_ICONEXCLAMATION
MB_ICONERROR = MB_ICONHAND
MB_ICONINFORMATION = MB_ICONASTERISK
MB_ICONSTOP = MB_ICONHAND
MB_DEFBUTTON1 = 0
MB_DEFBUTTON2 = 256
MB_DEFBUTTON3 = 512
MB_DEFBUTTON4 = 768
MB_APPLMODAL = 0
MB_SYSTEMMODAL = 4096
MB_TASKMODAL = 8192
MB_HELP = 16384
MB_NOFOCUS = 32768
MB_SETFOREGROUND = 65536
MB_DEFAULT_DESKTOP_ONLY = 131072
MB_TOPMOST = 262144L
MB_RIGHT = 524288
MB_RTLREADING = 1048576
MB_SERVICE_NOTIFICATION = 2097152
MB_TYPEMASK = 15
MB_USERICON = 128
MB_ICONMASK = 240
MB_DEFMASK = 3840
MB_MODEMASK = 12288
MB_MISCMASK = 49152
# winuser.h line 6373
CWP_ALL = 0
CWP_SKIPINVISIBLE = 1
CWP_SKIPDISABLED = 2
CWP_SKIPTRANSPARENT = 4
CTLCOLOR_MSGBOX = 0
CTLCOLOR_EDIT = 1
CTLCOLOR_LISTBOX = 2
CTLCOLOR_BTN = 3
CTLCOLOR_DLG = 4
CTLCOLOR_SCROLLBAR = 5
CTLCOLOR_STATIC = 6
CTLCOLOR_MAX = 7
COLOR_SCROLLBAR = 0
COLOR_BACKGROUND = 1
COLOR_ACTIVECAPTION = 2
COLOR_INACTIVECAPTION = 3
COLOR_MENU = 4
COLOR_WINDOW = 5
COLOR_WINDOWFRAME = 6
COLOR_MENUTEXT = 7
COLOR_WINDOWTEXT = 8
COLOR_CAPTIONTEXT = 9
COLOR_ACTIVEBORDER = 10
COLOR_INACTIVEBORDER = 11
COLOR_APPWORKSPACE = 12
COLOR_HIGHLIGHT = 13
COLOR_HIGHLIGHTTEXT = 14
COLOR_BTNFACE = 15
COLOR_BTNSHADOW = 16
COLOR_GRAYTEXT = 17
COLOR_BTNTEXT = 18
COLOR_INACTIVECAPTIONTEXT = 19
COLOR_BTNHIGHLIGHT = 20
COLOR_3DDKSHADOW = 21
COLOR_3DLIGHT = 22
COLOR_INFOTEXT = 23
COLOR_INFOBK = 24
COLOR_HOTLIGHT = 26
COLOR_GRADIENTACTIVECAPTION = 27
COLOR_GRADIENTINACTIVECAPTION = 28
COLOR_DESKTOP = COLOR_BACKGROUND
COLOR_3DFACE = COLOR_BTNFACE
COLOR_3DSHADOW = COLOR_BTNSHADOW
COLOR_3DHIGHLIGHT = COLOR_BTNHIGHLIGHT
COLOR_3DHILIGHT = COLOR_BTNHIGHLIGHT
COLOR_BTNHILIGHT = COLOR_BTNHIGHLIGHT
GW_HWNDFIRST = 0
GW_HWNDLAST = 1
GW_HWNDNEXT = 2
GW_HWNDPREV = 3
GW_OWNER = 4
GW_CHILD = 5
GW_ENABLEDPOPUP = 6
GW_MAX = 6
MF_INSERT = 0
MF_CHANGE = 128
MF_APPEND = 256
MF_DELETE = 512
MF_REMOVE = 4096
MF_BYCOMMAND = 0
MF_BYPOSITION = 1024
MF_SEPARATOR = 2048
MF_ENABLED = 0
MF_GRAYED = 1
MF_DISABLED = 2
MF_UNCHECKED = 0
MF_CHECKED = 8
MF_USECHECKBITMAPS = 512
MF_STRING = 0
MF_BITMAP = 4
MF_OWNERDRAW = 256
MF_POPUP = 16
MF_MENUBARBREAK = 32
MF_MENUBREAK = 64
MF_UNHILITE = 0
MF_HILITE = 128
MF_DEFAULT = 4096
MF_SYSMENU = 8192
MF_HELP = 16384
MF_RIGHTJUSTIFY = 16384
MF_MOUSESELECT = 32768
MF_END = 128
MFT_STRING = MF_STRING
MFT_BITMAP = MF_BITMAP
MFT_MENUBARBREAK = MF_MENUBARBREAK
MFT_MENUBREAK = MF_MENUBREAK
MFT_OWNERDRAW = MF_OWNERDRAW
MFT_RADIOCHECK = 512
MFT_SEPARATOR = MF_SEPARATOR
MFT_RIGHTORDER = 8192
MFT_RIGHTJUSTIFY = MF_RIGHTJUSTIFY
MFS_GRAYED = 3
MFS_DISABLED = MFS_GRAYED
MFS_CHECKED = MF_CHECKED
MFS_HILITE = MF_HILITE
MFS_ENABLED = MF_ENABLED
MFS_UNCHECKED = MF_UNCHECKED
MFS_UNHILITE = MF_UNHILITE
MFS_DEFAULT = MF_DEFAULT
MFS_MASK = 4235L
MFS_HOTTRACKDRAWN = 268435456
MFS_CACHEDBMP = 536870912
MFS_BOTTOMGAPDROP = 1073741824
MFS_TOPGAPDROP = -2147483648
MFS_GAPDROP = -1073741824
SC_SIZE = 61440
SC_MOVE = 61456
SC_MINIMIZE = 61472
SC_MAXIMIZE = 61488
SC_NEXTWINDOW = 61504
SC_PREVWINDOW = 61520
SC_CLOSE = 61536
SC_VSCROLL = 61552
SC_HSCROLL = 61568
SC_MOUSEMENU = 61584
SC_KEYMENU = 61696
SC_ARRANGE = 61712
SC_RESTORE = 61728
SC_TASKLIST = 61744
SC_SCREENSAVE = 61760
SC_HOTKEY = 61776
SC_DEFAULT = 61792
SC_MONITORPOWER = 61808
SC_CONTEXTHELP = 61824
SC_SEPARATOR = 61455
SC_ICON = SC_MINIMIZE
SC_ZOOM = SC_MAXIMIZE
IDC_ARROW = 32512
IDC_IBEAM = 32513
IDC_WAIT = 32514
IDC_CROSS = 32515
IDC_UPARROW = 32516
IDC_SIZE = 32640 # OBSOLETE: use IDC_SIZEALL
IDC_ICON = 32641 # OBSOLETE: use IDC_ARROW
IDC_SIZENWSE = 32642
IDC_SIZENESW = 32643
IDC_SIZEWE = 32644
IDC_SIZENS = 32645
IDC_SIZEALL = 32646
IDC_NO = 32648
IDC_HAND = 32649
IDC_APPSTARTING = 32650
IDC_HELP = 32651
IMAGE_BITMAP = 0
IMAGE_ICON = 1
IMAGE_CURSOR = 2
IMAGE_ENHMETAFILE = 3
LR_DEFAULTCOLOR = 0
LR_MONOCHROME = 1
LR_COLOR = 2
LR_COPYRETURNORG = 4
LR_COPYDELETEORG = 8
LR_LOADFROMFILE = 16
LR_LOADTRANSPARENT = 32
LR_DEFAULTSIZE = 64
LR_LOADREALSIZE = 128
LR_LOADMAP3DCOLORS = 4096
LR_CREATEDIBSECTION = 8192
LR_COPYFROMRESOURCE = 16384
LR_SHARED = 32768
DI_MASK = 1
DI_IMAGE = 2
DI_NORMAL = 3
DI_COMPAT = 4
DI_DEFAULTSIZE = 8
RES_ICON = 1
RES_CURSOR = 2
OBM_CLOSE = 32754
OBM_UPARROW = 32753
OBM_DNARROW = 32752
OBM_RGARROW = 32751
OBM_LFARROW = 32750
OBM_REDUCE = 32749
OBM_ZOOM = 32748
OBM_RESTORE = 32747
OBM_REDUCED = 32746
OBM_ZOOMD = 32745
OBM_RESTORED = 32744
OBM_UPARROWD = 32743
OBM_DNARROWD = 32742
OBM_RGARROWD = 32741
OBM_LFARROWD = 32740
OBM_MNARROW = 32739
OBM_COMBO = 32738
OBM_UPARROWI = 32737
OBM_DNARROWI = 32736
OBM_RGARROWI = 32735
OBM_LFARROWI = 32734
OBM_OLD_CLOSE = 32767
OBM_SIZE = 32766
OBM_OLD_UPARROW = 32765
OBM_OLD_DNARROW = 32764
OBM_OLD_RGARROW = 32763
OBM_OLD_LFARROW = 32762
OBM_BTSIZE = 32761
OBM_CHECK = 32760
OBM_CHECKBOXES = 32759
OBM_BTNCORNERS = 32758
OBM_OLD_REDUCE = 32757
OBM_OLD_ZOOM = 32756
OBM_OLD_RESTORE = 32755
OCR_NORMAL = 32512
OCR_IBEAM = 32513
OCR_WAIT = 32514
OCR_CROSS = 32515
OCR_UP = 32516
OCR_SIZE = 32640
OCR_ICON = 32641
OCR_SIZENWSE = 32642
OCR_SIZENESW = 32643
OCR_SIZEWE = 32644
OCR_SIZENS = 32645
OCR_SIZEALL = 32646
OCR_ICOCUR = 32647
OCR_NO = 32648
OCR_HAND = 32649
OCR_APPSTARTING = 32650
# winuser.h line 7455
OIC_SAMPLE = 32512
OIC_HAND = 32513
OIC_QUES = 32514
OIC_BANG = 32515
OIC_NOTE = 32516
OIC_WINLOGO = 32517
OIC_WARNING = OIC_BANG
OIC_ERROR = OIC_HAND
OIC_INFORMATION = OIC_NOTE
ORD_LANGDRIVER = 1
IDI_APPLICATION = 32512
IDI_HAND = 32513
IDI_QUESTION = 32514
IDI_EXCLAMATION = 32515
IDI_ASTERISK = 32516
IDI_WINLOGO = 32517
IDI_WARNING = IDI_EXCLAMATION
IDI_ERROR = IDI_HAND
IDI_INFORMATION = IDI_ASTERISK
IDOK = 1
IDCANCEL = 2
IDABORT = 3
IDRETRY = 4
IDIGNORE = 5
IDYES = 6
IDNO = 7
IDCLOSE = 8
IDHELP = 9
ES_LEFT = 0
ES_CENTER = 1
ES_RIGHT = 2
ES_MULTILINE = 4
ES_UPPERCASE = 8
ES_LOWERCASE = 16
ES_PASSWORD = 32
ES_AUTOVSCROLL = 64
ES_AUTOHSCROLL = 128
ES_NOHIDESEL = 256
ES_OEMCONVERT = 1024
ES_READONLY = 2048
ES_WANTRETURN = 4096
ES_NUMBER = 8192
EN_SETFOCUS = 256
EN_KILLFOCUS = 512
EN_CHANGE = 768
EN_UPDATE = 1024
EN_ERRSPACE = 1280
EN_MAXTEXT = 1281
EN_HSCROLL = 1537
EN_VSCROLL = 1538
EC_LEFTMARGIN = 1
EC_RIGHTMARGIN = 2
EC_USEFONTINFO = 65535
EMSIS_COMPOSITIONSTRING = 1
EIMES_GETCOMPSTRATONCE = 1
EIMES_CANCELCOMPSTRINFOCUS = 2
EIMES_COMPLETECOMPSTRKILLFOCUS = 4
EM_GETSEL = 176
EM_SETSEL = 177
EM_GETRECT = 178
EM_SETRECT = 179
EM_SETRECTNP = 180
EM_SCROLL = 181
EM_LINESCROLL = 182
EM_SCROLLCARET = 183
EM_GETMODIFY = 184
EM_SETMODIFY = 185
EM_GETLINECOUNT = 186
EM_LINEINDEX = 187
EM_SETHANDLE = 188
EM_GETHANDLE = 189
EM_GETTHUMB = 190
EM_LINELENGTH = 193
EM_REPLACESEL = 194
EM_GETLINE = 196
EM_LIMITTEXT = 197
EM_CANUNDO = 198
EM_UNDO = 199
EM_FMTLINES = 200
EM_LINEFROMCHAR = 201
EM_SETTABSTOPS = 203
EM_SETPASSWORDCHAR = 204
EM_EMPTYUNDOBUFFER = 205
EM_GETFIRSTVISIBLELINE = 206
EM_SETREADONLY = 207
EM_SETWORDBREAKPROC = 208
EM_GETWORDBREAKPROC = 209
EM_GETPASSWORDCHAR = 210
EM_SETMARGINS = 211
EM_GETMARGINS = 212
EM_SETLIMITTEXT = EM_LIMITTEXT
EM_GETLIMITTEXT = 213
EM_POSFROMCHAR = 214
EM_CHARFROMPOS = 215
EM_SETIMESTATUS = 216
EM_GETIMESTATUS = 217
WB_LEFT = 0
WB_RIGHT = 1
WB_ISDELIMITER = 2
BS_PUSHBUTTON = 0
BS_DEFPUSHBUTTON = 1
BS_CHECKBOX = 2
BS_AUTOCHECKBOX = 3
BS_RADIOBUTTON = 4
BS_3STATE = 5
BS_AUTO3STATE = 6
BS_GROUPBOX = 7
BS_USERBUTTON = 8
BS_AUTORADIOBUTTON = 9
BS_OWNERDRAW = 11L
BS_LEFTTEXT = 32
BS_TEXT = 0
BS_ICON = 64
BS_BITMAP = 128
BS_LEFT = 256
BS_RIGHT = 512
BS_CENTER = 768
BS_TOP = 1024
BS_BOTTOM = 2048
BS_VCENTER = 3072
BS_PUSHLIKE = 4096
BS_MULTILINE = 8192
BS_NOTIFY = 16384
BS_FLAT = 32768
BS_RIGHTBUTTON = BS_LEFTTEXT
BN_CLICKED = 0
BN_PAINT = 1
BN_HILITE = 2
BN_UNHILITE = 3
BN_DISABLE = 4
BN_DOUBLECLICKED = 5
BN_PUSHED = BN_HILITE
BN_UNPUSHED = BN_UNHILITE
BN_DBLCLK = BN_DOUBLECLICKED
BN_SETFOCUS = 6
BN_KILLFOCUS = 7
BM_GETCHECK = 240
BM_SETCHECK = 241
BM_GETSTATE = 242
BM_SETSTATE = 243
BM_SETSTYLE = 244
BM_CLICK = 245
BM_GETIMAGE = 246
BM_SETIMAGE = 247
BST_UNCHECKED = 0
BST_CHECKED = 1
BST_INDETERMINATE = 2
BST_PUSHED = 4
BST_FOCUS = 8
SS_LEFT = 0
SS_CENTER = 1
SS_RIGHT = 2
SS_ICON = 3
SS_BLACKRECT = 4
SS_GRAYRECT = 5
SS_WHITERECT = 6
SS_BLACKFRAME = 7
SS_GRAYFRAME = 8
SS_WHITEFRAME = 9
SS_USERITEM = 10
SS_SIMPLE = 11
SS_LEFTNOWORDWRAP = 12
SS_BITMAP = 14
SS_OWNERDRAW = 13
SS_ENHMETAFILE = 15
SS_ETCHEDHORZ = 16
SS_ETCHEDVERT = 17
SS_ETCHEDFRAME = 18
SS_TYPEMASK = 31
SS_NOPREFIX = 128
SS_NOTIFY = 256
SS_CENTERIMAGE = 512
SS_RIGHTJUST = 1024
SS_REALSIZEIMAGE = 2048
SS_SUNKEN = 4096
SS_ENDELLIPSIS = 16384
SS_PATHELLIPSIS = 32768
SS_WORDELLIPSIS = 49152
SS_ELLIPSISMASK = 49152
STM_SETICON = 368
STM_GETICON = 369
STM_SETIMAGE = 370
STM_GETIMAGE = 371
STN_CLICKED = 0
STN_DBLCLK = 1
STN_ENABLE = 2
STN_DISABLE = 3
STM_MSGMAX = 372
DWL_MSGRESULT = 0
DWL_DLGPROC = 4
DWL_USER = 8
DDL_READWRITE = 0
DDL_READONLY = 1
DDL_HIDDEN = 2
DDL_SYSTEM = 4
DDL_DIRECTORY = 16
DDL_ARCHIVE = 32
DDL_POSTMSGS = 8192
DDL_DRIVES = 16384
DDL_EXCLUSIVE = 32768
#from winuser.h line 153
RT_CURSOR = 1
RT_BITMAP = 2
RT_ICON = 3
RT_MENU = 4
RT_DIALOG = 5
RT_STRING = 6
RT_FONTDIR = 7
RT_FONT = 8
RT_ACCELERATOR = 9
RT_RCDATA = 10
RT_MESSAGETABLE = 11
DIFFERENCE = 11
RT_GROUP_CURSOR = (RT_CURSOR + DIFFERENCE)
RT_GROUP_ICON = (RT_ICON + DIFFERENCE)
RT_VERSION = 16
RT_DLGINCLUDE = 17
RT_PLUGPLAY = 19
RT_VXD = 20
RT_ANICURSOR = 21
RT_ANIICON = 22
RT_HTML = 23
# from winuser.h line 218
SB_HORZ = 0
SB_VERT = 1
SB_CTL = 2
SB_BOTH = 3
SB_LINEUP = 0
SB_LINELEFT = 0
SB_LINEDOWN = 1
SB_LINERIGHT = 1
SB_PAGEUP = 2
SB_PAGELEFT = 2
SB_PAGEDOWN = 3
SB_PAGERIGHT = 3
SB_THUMBPOSITION = 4
SB_THUMBTRACK = 5
SB_TOP = 6
SB_LEFT = 6
SB_BOTTOM = 7
SB_RIGHT = 7
SB_ENDSCROLL = 8
SW_HIDE = 0
SW_SHOWNORMAL = 1
SW_NORMAL = 1
SW_SHOWMINIMIZED = 2
SW_SHOWMAXIMIZED = 3
SW_MAXIMIZE = 3
SW_SHOWNOACTIVATE = 4
SW_SHOW = 5
SW_MINIMIZE = 6
SW_SHOWMINNOACTIVE = 7
SW_SHOWNA = 8
SW_RESTORE = 9
SW_SHOWDEFAULT = 10
SW_FORCEMINIMIZE = 11
SW_MAX = 11
HIDE_WINDOW = 0
SHOW_OPENWINDOW = 1
SHOW_ICONWINDOW = 2
SHOW_FULLSCREEN = 3
SHOW_OPENNOACTIVATE = 4
SW_PARENTCLOSING = 1
SW_OTHERZOOM = 2
SW_PARENTOPENING = 3
SW_OTHERUNZOOM = 4
AW_HOR_POSITIVE = 1
AW_HOR_NEGATIVE = 2
AW_VER_POSITIVE = 4
AW_VER_NEGATIVE = 8
AW_CENTER = 16
AW_HIDE = 65536
AW_ACTIVATE = 131072
AW_SLIDE = 262144
AW_BLEND = 524288
KF_EXTENDED = 256
KF_DLGMODE = 2048
KF_MENUMODE = 4096
KF_ALTDOWN = 8192
KF_REPEAT = 16384
KF_UP = 32768
VK_LBUTTON = 1
VK_RBUTTON = 2
VK_CANCEL = 3
VK_MBUTTON = 4
VK_BACK = 8
VK_TAB = 9
VK_CLEAR = 12
VK_RETURN = 13
VK_SHIFT = 16
VK_CONTROL = 17
VK_MENU = 18
VK_PAUSE = 19
VK_CAPITAL = 20
VK_KANA = 21
VK_HANGEUL = 21 # old name - should be here for compatibility
VK_HANGUL = 21
VK_JUNJA = 23
VK_FINAL = 24
VK_HANJA = 25
VK_KANJI = 25
VK_ESCAPE = 27
VK_CONVERT = 28
VK_NONCONVERT = 29
VK_ACCEPT = 30
VK_MODECHANGE = 31
VK_SPACE = 32
VK_PRIOR = 33
VK_NEXT = 34
VK_END = 35
VK_HOME = 36
VK_LEFT = 37
VK_UP = 38
VK_RIGHT = 39
VK_DOWN = 40
VK_SELECT = 41
VK_PRINT = 42
VK_EXECUTE = 43
VK_SNAPSHOT = 44
VK_INSERT = 45
VK_DELETE = 46
VK_HELP = 47
VK_LWIN = 91
VK_RWIN = 92
VK_APPS = 93
VK_NUMPAD0 = 96
VK_NUMPAD1 = 97
VK_NUMPAD2 = 98
VK_NUMPAD3 = 99
VK_NUMPAD4 = 100
VK_NUMPAD5 = 101
VK_NUMPAD6 = 102
VK_NUMPAD7 = 103
VK_NUMPAD8 = 104
VK_NUMPAD9 = 105
VK_MULTIPLY = 106
VK_ADD = 107
VK_SEPARATOR = 108
VK_SUBTRACT = 109
VK_DECIMAL = 110
VK_DIVIDE = 111
VK_F1 = 112
VK_F2 = 113
VK_F3 = 114
VK_F4 = 115
VK_F5 = 116
VK_F6 = 117
VK_F7 = 118
VK_F8 = 119
VK_F9 = 120
VK_F10 = 121
VK_F11 = 122
VK_F12 = 123
VK_F13 = 124
VK_F14 = 125
VK_F15 = 126
VK_F16 = 127
VK_F17 = 128
VK_F18 = 129
VK_F19 = 130
VK_F20 = 131
VK_F21 = 132
VK_F22 = 133
VK_F23 = 134
VK_F24 = 135
VK_NUMLOCK = 144
VK_SCROLL = 145
VK_LSHIFT = 160
VK_RSHIFT = 161
VK_LCONTROL = 162
VK_RCONTROL = 163
VK_LMENU = 164
VK_RMENU = 165
VK_PROCESSKEY = 229
VK_ATTN = 246
VK_CRSEL = 247
VK_EXSEL = 248
VK_EREOF = 249
VK_PLAY = 250
VK_ZOOM = 251
VK_NONAME = 252
VK_PA1 = 253
VK_OEM_CLEAR = 254
# multi-media related "keys"
MOUSEEVENTF_XDOWN = 0x0080
MOUSEEVENTF_XUP = 0x0100
MOUSEEVENTF_WHEEL = 0x0800
VK_XBUTTON1 = 0x05
VK_XBUTTON2 = 0x06
VK_VOLUME_MUTE = 0xAD
VK_VOLUME_DOWN = 0xAE
VK_VOLUME_UP = 0xAF
VK_MEDIA_NEXT_TRACK = 0xB0
VK_MEDIA_PREV_TRACK = 0xB1
VK_MEDIA_PLAY_PAUSE = 0xB3
VK_BROWSER_BACK = 0xA6
VK_BROWSER_FORWARD = 0xA7
WH_MIN = (-1)
WH_MSGFILTER = (-1)
WH_JOURNALRECORD = 0
WH_JOURNALPLAYBACK = 1
WH_KEYBOARD = 2
WH_GETMESSAGE = 3
WH_CALLWNDPROC = 4
WH_CBT = 5
WH_SYSMSGFILTER = 6
WH_MOUSE = 7
WH_HARDWARE = 8
WH_DEBUG = 9
WH_SHELL = 10
WH_FOREGROUNDIDLE = 11
WH_CALLWNDPROCRET = 12
WH_KEYBOARD_LL = 13
WH_MOUSE_LL = 14
WH_MAX = 14
WH_MINHOOK = WH_MIN
WH_MAXHOOK = WH_MAX
HC_ACTION = 0
HC_GETNEXT = 1
HC_SKIP = 2
HC_NOREMOVE = 3
HC_NOREM = HC_NOREMOVE
HC_SYSMODALON = 4
HC_SYSMODALOFF = 5
HCBT_MOVESIZE = 0
HCBT_MINMAX = 1
HCBT_QS = 2
HCBT_CREATEWND = 3
HCBT_DESTROYWND = 4
HCBT_ACTIVATE = 5
HCBT_CLICKSKIPPED = 6
HCBT_KEYSKIPPED = 7
HCBT_SYSCOMMAND = 8
HCBT_SETFOCUS = 9
MSGF_DIALOGBOX = 0
MSGF_MESSAGEBOX = 1
MSGF_MENU = 2
#MSGF_MOVE = 3
#MSGF_SIZE = 4
MSGF_SCROLLBAR = 5
MSGF_NEXTWINDOW = 6
#MSGF_MAINLOOP = 8
MSGF_MAX = 8
MSGF_USER = 4096
HSHELL_WINDOWCREATED = 1
HSHELL_WINDOWDESTROYED = 2
HSHELL_ACTIVATESHELLWINDOW = 3
HSHELL_WINDOWACTIVATED = 4
HSHELL_GETMINRECT = 5
HSHELL_REDRAW = 6
HSHELL_TASKMAN = 7
HSHELL_LANGUAGE = 8
HSHELL_ACCESSIBILITYSTATE = 11
ACCESS_STICKYKEYS = 1
ACCESS_FILTERKEYS = 2
ACCESS_MOUSEKEYS = 3
# winuser.h line 624
LLKHF_EXTENDED = 1
LLKHF_INJECTED = 16
LLKHF_ALTDOWN = 32
LLKHF_UP = 128
LLMHF_INJECTED = 1
# line 692
HKL_PREV = 0
HKL_NEXT = 1
KLF_ACTIVATE = 1
KLF_SUBSTITUTE_OK = 2
KLF_UNLOADPREVIOUS = 4
KLF_REORDER = 8
KLF_REPLACELANG = 16
KLF_NOTELLSHELL = 128
KLF_SETFORPROCESS = 256
KL_NAMELENGTH = 9
DESKTOP_READOBJECTS = 1
DESKTOP_CREATEWINDOW = 2
DESKTOP_CREATEMENU = 4
DESKTOP_HOOKCONTROL = 8
DESKTOP_JOURNALRECORD = 16
DESKTOP_JOURNALPLAYBACK = 32
DESKTOP_ENUMERATE = 64
DESKTOP_WRITEOBJECTS = 128
DESKTOP_SWITCHDESKTOP = 256
DF_ALLOWOTHERACCOUNTHOOK = 1
WINSTA_ENUMDESKTOPS = 1
WINSTA_READATTRIBUTES = 2
WINSTA_ACCESSCLIPBOARD = 4
WINSTA_CREATEDESKTOP = 8
WINSTA_WRITEATTRIBUTES = 16
WINSTA_ACCESSGLOBALATOMS = 32
WINSTA_EXITWINDOWS = 64
WINSTA_ENUMERATE = 256
WINSTA_READSCREEN = 512
WSF_VISIBLE = 1
UOI_FLAGS = 1
UOI_NAME = 2
UOI_TYPE = 3
UOI_USER_SID = 4
GWL_WNDPROC = (-4)
GWL_HINSTANCE = (-6)
GWL_HWNDPARENT = (-8)
GWL_STYLE = (-16)
GWL_EXSTYLE = (-20)
GWL_USERDATA = (-21)
GWL_ID = (-12)
GCL_MENUNAME = (-8)
GCL_HBRBACKGROUND = (-10)
GCL_HCURSOR = (-12)
GCL_HICON = (-14)
GCL_HMODULE = (-16)
GCL_CBWNDEXTRA = (-18)
GCL_CBCLSEXTRA = (-20)
GCL_WNDPROC = (-24)
GCL_STYLE = (-26)
GCW_ATOM = (-32)
GCL_HICONSM = (-34)
# line 1291
WM_NULL = 0
WM_CREATE = 1
WM_DESTROY = 2
WM_MOVE = 3
WM_SIZE = 5
WM_ACTIVATE = 6
WA_INACTIVE = 0
WA_ACTIVE = 1
WA_CLICKACTIVE = 2
WM_SETFOCUS = 7
WM_KILLFOCUS = 8
WM_ENABLE = 10
WM_SETREDRAW = 11
WM_SETTEXT = 12
WM_GETTEXT = 13
WM_GETTEXTLENGTH = 14
WM_PAINT = 15
WM_CLOSE = 16
WM_QUERYENDSESSION = 17
WM_QUIT = 18
WM_QUERYOPEN = 19
WM_ERASEBKGND = 20
WM_SYSCOLORCHANGE = 21
WM_ENDSESSION = 22
WM_SHOWWINDOW = 24
WM_WININICHANGE = 26
WM_SETTINGCHANGE = WM_WININICHANGE
WM_DEVMODECHANGE = 27
WM_ACTIVATEAPP = 28
WM_FONTCHANGE = 29
WM_TIMECHANGE = 30
WM_CANCELMODE = 31
WM_SETCURSOR = 32
WM_MOUSEACTIVATE = 33
WM_CHILDACTIVATE = 34
WM_QUEUESYNC = 35
WM_GETMINMAXINFO = 36
WM_PAINTICON = 38
WM_ICONERASEBKGND = 39
WM_NEXTDLGCTL = 40
WM_SPOOLERSTATUS = 42
WM_DRAWITEM = 43
WM_MEASUREITEM = 44
WM_DELETEITEM = 45
WM_VKEYTOITEM = 46
WM_CHARTOITEM = 47
WM_SETFONT = 48
WM_GETFONT = 49
WM_SETHOTKEY = 50
WM_GETHOTKEY = 51
WM_QUERYDRAGICON = 55
WM_COMPAREITEM = 57
WM_GETOBJECT = 61
WM_COMPACTING = 65
WM_COMMNOTIFY = 68
WM_WINDOWPOSCHANGING = 70
WM_WINDOWPOSCHANGED = 71
WM_POWER = 72
PWR_OK = 1
PWR_FAIL = (-1)
PWR_SUSPENDREQUEST = 1
PWR_SUSPENDRESUME = 2
PWR_CRITICALRESUME = 3
WM_COPYDATA = 74
WM_CANCELJOURNAL = 75
WM_NOTIFY = 78
WM_INPUTLANGCHANGEREQUEST = 80
WM_INPUTLANGCHANGE = 81
WM_TCARD = 82
WM_HELP = 83
WM_USERCHANGED = 84
WM_NOTIFYFORMAT = 85
NFR_ANSI = 1
NFR_UNICODE = 2
NF_QUERY = 3
NF_REQUERY = 4
WM_CONTEXTMENU = 123
WM_STYLECHANGING = 124
WM_STYLECHANGED = 125
WM_DISPLAYCHANGE = 126
WM_GETICON = 127
WM_SETICON = 128
WM_NCCREATE = 129
WM_NCDESTROY = 130
WM_NCCALCSIZE = 131
WM_NCHITTEST = 132
WM_NCPAINT = 133
WM_NCACTIVATE = 134
WM_GETDLGCODE = 135
WM_SYNCPAINT = 136
WM_NCMOUSEMOVE = 160
WM_NCLBUTTONDOWN = 161
WM_NCLBUTTONUP = 162
WM_NCLBUTTONDBLCLK = 163
WM_NCRBUTTONDOWN = 164
WM_NCRBUTTONUP = 165
WM_NCRBUTTONDBLCLK = 166
WM_NCMBUTTONDOWN = 167
WM_NCMBUTTONUP = 168
WM_NCMBUTTONDBLCLK = 169
WM_KEYFIRST = 256
WM_KEYDOWN = 256
WM_KEYUP = 257
WM_CHAR = 258
WM_DEADCHAR = 259
WM_SYSKEYDOWN = 260
WM_SYSKEYUP = 261
WM_SYSCHAR = 262
WM_SYSDEADCHAR = 263
WM_KEYLAST = 264
WM_IME_STARTCOMPOSITION = 269
WM_IME_ENDCOMPOSITION = 270
WM_IME_COMPOSITION = 271
WM_IME_KEYLAST = 271
WM_INITDIALOG = 272
WM_COMMAND = 273
WM_SYSCOMMAND = 274
WM_TIMER = 275
WM_HSCROLL = 276
WM_VSCROLL = 277
WM_INITMENU = 278
WM_INITMENUPOPUP = 279
WM_MENUSELECT = 287
WM_MENUCHAR = 288
WM_ENTERIDLE = 289
WM_MENURBUTTONUP = 290
WM_MENUDRAG = 291
WM_MENUGETOBJECT = 292
WM_UNINITMENUPOPUP = 293
WM_MENUCOMMAND = 294
WM_CTLCOLORMSGBOX = 306
WM_CTLCOLOREDIT = 307
WM_CTLCOLORLISTBOX = 308
WM_CTLCOLORBTN = 309
WM_CTLCOLORDLG = 310
WM_CTLCOLORSCROLLBAR = 311
WM_CTLCOLORSTATIC = 312
WM_MOUSEFIRST = 512
WM_MOUSEMOVE = 512
WM_LBUTTONDOWN = 513
WM_LBUTTONUP = 514
WM_LBUTTONDBLCLK = 515
WM_RBUTTONDOWN = 516
WM_RBUTTONUP = 517
WM_RBUTTONDBLCLK = 518
WM_MBUTTONDOWN = 519
WM_MBUTTONUP = 520
WM_MBUTTONDBLCLK = 521
WM_MOUSEWHEEL = 522
WM_MOUSELAST = 522
WHEEL_DELTA = 120 # Value for rolling one detent
WHEEL_PAGESCROLL = -1 # Scroll one page
WM_PARENTNOTIFY = 528
MENULOOP_WINDOW = 0
MENULOOP_POPUP = 1
WM_ENTERMENULOOP = 529
WM_EXITMENULOOP = 530
WM_NEXTMENU = 531
WM_SIZING = 532
WM_CAPTURECHANGED = 533
WM_MOVING = 534
WM_POWERBROADCAST = 536
PBT_APMQUERYSUSPEND = 0
PBT_APMQUERYSTANDBY = 1
PBT_APMQUERYSUSPENDFAILED = 2
PBT_APMQUERYSTANDBYFAILED = 3
PBT_APMSUSPEND = 4
PBT_APMSTANDBY = 5
PBT_APMRESUMECRITICAL = 6
PBT_APMRESUMESUSPEND = 7
PBT_APMRESUMESTANDBY = 8
PBTF_APMRESUMEFROMFAILURE = 1
PBT_APMBATTERYLOW = 9
PBT_APMPOWERSTATUSCHANGE = 10
PBT_APMOEMEVENT = 11
PBT_APMRESUMEAUTOMATIC = 18
WM_DEVICECHANGE = 537
WM_MDICREATE = 544
WM_MDIDESTROY = 545
WM_MDIACTIVATE = 546
WM_MDIRESTORE = 547
WM_MDINEXT = 548
WM_MDIMAXIMIZE = 549
WM_MDITILE = 550
WM_MDICASCADE = 551
WM_MDIICONARRANGE = 552
WM_MDIGETACTIVE = 553
WM_MDISETMENU = 560
WM_ENTERSIZEMOVE = 561
WM_EXITSIZEMOVE = 562
WM_DROPFILES = 563
WM_MDIREFRESHMENU = 564
WM_IME_SETCONTEXT = 641
WM_IME_NOTIFY = 642
WM_IME_CONTROL = 643
WM_IME_COMPOSITIONFULL = 644
WM_IME_SELECT = 645
WM_IME_CHAR = 646
WM_IME_REQUEST = 648
WM_IME_KEYDOWN = 656
WM_IME_KEYUP = 657
WM_MOUSEHOVER = 673
WM_MOUSELEAVE = 675
WM_CUT = 768
WM_COPY = 769
WM_PASTE = 770
WM_CLEAR = 771
WM_UNDO = 772
WM_RENDERFORMAT = 773
WM_RENDERALLFORMATS = 774
WM_DESTROYCLIPBOARD = 775
WM_DRAWCLIPBOARD = 776
WM_PAINTCLIPBOARD = 777
WM_VSCROLLCLIPBOARD = 778
WM_SIZECLIPBOARD = 779
WM_ASKCBFORMATNAME = 780
WM_CHANGECBCHAIN = 781
WM_HSCROLLCLIPBOARD = 782
WM_QUERYNEWPALETTE = 783
WM_PALETTEISCHANGING = 784
WM_PALETTECHANGED = 785
WM_HOTKEY = 786
WM_PRINT = 791
WM_PRINTCLIENT = 792
WM_HANDHELDFIRST = 856
WM_HANDHELDLAST = 863
WM_AFXFIRST = 864
WM_AFXLAST = 895
WM_PENWINFIRST = 896
WM_PENWINLAST = 911
WM_APP = 32768
WMSZ_LEFT = 1
WMSZ_RIGHT = 2
WMSZ_TOP = 3
WMSZ_TOPLEFT = 4
WMSZ_TOPRIGHT = 5
WMSZ_BOTTOM = 6
WMSZ_BOTTOMLEFT = 7
WMSZ_BOTTOMRIGHT = 8
#ST_BEGINSWP = 0
#ST_ENDSWP = 1
HTERROR = (-2)
HTTRANSPARENT = (-1)
HTNOWHERE = 0
HTCLIENT = 1
HTCAPTION = 2
HTSYSMENU = 3
HTGROWBOX = 4
HTSIZE = HTGROWBOX
HTMENU = 5
HTHSCROLL = 6
HTVSCROLL = 7
HTMINBUTTON = 8
HTMAXBUTTON = 9
HTLEFT = 10
HTRIGHT = 11
HTTOP = 12
HTTOPLEFT = 13
HTTOPRIGHT = 14
HTBOTTOM = 15
HTBOTTOMLEFT = 16
HTBOTTOMRIGHT = 17
HTBORDER = 18
HTREDUCE = HTMINBUTTON
HTZOOM = HTMAXBUTTON
HTSIZEFIRST = HTLEFT
HTSIZELAST = HTBOTTOMRIGHT
HTOBJECT = 19
HTCLOSE = 20
HTHELP = 21
SMTO_NORMAL = 0
SMTO_BLOCK = 1
SMTO_ABORTIFHUNG = 2
SMTO_NOTIMEOUTIFNOTHUNG = 8
MA_ACTIVATE = 1
MA_ACTIVATEANDEAT = 2
MA_NOACTIVATE = 3
MA_NOACTIVATEANDEAT = 4
ICON_SMALL = 0
ICON_BIG = 1
SIZE_RESTORED = 0
SIZE_MINIMIZED = 1
SIZE_MAXIMIZED = 2
SIZE_MAXSHOW = 3
SIZE_MAXHIDE = 4
SIZENORMAL = SIZE_RESTORED
SIZEICONIC = SIZE_MINIMIZED
SIZEFULLSCREEN = SIZE_MAXIMIZED
SIZEZOOMSHOW = SIZE_MAXSHOW
SIZEZOOMHIDE = SIZE_MAXHIDE
WVR_ALIGNTOP = 16
WVR_ALIGNLEFT = 32
WVR_ALIGNBOTTOM = 64
WVR_ALIGNRIGHT = 128
WVR_HREDRAW = 256
WVR_VREDRAW = 512
WVR_REDRAW = (WVR_HREDRAW | WVR_VREDRAW)
WVR_VALIDRECTS = 1024
MK_LBUTTON = 1
MK_RBUTTON = 2
MK_SHIFT = 4
MK_CONTROL = 8
MK_MBUTTON = 16
TME_HOVER = 1
TME_LEAVE = 2
TME_QUERY = 1073741824
TME_CANCEL = -2147483648
HOVER_DEFAULT = -1
WS_OVERLAPPED = 0
WS_POPUP = -2147483648
WS_CHILD = 1073741824
WS_MINIMIZE = 536870912
WS_VISIBLE = 268435456
WS_DISABLED = 134217728
WS_CLIPSIBLINGS = 67108864
WS_CLIPCHILDREN = 33554432
WS_MAXIMIZE = 16777216
WS_CAPTION = 12582912
WS_BORDER = 8388608
WS_DLGFRAME = 4194304
WS_VSCROLL = 2097152
WS_HSCROLL = 1048576
WS_SYSMENU = 524288
WS_THICKFRAME = 262144
WS_GROUP = 131072
WS_TABSTOP = 65536
WS_MINIMIZEBOX = 131072
WS_MAXIMIZEBOX = 65536
WS_TILED = WS_OVERLAPPED
WS_ICONIC = WS_MINIMIZE
WS_SIZEBOX = WS_THICKFRAME
WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED | \
WS_CAPTION | \
WS_SYSMENU | \
WS_THICKFRAME | \
WS_MINIMIZEBOX | \
WS_MAXIMIZEBOX)
WS_POPUPWINDOW = (WS_POPUP | \
WS_BORDER | \
WS_SYSMENU)
WS_CHILDWINDOW = (WS_CHILD)
WS_TILEDWINDOW = WS_OVERLAPPEDWINDOW
WS_EX_DLGMODALFRAME = 1
WS_EX_NOPARENTNOTIFY = 4
WS_EX_TOPMOST = 8
WS_EX_ACCEPTFILES = 16
WS_EX_TRANSPARENT = 32
WS_EX_MDICHILD = 64
WS_EX_TOOLWINDOW = 128
WS_EX_WINDOWEDGE = 256
WS_EX_CLIENTEDGE = 512
WS_EX_CONTEXTHELP = 1024
WS_EX_RIGHT = 4096
WS_EX_LEFT = 0
WS_EX_RTLREADING = 8192
WS_EX_LTRREADING = 0
WS_EX_LEFTSCROLLBAR = 16384
WS_EX_RIGHTSCROLLBAR = 0
WS_EX_CONTROLPARENT = 65536
WS_EX_STATICEDGE = 131072
WS_EX_APPWINDOW = 262144
WS_EX_OVERLAPPEDWINDOW = (WS_EX_WINDOWEDGE | WS_EX_CLIENTEDGE)
WS_EX_PALETTEWINDOW = (WS_EX_WINDOWEDGE | WS_EX_TOOLWINDOW | WS_EX_TOPMOST)
WS_EX_LAYERED = 0x00080000
WS_EX_NOINHERITLAYOUT = 0x00100000
WS_EX_LAYOUTRTL = 0x00400000
WS_EX_COMPOSITED = 0x02000000
WS_EX_NOACTIVATE = 0x08000000
CS_VREDRAW = 1
CS_HREDRAW = 2
#CS_KEYCVTWINDOW = 0x0004
CS_DBLCLKS = 8
CS_OWNDC = 32
CS_CLASSDC = 64
CS_PARENTDC = 128
#CS_NOKEYCVT = 0x0100
CS_NOCLOSE = 512
CS_SAVEBITS = 2048
CS_BYTEALIGNCLIENT = 4096
CS_BYTEALIGNWINDOW = 8192
CS_GLOBALCLASS = 16384
CS_IME = 65536
PRF_CHECKVISIBLE = 1
PRF_NONCLIENT = 2
PRF_CLIENT = 4
PRF_ERASEBKGND = 8
PRF_CHILDREN = 16
PRF_OWNED = 32
BDR_RAISEDOUTER = 1
BDR_SUNKENOUTER = 2
BDR_RAISEDINNER = 4
BDR_SUNKENINNER = 8
BDR_OUTER = 3
BDR_INNER = 12
#BDR_RAISED = 0x0005
#BDR_SUNKEN = 0x000a
EDGE_RAISED = (BDR_RAISEDOUTER | BDR_RAISEDINNER)
EDGE_SUNKEN = (BDR_SUNKENOUTER | BDR_SUNKENINNER)
EDGE_ETCHED = (BDR_SUNKENOUTER | BDR_RAISEDINNER)
EDGE_BUMP = (BDR_RAISEDOUTER | BDR_SUNKENINNER)
# winuser.h line 2879
ISMEX_NOSEND = 0
ISMEX_SEND = 1
ISMEX_NOTIFY = 2
ISMEX_CALLBACK = 4
ISMEX_REPLIED = 8
CW_USEDEFAULT = -2147483648
FLASHW_STOP = 0
FLASHW_CAPTION = 1
FLASHW_TRAY = 2
FLASHW_ALL = (FLASHW_CAPTION | FLASHW_TRAY)
FLASHW_TIMER = 4
FLASHW_TIMERNOFG = 12
# winuser.h line 7963
DS_ABSALIGN = 1
DS_SYSMODAL = 2
DS_LOCALEDIT = 32
DS_SETFONT = 64
DS_MODALFRAME = 128
DS_NOIDLEMSG = 256
DS_SETFOREGROUND = 512
DS_3DLOOK = 4
DS_FIXEDSYS = 8
DS_NOFAILCREATE = 16
DS_CONTROL = 1024
DS_CENTER = 2048
DS_CENTERMOUSE = 4096
DS_CONTEXTHELP = 8192
DM_GETDEFID = (WM_USER+0)
DM_SETDEFID = (WM_USER+1)
DM_REPOSITION = (WM_USER+2)
#PSM_PAGEINFO = (WM_USER+100)
#PSM_SHEETINFO = (WM_USER+101)
#PSI_SETACTIVE = 0x0001
#PSI_KILLACTIVE = 0x0002
#PSI_APPLY = 0x0003
#PSI_RESET = 0x0004
#PSI_HASHELP = 0x0005
#PSI_HELP = 0x0006
#PSI_CHANGED = 0x0001
#PSI_GUISTART = 0x0002
#PSI_REBOOT = 0x0003
#PSI_GETSIBLINGS = 0x0004
DC_HASDEFID = 21323
DLGC_WANTARROWS = 1
DLGC_WANTTAB = 2
DLGC_WANTALLKEYS = 4
DLGC_WANTMESSAGE = 4
DLGC_HASSETSEL = 8
DLGC_DEFPUSHBUTTON = 16
DLGC_UNDEFPUSHBUTTON = 32
DLGC_RADIOBUTTON = 64
DLGC_WANTCHARS = 128
DLGC_STATIC = 256
DLGC_BUTTON = 8192
LB_CTLCODE = 0
LB_OKAY = 0
LB_ERR = (-1)
LB_ERRSPACE = (-2)
LBN_ERRSPACE = (-2)
LBN_SELCHANGE = 1
LBN_DBLCLK = 2
LBN_SELCANCEL = 3
LBN_SETFOCUS = 4
LBN_KILLFOCUS = 5
LB_ADDSTRING = 384
LB_INSERTSTRING = 385
LB_DELETESTRING = 386
LB_SELITEMRANGEEX = 387
LB_RESETCONTENT = 388
LB_SETSEL = 389
LB_SETCURSEL = 390
LB_GETSEL = 391
LB_GETCURSEL = 392
LB_GETTEXT = 393
LB_GETTEXTLEN = 394
LB_GETCOUNT = 395
LB_SELECTSTRING = 396
LB_DIR = 397
LB_GETTOPINDEX = 398
LB_FINDSTRING = 399
LB_GETSELCOUNT = 400
LB_GETSELITEMS = 401
LB_SETTABSTOPS = 402
LB_GETHORIZONTALEXTENT = 403
LB_SETHORIZONTALEXTENT = 404
LB_SETCOLUMNWIDTH = 405
LB_ADDFILE = 406
LB_SETTOPINDEX = 407
LB_GETITEMRECT = 408
LB_GETITEMDATA = 409
LB_SETITEMDATA = 410
LB_SELITEMRANGE = 411
LB_SETANCHORINDEX = 412
LB_GETANCHORINDEX = 413
LB_SETCARETINDEX = 414
LB_GETCARETINDEX = 415
LB_SETITEMHEIGHT = 416
LB_GETITEMHEIGHT = 417
LB_FINDSTRINGEXACT = 418
LB_SETLOCALE = 421
LB_GETLOCALE = 422
LB_SETCOUNT = 423
LB_INITSTORAGE = 424
LB_ITEMFROMPOINT = 425
LB_MSGMAX = 432
LBS_NOTIFY = 1
LBS_SORT = 2
LBS_NOREDRAW = 4
LBS_MULTIPLESEL = 8
LBS_OWNERDRAWFIXED = 16
LBS_OWNERDRAWVARIABLE = 32
LBS_HASSTRINGS = 64
LBS_USETABSTOPS = 128
LBS_NOINTEGRALHEIGHT = 256
LBS_MULTICOLUMN = 512
LBS_WANTKEYBOARDINPUT = 1024
LBS_EXTENDEDSEL = 2048
LBS_DISABLENOSCROLL = 4096
LBS_NODATA = 8192
LBS_NOSEL = 16384
LBS_STANDARD = (LBS_NOTIFY | LBS_SORT | WS_VSCROLL | WS_BORDER)
CB_OKAY = 0
CB_ERR = (-1)
CB_ERRSPACE = (-2)
CBN_ERRSPACE = (-1)
CBN_SELCHANGE = 1
CBN_DBLCLK = 2
CBN_SETFOCUS = 3
CBN_KILLFOCUS = 4
CBN_EDITCHANGE = 5
CBN_EDITUPDATE = 6
CBN_DROPDOWN = 7
CBN_CLOSEUP = 8
CBN_SELENDOK = 9
CBN_SELENDCANCEL = 10
CBS_SIMPLE = 1
CBS_DROPDOWN = 2
CBS_DROPDOWNLIST = 3
CBS_OWNERDRAWFIXED = 16
CBS_OWNERDRAWVARIABLE = 32
CBS_AUTOHSCROLL = 64
CBS_OEMCONVERT = 128
CBS_SORT = 256
CBS_HASSTRINGS = 512
CBS_NOINTEGRALHEIGHT = 1024
CBS_DISABLENOSCROLL = 2048
CBS_UPPERCASE = 8192
CBS_LOWERCASE = 16384
CB_GETEDITSEL = 320
CB_LIMITTEXT = 321
CB_SETEDITSEL = 322
CB_ADDSTRING = 323
CB_DELETESTRING = 324
CB_DIR = 325
CB_GETCOUNT = 326
CB_GETCURSEL = 327
CB_GETLBTEXT = 328
CB_GETLBTEXTLEN = 329
CB_INSERTSTRING = 330
CB_RESETCONTENT = 331
CB_FINDSTRING = 332
CB_SELECTSTRING = 333
CB_SETCURSEL = 334
CB_SHOWDROPDOWN = 335
CB_GETITEMDATA = 336
CB_SETITEMDATA = 337
CB_GETDROPPEDCONTROLRECT = 338
CB_SETITEMHEIGHT = 339
CB_GETITEMHEIGHT = 340
CB_SETEXTENDEDUI = 341
CB_GETEXTENDEDUI = 342
CB_GETDROPPEDSTATE = 343
CB_FINDSTRINGEXACT = 344
CB_SETLOCALE = 345
CB_GETLOCALE = 346
CB_GETTOPINDEX = 347
CB_SETTOPINDEX = 348
CB_GETHORIZONTALEXTENT = 349
CB_SETHORIZONTALEXTENT = 350
CB_GETDROPPEDWIDTH = 351
CB_SETDROPPEDWIDTH = 352
CB_INITSTORAGE = 353
CB_MSGMAX = 354
SBS_HORZ = 0
SBS_VERT = 1
SBS_TOPALIGN = 2
SBS_LEFTALIGN = 2
SBS_BOTTOMALIGN = 4
SBS_RIGHTALIGN = 4
SBS_SIZEBOXTOPLEFTALIGN = 2
SBS_SIZEBOXBOTTOMRIGHTALIGN = 4
SBS_SIZEBOX = 8
SBS_SIZEGRIP = 16
SBM_SETPOS = 224
SBM_GETPOS = 225
SBM_SETRANGE = 226
SBM_SETRANGEREDRAW = 230
SBM_GETRANGE = 227
SBM_ENABLE_ARROWS = 228
SBM_SETSCROLLINFO = 233
SBM_GETSCROLLINFO = 234
SIF_RANGE = 1
SIF_PAGE = 2
SIF_POS = 4
SIF_DISABLENOSCROLL = 8
SIF_TRACKPOS = 16
SIF_ALL = (SIF_RANGE | SIF_PAGE | SIF_POS | SIF_TRACKPOS)
MDIS_ALLCHILDSTYLES = 1
MDITILE_VERTICAL = 0
MDITILE_HORIZONTAL = 1
MDITILE_SKIPDISABLED = 2
IMC_GETCANDIDATEPOS = 7
IMC_SETCANDIDATEPOS = 8
IMC_GETCOMPOSITIONFONT = 9
IMC_SETCOMPOSITIONFONT = 10
IMC_GETCOMPOSITIONWINDOW = 11
IMC_SETCOMPOSITIONWINDOW = 12
IMC_GETSTATUSWINDOWPOS = 15
IMC_SETSTATUSWINDOWPOS = 16
IMC_CLOSESTATUSWINDOW = 33
IMC_OPENSTATUSWINDOW = 34
# Generated by h2py from \msvc20\include\winnt.h
# hacked and split by mhammond.
DELETE = (65536)
READ_CONTROL = (131072)
WRITE_DAC = (262144)
WRITE_OWNER = (524288)
SYNCHRONIZE = (1048576)
STANDARD_RIGHTS_REQUIRED = (983040)
STANDARD_RIGHTS_READ = (READ_CONTROL)
STANDARD_RIGHTS_WRITE = (READ_CONTROL)
STANDARD_RIGHTS_EXECUTE = (READ_CONTROL)
STANDARD_RIGHTS_ALL = (2031616)
SPECIFIC_RIGHTS_ALL = (65535)
ACCESS_SYSTEM_SECURITY = (16777216)
MAXIMUM_ALLOWED = (33554432)
GENERIC_READ = (-2147483648)
GENERIC_WRITE = (1073741824)
GENERIC_EXECUTE = (536870912)
GENERIC_ALL = (268435456)
SERVICE_KERNEL_DRIVER = 1
SERVICE_FILE_SYSTEM_DRIVER = 2
SERVICE_ADAPTER = 4
SERVICE_RECOGNIZER_DRIVER = 8
SERVICE_DRIVER = (SERVICE_KERNEL_DRIVER | \
SERVICE_FILE_SYSTEM_DRIVER | \
SERVICE_RECOGNIZER_DRIVER)
SERVICE_WIN32_OWN_PROCESS = 16
SERVICE_WIN32_SHARE_PROCESS = 32
SERVICE_WIN32 = (SERVICE_WIN32_OWN_PROCESS | \
SERVICE_WIN32_SHARE_PROCESS)
SERVICE_INTERACTIVE_PROCESS = 256
SERVICE_TYPE_ALL = (SERVICE_WIN32 | \
SERVICE_ADAPTER | \
SERVICE_DRIVER | \
SERVICE_INTERACTIVE_PROCESS)
SERVICE_BOOT_START = 0
SERVICE_SYSTEM_START = 1
SERVICE_AUTO_START = 2
SERVICE_DEMAND_START = 3
SERVICE_DISABLED = 4
SERVICE_ERROR_IGNORE = 0
SERVICE_ERROR_NORMAL = 1
SERVICE_ERROR_SEVERE = 2
SERVICE_ERROR_CRITICAL = 3
TAPE_ERASE_SHORT = 0
TAPE_ERASE_LONG = 1
TAPE_LOAD = 0
TAPE_UNLOAD = 1
TAPE_TENSION = 2
TAPE_LOCK = 3
TAPE_UNLOCK = 4
TAPE_FORMAT = 5
TAPE_SETMARKS = 0
TAPE_FILEMARKS = 1
TAPE_SHORT_FILEMARKS = 2
TAPE_LONG_FILEMARKS = 3
TAPE_ABSOLUTE_POSITION = 0
TAPE_LOGICAL_POSITION = 1
TAPE_PSEUDO_LOGICAL_POSITION = 2
TAPE_REWIND = 0
TAPE_ABSOLUTE_BLOCK = 1
TAPE_LOGICAL_BLOCK = 2
TAPE_PSEUDO_LOGICAL_BLOCK = 3
TAPE_SPACE_END_OF_DATA = 4
TAPE_SPACE_RELATIVE_BLOCKS = 5
TAPE_SPACE_FILEMARKS = 6
TAPE_SPACE_SEQUENTIAL_FMKS = 7
TAPE_SPACE_SETMARKS = 8
TAPE_SPACE_SEQUENTIAL_SMKS = 9
TAPE_DRIVE_FIXED = 1
TAPE_DRIVE_SELECT = 2
TAPE_DRIVE_INITIATOR = 4
TAPE_DRIVE_ERASE_SHORT = 16
TAPE_DRIVE_ERASE_LONG = 32
TAPE_DRIVE_ERASE_BOP_ONLY = 64
TAPE_DRIVE_ERASE_IMMEDIATE = 128
TAPE_DRIVE_TAPE_CAPACITY = 256
TAPE_DRIVE_TAPE_REMAINING = 512
TAPE_DRIVE_FIXED_BLOCK = 1024
TAPE_DRIVE_VARIABLE_BLOCK = 2048
TAPE_DRIVE_WRITE_PROTECT = 4096
TAPE_DRIVE_EOT_WZ_SIZE = 8192
TAPE_DRIVE_ECC = 65536
TAPE_DRIVE_COMPRESSION = 131072
TAPE_DRIVE_PADDING = 262144
TAPE_DRIVE_REPORT_SMKS = 524288
TAPE_DRIVE_GET_ABSOLUTE_BLK = 1048576
TAPE_DRIVE_GET_LOGICAL_BLK = 2097152
TAPE_DRIVE_SET_EOT_WZ_SIZE = 4194304
TAPE_DRIVE_LOAD_UNLOAD = -2147483647
TAPE_DRIVE_TENSION = -2147483646
TAPE_DRIVE_LOCK_UNLOCK = -2147483644
TAPE_DRIVE_REWIND_IMMEDIATE = -2147483640
TAPE_DRIVE_SET_BLOCK_SIZE = -2147483632
TAPE_DRIVE_LOAD_UNLD_IMMED = -2147483616
TAPE_DRIVE_TENSION_IMMED = -2147483584
TAPE_DRIVE_LOCK_UNLK_IMMED = -2147483520
TAPE_DRIVE_SET_ECC = -2147483392
TAPE_DRIVE_SET_COMPRESSION = -2147483136
TAPE_DRIVE_SET_PADDING = -2147482624
TAPE_DRIVE_SET_REPORT_SMKS = -2147481600
TAPE_DRIVE_ABSOLUTE_BLK = -2147479552
TAPE_DRIVE_ABS_BLK_IMMED = -2147475456
TAPE_DRIVE_LOGICAL_BLK = -2147467264
TAPE_DRIVE_LOG_BLK_IMMED = -2147450880
TAPE_DRIVE_END_OF_DATA = -2147418112
TAPE_DRIVE_RELATIVE_BLKS = -2147352576
TAPE_DRIVE_FILEMARKS = -2147221504
TAPE_DRIVE_SEQUENTIAL_FMKS = -2146959360
TAPE_DRIVE_SETMARKS = -2146435072
TAPE_DRIVE_SEQUENTIAL_SMKS = -2145386496
TAPE_DRIVE_REVERSE_POSITION = -2143289344
TAPE_DRIVE_SPACE_IMMEDIATE = -2139095040
TAPE_DRIVE_WRITE_SETMARKS = -2130706432
TAPE_DRIVE_WRITE_FILEMARKS = -2113929216
TAPE_DRIVE_WRITE_SHORT_FMKS = -2080374784
TAPE_DRIVE_WRITE_LONG_FMKS = -2013265920
TAPE_DRIVE_WRITE_MARK_IMMED = -1879048192
TAPE_DRIVE_FORMAT = -1610612736
TAPE_DRIVE_FORMAT_IMMEDIATE = -1073741824
TAPE_FIXED_PARTITIONS = 0
TAPE_SELECT_PARTITIONS = 1
TAPE_INITIATOR_PARTITIONS = 2
# Generated by h2py from \msvc20\include\winnt.h
# hacked and split by mhammond.
APPLICATION_ERROR_MASK = 536870912
ERROR_SEVERITY_SUCCESS = 0
ERROR_SEVERITY_INFORMATIONAL = 1073741824
ERROR_SEVERITY_WARNING = -2147483648
ERROR_SEVERITY_ERROR = -1073741824
MINCHAR = 128
MAXCHAR = 127
MINSHORT = 32768
MAXSHORT = 32767
MINLONG = -2147483648
MAXLONG = 2147483647
MAXBYTE = 255
MAXWORD = 65535
MAXDWORD = -1
LANG_NEUTRAL = 0
LANG_BULGARIAN = 2
LANG_CHINESE = 4
LANG_CROATIAN = 26
LANG_CZECH = 5
LANG_DANISH = 6
LANG_DUTCH = 19
LANG_ENGLISH = 9
LANG_FINNISH = 11
LANG_FRENCH = 12
LANG_GERMAN = 7
LANG_GREEK = 8
LANG_HUNGARIAN = 14
LANG_ICELANDIC = 15
LANG_ITALIAN = 16
LANG_JAPANESE = 17
LANG_KOREAN = 18
LANG_NORWEGIAN = 20
LANG_POLISH = 21
LANG_PORTUGUESE = 22
LANG_ROMANIAN = 24
LANG_RUSSIAN = 25
LANG_SLOVAK = 27
LANG_SLOVENIAN = 36
LANG_SPANISH = 10
LANG_SWEDISH = 29
LANG_TURKISH = 31
SUBLANG_NEUTRAL = 0
SUBLANG_DEFAULT = 1
SUBLANG_SYS_DEFAULT = 2
SUBLANG_CHINESE_TRADITIONAL = 1
SUBLANG_CHINESE_SIMPLIFIED = 2
SUBLANG_CHINESE_HONGKONG = 3
SUBLANG_CHINESE_SINGAPORE = 4
SUBLANG_DUTCH = 1
SUBLANG_DUTCH_BELGIAN = 2
SUBLANG_ENGLISH_US = 1
SUBLANG_ENGLISH_UK = 2
SUBLANG_ENGLISH_AUS = 3
SUBLANG_ENGLISH_CAN = 4
SUBLANG_ENGLISH_NZ = 5
SUBLANG_ENGLISH_EIRE = 6
SUBLANG_FRENCH = 1
SUBLANG_FRENCH_BELGIAN = 2
SUBLANG_FRENCH_CANADIAN = 3
SUBLANG_FRENCH_SWISS = 4
SUBLANG_GERMAN = 1
SUBLANG_GERMAN_SWISS = 2
SUBLANG_GERMAN_AUSTRIAN = 3
SUBLANG_ITALIAN = 1
SUBLANG_ITALIAN_SWISS = 2
SUBLANG_NORWEGIAN_BOKMAL = 1
SUBLANG_NORWEGIAN_NYNORSK = 2
SUBLANG_PORTUGUESE = 2
SUBLANG_PORTUGUESE_BRAZILIAN = 1
SUBLANG_SPANISH = 1
SUBLANG_SPANISH_MEXICAN = 2
SUBLANG_SPANISH_MODERN = 3
SORT_DEFAULT = 0
SORT_JAPANESE_XJIS = 0
SORT_JAPANESE_UNICODE = 1
SORT_CHINESE_BIG5 = 0
SORT_CHINESE_UNICODE = 1
SORT_KOREAN_KSC = 0
SORT_KOREAN_UNICODE = 1
def PRIMARYLANGID(lgid): return ((lgid) & 1023)
def SUBLANGID(lgid): return ((lgid) >> 10)
NLS_VALID_LOCALE_MASK = 1048575
CONTEXT_PORTABLE_32BIT = 1048576
CONTEXT_ALPHA = 131072
CONTEXT_CONTROL = (CONTEXT_ALPHA | 1)
CONTEXT_FLOATING_POINT = (CONTEXT_ALPHA | 2)
CONTEXT_INTEGER = (CONTEXT_ALPHA | 4)
CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER)
SIZE_OF_80387_REGISTERS = 80
CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER)
CONTEXT_CONTROL = 1
CONTEXT_FLOATING_POINT = 2
CONTEXT_INTEGER = 4
CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER)
PROCESS_TERMINATE = (1)
PROCESS_CREATE_THREAD = (2)
PROCESS_VM_OPERATION = (8)
PROCESS_VM_READ = (16)
PROCESS_VM_WRITE = (32)
PROCESS_DUP_HANDLE = (64)
PROCESS_CREATE_PROCESS = (128)
PROCESS_SET_QUOTA = (256)
PROCESS_SET_INFORMATION = (512)
PROCESS_QUERY_INFORMATION = (1024)
PROCESS_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 4095)
THREAD_TERMINATE = (1)
THREAD_SUSPEND_RESUME = (2)
THREAD_GET_CONTEXT = (8)
THREAD_SET_CONTEXT = (16)
THREAD_SET_INFORMATION = (32)
THREAD_QUERY_INFORMATION = (64)
THREAD_SET_THREAD_TOKEN = (128)
THREAD_IMPERSONATE = (256)
THREAD_DIRECT_IMPERSONATION = (512)
TLS_MINIMUM_AVAILABLE = 64
EVENT_MODIFY_STATE = 2
MUTANT_QUERY_STATE = 1
SEMAPHORE_MODIFY_STATE = 2
TIME_ZONE_ID_UNKNOWN = 0
TIME_ZONE_ID_STANDARD = 1
TIME_ZONE_ID_DAYLIGHT = 2
PROCESSOR_INTEL_386 = 386
PROCESSOR_INTEL_486 = 486
PROCESSOR_INTEL_PENTIUM = 586
PROCESSOR_INTEL_860 = 860
PROCESSOR_MIPS_R2000 = 2000
PROCESSOR_MIPS_R3000 = 3000
PROCESSOR_MIPS_R4000 = 4000
PROCESSOR_ALPHA_21064 = 21064
PROCESSOR_PPC_601 = 601
PROCESSOR_PPC_603 = 603
PROCESSOR_PPC_604 = 604
PROCESSOR_PPC_620 = 620
SECTION_QUERY = 1
SECTION_MAP_WRITE = 2
SECTION_MAP_READ = 4
SECTION_MAP_EXECUTE = 8
SECTION_EXTEND_SIZE = 16
PAGE_NOACCESS = 1
PAGE_READONLY = 2
PAGE_READWRITE = 4
PAGE_WRITECOPY = 8
PAGE_EXECUTE = 16
PAGE_EXECUTE_READ = 32
PAGE_EXECUTE_READWRITE = 64
PAGE_EXECUTE_WRITECOPY = 128
PAGE_GUARD = 256
PAGE_NOCACHE = 512
MEM_COMMIT = 4096
MEM_RESERVE = 8192
MEM_DECOMMIT = 16384
MEM_RELEASE = 32768
MEM_FREE = 65536
MEM_PRIVATE = 131072
MEM_MAPPED = 262144
MEM_TOP_DOWN = 1048576
# Generated by h2py from \msvc20\include\winnt.h
# hacked and split by mhammond.
INVALID_HANDLE_VALUE = 4294967295L
SEC_FILE = 8388608
SEC_IMAGE = 16777216
SEC_RESERVE = 67108864
SEC_COMMIT = 134217728
SEC_NOCACHE = 268435456
MEM_IMAGE = SEC_IMAGE
FILE_SHARE_READ = 1
FILE_SHARE_WRITE = 2
FILE_SHARE_DELETE = 4
FILE_ATTRIBUTE_READONLY = 1
FILE_ATTRIBUTE_HIDDEN = 2
FILE_ATTRIBUTE_SYSTEM = 4
FILE_ATTRIBUTE_DIRECTORY = 16
FILE_ATTRIBUTE_ARCHIVE = 32
FILE_ATTRIBUTE_NORMAL = 128
FILE_ATTRIBUTE_TEMPORARY = 256
FILE_ATTRIBUTE_ATOMIC_WRITE = 512
FILE_ATTRIBUTE_XACTION_WRITE = 1024
FILE_ATTRIBUTE_COMPRESSED = 2048
FILE_NOTIFY_CHANGE_FILE_NAME = 1
FILE_NOTIFY_CHANGE_DIR_NAME = 2
FILE_NOTIFY_CHANGE_ATTRIBUTES = 4
FILE_NOTIFY_CHANGE_SIZE = 8
FILE_NOTIFY_CHANGE_LAST_WRITE = 16
FILE_NOTIFY_CHANGE_SECURITY = 256
FILE_CASE_SENSITIVE_SEARCH = 1
FILE_CASE_PRESERVED_NAMES = 2
FILE_UNICODE_ON_DISK = 4
FILE_PERSISTENT_ACLS = 8
FILE_FILE_COMPRESSION = 16
FILE_VOLUME_IS_COMPRESSED = 32768
IO_COMPLETION_MODIFY_STATE = 2
DUPLICATE_CLOSE_SOURCE = 1
DUPLICATE_SAME_ACCESS = 2
SID_MAX_SUB_AUTHORITIES = (15)
SECURITY_NULL_RID = (0)
SECURITY_WORLD_RID = (0)
SECURITY_LOCAL_RID = (0X00000000)
SECURITY_CREATOR_OWNER_RID = (0)
SECURITY_CREATOR_GROUP_RID = (1)
SECURITY_DIALUP_RID = (1)
SECURITY_NETWORK_RID = (2)
SECURITY_BATCH_RID = (3)
SECURITY_INTERACTIVE_RID = (4)
SECURITY_SERVICE_RID = (6)
SECURITY_ANONYMOUS_LOGON_RID = (7)
SECURITY_LOGON_IDS_RID = (5)
SECURITY_LOGON_IDS_RID_COUNT = (3)
SECURITY_LOCAL_SYSTEM_RID = (18)
SECURITY_NT_NON_UNIQUE = (21)
SECURITY_BUILTIN_DOMAIN_RID = (32)
DOMAIN_USER_RID_ADMIN = (500)
DOMAIN_USER_RID_GUEST = (501)
DOMAIN_GROUP_RID_ADMINS = (512)
DOMAIN_GROUP_RID_USERS = (513)
DOMAIN_GROUP_RID_GUESTS = (514)
DOMAIN_ALIAS_RID_ADMINS = (544)
DOMAIN_ALIAS_RID_USERS = (545)
DOMAIN_ALIAS_RID_GUESTS = (546)
DOMAIN_ALIAS_RID_POWER_USERS = (547)
DOMAIN_ALIAS_RID_ACCOUNT_OPS = (548)
DOMAIN_ALIAS_RID_SYSTEM_OPS = (549)
DOMAIN_ALIAS_RID_PRINT_OPS = (550)
DOMAIN_ALIAS_RID_BACKUP_OPS = (551)
DOMAIN_ALIAS_RID_REPLICATOR = (552)
SE_GROUP_MANDATORY = (1)
SE_GROUP_ENABLED_BY_DEFAULT = (2)
SE_GROUP_ENABLED = (4)
SE_GROUP_OWNER = (8)
SE_GROUP_LOGON_ID = (-1073741824)
ACL_REVISION = (2)
ACL_REVISION1 = (1)
ACL_REVISION2 = (2)
ACCESS_ALLOWED_ACE_TYPE = (0)
ACCESS_DENIED_ACE_TYPE = (1)
SYSTEM_AUDIT_ACE_TYPE = (2)
SYSTEM_ALARM_ACE_TYPE = (3)
OBJECT_INHERIT_ACE = (1)
CONTAINER_INHERIT_ACE = (2)
NO_PROPAGATE_INHERIT_ACE = (4)
INHERIT_ONLY_ACE = (8)
VALID_INHERIT_FLAGS = (15)
SUCCESSFUL_ACCESS_ACE_FLAG = (64)
FAILED_ACCESS_ACE_FLAG = (128)
SECURITY_DESCRIPTOR_REVISION = (1)
SECURITY_DESCRIPTOR_REVISION1 = (1)
SECURITY_DESCRIPTOR_MIN_LENGTH = (20)
SE_OWNER_DEFAULTED = (1)
SE_GROUP_DEFAULTED = (2)
SE_DACL_PRESENT = (4)
SE_DACL_DEFAULTED = (8)
SE_SACL_PRESENT = (16)
SE_SACL_DEFAULTED = (32)
SE_SELF_RELATIVE = (32768)
SE_PRIVILEGE_ENABLED_BY_DEFAULT = (1)
SE_PRIVILEGE_ENABLED = (2)
SE_PRIVILEGE_USED_FOR_ACCESS = (-2147483648)
PRIVILEGE_SET_ALL_NECESSARY = (1)
SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege"
SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege"
SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege"
SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege"
SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege"
SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege"
SE_TCB_NAME = "SeTcbPrivilege"
SE_SECURITY_NAME = "SeSecurityPrivilege"
SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege"
SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege"
SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege"
SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege"
SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege"
SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege"
SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege"
SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege"
SE_BACKUP_NAME = "SeBackupPrivilege"
SE_RESTORE_NAME = "SeRestorePrivilege"
SE_SHUTDOWN_NAME = "SeShutdownPrivilege"
SE_DEBUG_NAME = "SeDebugPrivilege"
SE_AUDIT_NAME = "SeAuditPrivilege"
SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege"
SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege"
SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege"
TOKEN_ASSIGN_PRIMARY = (1)
TOKEN_DUPLICATE = (2)
TOKEN_IMPERSONATE = (4)
TOKEN_QUERY = (8)
TOKEN_QUERY_SOURCE = (16)
TOKEN_ADJUST_PRIVILEGES = (32)
TOKEN_ADJUST_GROUPS = (64)
TOKEN_ADJUST_DEFAULT = (128)
TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED |\
TOKEN_ASSIGN_PRIMARY |\
TOKEN_DUPLICATE |\
TOKEN_IMPERSONATE |\
TOKEN_QUERY |\
TOKEN_QUERY_SOURCE |\
TOKEN_ADJUST_PRIVILEGES |\
TOKEN_ADJUST_GROUPS |\
TOKEN_ADJUST_DEFAULT)
TOKEN_READ = (STANDARD_RIGHTS_READ |\
TOKEN_QUERY)
TOKEN_WRITE = (STANDARD_RIGHTS_WRITE |\
TOKEN_ADJUST_PRIVILEGES |\
TOKEN_ADJUST_GROUPS |\
TOKEN_ADJUST_DEFAULT)
TOKEN_EXECUTE = (STANDARD_RIGHTS_EXECUTE)
TOKEN_SOURCE_LENGTH = 8
KEY_QUERY_VALUE = (1)
KEY_SET_VALUE = (2)
KEY_CREATE_SUB_KEY = (4)
KEY_ENUMERATE_SUB_KEYS = (8)
KEY_NOTIFY = (16)
KEY_CREATE_LINK = (32)
KEY_WOW64_32KEY = 512
KEY_WOW64_64KEY = 256
KEY_WOW64_RES = 768
KEY_READ = ((STANDARD_RIGHTS_READ |\
KEY_QUERY_VALUE |\
KEY_ENUMERATE_SUB_KEYS |\
KEY_NOTIFY) \
& \
(~SYNCHRONIZE))
KEY_WRITE = ((STANDARD_RIGHTS_WRITE |\
KEY_SET_VALUE |\
KEY_CREATE_SUB_KEY) \
& \
(~SYNCHRONIZE))
KEY_EXECUTE = ((KEY_READ) \
& \
(~SYNCHRONIZE))
KEY_ALL_ACCESS = ((STANDARD_RIGHTS_ALL |\
KEY_QUERY_VALUE |\
KEY_SET_VALUE |\
KEY_CREATE_SUB_KEY |\
KEY_ENUMERATE_SUB_KEYS |\
KEY_NOTIFY |\
KEY_CREATE_LINK) \
& \
(~SYNCHRONIZE))
REG_NOTIFY_CHANGE_ATTRIBUTES = (2)
REG_NOTIFY_CHANGE_SECURITY = (8)
REG_RESOURCE_REQUIREMENTS_LIST = ( 10 )
REG_NONE = ( 0 ) # No value type
REG_SZ = ( 1 ) # Unicode nul terminated string
REG_EXPAND_SZ = ( 2 ) # Unicode nul terminated string
# (with environment variable references)
REG_BINARY = ( 3 ) # Free form binary
REG_DWORD = ( 4 ) # 32-bit number
REG_DWORD_LITTLE_ENDIAN = ( 4 ) # 32-bit number (same as REG_DWORD)
REG_DWORD_BIG_ENDIAN = ( 5 ) # 32-bit number
REG_LINK = ( 6 ) # Symbolic Link (unicode)
REG_MULTI_SZ = ( 7 ) # Multiple Unicode strings
REG_RESOURCE_LIST = ( 8 ) # Resource list in the resource map
REG_FULL_RESOURCE_DESCRIPTOR =( 9 ) # Resource list in the hardware description
REG_RESOURCE_REQUIREMENTS_LIST = ( 10 )
REG_QWORD = ( 11 ) # 64-bit number
REG_QWORD_LITTLE_ENDIAN = ( 11 ) # 64-bit number (same as REG_QWORD)
# Generated by h2py from \msvc20\include\winnt.h
# hacked and split by mhammond.
# Included from string.h
_NLSCMPERROR = 2147483647
NULL = 0
HEAP_NO_SERIALIZE = 1
HEAP_GROWABLE = 2
HEAP_GENERATE_EXCEPTIONS = 4
HEAP_ZERO_MEMORY = 8
HEAP_REALLOC_IN_PLACE_ONLY = 16
HEAP_TAIL_CHECKING_ENABLED = 32
HEAP_FREE_CHECKING_ENABLED = 64
HEAP_DISABLE_COALESCE_ON_FREE = 128
IS_TEXT_UNICODE_ASCII16 = 1
IS_TEXT_UNICODE_REVERSE_ASCII16 = 16
IS_TEXT_UNICODE_STATISTICS = 2
IS_TEXT_UNICODE_REVERSE_STATISTICS = 32
IS_TEXT_UNICODE_CONTROLS = 4
IS_TEXT_UNICODE_REVERSE_CONTROLS = 64
IS_TEXT_UNICODE_SIGNATURE = 8
IS_TEXT_UNICODE_REVERSE_SIGNATURE = 128
IS_TEXT_UNICODE_ILLEGAL_CHARS = 256
IS_TEXT_UNICODE_ODD_LENGTH = 512
IS_TEXT_UNICODE_DBCS_LEADBYTE = 1024
IS_TEXT_UNICODE_NULL_BYTES = 4096
IS_TEXT_UNICODE_UNICODE_MASK = 15
IS_TEXT_UNICODE_REVERSE_MASK = 240
IS_TEXT_UNICODE_NOT_UNICODE_MASK = 3840
IS_TEXT_UNICODE_NOT_ASCII_MASK = 61440
COMPRESSION_FORMAT_NONE = (0)
COMPRESSION_FORMAT_DEFAULT = (1)
COMPRESSION_FORMAT_LZNT1 = (2)
COMPRESSION_ENGINE_STANDARD = (0)
COMPRESSION_ENGINE_MAXIMUM = (256)
MESSAGE_RESOURCE_UNICODE = 1
RTL_CRITSECT_TYPE = 0
RTL_RESOURCE_TYPE = 1
DLL_PROCESS_ATTACH = 1
DLL_THREAD_ATTACH = 2
DLL_THREAD_DETACH = 3
DLL_PROCESS_DETACH = 0
EVENTLOG_SEQUENTIAL_READ = 0X0001
EVENTLOG_SEEK_READ = 0X0002
EVENTLOG_FORWARDS_READ = 0X0004
EVENTLOG_BACKWARDS_READ = 0X0008
EVENTLOG_SUCCESS = 0X0000
EVENTLOG_ERROR_TYPE = 1
EVENTLOG_WARNING_TYPE = 2
EVENTLOG_INFORMATION_TYPE = 4
EVENTLOG_AUDIT_SUCCESS = 8
EVENTLOG_AUDIT_FAILURE = 16
EVENTLOG_START_PAIRED_EVENT = 1
EVENTLOG_END_PAIRED_EVENT = 2
EVENTLOG_END_ALL_PAIRED_EVENTS = 4
EVENTLOG_PAIRED_EVENT_ACTIVE = 8
EVENTLOG_PAIRED_EVENT_INACTIVE = 16
# Generated by h2py from \msvc20\include\winnt.h
# hacked and split by mhammond.
OWNER_SECURITY_INFORMATION = (0X00000001)
GROUP_SECURITY_INFORMATION = (0X00000002)
DACL_SECURITY_INFORMATION = (0X00000004)
SACL_SECURITY_INFORMATION = (0X00000008)
IMAGE_SIZEOF_FILE_HEADER = 20
IMAGE_FILE_MACHINE_UNKNOWN = 0
IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16
IMAGE_SIZEOF_ROM_OPTIONAL_HEADER = 56
IMAGE_SIZEOF_STD_OPTIONAL_HEADER = 28
IMAGE_SIZEOF_NT_OPTIONAL_HEADER = 224
IMAGE_NT_OPTIONAL_HDR_MAGIC = 267
IMAGE_ROM_OPTIONAL_HDR_MAGIC = 263
IMAGE_SIZEOF_SHORT_NAME = 8
IMAGE_SIZEOF_SECTION_HEADER = 40
IMAGE_SIZEOF_SYMBOL = 18
IMAGE_SYM_CLASS_NULL = 0
IMAGE_SYM_CLASS_AUTOMATIC = 1
IMAGE_SYM_CLASS_EXTERNAL = 2
IMAGE_SYM_CLASS_STATIC = 3
IMAGE_SYM_CLASS_REGISTER = 4
IMAGE_SYM_CLASS_EXTERNAL_DEF = 5
IMAGE_SYM_CLASS_LABEL = 6
IMAGE_SYM_CLASS_UNDEFINED_LABEL = 7
IMAGE_SYM_CLASS_MEMBER_OF_STRUCT = 8
IMAGE_SYM_CLASS_ARGUMENT = 9
IMAGE_SYM_CLASS_STRUCT_TAG = 10
IMAGE_SYM_CLASS_MEMBER_OF_UNION = 11
IMAGE_SYM_CLASS_UNION_TAG = 12
IMAGE_SYM_CLASS_TYPE_DEFINITION = 13
IMAGE_SYM_CLASS_UNDEFINED_STATIC = 14
IMAGE_SYM_CLASS_ENUM_TAG = 15
IMAGE_SYM_CLASS_MEMBER_OF_ENUM = 16
IMAGE_SYM_CLASS_REGISTER_PARAM = 17
IMAGE_SYM_CLASS_BIT_FIELD = 18
IMAGE_SYM_CLASS_BLOCK = 100
IMAGE_SYM_CLASS_FUNCTION = 101
IMAGE_SYM_CLASS_END_OF_STRUCT = 102
IMAGE_SYM_CLASS_FILE = 103
IMAGE_SYM_CLASS_SECTION = 104
IMAGE_SYM_CLASS_WEAK_EXTERNAL = 105
N_BTMASK = 017
N_TMASK = 060
N_TMASK1 = 0300
N_TMASK2 = 0360
N_BTSHFT = 4
N_TSHIFT = 2
IMAGE_SIZEOF_AUX_SYMBOL = 18
IMAGE_COMDAT_SELECT_NODUPLICATES = 1
IMAGE_COMDAT_SELECT_ANY = 2
IMAGE_COMDAT_SELECT_SAME_SIZE = 3
IMAGE_COMDAT_SELECT_EXACT_MATCH = 4
IMAGE_COMDAT_SELECT_ASSOCIATIVE = 5
IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY = 1
IMAGE_WEAK_EXTERN_SEARCH_LIBRARY = 2
IMAGE_WEAK_EXTERN_SEARCH_ALIAS = 3
IMAGE_SIZEOF_RELOCATION = 10
IMAGE_REL_I386_SECTION = 012
IMAGE_REL_I386_SECREL = 013
IMAGE_REL_MIPS_REFHALF = 01
IMAGE_REL_MIPS_REFWORD = 02
IMAGE_REL_MIPS_JMPADDR = 03
IMAGE_REL_MIPS_REFHI = 04
IMAGE_REL_MIPS_REFLO = 05
IMAGE_REL_MIPS_GPREL = 06
IMAGE_REL_MIPS_LITERAL = 07
IMAGE_REL_MIPS_SECTION = 012
IMAGE_REL_MIPS_SECREL = 013
IMAGE_REL_MIPS_REFWORDNB = 042
IMAGE_REL_MIPS_PAIR = 045
IMAGE_REL_ALPHA_ABSOLUTE = 0
IMAGE_REL_ALPHA_REFLONG = 1
IMAGE_REL_ALPHA_REFQUAD = 2
IMAGE_REL_ALPHA_GPREL32 = 3
IMAGE_REL_ALPHA_LITERAL = 4
IMAGE_REL_ALPHA_LITUSE = 5
IMAGE_REL_ALPHA_GPDISP = 6
IMAGE_REL_ALPHA_BRADDR = 7
IMAGE_REL_ALPHA_HINT = 8
IMAGE_REL_ALPHA_INLINE_REFLONG = 9
IMAGE_REL_ALPHA_REFHI = 10
IMAGE_REL_ALPHA_REFLO = 11
IMAGE_REL_ALPHA_PAIR = 12
IMAGE_REL_ALPHA_MATCH = 13
IMAGE_REL_ALPHA_SECTION = 14
IMAGE_REL_ALPHA_SECREL = 15
IMAGE_REL_ALPHA_REFLONGNB = 16
IMAGE_SIZEOF_BASE_RELOCATION = 8
IMAGE_REL_BASED_ABSOLUTE = 0
IMAGE_REL_BASED_HIGH = 1
IMAGE_REL_BASED_LOW = 2
IMAGE_REL_BASED_HIGHLOW = 3
IMAGE_REL_BASED_HIGHADJ = 4
IMAGE_REL_BASED_MIPS_JMPADDR = 5
IMAGE_SIZEOF_LINENUMBER = 6
IMAGE_ARCHIVE_START_SIZE = 8
IMAGE_ARCHIVE_START = "!<arch>\n"
IMAGE_ARCHIVE_END = "`\n"
IMAGE_ARCHIVE_PAD = "\n"
IMAGE_ARCHIVE_LINKER_MEMBER = "/ "
IMAGE_ARCHIVE_LONGNAMES_MEMBER = "// "
IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR = 60
IMAGE_ORDINAL_FLAG = -2147483648
def IMAGE_SNAP_BY_ORDINAL(Ordinal): return ((Ordinal & IMAGE_ORDINAL_FLAG) != 0)
def IMAGE_ORDINAL(Ordinal): return (Ordinal & 65535)
IMAGE_RESOURCE_NAME_IS_STRING = -2147483648
IMAGE_RESOURCE_DATA_IS_DIRECTORY = -2147483648
IMAGE_DEBUG_TYPE_UNKNOWN = 0
IMAGE_DEBUG_TYPE_COFF = 1
IMAGE_DEBUG_TYPE_CODEVIEW = 2
IMAGE_DEBUG_TYPE_FPO = 3
IMAGE_DEBUG_TYPE_MISC = 4
IMAGE_DEBUG_TYPE_EXCEPTION = 5
IMAGE_DEBUG_TYPE_FIXUP = 6
IMAGE_DEBUG_TYPE_OMAP_TO_SRC = 7
IMAGE_DEBUG_TYPE_OMAP_FROM_SRC = 8
FRAME_FPO = 0
FRAME_TRAP = 1
FRAME_TSS = 2
SIZEOF_RFPO_DATA = 16
IMAGE_DEBUG_MISC_EXENAME = 1
IMAGE_SEPARATE_DEBUG_SIGNATURE = 18756
# Generated by h2py from \msvcnt\include\wingdi.h
# hacked and split manually by mhammond.
NEWFRAME = 1
ABORTDOC = 2
NEXTBAND = 3
SETCOLORTABLE = 4
GETCOLORTABLE = 5
FLUSHOUTPUT = 6
DRAFTMODE = 7
QUERYESCSUPPORT = 8
SETABORTPROC = 9
STARTDOC = 10
ENDDOC = 11
GETPHYSPAGESIZE = 12
GETPRINTINGOFFSET = 13
GETSCALINGFACTOR = 14
MFCOMMENT = 15
GETPENWIDTH = 16
SETCOPYCOUNT = 17
SELECTPAPERSOURCE = 18
DEVICEDATA = 19
PASSTHROUGH = 19
GETTECHNOLGY = 20
GETTECHNOLOGY = 20
SETLINECAP = 21
SETLINEJOIN = 22
SETMITERLIMIT = 23
BANDINFO = 24
DRAWPATTERNRECT = 25
GETVECTORPENSIZE = 26
GETVECTORBRUSHSIZE = 27
ENABLEDUPLEX = 28
GETSETPAPERBINS = 29
GETSETPRINTORIENT = 30
ENUMPAPERBINS = 31
SETDIBSCALING = 32
EPSPRINTING = 33
ENUMPAPERMETRICS = 34
GETSETPAPERMETRICS = 35
POSTSCRIPT_DATA = 37
POSTSCRIPT_IGNORE = 38
MOUSETRAILS = 39
GETDEVICEUNITS = 42
GETEXTENDEDTEXTMETRICS = 256
GETEXTENTTABLE = 257
GETPAIRKERNTABLE = 258
GETTRACKKERNTABLE = 259
EXTTEXTOUT = 512
GETFACENAME = 513
DOWNLOADFACE = 514
ENABLERELATIVEWIDTHS = 768
ENABLEPAIRKERNING = 769
SETKERNTRACK = 770
SETALLJUSTVALUES = 771
SETCHARSET = 772
STRETCHBLT = 2048
GETSETSCREENPARAMS = 3072
BEGIN_PATH = 4096
CLIP_TO_PATH = 4097
END_PATH = 4098
EXT_DEVICE_CAPS = 4099
RESTORE_CTM = 4100
SAVE_CTM = 4101
SET_ARC_DIRECTION = 4102
SET_BACKGROUND_COLOR = 4103
SET_POLY_MODE = 4104
SET_SCREEN_ANGLE = 4105
SET_SPREAD = 4106
TRANSFORM_CTM = 4107
SET_CLIP_BOX = 4108
SET_BOUNDS = 4109
SET_MIRROR_MODE = 4110
OPENCHANNEL = 4110
DOWNLOADHEADER = 4111
CLOSECHANNEL = 4112
POSTSCRIPT_PASSTHROUGH = 4115
ENCAPSULATED_POSTSCRIPT = 4116
SP_NOTREPORTED = 16384
SP_ERROR = (-1)
SP_APPABORT = (-2)
SP_USERABORT = (-3)
SP_OUTOFDISK = (-4)
SP_OUTOFMEMORY = (-5)
PR_JOBSTATUS = 0
## GDI object types
OBJ_PEN = 1
OBJ_BRUSH = 2
OBJ_DC = 3
OBJ_METADC = 4
OBJ_PAL = 5
OBJ_FONT = 6
OBJ_BITMAP = 7
OBJ_REGION = 8
OBJ_METAFILE = 9
OBJ_MEMDC = 10
OBJ_EXTPEN = 11
OBJ_ENHMETADC = 12
OBJ_ENHMETAFILE = 13
OBJ_COLORSPACE = 14
MWT_IDENTITY = 1
MWT_LEFTMULTIPLY = 2
MWT_RIGHTMULTIPLY = 3
MWT_MIN = MWT_IDENTITY
MWT_MAX = MWT_RIGHTMULTIPLY
BI_RGB = 0
BI_RLE8 = 1
BI_RLE4 = 2
BI_BITFIELDS = 3
TMPF_FIXED_PITCH = 1
TMPF_VECTOR = 2
TMPF_DEVICE = 8
TMPF_TRUETYPE = 4
NTM_REGULAR = 64
NTM_BOLD = 32
NTM_ITALIC = 1
LF_FACESIZE = 32
LF_FULLFACESIZE = 64
OUT_DEFAULT_PRECIS = 0
OUT_STRING_PRECIS = 1
OUT_CHARACTER_PRECIS = 2
OUT_STROKE_PRECIS = 3
OUT_TT_PRECIS = 4
OUT_DEVICE_PRECIS = 5
OUT_RASTER_PRECIS = 6
OUT_TT_ONLY_PRECIS = 7
OUT_OUTLINE_PRECIS = 8
CLIP_DEFAULT_PRECIS = 0
CLIP_CHARACTER_PRECIS = 1
CLIP_STROKE_PRECIS = 2
CLIP_MASK = 15
CLIP_LH_ANGLES = (1<<4)
CLIP_TT_ALWAYS = (2<<4)
CLIP_EMBEDDED = (8<<4)
DEFAULT_QUALITY = 0
DRAFT_QUALITY = 1
PROOF_QUALITY = 2
NONANTIALIASED_QUALITY = 3
ANTIALIASED_QUALITY = 4
CLEARTYPE_QUALITY = 5
CLEARTYPE_NATURAL_QUALITY = 6
DEFAULT_PITCH = 0
FIXED_PITCH = 1
VARIABLE_PITCH = 2
ANSI_CHARSET = 0
DEFAULT_CHARSET = 1
SYMBOL_CHARSET = 2
SHIFTJIS_CHARSET = 128
HANGEUL_CHARSET = 129
CHINESEBIG5_CHARSET = 136
OEM_CHARSET = 255
JOHAB_CHARSET = 130
HEBREW_CHARSET = 177
ARABIC_CHARSET = 178
GREEK_CHARSET = 161
TURKISH_CHARSET = 162
VIETNAMESE_CHARSET = 163
THAI_CHARSET = 222
EASTEUROPE_CHARSET = 238
RUSSIAN_CHARSET = 204
MAC_CHARSET = 77
BALTIC_CHARSET = 186
FF_DONTCARE = (0<<4)
FF_ROMAN = (1<<4)
FF_SWISS = (2<<4)
FF_MODERN = (3<<4)
FF_SCRIPT = (4<<4)
FF_DECORATIVE = (5<<4)
FW_DONTCARE = 0
FW_THIN = 100
FW_EXTRALIGHT = 200
FW_LIGHT = 300
FW_NORMAL = 400
FW_MEDIUM = 500
FW_SEMIBOLD = 600
FW_BOLD = 700
FW_EXTRABOLD = 800
FW_HEAVY = 900
FW_ULTRALIGHT = FW_EXTRALIGHT
FW_REGULAR = FW_NORMAL
FW_DEMIBOLD = FW_SEMIBOLD
FW_ULTRABOLD = FW_EXTRABOLD
FW_BLACK = FW_HEAVY
# Generated by h2py from \msvcnt\include\wingdi.h
# hacked and split manually by mhammond.
BS_SOLID = 0
BS_NULL = 1
BS_HOLLOW = BS_NULL
BS_HATCHED = 2
BS_PATTERN = 3
BS_INDEXED = 4
BS_DIBPATTERN = 5
BS_DIBPATTERNPT = 6
BS_PATTERN8X8 = 7
BS_DIBPATTERN8X8 = 8
HS_HORIZONTAL = 0
HS_VERTICAL = 1
HS_FDIAGONAL = 2
HS_BDIAGONAL = 3
HS_CROSS = 4
HS_DIAGCROSS = 5
HS_FDIAGONAL1 = 6
HS_BDIAGONAL1 = 7
HS_SOLID = 8
HS_DENSE1 = 9
HS_DENSE2 = 10
HS_DENSE3 = 11
HS_DENSE4 = 12
HS_DENSE5 = 13
HS_DENSE6 = 14
HS_DENSE7 = 15
HS_DENSE8 = 16
HS_NOSHADE = 17
HS_HALFTONE = 18
HS_SOLIDCLR = 19
HS_DITHEREDCLR = 20
HS_SOLIDTEXTCLR = 21
HS_DITHEREDTEXTCLR = 22
HS_SOLIDBKCLR = 23
HS_DITHEREDBKCLR = 24
HS_API_MAX = 25
PS_SOLID = 0
PS_DASH = 1
PS_DOT = 2
PS_DASHDOT = 3
PS_DASHDOTDOT = 4
PS_NULL = 5
PS_INSIDEFRAME = 6
PS_USERSTYLE = 7
PS_ALTERNATE = 8
PS_STYLE_MASK = 15
PS_ENDCAP_ROUND = 0
PS_ENDCAP_SQUARE = 256
PS_ENDCAP_FLAT = 512
PS_ENDCAP_MASK = 3840
PS_JOIN_ROUND = 0
PS_JOIN_BEVEL = 4096
PS_JOIN_MITER = 8192
PS_JOIN_MASK = 61440
PS_COSMETIC = 0
PS_GEOMETRIC = 65536
PS_TYPE_MASK = 983040
AD_COUNTERCLOCKWISE = 1
AD_CLOCKWISE = 2
DRIVERVERSION = 0
TECHNOLOGY = 2
HORZSIZE = 4
VERTSIZE = 6
HORZRES = 8
VERTRES = 10
BITSPIXEL = 12
PLANES = 14
NUMBRUSHES = 16
NUMPENS = 18
NUMMARKERS = 20
NUMFONTS = 22
NUMCOLORS = 24
PDEVICESIZE = 26
CURVECAPS = 28
LINECAPS = 30
POLYGONALCAPS = 32
TEXTCAPS = 34
CLIPCAPS = 36
RASTERCAPS = 38
ASPECTX = 40
ASPECTY = 42
ASPECTXY = 44
LOGPIXELSX = 88
LOGPIXELSY = 90
SIZEPALETTE = 104
NUMRESERVED = 106
COLORRES = 108
PHYSICALWIDTH = 110
PHYSICALHEIGHT = 111
PHYSICALOFFSETX = 112
PHYSICALOFFSETY = 113
SCALINGFACTORX = 114
SCALINGFACTORY = 115
VREFRESH = 116
DESKTOPVERTRES = 117
DESKTOPHORZRES = 118
BLTALIGNMENT = 119
SHADEBLENDCAPS = 120
COLORMGMTCAPS = 121
DT_PLOTTER = 0
DT_RASDISPLAY = 1
DT_RASPRINTER = 2
DT_RASCAMERA = 3
DT_CHARSTREAM = 4
DT_METAFILE = 5
DT_DISPFILE = 6
CC_NONE = 0
CC_CIRCLES = 1
CC_PIE = 2
CC_CHORD = 4
CC_ELLIPSES = 8
CC_WIDE = 16
CC_STYLED = 32
CC_WIDESTYLED = 64
CC_INTERIORS = 128
CC_ROUNDRECT = 256
LC_NONE = 0
LC_POLYLINE = 2
LC_MARKER = 4
LC_POLYMARKER = 8
LC_WIDE = 16
LC_STYLED = 32
LC_WIDESTYLED = 64
LC_INTERIORS = 128
PC_NONE = 0
PC_POLYGON = 1
PC_RECTANGLE = 2
PC_WINDPOLYGON = 4
PC_TRAPEZOID = 4
PC_SCANLINE = 8
PC_WIDE = 16
PC_STYLED = 32
PC_WIDESTYLED = 64
PC_INTERIORS = 128
CP_NONE = 0
CP_RECTANGLE = 1
CP_REGION = 2
TC_OP_CHARACTER = 1
TC_OP_STROKE = 2
TC_CP_STROKE = 4
TC_CR_90 = 8
TC_CR_ANY = 16
TC_SF_X_YINDEP = 32
TC_SA_DOUBLE = 64
TC_SA_INTEGER = 128
TC_SA_CONTIN = 256
TC_EA_DOUBLE = 512
TC_IA_ABLE = 1024
TC_UA_ABLE = 2048
TC_SO_ABLE = 4096
TC_RA_ABLE = 8192
TC_VA_ABLE = 16384
TC_RESERVED = 32768
TC_SCROLLBLT = 65536
RC_BITBLT = 1
RC_BANDING = 2
RC_SCALING = 4
RC_BITMAP64 = 8
RC_GDI20_OUTPUT = 16
RC_GDI20_STATE = 32
RC_SAVEBITMAP = 64
RC_DI_BITMAP = 128
RC_PALETTE = 256
RC_DIBTODEV = 512
RC_BIGFONT = 1024
RC_STRETCHBLT = 2048
RC_FLOODFILL = 4096
RC_STRETCHDIB = 8192
RC_OP_DX_OUTPUT = 16384
RC_DEVBITS = 32768
DIB_RGB_COLORS = 0
DIB_PAL_COLORS = 1
DIB_PAL_INDICES = 2
DIB_PAL_PHYSINDICES = 2
DIB_PAL_LOGINDICES = 4
SYSPAL_ERROR = 0
SYSPAL_STATIC = 1
SYSPAL_NOSTATIC = 2
CBM_CREATEDIB = 2
CBM_INIT = 4
FLOODFILLBORDER = 0
FLOODFILLSURFACE = 1
CCHDEVICENAME = 32
CCHFORMNAME = 32
# Generated by h2py from \msvcnt\include\wingdi.h
# hacked and split manually by mhammond.
# DEVMODE.dmFields
DM_SPECVERSION = 800
DM_ORIENTATION = 1
DM_PAPERSIZE = 2
DM_PAPERLENGTH = 4
DM_PAPERWIDTH = 8
DM_SCALE = 16
DM_POSITION = 32
DM_NUP = 64
DM_DISPLAYORIENTATION = 128
DM_COPIES = 256
DM_DEFAULTSOURCE = 512
DM_PRINTQUALITY = 1024
DM_COLOR = 2048
DM_DUPLEX = 4096
DM_YRESOLUTION = 8192
DM_TTOPTION = 16384
DM_COLLATE = 32768
DM_FORMNAME = 65536
DM_LOGPIXELS = 131072
DM_BITSPERPEL = 262144
DM_PELSWIDTH = 524288
DM_PELSHEIGHT = 1048576
DM_DISPLAYFLAGS = 2097152
DM_DISPLAYFREQUENCY = 4194304
DM_ICMMETHOD = 8388608
DM_ICMINTENT = 16777216
DM_MEDIATYPE = 33554432
DM_DITHERTYPE = 67108864
DM_PANNINGWIDTH = 134217728
DM_PANNINGHEIGHT = 268435456
DM_DISPLAYFIXEDOUTPUT = 536870912
# DEVMODE.dmOrientation
DMORIENT_PORTRAIT = 1
DMORIENT_LANDSCAPE = 2
# DEVMODE.dmDisplayOrientation
DMDO_DEFAULT = 0
DMDO_90 = 1
DMDO_180 = 2
DMDO_270 = 3
# DEVMODE.dmDisplayFixedOutput
DMDFO_DEFAULT = 0
DMDFO_STRETCH = 1
DMDFO_CENTER = 2
# DEVMODE.dmPaperSize
DMPAPER_LETTER = 1
DMPAPER_LETTERSMALL = 2
DMPAPER_TABLOID = 3
DMPAPER_LEDGER = 4
DMPAPER_LEGAL = 5
DMPAPER_STATEMENT = 6
DMPAPER_EXECUTIVE = 7
DMPAPER_A3 = 8
DMPAPER_A4 = 9
DMPAPER_A4SMALL = 10
DMPAPER_A5 = 11
DMPAPER_B4 = 12
DMPAPER_B5 = 13
DMPAPER_FOLIO = 14
DMPAPER_QUARTO = 15
DMPAPER_10X14 = 16
DMPAPER_11X17 = 17
DMPAPER_NOTE = 18
DMPAPER_ENV_9 = 19
DMPAPER_ENV_10 = 20
DMPAPER_ENV_11 = 21
DMPAPER_ENV_12 = 22
DMPAPER_ENV_14 = 23
DMPAPER_CSHEET = 24
DMPAPER_DSHEET = 25
DMPAPER_ESHEET = 26
DMPAPER_ENV_DL = 27
DMPAPER_ENV_C5 = 28
DMPAPER_ENV_C3 = 29
DMPAPER_ENV_C4 = 30
DMPAPER_ENV_C6 = 31
DMPAPER_ENV_C65 = 32
DMPAPER_ENV_B4 = 33
DMPAPER_ENV_B5 = 34
DMPAPER_ENV_B6 = 35
DMPAPER_ENV_ITALY = 36
DMPAPER_ENV_MONARCH = 37
DMPAPER_ENV_PERSONAL = 38
DMPAPER_FANFOLD_US = 39
DMPAPER_FANFOLD_STD_GERMAN = 40
DMPAPER_FANFOLD_LGL_GERMAN = 41
DMPAPER_ISO_B4 = 42
DMPAPER_JAPANESE_POSTCARD = 43
DMPAPER_9X11 = 44
DMPAPER_10X11 = 45
DMPAPER_15X11 = 46
DMPAPER_ENV_INVITE = 47
DMPAPER_RESERVED_48 = 48
DMPAPER_RESERVED_49 = 49
DMPAPER_LETTER_EXTRA = 50
DMPAPER_LEGAL_EXTRA = 51
DMPAPER_TABLOID_EXTRA = 52
DMPAPER_A4_EXTRA = 53
DMPAPER_LETTER_TRANSVERSE = 54
DMPAPER_A4_TRANSVERSE = 55
DMPAPER_LETTER_EXTRA_TRANSVERSE = 56
DMPAPER_A_PLUS = 57
DMPAPER_B_PLUS = 58
DMPAPER_LETTER_PLUS = 59
DMPAPER_A4_PLUS = 60
DMPAPER_A5_TRANSVERSE = 61
DMPAPER_B5_TRANSVERSE = 62
DMPAPER_A3_EXTRA = 63
DMPAPER_A5_EXTRA = 64
DMPAPER_B5_EXTRA = 65
DMPAPER_A2 = 66
DMPAPER_A3_TRANSVERSE = 67
DMPAPER_A3_EXTRA_TRANSVERSE = 68
DMPAPER_DBL_JAPANESE_POSTCARD = 69
DMPAPER_A6 = 70
DMPAPER_JENV_KAKU2 = 71
DMPAPER_JENV_KAKU3 = 72
DMPAPER_JENV_CHOU3 = 73
DMPAPER_JENV_CHOU4 = 74
DMPAPER_LETTER_ROTATED = 75
DMPAPER_A3_ROTATED = 76
DMPAPER_A4_ROTATED = 77
DMPAPER_A5_ROTATED = 78
DMPAPER_B4_JIS_ROTATED = 79
DMPAPER_B5_JIS_ROTATED = 80
DMPAPER_JAPANESE_POSTCARD_ROTATED = 81
DMPAPER_DBL_JAPANESE_POSTCARD_ROTATED = 82
DMPAPER_A6_ROTATED = 83
DMPAPER_JENV_KAKU2_ROTATED = 84
DMPAPER_JENV_KAKU3_ROTATED = 85
DMPAPER_JENV_CHOU3_ROTATED = 86
DMPAPER_JENV_CHOU4_ROTATED = 87
DMPAPER_B6_JIS = 88
DMPAPER_B6_JIS_ROTATED = 89
DMPAPER_12X11 = 90
DMPAPER_JENV_YOU4 = 91
DMPAPER_JENV_YOU4_ROTATED = 92
DMPAPER_P16K = 93
DMPAPER_P32K = 94
DMPAPER_P32KBIG = 95
DMPAPER_PENV_1 = 96
DMPAPER_PENV_2 = 97
DMPAPER_PENV_3 = 98
DMPAPER_PENV_4 = 99
DMPAPER_PENV_5 = 100
DMPAPER_PENV_6 = 101
DMPAPER_PENV_7 = 102
DMPAPER_PENV_8 = 103
DMPAPER_PENV_9 = 104
DMPAPER_PENV_10 = 105
DMPAPER_P16K_ROTATED = 106
DMPAPER_P32K_ROTATED = 107
DMPAPER_P32KBIG_ROTATED = 108
DMPAPER_PENV_1_ROTATED = 109
DMPAPER_PENV_2_ROTATED = 110
DMPAPER_PENV_3_ROTATED = 111
DMPAPER_PENV_4_ROTATED = 112
DMPAPER_PENV_5_ROTATED = 113
DMPAPER_PENV_6_ROTATED = 114
DMPAPER_PENV_7_ROTATED = 115
DMPAPER_PENV_8_ROTATED = 116
DMPAPER_PENV_9_ROTATED = 117
DMPAPER_PENV_10_ROTATED = 118
DMPAPER_LAST = DMPAPER_PENV_10_ROTATED
DMPAPER_USER = 256
# DEVMODE.dmDefaultSource
DMBIN_UPPER = 1
DMBIN_ONLYONE = 1
DMBIN_LOWER = 2
DMBIN_MIDDLE = 3
DMBIN_MANUAL = 4
DMBIN_ENVELOPE = 5
DMBIN_ENVMANUAL = 6
DMBIN_AUTO = 7
DMBIN_TRACTOR = 8
DMBIN_SMALLFMT = 9
DMBIN_LARGEFMT = 10
DMBIN_LARGECAPACITY = 11
DMBIN_CASSETTE = 14
DMBIN_FORMSOURCE = 15
DMBIN_LAST = DMBIN_FORMSOURCE
DMBIN_USER = 256
# DEVMODE.dmPrintQuality
DMRES_DRAFT = (-1)
DMRES_LOW = (-2)
DMRES_MEDIUM = (-3)
DMRES_HIGH = (-4)
# DEVMODE.dmColor
DMCOLOR_MONOCHROME = 1
DMCOLOR_COLOR = 2
# DEVMODE.dmDuplex
DMDUP_SIMPLEX = 1
DMDUP_VERTICAL = 2
DMDUP_HORIZONTAL = 3
# DEVMODE.dmTTOption
DMTT_BITMAP = 1
DMTT_DOWNLOAD = 2
DMTT_SUBDEV = 3
DMTT_DOWNLOAD_OUTLINE = 4
# DEVMODE.dmCollate
DMCOLLATE_FALSE = 0
DMCOLLATE_TRUE = 1
# DEVMODE.dmDisplayFlags
DM_GRAYSCALE = 1
DM_INTERLACED = 2
# DEVMODE.dmICMMethod
DMICMMETHOD_NONE = 1
DMICMMETHOD_SYSTEM = 2
DMICMMETHOD_DRIVER = 3
DMICMMETHOD_DEVICE = 4
DMICMMETHOD_USER = 256
# DEVMODE.dmICMIntent
DMICM_SATURATE = 1
DMICM_CONTRAST = 2
DMICM_COLORIMETRIC = 3
DMICM_ABS_COLORIMETRIC = 4
DMICM_USER = 256
# DEVMODE.dmMediaType
DMMEDIA_STANDARD = 1
DMMEDIA_TRANSPARENCY = 2
DMMEDIA_GLOSSY = 3
DMMEDIA_USER = 256
# DEVMODE.dmDitherType
DMDITHER_NONE = 1
DMDITHER_COARSE = 2
DMDITHER_FINE = 3
DMDITHER_LINEART = 4
DMDITHER_ERRORDIFFUSION = 5
DMDITHER_RESERVED6 = 6
DMDITHER_RESERVED7 = 7
DMDITHER_RESERVED8 = 8
DMDITHER_RESERVED9 = 9
DMDITHER_GRAYSCALE = 10
DMDITHER_USER = 256
# DEVMODE.dmNup
DMNUP_SYSTEM = 1
DMNUP_ONEUP = 2
# used with ExtEscape
FEATURESETTING_NUP = 0
FEATURESETTING_OUTPUT = 1
FEATURESETTING_PSLEVEL = 2
FEATURESETTING_CUSTPAPER = 3
FEATURESETTING_MIRROR = 4
FEATURESETTING_NEGATIVE = 5
FEATURESETTING_PROTOCOL = 6
FEATURESETTING_PRIVATE_BEGIN = 0x1000
FEATURESETTING_PRIVATE_END = 0x1FFF
RDH_RECTANGLES = 1
GGO_METRICS = 0
GGO_BITMAP = 1
GGO_NATIVE = 2
TT_POLYGON_TYPE = 24
TT_PRIM_LINE = 1
TT_PRIM_QSPLINE = 2
TT_AVAILABLE = 1
TT_ENABLED = 2
DM_UPDATE = 1
DM_COPY = 2
DM_PROMPT = 4
DM_MODIFY = 8
DM_IN_BUFFER = DM_MODIFY
DM_IN_PROMPT = DM_PROMPT
DM_OUT_BUFFER = DM_COPY
DM_OUT_DEFAULT = DM_UPDATE
# DISPLAY_DEVICE.StateFlags
DISPLAY_DEVICE_ATTACHED_TO_DESKTOP = 1
DISPLAY_DEVICE_MULTI_DRIVER = 2
DISPLAY_DEVICE_PRIMARY_DEVICE = 4
DISPLAY_DEVICE_MIRRORING_DRIVER = 8
DISPLAY_DEVICE_VGA_COMPATIBLE = 16
DISPLAY_DEVICE_REMOVABLE = 32
DISPLAY_DEVICE_MODESPRUNED = 134217728
DISPLAY_DEVICE_REMOTE = 67108864
DISPLAY_DEVICE_DISCONNECT = 33554432
# DeviceCapabilities types
DC_FIELDS = 1
DC_PAPERS = 2
DC_PAPERSIZE = 3
DC_MINEXTENT = 4
DC_MAXEXTENT = 5
DC_BINS = 6
DC_DUPLEX = 7
DC_SIZE = 8
DC_EXTRA = 9
DC_VERSION = 10
DC_DRIVER = 11
DC_BINNAMES = 12
DC_ENUMRESOLUTIONS = 13
DC_FILEDEPENDENCIES = 14
DC_TRUETYPE = 15
DC_PAPERNAMES = 16
DC_ORIENTATION = 17
DC_COPIES = 18
DC_BINADJUST = 19
DC_EMF_COMPLIANT = 20
DC_DATATYPE_PRODUCED = 21
DC_COLLATE = 22
DC_MANUFACTURER = 23
DC_MODEL = 24
DC_PERSONALITY = 25
DC_PRINTRATE = 26
DC_PRINTRATEUNIT = 27
DC_PRINTERMEM = 28
DC_MEDIAREADY = 29
DC_STAPLE = 30
DC_PRINTRATEPPM = 31
DC_COLORDEVICE = 32
DC_NUP = 33
DC_MEDIATYPENAMES = 34
DC_MEDIATYPES = 35
PRINTRATEUNIT_PPM = 1
PRINTRATEUNIT_CPS = 2
PRINTRATEUNIT_LPM = 3
PRINTRATEUNIT_IPM = 4
# TrueType constants
DCTT_BITMAP = 1
DCTT_DOWNLOAD = 2
DCTT_SUBDEV = 4
DCTT_DOWNLOAD_OUTLINE = 8
DCBA_FACEUPNONE = 0
DCBA_FACEUPCENTER = 1
DCBA_FACEUPLEFT = 2
DCBA_FACEUPRIGHT = 3
DCBA_FACEDOWNNONE = 256
DCBA_FACEDOWNCENTER = 257
DCBA_FACEDOWNLEFT = 258
DCBA_FACEDOWNRIGHT = 259
CA_NEGATIVE = 1
CA_LOG_FILTER = 2
ILLUMINANT_DEVICE_DEFAULT = 0
ILLUMINANT_A = 1
ILLUMINANT_B = 2
ILLUMINANT_C = 3
ILLUMINANT_D50 = 4
ILLUMINANT_D55 = 5
ILLUMINANT_D65 = 6
ILLUMINANT_D75 = 7
ILLUMINANT_F2 = 8
ILLUMINANT_MAX_INDEX = ILLUMINANT_F2
ILLUMINANT_TUNGSTEN = ILLUMINANT_A
ILLUMINANT_DAYLIGHT = ILLUMINANT_C
ILLUMINANT_FLUORESCENT = ILLUMINANT_F2
ILLUMINANT_NTSC = ILLUMINANT_C
# Generated by h2py from \msvcnt\include\wingdi.h
# hacked and split manually by mhammond.
FONTMAPPER_MAX = 10
ENHMETA_SIGNATURE = 1179469088
ENHMETA_STOCK_OBJECT = -2147483648
EMR_HEADER = 1
EMR_POLYBEZIER = 2
EMR_POLYGON = 3
EMR_POLYLINE = 4
EMR_POLYBEZIERTO = 5
EMR_POLYLINETO = 6
EMR_POLYPOLYLINE = 7
EMR_POLYPOLYGON = 8
EMR_SETWINDOWEXTEX = 9
EMR_SETWINDOWORGEX = 10
EMR_SETVIEWPORTEXTEX = 11
EMR_SETVIEWPORTORGEX = 12
EMR_SETBRUSHORGEX = 13
EMR_EOF = 14
EMR_SETPIXELV = 15
EMR_SETMAPPERFLAGS = 16
EMR_SETMAPMODE = 17
EMR_SETBKMODE = 18
EMR_SETPOLYFILLMODE = 19
EMR_SETROP2 = 20
EMR_SETSTRETCHBLTMODE = 21
EMR_SETTEXTALIGN = 22
EMR_SETCOLORADJUSTMENT = 23
EMR_SETTEXTCOLOR = 24
EMR_SETBKCOLOR = 25
EMR_OFFSETCLIPRGN = 26
EMR_MOVETOEX = 27
EMR_SETMETARGN = 28
EMR_EXCLUDECLIPRECT = 29
EMR_INTERSECTCLIPRECT = 30
EMR_SCALEVIEWPORTEXTEX = 31
EMR_SCALEWINDOWEXTEX = 32
EMR_SAVEDC = 33
EMR_RESTOREDC = 34
EMR_SETWORLDTRANSFORM = 35
EMR_MODIFYWORLDTRANSFORM = 36
EMR_SELECTOBJECT = 37
EMR_CREATEPEN = 38
EMR_CREATEBRUSHINDIRECT = 39
EMR_DELETEOBJECT = 40
EMR_ANGLEARC = 41
EMR_ELLIPSE = 42
EMR_RECTANGLE = 43
EMR_ROUNDRECT = 44
EMR_ARC = 45
EMR_CHORD = 46
EMR_PIE = 47
EMR_SELECTPALETTE = 48
EMR_CREATEPALETTE = 49
EMR_SETPALETTEENTRIES = 50
EMR_RESIZEPALETTE = 51
EMR_REALIZEPALETTE = 52
EMR_EXTFLOODFILL = 53
EMR_LINETO = 54
EMR_ARCTO = 55
EMR_POLYDRAW = 56
EMR_SETARCDIRECTION = 57
EMR_SETMITERLIMIT = 58
EMR_BEGINPATH = 59
EMR_ENDPATH = 60
EMR_CLOSEFIGURE = 61
EMR_FILLPATH = 62
EMR_STROKEANDFILLPATH = 63
EMR_STROKEPATH = 64
EMR_FLATTENPATH = 65
EMR_WIDENPATH = 66
EMR_SELECTCLIPPATH = 67
EMR_ABORTPATH = 68
EMR_GDICOMMENT = 70
EMR_FILLRGN = 71
EMR_FRAMERGN = 72
EMR_INVERTRGN = 73
EMR_PAINTRGN = 74
EMR_EXTSELECTCLIPRGN = 75
EMR_BITBLT = 76
EMR_STRETCHBLT = 77
EMR_MASKBLT = 78
EMR_PLGBLT = 79
EMR_SETDIBITSTODEVICE = 80
EMR_STRETCHDIBITS = 81
EMR_EXTCREATEFONTINDIRECTW = 82
EMR_EXTTEXTOUTA = 83
EMR_EXTTEXTOUTW = 84
EMR_POLYBEZIER16 = 85
EMR_POLYGON16 = 86
EMR_POLYLINE16 = 87
EMR_POLYBEZIERTO16 = 88
EMR_POLYLINETO16 = 89
EMR_POLYPOLYLINE16 = 90
EMR_POLYPOLYGON16 = 91
EMR_POLYDRAW16 = 92
EMR_CREATEMONOBRUSH = 93
EMR_CREATEDIBPATTERNBRUSHPT = 94
EMR_EXTCREATEPEN = 95
EMR_POLYTEXTOUTA = 96
EMR_POLYTEXTOUTW = 97
EMR_MIN = 1
EMR_MAX = 97
# Generated by h2py from \msvcnt\include\wingdi.h
# hacked and split manually by mhammond.
PANOSE_COUNT = 10
PAN_FAMILYTYPE_INDEX = 0
PAN_SERIFSTYLE_INDEX = 1
PAN_WEIGHT_INDEX = 2
PAN_PROPORTION_INDEX = 3
PAN_CONTRAST_INDEX = 4
PAN_STROKEVARIATION_INDEX = 5
PAN_ARMSTYLE_INDEX = 6
PAN_LETTERFORM_INDEX = 7
PAN_MIDLINE_INDEX = 8
PAN_XHEIGHT_INDEX = 9
PAN_CULTURE_LATIN = 0
PAN_ANY = 0
PAN_NO_FIT = 1
PAN_FAMILY_TEXT_DISPLAY = 2
PAN_FAMILY_SCRIPT = 3
PAN_FAMILY_DECORATIVE = 4
PAN_FAMILY_PICTORIAL = 5
PAN_SERIF_COVE = 2
PAN_SERIF_OBTUSE_COVE = 3
PAN_SERIF_SQUARE_COVE = 4
PAN_SERIF_OBTUSE_SQUARE_COVE = 5
PAN_SERIF_SQUARE = 6
PAN_SERIF_THIN = 7
PAN_SERIF_BONE = 8
PAN_SERIF_EXAGGERATED = 9
PAN_SERIF_TRIANGLE = 10
PAN_SERIF_NORMAL_SANS = 11
PAN_SERIF_OBTUSE_SANS = 12
PAN_SERIF_PERP_SANS = 13
PAN_SERIF_FLARED = 14
PAN_SERIF_ROUNDED = 15
PAN_WEIGHT_VERY_LIGHT = 2
PAN_WEIGHT_LIGHT = 3
PAN_WEIGHT_THIN = 4
PAN_WEIGHT_BOOK = 5
PAN_WEIGHT_MEDIUM = 6
PAN_WEIGHT_DEMI = 7
PAN_WEIGHT_BOLD = 8
PAN_WEIGHT_HEAVY = 9
PAN_WEIGHT_BLACK = 10
PAN_WEIGHT_NORD = 11
PAN_PROP_OLD_STYLE = 2
PAN_PROP_MODERN = 3
PAN_PROP_EVEN_WIDTH = 4
PAN_PROP_EXPANDED = 5
PAN_PROP_CONDENSED = 6
PAN_PROP_VERY_EXPANDED = 7
PAN_PROP_VERY_CONDENSED = 8
PAN_PROP_MONOSPACED = 9
PAN_CONTRAST_NONE = 2
PAN_CONTRAST_VERY_LOW = 3
PAN_CONTRAST_LOW = 4
PAN_CONTRAST_MEDIUM_LOW = 5
PAN_CONTRAST_MEDIUM = 6
PAN_CONTRAST_MEDIUM_HIGH = 7
PAN_CONTRAST_HIGH = 8
PAN_CONTRAST_VERY_HIGH = 9
PAN_STROKE_GRADUAL_DIAG = 2
PAN_STROKE_GRADUAL_TRAN = 3
PAN_STROKE_GRADUAL_VERT = 4
PAN_STROKE_GRADUAL_HORZ = 5
PAN_STROKE_RAPID_VERT = 6
PAN_STROKE_RAPID_HORZ = 7
PAN_STROKE_INSTANT_VERT = 8
PAN_STRAIGHT_ARMS_HORZ = 2
PAN_STRAIGHT_ARMS_WEDGE = 3
PAN_STRAIGHT_ARMS_VERT = 4
PAN_STRAIGHT_ARMS_SINGLE_SERIF = 5
PAN_STRAIGHT_ARMS_DOUBLE_SERIF = 6
PAN_BENT_ARMS_HORZ = 7
PAN_BENT_ARMS_WEDGE = 8
PAN_BENT_ARMS_VERT = 9
PAN_BENT_ARMS_SINGLE_SERIF = 10
PAN_BENT_ARMS_DOUBLE_SERIF = 11
PAN_LETT_NORMAL_CONTACT = 2
PAN_LETT_NORMAL_WEIGHTED = 3
PAN_LETT_NORMAL_BOXED = 4
PAN_LETT_NORMAL_FLATTENED = 5
PAN_LETT_NORMAL_ROUNDED = 6
PAN_LETT_NORMAL_OFF_CENTER = 7
PAN_LETT_NORMAL_SQUARE = 8
PAN_LETT_OBLIQUE_CONTACT = 9
PAN_LETT_OBLIQUE_WEIGHTED = 10
PAN_LETT_OBLIQUE_BOXED = 11
PAN_LETT_OBLIQUE_FLATTENED = 12
PAN_LETT_OBLIQUE_ROUNDED = 13
PAN_LETT_OBLIQUE_OFF_CENTER = 14
PAN_LETT_OBLIQUE_SQUARE = 15
PAN_MIDLINE_STANDARD_TRIMMED = 2
PAN_MIDLINE_STANDARD_POINTED = 3
PAN_MIDLINE_STANDARD_SERIFED = 4
PAN_MIDLINE_HIGH_TRIMMED = 5
PAN_MIDLINE_HIGH_POINTED = 6
PAN_MIDLINE_HIGH_SERIFED = 7
PAN_MIDLINE_CONSTANT_TRIMMED = 8
PAN_MIDLINE_CONSTANT_POINTED = 9
PAN_MIDLINE_CONSTANT_SERIFED = 10
PAN_MIDLINE_LOW_TRIMMED = 11
PAN_MIDLINE_LOW_POINTED = 12
PAN_MIDLINE_LOW_SERIFED = 13
PAN_XHEIGHT_CONSTANT_SMALL = 2
PAN_XHEIGHT_CONSTANT_STD = 3
PAN_XHEIGHT_CONSTANT_LARGE = 4
PAN_XHEIGHT_DUCKING_SMALL = 5
PAN_XHEIGHT_DUCKING_STD = 6
PAN_XHEIGHT_DUCKING_LARGE = 7
ELF_VENDOR_SIZE = 4
ELF_VERSION = 0
ELF_CULTURE_LATIN = 0
RASTER_FONTTYPE = 1
DEVICE_FONTTYPE = 2
TRUETYPE_FONTTYPE = 4
def PALETTEINDEX(i): return ((16777216 | (i)))
PC_RESERVED = 1
PC_EXPLICIT = 2
PC_NOCOLLAPSE = 4
def GetRValue(rgb): return rgb & 0xff
def GetGValue(rgb): return (rgb >> 8) & 0xff
def GetBValue(rgb): return (rgb >> 16) & 0xff
TRANSPARENT = 1
OPAQUE = 2
BKMODE_LAST = 2
GM_COMPATIBLE = 1
GM_ADVANCED = 2
GM_LAST = 2
PT_CLOSEFIGURE = 1
PT_LINETO = 2
PT_BEZIERTO = 4
PT_MOVETO = 6
MM_TEXT = 1
MM_LOMETRIC = 2
MM_HIMETRIC = 3
MM_LOENGLISH = 4
MM_HIENGLISH = 5
MM_TWIPS = 6
MM_ISOTROPIC = 7
MM_ANISOTROPIC = 8
MM_MIN = MM_TEXT
MM_MAX = MM_ANISOTROPIC
MM_MAX_FIXEDSCALE = MM_TWIPS
ABSOLUTE = 1
RELATIVE = 2
WHITE_BRUSH = 0
LTGRAY_BRUSH = 1
GRAY_BRUSH = 2
DKGRAY_BRUSH = 3
BLACK_BRUSH = 4
NULL_BRUSH = 5
HOLLOW_BRUSH = NULL_BRUSH
WHITE_PEN = 6
BLACK_PEN = 7
NULL_PEN = 8
OEM_FIXED_FONT = 10
ANSI_FIXED_FONT = 11
ANSI_VAR_FONT = 12
SYSTEM_FONT = 13
DEVICE_DEFAULT_FONT = 14
DEFAULT_PALETTE = 15
SYSTEM_FIXED_FONT = 16
STOCK_LAST = 16
CLR_INVALID = -1
DC_BRUSH = 18
DC_PEN = 19
# Exception/Status codes from winuser.h and winnt.h
STATUS_WAIT_0 = 0
STATUS_ABANDONED_WAIT_0 = 128
STATUS_USER_APC = 192
STATUS_TIMEOUT = 258
STATUS_PENDING = 259
STATUS_SEGMENT_NOTIFICATION = 1073741829
STATUS_GUARD_PAGE_VIOLATION = -2147483647
STATUS_DATATYPE_MISALIGNMENT = -2147483646
STATUS_BREAKPOINT = -2147483645
STATUS_SINGLE_STEP = -2147483644
STATUS_ACCESS_VIOLATION = -1073741819
STATUS_IN_PAGE_ERROR = -1073741818
STATUS_INVALID_HANDLE = -1073741816
STATUS_NO_MEMORY = -1073741801
STATUS_ILLEGAL_INSTRUCTION = -1073741795
STATUS_NONCONTINUABLE_EXCEPTION = -1073741787
STATUS_INVALID_DISPOSITION = -1073741786
STATUS_ARRAY_BOUNDS_EXCEEDED = -1073741684
STATUS_FLOAT_DENORMAL_OPERAND = -1073741683
STATUS_FLOAT_DIVIDE_BY_ZERO = -1073741682
STATUS_FLOAT_INEXACT_RESULT = -1073741681
STATUS_FLOAT_INVALID_OPERATION = -1073741680
STATUS_FLOAT_OVERFLOW = -1073741679
STATUS_FLOAT_STACK_CHECK = -1073741678
STATUS_FLOAT_UNDERFLOW = -1073741677
STATUS_INTEGER_DIVIDE_BY_ZERO = -1073741676
STATUS_INTEGER_OVERFLOW = -1073741675
STATUS_PRIVILEGED_INSTRUCTION = -1073741674
STATUS_STACK_OVERFLOW = -1073741571
STATUS_CONTROL_C_EXIT = -1073741510
WAIT_FAILED = -1
WAIT_OBJECT_0 = STATUS_WAIT_0 + 0
WAIT_ABANDONED = STATUS_ABANDONED_WAIT_0 + 0
WAIT_ABANDONED_0 = STATUS_ABANDONED_WAIT_0 + 0
WAIT_TIMEOUT = STATUS_TIMEOUT
WAIT_IO_COMPLETION = STATUS_USER_APC
STILL_ACTIVE = STATUS_PENDING
EXCEPTION_ACCESS_VIOLATION = STATUS_ACCESS_VIOLATION
EXCEPTION_DATATYPE_MISALIGNMENT = STATUS_DATATYPE_MISALIGNMENT
EXCEPTION_BREAKPOINT = STATUS_BREAKPOINT
EXCEPTION_SINGLE_STEP = STATUS_SINGLE_STEP
EXCEPTION_ARRAY_BOUNDS_EXCEEDED = STATUS_ARRAY_BOUNDS_EXCEEDED
EXCEPTION_FLT_DENORMAL_OPERAND = STATUS_FLOAT_DENORMAL_OPERAND
EXCEPTION_FLT_DIVIDE_BY_ZERO = STATUS_FLOAT_DIVIDE_BY_ZERO
EXCEPTION_FLT_INEXACT_RESULT = STATUS_FLOAT_INEXACT_RESULT
EXCEPTION_FLT_INVALID_OPERATION = STATUS_FLOAT_INVALID_OPERATION
EXCEPTION_FLT_OVERFLOW = STATUS_FLOAT_OVERFLOW
EXCEPTION_FLT_STACK_CHECK = STATUS_FLOAT_STACK_CHECK
EXCEPTION_FLT_UNDERFLOW = STATUS_FLOAT_UNDERFLOW
EXCEPTION_INT_DIVIDE_BY_ZERO = STATUS_INTEGER_DIVIDE_BY_ZERO
EXCEPTION_INT_OVERFLOW = STATUS_INTEGER_OVERFLOW
EXCEPTION_PRIV_INSTRUCTION = STATUS_PRIVILEGED_INSTRUCTION
EXCEPTION_IN_PAGE_ERROR = STATUS_IN_PAGE_ERROR
EXCEPTION_ILLEGAL_INSTRUCTION = STATUS_ILLEGAL_INSTRUCTION
EXCEPTION_NONCONTINUABLE_EXCEPTION = STATUS_NONCONTINUABLE_EXCEPTION
EXCEPTION_STACK_OVERFLOW = STATUS_STACK_OVERFLOW
EXCEPTION_INVALID_DISPOSITION = STATUS_INVALID_DISPOSITION
EXCEPTION_GUARD_PAGE = STATUS_GUARD_PAGE_VIOLATION
EXCEPTION_INVALID_HANDLE = STATUS_INVALID_HANDLE
CONTROL_C_EXIT = STATUS_CONTROL_C_EXIT
# winuser.h line 8594
# constants used with SystemParametersInfo
SPI_GETBEEP = 1
SPI_SETBEEP = 2
SPI_GETMOUSE = 3
SPI_SETMOUSE = 4
SPI_GETBORDER = 5
SPI_SETBORDER = 6
SPI_GETKEYBOARDSPEED = 10
SPI_SETKEYBOARDSPEED = 11
SPI_LANGDRIVER = 12
SPI_ICONHORIZONTALSPACING = 13
SPI_GETSCREENSAVETIMEOUT = 14
SPI_SETSCREENSAVETIMEOUT = 15
SPI_GETSCREENSAVEACTIVE = 16
SPI_SETSCREENSAVEACTIVE = 17
SPI_GETGRIDGRANULARITY = 18
SPI_SETGRIDGRANULARITY = 19
SPI_SETDESKWALLPAPER = 20
SPI_SETDESKPATTERN = 21
SPI_GETKEYBOARDDELAY = 22
SPI_SETKEYBOARDDELAY = 23
SPI_ICONVERTICALSPACING = 24
SPI_GETICONTITLEWRAP = 25
SPI_SETICONTITLEWRAP = 26
SPI_GETMENUDROPALIGNMENT = 27
SPI_SETMENUDROPALIGNMENT = 28
SPI_SETDOUBLECLKWIDTH = 29
SPI_SETDOUBLECLKHEIGHT = 30
SPI_GETICONTITLELOGFONT = 31
SPI_SETDOUBLECLICKTIME = 32
SPI_SETMOUSEBUTTONSWAP = 33
SPI_SETICONTITLELOGFONT = 34
SPI_GETFASTTASKSWITCH = 35
SPI_SETFASTTASKSWITCH = 36
SPI_SETDRAGFULLWINDOWS = 37
SPI_GETDRAGFULLWINDOWS = 38
SPI_GETNONCLIENTMETRICS = 41
SPI_SETNONCLIENTMETRICS = 42
SPI_GETMINIMIZEDMETRICS = 43
SPI_SETMINIMIZEDMETRICS = 44
SPI_GETICONMETRICS = 45
SPI_SETICONMETRICS = 46
SPI_SETWORKAREA = 47
SPI_GETWORKAREA = 48
SPI_SETPENWINDOWS = 49
SPI_GETFILTERKEYS = 50
SPI_SETFILTERKEYS = 51
SPI_GETTOGGLEKEYS = 52
SPI_SETTOGGLEKEYS = 53
SPI_GETMOUSEKEYS = 54
SPI_SETMOUSEKEYS = 55
SPI_GETSHOWSOUNDS = 56
SPI_SETSHOWSOUNDS = 57
SPI_GETSTICKYKEYS = 58
SPI_SETSTICKYKEYS = 59
SPI_GETACCESSTIMEOUT = 60
SPI_SETACCESSTIMEOUT = 61
SPI_GETSERIALKEYS = 62
SPI_SETSERIALKEYS = 63
SPI_GETSOUNDSENTRY = 64
SPI_SETSOUNDSENTRY = 65
SPI_GETHIGHCONTRAST = 66
SPI_SETHIGHCONTRAST = 67
SPI_GETKEYBOARDPREF = 68
SPI_SETKEYBOARDPREF = 69
SPI_GETSCREENREADER = 70
SPI_SETSCREENREADER = 71
SPI_GETANIMATION = 72
SPI_SETANIMATION = 73
SPI_GETFONTSMOOTHING = 74
SPI_SETFONTSMOOTHING = 75
SPI_SETDRAGWIDTH = 76
SPI_SETDRAGHEIGHT = 77
SPI_SETHANDHELD = 78
SPI_GETLOWPOWERTIMEOUT = 79
SPI_GETPOWEROFFTIMEOUT = 80
SPI_SETLOWPOWERTIMEOUT = 81
SPI_SETPOWEROFFTIMEOUT = 82
SPI_GETLOWPOWERACTIVE = 83
SPI_GETPOWEROFFACTIVE = 84
SPI_SETLOWPOWERACTIVE = 85
SPI_SETPOWEROFFACTIVE = 86
SPI_SETCURSORS = 87
SPI_SETICONS = 88
SPI_GETDEFAULTINPUTLANG = 89
SPI_SETDEFAULTINPUTLANG = 90
SPI_SETLANGTOGGLE = 91
SPI_GETWINDOWSEXTENSION = 92
SPI_SETMOUSETRAILS = 93
SPI_GETMOUSETRAILS = 94
SPI_GETSNAPTODEFBUTTON = 95
SPI_SETSNAPTODEFBUTTON = 96
SPI_SETSCREENSAVERRUNNING = 97
SPI_SCREENSAVERRUNNING = SPI_SETSCREENSAVERRUNNING
SPI_GETMOUSEHOVERWIDTH = 98
SPI_SETMOUSEHOVERWIDTH = 99
SPI_GETMOUSEHOVERHEIGHT = 100
SPI_SETMOUSEHOVERHEIGHT = 101
SPI_GETMOUSEHOVERTIME = 102
SPI_SETMOUSEHOVERTIME = 103
SPI_GETWHEELSCROLLLINES = 104
SPI_SETWHEELSCROLLLINES = 105
SPI_GETMENUSHOWDELAY = 106
SPI_SETMENUSHOWDELAY = 107
SPI_GETSHOWIMEUI = 110
SPI_SETSHOWIMEUI = 111
SPI_GETMOUSESPEED = 112
SPI_SETMOUSESPEED = 113
SPI_GETSCREENSAVERRUNNING = 114
SPI_GETDESKWALLPAPER = 115
SPI_GETACTIVEWINDOWTRACKING = 4096
SPI_SETACTIVEWINDOWTRACKING = 4097
SPI_GETMENUANIMATION = 4098
SPI_SETMENUANIMATION = 4099
SPI_GETCOMBOBOXANIMATION = 4100
SPI_SETCOMBOBOXANIMATION = 4101
SPI_GETLISTBOXSMOOTHSCROLLING = 4102
SPI_SETLISTBOXSMOOTHSCROLLING = 4103
SPI_GETGRADIENTCAPTIONS = 4104
SPI_SETGRADIENTCAPTIONS = 4105
SPI_GETKEYBOARDCUES = 4106
SPI_SETKEYBOARDCUES = 4107
SPI_GETMENUUNDERLINES = 4106
SPI_SETMENUUNDERLINES = 4107
SPI_GETACTIVEWNDTRKZORDER = 4108
SPI_SETACTIVEWNDTRKZORDER = 4109
SPI_GETHOTTRACKING = 4110
SPI_SETHOTTRACKING = 4111
SPI_GETMENUFADE = 4114
SPI_SETMENUFADE = 4115
SPI_GETSELECTIONFADE = 4116
SPI_SETSELECTIONFADE = 4117
SPI_GETTOOLTIPANIMATION = 4118
SPI_SETTOOLTIPANIMATION = 4119
SPI_GETTOOLTIPFADE = 4120
SPI_SETTOOLTIPFADE = 4121
SPI_GETCURSORSHADOW = 4122
SPI_SETCURSORSHADOW = 4123
SPI_GETMOUSESONAR = 4124
SPI_SETMOUSESONAR = 4125
SPI_GETMOUSECLICKLOCK = 4126
SPI_SETMOUSECLICKLOCK = 4127
SPI_GETMOUSEVANISH = 4128
SPI_SETMOUSEVANISH = 4129
SPI_GETFLATMENU = 4130
SPI_SETFLATMENU = 4131
SPI_GETDROPSHADOW = 4132
SPI_SETDROPSHADOW = 4133
SPI_GETBLOCKSENDINPUTRESETS = 4134
SPI_SETBLOCKSENDINPUTRESETS = 4135
SPI_GETUIEFFECTS = 4158
SPI_SETUIEFFECTS = 4159
SPI_GETFOREGROUNDLOCKTIMEOUT = 8192
SPI_SETFOREGROUNDLOCKTIMEOUT = 8193
SPI_GETACTIVEWNDTRKTIMEOUT = 8194
SPI_SETACTIVEWNDTRKTIMEOUT = 8195
SPI_GETFOREGROUNDFLASHCOUNT = 8196
SPI_SETFOREGROUNDFLASHCOUNT = 8197
SPI_GETCARETWIDTH = 8198
SPI_SETCARETWIDTH = 8199
SPI_GETMOUSECLICKLOCKTIME = 8200
SPI_SETMOUSECLICKLOCKTIME = 8201
SPI_GETFONTSMOOTHINGTYPE = 8202
SPI_SETFONTSMOOTHINGTYPE = 8203
SPI_GETFONTSMOOTHINGCONTRAST = 8204
SPI_SETFONTSMOOTHINGCONTRAST = 8205
SPI_GETFOCUSBORDERWIDTH = 8206
SPI_SETFOCUSBORDERWIDTH = 8207
SPI_GETFOCUSBORDERHEIGHT = 8208
SPI_SETFOCUSBORDERHEIGHT = 8209
SPI_GETFONTSMOOTHINGORIENTATION = 8210
SPI_SETFONTSMOOTHINGORIENTATION = 8211
# fWinIni flags for SystemParametersInfo
SPIF_UPDATEINIFILE = 1
SPIF_SENDWININICHANGE = 2
SPIF_SENDCHANGE = SPIF_SENDWININICHANGE
# used with SystemParametersInfo and SPI_GETFONTSMOOTHINGTYPE/SPI_SETFONTSMOOTHINGTYPE
FE_FONTSMOOTHINGSTANDARD = 1
FE_FONTSMOOTHINGCLEARTYPE = 2
FE_FONTSMOOTHINGDOCKING = 32768
METRICS_USEDEFAULT = -1
ARW_BOTTOMLEFT = 0
ARW_BOTTOMRIGHT = 1
ARW_TOPLEFT = 2
ARW_TOPRIGHT = 3
ARW_STARTMASK = 3
ARW_STARTRIGHT = 1
ARW_STARTTOP = 2
ARW_LEFT = 0
ARW_RIGHT = 0
ARW_UP = 4
ARW_DOWN = 4
ARW_HIDE = 8
#ARW_VALID = 0x000F
SERKF_SERIALKEYSON = 1
SERKF_AVAILABLE = 2
SERKF_INDICATOR = 4
HCF_HIGHCONTRASTON = 1
HCF_AVAILABLE = 2
HCF_HOTKEYACTIVE = 4
HCF_CONFIRMHOTKEY = 8
HCF_HOTKEYSOUND = 16
HCF_INDICATOR = 32
HCF_HOTKEYAVAILABLE = 64
CDS_UPDATEREGISTRY = 1
CDS_TEST = 2
CDS_FULLSCREEN = 4
CDS_GLOBAL = 8
CDS_SET_PRIMARY = 16
CDS_RESET = 1073741824
CDS_SETRECT = 536870912
CDS_NORESET = 268435456
# return values from ChangeDisplaySettings and ChangeDisplaySettingsEx
DISP_CHANGE_SUCCESSFUL = 0
DISP_CHANGE_RESTART = 1
DISP_CHANGE_FAILED = -1
DISP_CHANGE_BADMODE = -2
DISP_CHANGE_NOTUPDATED = -3
DISP_CHANGE_BADFLAGS = -4
DISP_CHANGE_BADPARAM = -5
DISP_CHANGE_BADDUALVIEW = -6
ENUM_CURRENT_SETTINGS = -1
ENUM_REGISTRY_SETTINGS = -2
FKF_FILTERKEYSON = 1
FKF_AVAILABLE = 2
FKF_HOTKEYACTIVE = 4
FKF_CONFIRMHOTKEY = 8
FKF_HOTKEYSOUND = 16
FKF_INDICATOR = 32
FKF_CLICKON = 64
SKF_STICKYKEYSON = 1
SKF_AVAILABLE = 2
SKF_HOTKEYACTIVE = 4
SKF_CONFIRMHOTKEY = 8
SKF_HOTKEYSOUND = 16
SKF_INDICATOR = 32
SKF_AUDIBLEFEEDBACK = 64
SKF_TRISTATE = 128
SKF_TWOKEYSOFF = 256
SKF_LALTLATCHED = 268435456
SKF_LCTLLATCHED = 67108864
SKF_LSHIFTLATCHED = 16777216
SKF_RALTLATCHED = 536870912
SKF_RCTLLATCHED = 134217728
SKF_RSHIFTLATCHED = 33554432
SKF_LWINLATCHED = 1073741824
SKF_RWINLATCHED = -2147483648
SKF_LALTLOCKED = 1048576
SKF_LCTLLOCKED = 262144
SKF_LSHIFTLOCKED = 65536
SKF_RALTLOCKED = 2097152
SKF_RCTLLOCKED = 524288
SKF_RSHIFTLOCKED = 131072
SKF_LWINLOCKED = 4194304
SKF_RWINLOCKED = 8388608
MKF_MOUSEKEYSON = 1
MKF_AVAILABLE = 2
MKF_HOTKEYACTIVE = 4
MKF_CONFIRMHOTKEY = 8
MKF_HOTKEYSOUND = 16
MKF_INDICATOR = 32
MKF_MODIFIERS = 64
MKF_REPLACENUMBERS = 128
MKF_LEFTBUTTONSEL = 268435456
MKF_RIGHTBUTTONSEL = 536870912
MKF_LEFTBUTTONDOWN = 16777216
MKF_RIGHTBUTTONDOWN = 33554432
MKF_MOUSEMODE = -2147483648
ATF_TIMEOUTON = 1
ATF_ONOFFFEEDBACK = 2
SSGF_NONE = 0
SSGF_DISPLAY = 3
SSTF_NONE = 0
SSTF_CHARS = 1
SSTF_BORDER = 2
SSTF_DISPLAY = 3
SSWF_NONE = 0
SSWF_TITLE = 1
SSWF_WINDOW = 2
SSWF_DISPLAY = 3
SSWF_CUSTOM = 4
SSF_SOUNDSENTRYON = 1
SSF_AVAILABLE = 2
SSF_INDICATOR = 4
TKF_TOGGLEKEYSON = 1
TKF_AVAILABLE = 2
TKF_HOTKEYACTIVE = 4
TKF_CONFIRMHOTKEY = 8
TKF_HOTKEYSOUND = 16
TKF_INDICATOR = 32
SLE_ERROR = 1
SLE_MINORERROR = 2
SLE_WARNING = 3
MONITOR_DEFAULTTONULL = 0
MONITOR_DEFAULTTOPRIMARY = 1
MONITOR_DEFAULTTONEAREST = 2
MONITORINFOF_PRIMARY = 1
CCHDEVICENAME = 32
CHILDID_SELF = 0
INDEXID_OBJECT = 0
INDEXID_CONTAINER = 0
OBJID_WINDOW = 0
OBJID_SYSMENU = -1
OBJID_TITLEBAR = -2
OBJID_MENU = -3
OBJID_CLIENT = -4
OBJID_VSCROLL = -5
OBJID_HSCROLL = -6
OBJID_SIZEGRIP = -7
OBJID_CARET = -8
OBJID_CURSOR = -9
OBJID_ALERT = -10
OBJID_SOUND = -11
EVENT_MIN = 1
EVENT_MAX = 2147483647
EVENT_SYSTEM_SOUND = 1
EVENT_SYSTEM_ALERT = 2
EVENT_SYSTEM_FOREGROUND = 3
EVENT_SYSTEM_MENUSTART = 4
EVENT_SYSTEM_MENUEND = 5
EVENT_SYSTEM_MENUPOPUPSTART = 6
EVENT_SYSTEM_MENUPOPUPEND = 7
EVENT_SYSTEM_CAPTURESTART = 8
EVENT_SYSTEM_CAPTUREEND = 9
EVENT_SYSTEM_MOVESIZESTART = 10
EVENT_SYSTEM_MOVESIZEEND = 11
EVENT_SYSTEM_CONTEXTHELPSTART = 12
EVENT_SYSTEM_CONTEXTHELPEND = 13
EVENT_SYSTEM_DRAGDROPSTART = 14
EVENT_SYSTEM_DRAGDROPEND = 15
EVENT_SYSTEM_DIALOGSTART = 16
EVENT_SYSTEM_DIALOGEND = 17
EVENT_SYSTEM_SCROLLINGSTART = 18
EVENT_SYSTEM_SCROLLINGEND = 19
EVENT_SYSTEM_SWITCHSTART = 20
EVENT_SYSTEM_SWITCHEND = 21
EVENT_SYSTEM_MINIMIZESTART = 22
EVENT_SYSTEM_MINIMIZEEND = 23
EVENT_OBJECT_CREATE = 32768
EVENT_OBJECT_DESTROY = 32769
EVENT_OBJECT_SHOW = 32770
EVENT_OBJECT_HIDE = 32771
EVENT_OBJECT_REORDER = 32772
EVENT_OBJECT_FOCUS = 32773
EVENT_OBJECT_SELECTION = 32774
EVENT_OBJECT_SELECTIONADD = 32775
EVENT_OBJECT_SELECTIONREMOVE = 32776
EVENT_OBJECT_SELECTIONWITHIN = 32777
EVENT_OBJECT_STATECHANGE = 32778
EVENT_OBJECT_LOCATIONCHANGE = 32779
EVENT_OBJECT_NAMECHANGE = 32780
EVENT_OBJECT_DESCRIPTIONCHANGE = 32781
EVENT_OBJECT_VALUECHANGE = 32782
EVENT_OBJECT_PARENTCHANGE = 32783
EVENT_OBJECT_HELPCHANGE = 32784
EVENT_OBJECT_DEFACTIONCHANGE = 32785
EVENT_OBJECT_ACCELERATORCHANGE = 32786
SOUND_SYSTEM_STARTUP = 1
SOUND_SYSTEM_SHUTDOWN = 2
SOUND_SYSTEM_BEEP = 3
SOUND_SYSTEM_ERROR = 4
SOUND_SYSTEM_QUESTION = 5
SOUND_SYSTEM_WARNING = 6
SOUND_SYSTEM_INFORMATION = 7
SOUND_SYSTEM_MAXIMIZE = 8
SOUND_SYSTEM_MINIMIZE = 9
SOUND_SYSTEM_RESTOREUP = 10
SOUND_SYSTEM_RESTOREDOWN = 11
SOUND_SYSTEM_APPSTART = 12
SOUND_SYSTEM_FAULT = 13
SOUND_SYSTEM_APPEND = 14
SOUND_SYSTEM_MENUCOMMAND = 15
SOUND_SYSTEM_MENUPOPUP = 16
CSOUND_SYSTEM = 16
ALERT_SYSTEM_INFORMATIONAL = 1
ALERT_SYSTEM_WARNING = 2
ALERT_SYSTEM_ERROR = 3
ALERT_SYSTEM_QUERY = 4
ALERT_SYSTEM_CRITICAL = 5
CALERT_SYSTEM = 6
WINEVENT_OUTOFCONTEXT = 0
WINEVENT_SKIPOWNTHREAD = 1
WINEVENT_SKIPOWNPROCESS = 2
WINEVENT_INCONTEXT = 4
GUI_CARETBLINKING = 1
GUI_INMOVESIZE = 2
GUI_INMENUMODE = 4
GUI_SYSTEMMENUMODE = 8
GUI_POPUPMENUMODE = 16
STATE_SYSTEM_UNAVAILABLE = 1
STATE_SYSTEM_SELECTED = 2
STATE_SYSTEM_FOCUSED = 4
STATE_SYSTEM_PRESSED = 8
STATE_SYSTEM_CHECKED = 16
STATE_SYSTEM_MIXED = 32
STATE_SYSTEM_READONLY = 64
STATE_SYSTEM_HOTTRACKED = 128
STATE_SYSTEM_DEFAULT = 256
STATE_SYSTEM_EXPANDED = 512
STATE_SYSTEM_COLLAPSED = 1024
STATE_SYSTEM_BUSY = 2048
STATE_SYSTEM_FLOATING = 4096
STATE_SYSTEM_MARQUEED = 8192
STATE_SYSTEM_ANIMATED = 16384
STATE_SYSTEM_INVISIBLE = 32768
STATE_SYSTEM_OFFSCREEN = 65536
STATE_SYSTEM_SIZEABLE = 131072
STATE_SYSTEM_MOVEABLE = 262144
STATE_SYSTEM_SELFVOICING = 524288
STATE_SYSTEM_FOCUSABLE = 1048576
STATE_SYSTEM_SELECTABLE = 2097152
STATE_SYSTEM_LINKED = 4194304
STATE_SYSTEM_TRAVERSED = 8388608
STATE_SYSTEM_MULTISELECTABLE = 16777216
STATE_SYSTEM_EXTSELECTABLE = 33554432
STATE_SYSTEM_ALERT_LOW = 67108864
STATE_SYSTEM_ALERT_MEDIUM = 134217728
STATE_SYSTEM_ALERT_HIGH = 268435456
STATE_SYSTEM_VALID = 536870911
CCHILDREN_TITLEBAR = 5
CCHILDREN_SCROLLBAR = 5
CURSOR_SHOWING = 1
WS_ACTIVECAPTION = 1
GA_MIC = 1
GA_PARENT = 1
GA_ROOT = 2
GA_ROOTOWNER = 3
GA_MAC = 4
# winuser.h line 1979
BF_LEFT = 1
BF_TOP = 2
BF_RIGHT = 4
BF_BOTTOM = 8
BF_TOPLEFT = (BF_TOP | BF_LEFT)
BF_TOPRIGHT = (BF_TOP | BF_RIGHT)
BF_BOTTOMLEFT = (BF_BOTTOM | BF_LEFT)
BF_BOTTOMRIGHT = (BF_BOTTOM | BF_RIGHT)
BF_RECT = (BF_LEFT | BF_TOP | BF_RIGHT | BF_BOTTOM)
BF_DIAGONAL = 16
BF_DIAGONAL_ENDTOPRIGHT = (BF_DIAGONAL | BF_TOP | BF_RIGHT)
BF_DIAGONAL_ENDTOPLEFT = (BF_DIAGONAL | BF_TOP | BF_LEFT)
BF_DIAGONAL_ENDBOTTOMLEFT = (BF_DIAGONAL | BF_BOTTOM | BF_LEFT)
BF_DIAGONAL_ENDBOTTOMRIGHT = (BF_DIAGONAL | BF_BOTTOM | BF_RIGHT)
BF_MIDDLE = 2048
BF_SOFT = 4096
BF_ADJUST = 8192
BF_FLAT = 16384
BF_MONO = 32768
DFC_CAPTION = 1
DFC_MENU = 2
DFC_SCROLL = 3
DFC_BUTTON = 4
DFC_POPUPMENU = 5
DFCS_CAPTIONCLOSE = 0
DFCS_CAPTIONMIN = 1
DFCS_CAPTIONMAX = 2
DFCS_CAPTIONRESTORE = 3
DFCS_CAPTIONHELP = 4
DFCS_MENUARROW = 0
DFCS_MENUCHECK = 1
DFCS_MENUBULLET = 2
DFCS_MENUARROWRIGHT = 4
DFCS_SCROLLUP = 0
DFCS_SCROLLDOWN = 1
DFCS_SCROLLLEFT = 2
DFCS_SCROLLRIGHT = 3
DFCS_SCROLLCOMBOBOX = 5
DFCS_SCROLLSIZEGRIP = 8
DFCS_SCROLLSIZEGRIPRIGHT = 16
DFCS_BUTTONCHECK = 0
DFCS_BUTTONRADIOIMAGE = 1
DFCS_BUTTONRADIOMASK = 2
DFCS_BUTTONRADIO = 4
DFCS_BUTTON3STATE = 8
DFCS_BUTTONPUSH = 16
DFCS_INACTIVE = 256
DFCS_PUSHED = 512
DFCS_CHECKED = 1024
DFCS_TRANSPARENT = 2048
DFCS_HOT = 4096
DFCS_ADJUSTRECT = 8192
DFCS_FLAT = 16384
DFCS_MONO = 32768
DC_ACTIVE = 1
DC_SMALLCAP = 2
DC_ICON = 4
DC_TEXT = 8
DC_INBUTTON = 16
DC_GRADIENT = 32
IDANI_OPEN = 1
IDANI_CLOSE = 2
IDANI_CAPTION = 3
CF_TEXT = 1
CF_BITMAP = 2
CF_METAFILEPICT = 3
CF_SYLK = 4
CF_DIF = 5
CF_TIFF = 6
CF_OEMTEXT = 7
CF_DIB = 8
CF_PALETTE = 9
CF_PENDATA = 10
CF_RIFF = 11
CF_WAVE = 12
CF_UNICODETEXT = 13
CF_ENHMETAFILE = 14
CF_HDROP = 15
CF_LOCALE = 16
CF_DIBV5 = 17
CF_MAX = 18
CF_OWNERDISPLAY = 128
CF_DSPTEXT = 129
CF_DSPBITMAP = 130
CF_DSPMETAFILEPICT = 131
CF_DSPENHMETAFILE = 142
CF_PRIVATEFIRST = 512
CF_PRIVATELAST = 767
CF_GDIOBJFIRST = 768
CF_GDIOBJLAST = 1023
FVIRTKEY =1
FNOINVERT = 2
FSHIFT = 4
FCONTROL = 8
FALT = 16
WPF_SETMINPOSITION = 1
WPF_RESTORETOMAXIMIZED = 2
ODT_MENU = 1
ODT_LISTBOX = 2
ODT_COMBOBOX = 3
ODT_BUTTON = 4
ODT_STATIC = 5
ODA_DRAWENTIRE = 1
ODA_SELECT = 2
ODA_FOCUS = 4
ODS_SELECTED = 1
ODS_GRAYED = 2
ODS_DISABLED = 4
ODS_CHECKED = 8
ODS_FOCUS = 16
ODS_DEFAULT = 32
ODS_COMBOBOXEDIT = 4096
ODS_HOTLIGHT = 64
ODS_INACTIVE = 128
PM_NOREMOVE = 0
PM_REMOVE = 1
PM_NOYIELD = 2
MOD_ALT = 1
MOD_CONTROL = 2
MOD_SHIFT = 4
MOD_WIN = 8
IDHOT_SNAPWINDOW = (-1)
IDHOT_SNAPDESKTOP = (-2)
#EW_RESTARTWINDOWS = 0x0042
#EW_REBOOTSYSTEM = 0x0043
#EW_EXITANDEXECAPP = 0x0044
ENDSESSION_LOGOFF = -2147483648
EWX_LOGOFF = 0
EWX_SHUTDOWN = 1
EWX_REBOOT = 2
EWX_FORCE = 4
EWX_POWEROFF = 8
EWX_FORCEIFHUNG = 16
BSM_ALLCOMPONENTS = 0
BSM_VXDS = 1
BSM_NETDRIVER = 2
BSM_INSTALLABLEDRIVERS = 4
BSM_APPLICATIONS = 8
BSM_ALLDESKTOPS = 16
BSF_QUERY = 1
BSF_IGNORECURRENTTASK = 2
BSF_FLUSHDISK = 4
BSF_NOHANG = 8
BSF_POSTMESSAGE = 16
BSF_FORCEIFHUNG = 32
BSF_NOTIMEOUTIFNOTHUNG = 64
BROADCAST_QUERY_DENY = 1112363332 # Return this value to deny a query.
DBWF_LPARAMPOINTER = 32768
# winuser.h line 3232
SWP_NOSIZE = 1
SWP_NOMOVE = 2
SWP_NOZORDER = 4
SWP_NOREDRAW = 8
SWP_NOACTIVATE = 16
SWP_FRAMECHANGED = 32
SWP_SHOWWINDOW = 64
SWP_HIDEWINDOW = 128
SWP_NOCOPYBITS = 256
SWP_NOOWNERZORDER = 512
SWP_NOSENDCHANGING = 1024
SWP_DRAWFRAME = SWP_FRAMECHANGED
SWP_NOREPOSITION = SWP_NOOWNERZORDER
SWP_DEFERERASE = 8192
SWP_ASYNCWINDOWPOS = 16384
DLGWINDOWEXTRA = 30
# winuser.h line 4249
KEYEVENTF_EXTENDEDKEY = 1
KEYEVENTF_KEYUP = 2
MOUSEEVENTF_MOVE = 1
MOUSEEVENTF_LEFTDOWN = 2
MOUSEEVENTF_LEFTUP = 4
MOUSEEVENTF_RIGHTDOWN = 8
MOUSEEVENTF_RIGHTUP = 16
MOUSEEVENTF_MIDDLEDOWN = 32
MOUSEEVENTF_MIDDLEUP = 64
MOUSEEVENTF_ABSOLUTE = 32768
INPUT_MOUSE = 0
INPUT_KEYBOARD = 1
INPUT_HARDWARE = 2
MWMO_WAITALL = 1
MWMO_ALERTABLE = 2
MWMO_INPUTAVAILABLE = 4
QS_KEY = 1
QS_MOUSEMOVE = 2
QS_MOUSEBUTTON = 4
QS_POSTMESSAGE = 8
QS_TIMER = 16
QS_PAINT = 32
QS_SENDMESSAGE = 64
QS_HOTKEY = 128
QS_MOUSE = (QS_MOUSEMOVE | \
QS_MOUSEBUTTON)
QS_INPUT = (QS_MOUSE | \
QS_KEY)
QS_ALLEVENTS = (QS_INPUT | \
QS_POSTMESSAGE | \
QS_TIMER | \
QS_PAINT | \
QS_HOTKEY)
QS_ALLINPUT = (QS_INPUT | \
QS_POSTMESSAGE | \
QS_TIMER | \
QS_PAINT | \
QS_HOTKEY | \
QS_SENDMESSAGE)
IMN_CLOSESTATUSWINDOW = 1
IMN_OPENSTATUSWINDOW = 2
IMN_CHANGECANDIDATE = 3
IMN_CLOSECANDIDATE = 4
IMN_OPENCANDIDATE = 5
IMN_SETCONVERSIONMODE = 6
IMN_SETSENTENCEMODE = 7
IMN_SETOPENSTATUS = 8
IMN_SETCANDIDATEPOS = 9
IMN_SETCOMPOSITIONFONT = 10
IMN_SETCOMPOSITIONWINDOW = 11
IMN_SETSTATUSWINDOWPOS = 12
IMN_GUIDELINE = 13
IMN_PRIVATE = 14
# winuser.h line 8518
HELP_CONTEXT = 1
HELP_QUIT = 2
HELP_INDEX = 3
HELP_CONTENTS = 3
HELP_HELPONHELP = 4
HELP_SETINDEX = 5
HELP_SETCONTENTS = 5
HELP_CONTEXTPOPUP = 8
HELP_FORCEFILE = 9
HELP_KEY = 257
HELP_COMMAND = 258
HELP_PARTIALKEY = 261
HELP_MULTIKEY = 513
HELP_SETWINPOS = 515
HELP_CONTEXTMENU = 10
HELP_FINDER = 11
HELP_WM_HELP = 12
HELP_SETPOPUP_POS = 13
HELP_TCARD = 32768
HELP_TCARD_DATA = 16
HELP_TCARD_OTHER_CALLER = 17
IDH_NO_HELP = 28440
IDH_MISSING_CONTEXT = 28441 # Control doesn't have matching help context
IDH_GENERIC_HELP_BUTTON = 28442 # Property sheet help button
IDH_OK = 28443
IDH_CANCEL = 28444
IDH_HELP = 28445
GR_GDIOBJECTS = 0 # Count of GDI objects
GR_USEROBJECTS = 1 # Count of USER objects
# Generated by h2py from \msvcnt\include\wingdi.h
# manually added (missed by generation some how!
SRCCOPY = 13369376 # dest = source
SRCPAINT = 15597702 # dest = source OR dest
SRCAND = 8913094 # dest = source AND dest
SRCINVERT = 6684742 # dest = source XOR dest
SRCERASE = 4457256 # dest = source AND (NOT dest )
NOTSRCCOPY = 3342344 # dest = (NOT source)
NOTSRCERASE = 1114278 # dest = (NOT src) AND (NOT dest)
MERGECOPY = 12583114 # dest = (source AND pattern)
MERGEPAINT = 12255782 # dest = (NOT source) OR dest
PATCOPY = 15728673 # dest = pattern
PATPAINT = 16452105 # dest = DPSnoo
PATINVERT = 5898313 # dest = pattern XOR dest
DSTINVERT = 5570569 # dest = (NOT dest)
BLACKNESS = 66 # dest = BLACK
WHITENESS = 16711778 # dest = WHITE
# hacked and split manually by mhammond.
R2_BLACK = 1
R2_NOTMERGEPEN = 2
R2_MASKNOTPEN = 3
R2_NOTCOPYPEN = 4
R2_MASKPENNOT = 5
R2_NOT = 6
R2_XORPEN = 7
R2_NOTMASKPEN = 8
R2_MASKPEN = 9
R2_NOTXORPEN = 10
R2_NOP = 11
R2_MERGENOTPEN = 12
R2_COPYPEN = 13
R2_MERGEPENNOT = 14
R2_MERGEPEN = 15
R2_WHITE = 16
R2_LAST = 16
GDI_ERROR = (-1)
ERROR = 0
NULLREGION = 1
SIMPLEREGION = 2
COMPLEXREGION = 3
RGN_ERROR = ERROR
RGN_AND = 1
RGN_OR = 2
RGN_XOR = 3
RGN_DIFF = 4
RGN_COPY = 5
RGN_MIN = RGN_AND
RGN_MAX = RGN_COPY
## Stretching modes used with Get/SetStretchBltMode
BLACKONWHITE = 1
WHITEONBLACK = 2
COLORONCOLOR = 3
HALFTONE = 4
MAXSTRETCHBLTMODE = 4
STRETCH_ANDSCANS = BLACKONWHITE
STRETCH_ORSCANS = WHITEONBLACK
STRETCH_DELETESCANS = COLORONCOLOR
STRETCH_HALFTONE = HALFTONE
ALTERNATE = 1
WINDING = 2
POLYFILL_LAST = 2
## flags used with SetLayout
LAYOUT_RTL = 1
LAYOUT_BTT = 2
LAYOUT_VBH = 4
LAYOUT_ORIENTATIONMASK = LAYOUT_RTL|LAYOUT_BTT|LAYOUT_VBH
LAYOUT_BITMAPORIENTATIONPRESERVED = 8
TA_NOUPDATECP = 0
TA_UPDATECP = 1
TA_LEFT = 0
TA_RIGHT = 2
TA_CENTER = 6
TA_TOP = 0
TA_BOTTOM = 8
TA_BASELINE = 24
TA_MASK = (TA_BASELINE+TA_CENTER+TA_UPDATECP)
VTA_BASELINE = TA_BASELINE
VTA_LEFT = TA_BOTTOM
VTA_RIGHT = TA_TOP
VTA_CENTER = TA_CENTER
VTA_BOTTOM = TA_RIGHT
VTA_TOP = TA_LEFT
ETO_GRAYED = 1
ETO_OPAQUE = 2
ETO_CLIPPED = 4
ASPECT_FILTERING = 1
DCB_RESET = 1
DCB_ACCUMULATE = 2
DCB_DIRTY = DCB_ACCUMULATE
DCB_SET = (DCB_RESET | DCB_ACCUMULATE)
DCB_ENABLE = 4
DCB_DISABLE = 8
META_SETBKCOLOR = 513
META_SETBKMODE = 258
META_SETMAPMODE = 259
META_SETROP2 = 260
META_SETRELABS = 261
META_SETPOLYFILLMODE = 262
META_SETSTRETCHBLTMODE = 263
META_SETTEXTCHAREXTRA = 264
META_SETTEXTCOLOR = 521
META_SETTEXTJUSTIFICATION = 522
META_SETWINDOWORG = 523
META_SETWINDOWEXT = 524
META_SETVIEWPORTORG = 525
META_SETVIEWPORTEXT = 526
META_OFFSETWINDOWORG = 527
META_SCALEWINDOWEXT = 1040
META_OFFSETVIEWPORTORG = 529
META_SCALEVIEWPORTEXT = 1042
META_LINETO = 531
META_MOVETO = 532
META_EXCLUDECLIPRECT = 1045
META_INTERSECTCLIPRECT = 1046
META_ARC = 2071
META_ELLIPSE = 1048
META_FLOODFILL = 1049
META_PIE = 2074
META_RECTANGLE = 1051
META_ROUNDRECT = 1564
META_PATBLT = 1565
META_SAVEDC = 30
META_SETPIXEL = 1055
META_OFFSETCLIPRGN = 544
META_TEXTOUT = 1313
META_BITBLT = 2338
META_STRETCHBLT = 2851
META_POLYGON = 804
META_POLYLINE = 805
META_ESCAPE = 1574
META_RESTOREDC = 295
META_FILLREGION = 552
META_FRAMEREGION = 1065
META_INVERTREGION = 298
META_PAINTREGION = 299
META_SELECTCLIPREGION = 300
META_SELECTOBJECT = 301
META_SETTEXTALIGN = 302
META_CHORD = 2096
META_SETMAPPERFLAGS = 561
META_EXTTEXTOUT = 2610
META_SETDIBTODEV = 3379
META_SELECTPALETTE = 564
META_REALIZEPALETTE = 53
META_ANIMATEPALETTE = 1078
META_SETPALENTRIES = 55
META_POLYPOLYGON = 1336
META_RESIZEPALETTE = 313
META_DIBBITBLT = 2368
META_DIBSTRETCHBLT = 2881
META_DIBCREATEPATTERNBRUSH = 322
META_STRETCHDIB = 3907
META_EXTFLOODFILL = 1352
META_DELETEOBJECT = 496
META_CREATEPALETTE = 247
META_CREATEPATTERNBRUSH = 505
META_CREATEPENINDIRECT = 762
META_CREATEFONTINDIRECT = 763
META_CREATEBRUSHINDIRECT = 764
META_CREATEREGION = 1791
FILE_BEGIN = 0
FILE_CURRENT = 1
FILE_END = 2
FILE_FLAG_WRITE_THROUGH = -2147483648
FILE_FLAG_OVERLAPPED = 1073741824
FILE_FLAG_NO_BUFFERING = 536870912
FILE_FLAG_RANDOM_ACCESS = 268435456
FILE_FLAG_SEQUENTIAL_SCAN = 134217728
FILE_FLAG_DELETE_ON_CLOSE = 67108864
FILE_FLAG_BACKUP_SEMANTICS = 33554432
FILE_FLAG_POSIX_SEMANTICS = 16777216
CREATE_NEW = 1
CREATE_ALWAYS = 2
OPEN_EXISTING = 3
OPEN_ALWAYS = 4
TRUNCATE_EXISTING = 5
PIPE_ACCESS_INBOUND = 1
PIPE_ACCESS_OUTBOUND = 2
PIPE_ACCESS_DUPLEX = 3
PIPE_CLIENT_END = 0
PIPE_SERVER_END = 1
PIPE_WAIT = 0
PIPE_NOWAIT = 1
PIPE_READMODE_BYTE = 0
PIPE_READMODE_MESSAGE = 2
PIPE_TYPE_BYTE = 0
PIPE_TYPE_MESSAGE = 4
PIPE_UNLIMITED_INSTANCES = 255
SECURITY_CONTEXT_TRACKING = 262144
SECURITY_EFFECTIVE_ONLY = 524288
SECURITY_SQOS_PRESENT = 1048576
SECURITY_VALID_SQOS_FLAGS = 2031616
DTR_CONTROL_DISABLE = 0
DTR_CONTROL_ENABLE = 1
DTR_CONTROL_HANDSHAKE = 2
RTS_CONTROL_DISABLE = 0
RTS_CONTROL_ENABLE = 1
RTS_CONTROL_HANDSHAKE = 2
RTS_CONTROL_TOGGLE = 3
GMEM_FIXED = 0
GMEM_MOVEABLE = 2
GMEM_NOCOMPACT = 16
GMEM_NODISCARD = 32
GMEM_ZEROINIT = 64
GMEM_MODIFY = 128
GMEM_DISCARDABLE = 256
GMEM_NOT_BANKED = 4096
GMEM_SHARE = 8192
GMEM_DDESHARE = 8192
GMEM_NOTIFY = 16384
GMEM_LOWER = GMEM_NOT_BANKED
GMEM_VALID_FLAGS = 32626
GMEM_INVALID_HANDLE = 32768
GHND = (GMEM_MOVEABLE | GMEM_ZEROINIT)
GPTR = (GMEM_FIXED | GMEM_ZEROINIT)
GMEM_DISCARDED = 16384
GMEM_LOCKCOUNT = 255
LMEM_FIXED = 0
LMEM_MOVEABLE = 2
LMEM_NOCOMPACT = 16
LMEM_NODISCARD = 32
LMEM_ZEROINIT = 64
LMEM_MODIFY = 128
LMEM_DISCARDABLE = 3840
LMEM_VALID_FLAGS = 3954
LMEM_INVALID_HANDLE = 32768
LHND = (LMEM_MOVEABLE | LMEM_ZEROINIT)
LPTR = (LMEM_FIXED | LMEM_ZEROINIT)
NONZEROLHND = (LMEM_MOVEABLE)
NONZEROLPTR = (LMEM_FIXED)
LMEM_DISCARDED = 16384
LMEM_LOCKCOUNT = 255
DEBUG_PROCESS = 1
DEBUG_ONLY_THIS_PROCESS = 2
CREATE_SUSPENDED = 4
DETACHED_PROCESS = 8
CREATE_NEW_CONSOLE = 16
NORMAL_PRIORITY_CLASS = 32
IDLE_PRIORITY_CLASS = 64
HIGH_PRIORITY_CLASS = 128
REALTIME_PRIORITY_CLASS = 256
CREATE_NEW_PROCESS_GROUP = 512
CREATE_UNICODE_ENVIRONMENT = 1024
CREATE_SEPARATE_WOW_VDM = 2048
CREATE_SHARED_WOW_VDM = 4096
CREATE_DEFAULT_ERROR_MODE = 67108864
CREATE_NO_WINDOW = 134217728
PROFILE_USER = 268435456
PROFILE_KERNEL = 536870912
PROFILE_SERVER = 1073741824
THREAD_BASE_PRIORITY_LOWRT = 15
THREAD_BASE_PRIORITY_MAX = 2
THREAD_BASE_PRIORITY_MIN = -2
THREAD_BASE_PRIORITY_IDLE = -15
THREAD_PRIORITY_LOWEST = THREAD_BASE_PRIORITY_MIN
THREAD_PRIORITY_BELOW_NORMAL = THREAD_PRIORITY_LOWEST+1
THREAD_PRIORITY_HIGHEST = THREAD_BASE_PRIORITY_MAX
THREAD_PRIORITY_ABOVE_NORMAL = THREAD_PRIORITY_HIGHEST-1
THREAD_PRIORITY_ERROR_RETURN = MAXLONG
THREAD_PRIORITY_TIME_CRITICAL = THREAD_BASE_PRIORITY_LOWRT
THREAD_PRIORITY_IDLE = THREAD_BASE_PRIORITY_IDLE
THREAD_PRIORITY_NORMAL = 0
THREAD_MODE_BACKGROUND_BEGIN = 0x00010000
THREAD_MODE_BACKGROUND_END = 0x00020000
EXCEPTION_DEBUG_EVENT = 1
CREATE_THREAD_DEBUG_EVENT = 2
CREATE_PROCESS_DEBUG_EVENT = 3
EXIT_THREAD_DEBUG_EVENT = 4
EXIT_PROCESS_DEBUG_EVENT = 5
LOAD_DLL_DEBUG_EVENT = 6
UNLOAD_DLL_DEBUG_EVENT = 7
OUTPUT_DEBUG_STRING_EVENT = 8
RIP_EVENT = 9
DRIVE_UNKNOWN = 0
DRIVE_NO_ROOT_DIR = 1
DRIVE_REMOVABLE = 2
DRIVE_FIXED = 3
DRIVE_REMOTE = 4
DRIVE_CDROM = 5
DRIVE_RAMDISK = 6
FILE_TYPE_UNKNOWN = 0
FILE_TYPE_DISK = 1
FILE_TYPE_CHAR = 2
FILE_TYPE_PIPE = 3
FILE_TYPE_REMOTE = 32768
NOPARITY = 0
ODDPARITY = 1
EVENPARITY = 2
MARKPARITY = 3
SPACEPARITY = 4
ONESTOPBIT = 0
ONE5STOPBITS = 1
TWOSTOPBITS = 2
CBR_110 = 110
CBR_300 = 300
CBR_600 = 600
CBR_1200 = 1200
CBR_2400 = 2400
CBR_4800 = 4800
CBR_9600 = 9600
CBR_14400 = 14400
CBR_19200 = 19200
CBR_38400 = 38400
CBR_56000 = 56000
CBR_57600 = 57600
CBR_115200 = 115200
CBR_128000 = 128000
CBR_256000 = 256000
S_QUEUEEMPTY = 0
S_THRESHOLD = 1
S_ALLTHRESHOLD = 2
S_NORMAL = 0
S_LEGATO = 1
S_STACCATO = 2
NMPWAIT_WAIT_FOREVER = -1
NMPWAIT_NOWAIT = 1
NMPWAIT_USE_DEFAULT_WAIT = 0
OF_READ = 0
OF_WRITE = 1
OF_READWRITE = 2
OF_SHARE_COMPAT = 0
OF_SHARE_EXCLUSIVE = 16
OF_SHARE_DENY_WRITE = 32
OF_SHARE_DENY_READ = 48
OF_SHARE_DENY_NONE = 64
OF_PARSE = 256
OF_DELETE = 512
OF_VERIFY = 1024
OF_CANCEL = 2048
OF_CREATE = 4096
OF_PROMPT = 8192
OF_EXIST = 16384
OF_REOPEN = 32768
OFS_MAXPATHNAME = 128
MAXINTATOM = 49152
# winbase.h
PROCESS_HEAP_REGION = 1
PROCESS_HEAP_UNCOMMITTED_RANGE = 2
PROCESS_HEAP_ENTRY_BUSY = 4
PROCESS_HEAP_ENTRY_MOVEABLE = 16
PROCESS_HEAP_ENTRY_DDESHARE = 32
SCS_32BIT_BINARY = 0
SCS_DOS_BINARY = 1
SCS_WOW_BINARY = 2
SCS_PIF_BINARY = 3
SCS_POSIX_BINARY = 4
SCS_OS216_BINARY = 5
SEM_FAILCRITICALERRORS = 1
SEM_NOGPFAULTERRORBOX = 2
SEM_NOALIGNMENTFAULTEXCEPT = 4
SEM_NOOPENFILEERRORBOX = 32768
LOCKFILE_FAIL_IMMEDIATELY = 1
LOCKFILE_EXCLUSIVE_LOCK = 2
HANDLE_FLAG_INHERIT = 1
HANDLE_FLAG_PROTECT_FROM_CLOSE = 2
HINSTANCE_ERROR = 32
GET_TAPE_MEDIA_INFORMATION = 0
GET_TAPE_DRIVE_INFORMATION = 1
SET_TAPE_MEDIA_INFORMATION = 0
SET_TAPE_DRIVE_INFORMATION = 1
FORMAT_MESSAGE_ALLOCATE_BUFFER = 256
FORMAT_MESSAGE_IGNORE_INSERTS = 512
FORMAT_MESSAGE_FROM_STRING = 1024
FORMAT_MESSAGE_FROM_HMODULE = 2048
FORMAT_MESSAGE_FROM_SYSTEM = 4096
FORMAT_MESSAGE_ARGUMENT_ARRAY = 8192
FORMAT_MESSAGE_MAX_WIDTH_MASK = 255
BACKUP_INVALID = 0
BACKUP_DATA = 1
BACKUP_EA_DATA = 2
BACKUP_SECURITY_DATA = 3
BACKUP_ALTERNATE_DATA = 4
BACKUP_LINK = 5
BACKUP_PROPERTY_DATA = 6
BACKUP_OBJECT_ID = 7
BACKUP_REPARSE_DATA = 8
BACKUP_SPARSE_BLOCK = 9
STREAM_NORMAL_ATTRIBUTE = 0
STREAM_MODIFIED_WHEN_READ = 1
STREAM_CONTAINS_SECURITY = 2
STREAM_CONTAINS_PROPERTIES = 4
STARTF_USESHOWWINDOW = 1
STARTF_USESIZE = 2
STARTF_USEPOSITION = 4
STARTF_USECOUNTCHARS = 8
STARTF_USEFILLATTRIBUTE = 16
STARTF_FORCEONFEEDBACK = 64
STARTF_FORCEOFFFEEDBACK = 128
STARTF_USESTDHANDLES = 256
STARTF_USEHOTKEY = 512
SHUTDOWN_NORETRY = 1
DONT_RESOLVE_DLL_REFERENCES = 1
LOAD_LIBRARY_AS_DATAFILE = 2
LOAD_WITH_ALTERED_SEARCH_PATH = 8
DDD_RAW_TARGET_PATH = 1
DDD_REMOVE_DEFINITION = 2
DDD_EXACT_MATCH_ON_REMOVE = 4
MOVEFILE_REPLACE_EXISTING = 1
MOVEFILE_COPY_ALLOWED = 2
MOVEFILE_DELAY_UNTIL_REBOOT = 4
MAX_COMPUTERNAME_LENGTH = 15
LOGON32_LOGON_INTERACTIVE = 2
LOGON32_LOGON_BATCH = 4
LOGON32_LOGON_SERVICE = 5
LOGON32_PROVIDER_DEFAULT = 0
LOGON32_PROVIDER_WINNT35 = 1
VER_PLATFORM_WIN32s = 0
VER_PLATFORM_WIN32_WINDOWS = 1
VER_PLATFORM_WIN32_NT = 2
TC_NORMAL = 0
TC_HARDERR = 1
TC_GP_TRAP = 2
TC_SIGNAL = 3
AC_LINE_OFFLINE = 0
AC_LINE_ONLINE = 1
AC_LINE_BACKUP_POWER = 2
AC_LINE_UNKNOWN = 255
BATTERY_FLAG_HIGH = 1
BATTERY_FLAG_LOW = 2
BATTERY_FLAG_CRITICAL = 4
BATTERY_FLAG_CHARGING = 8
BATTERY_FLAG_NO_BATTERY = 128
BATTERY_FLAG_UNKNOWN = 255
BATTERY_PERCENTAGE_UNKNOWN = 255
BATTERY_LIFE_UNKNOWN = -1
# Generated by h2py from d:\msdev\include\richedit.h
cchTextLimitDefault = 32767
WM_CONTEXTMENU = 123
WM_PRINTCLIENT = 792
EN_MSGFILTER = 1792
EN_REQUESTRESIZE = 1793
EN_SELCHANGE = 1794
EN_DROPFILES = 1795
EN_PROTECTED = 1796
EN_CORRECTTEXT = 1797
EN_STOPNOUNDO = 1798
EN_IMECHANGE = 1799
EN_SAVECLIPBOARD = 1800
EN_OLEOPFAILED = 1801
ENM_NONE = 0
ENM_CHANGE = 1
ENM_UPDATE = 2
ENM_SCROLL = 4
ENM_KEYEVENTS = 65536
ENM_MOUSEEVENTS = 131072
ENM_REQUESTRESIZE = 262144
ENM_SELCHANGE = 524288
ENM_DROPFILES = 1048576
ENM_PROTECTED = 2097152
ENM_CORRECTTEXT = 4194304
ENM_IMECHANGE = 8388608
ES_SAVESEL = 32768
ES_SUNKEN = 16384
ES_DISABLENOSCROLL = 8192
ES_SELECTIONBAR = 16777216
ES_EX_NOCALLOLEINIT = 16777216
ES_VERTICAL = 4194304
ES_NOIME = 524288
ES_SELFIME = 262144
ECO_AUTOWORDSELECTION = 1
ECO_AUTOVSCROLL = 64
ECO_AUTOHSCROLL = 128
ECO_NOHIDESEL = 256
ECO_READONLY = 2048
ECO_WANTRETURN = 4096
ECO_SAVESEL = 32768
ECO_SELECTIONBAR = 16777216
ECO_VERTICAL = 4194304
ECOOP_SET = 1
ECOOP_OR = 2
ECOOP_AND = 3
ECOOP_XOR = 4
WB_CLASSIFY = 3
WB_MOVEWORDLEFT = 4
WB_MOVEWORDRIGHT = 5
WB_LEFTBREAK = 6
WB_RIGHTBREAK = 7
WB_MOVEWORDPREV = 4
WB_MOVEWORDNEXT = 5
WB_PREVBREAK = 6
WB_NEXTBREAK = 7
PC_FOLLOWING = 1
PC_LEADING = 2
PC_OVERFLOW = 3
PC_DELIMITER = 4
WBF_WORDWRAP = 16
WBF_WORDBREAK = 32
WBF_OVERFLOW = 64
WBF_LEVEL1 = 128
WBF_LEVEL2 = 256
WBF_CUSTOM = 512
CFM_BOLD = 1
CFM_ITALIC = 2
CFM_UNDERLINE = 4
CFM_STRIKEOUT = 8
CFM_PROTECTED = 16
CFM_SIZE = -2147483648
CFM_COLOR = 1073741824
CFM_FACE = 536870912
CFM_OFFSET = 268435456
CFM_CHARSET = 134217728
CFE_BOLD = 1
CFE_ITALIC = 2
CFE_UNDERLINE = 4
CFE_STRIKEOUT = 8
CFE_PROTECTED = 16
CFE_AUTOCOLOR = 1073741824
yHeightCharPtsMost = 1638
SCF_SELECTION = 1
SCF_WORD = 2
SF_TEXT = 1
SF_RTF = 2
SF_RTFNOOBJS = 3
SF_TEXTIZED = 4
SFF_SELECTION = 32768
SFF_PLAINRTF = 16384
MAX_TAB_STOPS = 32
lDefaultTab = 720
PFM_STARTINDENT = 1
PFM_RIGHTINDENT = 2
PFM_OFFSET = 4
PFM_ALIGNMENT = 8
PFM_TABSTOPS = 16
PFM_NUMBERING = 32
PFM_OFFSETINDENT = -2147483648
PFN_BULLET = 1
PFA_LEFT = 1
PFA_RIGHT = 2
PFA_CENTER = 3
WM_NOTIFY = 78
SEL_EMPTY = 0
SEL_TEXT = 1
SEL_OBJECT = 2
SEL_MULTICHAR = 4
SEL_MULTIOBJECT = 8
OLEOP_DOVERB = 1
CF_RTF = "Rich Text Format"
CF_RTFNOOBJS = "Rich Text Format Without Objects"
CF_RETEXTOBJ = "RichEdit Text and Objects"
# From wincon.h
RIGHT_ALT_PRESSED = 1 # the right alt key is pressed.
LEFT_ALT_PRESSED = 2 # the left alt key is pressed.
RIGHT_CTRL_PRESSED = 4 # the right ctrl key is pressed.
LEFT_CTRL_PRESSED = 8 # the left ctrl key is pressed.
SHIFT_PRESSED = 16 # the shift key is pressed.
NUMLOCK_ON = 32 # the numlock light is on.
SCROLLLOCK_ON = 64 # the scrolllock light is on.
CAPSLOCK_ON = 128 # the capslock light is on.
ENHANCED_KEY = 256 # the key is enhanced.
NLS_DBCSCHAR = 65536 # DBCS for JPN: SBCS/DBCS mode.
NLS_ALPHANUMERIC = 0 # DBCS for JPN: Alphanumeric mode.
NLS_KATAKANA = 131072 # DBCS for JPN: Katakana mode.
NLS_HIRAGANA = 262144 # DBCS for JPN: Hiragana mode.
NLS_ROMAN = 4194304 # DBCS for JPN: Roman/Noroman mode.
NLS_IME_CONVERSION = 8388608 # DBCS for JPN: IME conversion.
NLS_IME_DISABLE = 536870912 # DBCS for JPN: IME enable/disable.
FROM_LEFT_1ST_BUTTON_PRESSED = 1
RIGHTMOST_BUTTON_PRESSED = 2
FROM_LEFT_2ND_BUTTON_PRESSED = 4
FROM_LEFT_3RD_BUTTON_PRESSED = 8
FROM_LEFT_4TH_BUTTON_PRESSED = 16
CTRL_C_EVENT = 0
CTRL_BREAK_EVENT = 1
CTRL_CLOSE_EVENT = 2
CTRL_LOGOFF_EVENT = 5
CTRL_SHUTDOWN_EVENT = 6
MOUSE_MOVED = 1
DOUBLE_CLICK = 2
MOUSE_WHEELED = 4
#property sheet window messages from prsht.h
PSM_SETCURSEL = (WM_USER + 101)
PSM_REMOVEPAGE = (WM_USER + 102)
PSM_ADDPAGE = (WM_USER + 103)
PSM_CHANGED = (WM_USER + 104)
PSM_RESTARTWINDOWS = (WM_USER + 105)
PSM_REBOOTSYSTEM = (WM_USER + 106)
PSM_CANCELTOCLOSE = (WM_USER + 107)
PSM_QUERYSIBLINGS = (WM_USER + 108)
PSM_UNCHANGED = (WM_USER + 109)
PSM_APPLY = (WM_USER + 110)
PSM_SETTITLEA = (WM_USER + 111)
PSM_SETTITLEW = (WM_USER + 120)
PSM_SETWIZBUTTONS = (WM_USER + 112)
PSM_PRESSBUTTON = (WM_USER + 113)
PSM_SETCURSELID = (WM_USER + 114)
PSM_SETFINISHTEXTA = (WM_USER + 115)
PSM_SETFINISHTEXTW = (WM_USER + 121)
PSM_GETTABCONTROL = (WM_USER + 116)
PSM_ISDIALOGMESSAGE = (WM_USER + 117)
PSM_GETCURRENTPAGEHWND = (WM_USER + 118)
PSM_INSERTPAGE = (WM_USER + 119)
PSM_SETHEADERTITLEA = (WM_USER + 125)
PSM_SETHEADERTITLEW = (WM_USER + 126)
PSM_SETHEADERSUBTITLEA = (WM_USER + 127)
PSM_SETHEADERSUBTITLEW = (WM_USER + 128)
PSM_HWNDTOINDEX = (WM_USER + 129)
PSM_INDEXTOHWND = (WM_USER + 130)
PSM_PAGETOINDEX = (WM_USER + 131)
PSM_INDEXTOPAGE = (WM_USER + 132)
PSM_IDTOINDEX = (WM_USER + 133)
PSM_INDEXTOID = (WM_USER + 134)
PSM_GETRESULT = (WM_USER + 135)
PSM_RECALCPAGESIZES = (WM_USER + 136)
# GetUserNameEx/GetComputerNameEx
NameUnknown = 0
NameFullyQualifiedDN = 1
NameSamCompatible = 2
NameDisplay = 3
NameUniqueId = 6
NameCanonical = 7
NameUserPrincipal = 8
NameCanonicalEx = 9
NameServicePrincipal = 10
NameDnsDomain = 12
ComputerNameNetBIOS = 0
ComputerNameDnsHostname = 1
ComputerNameDnsDomain = 2
ComputerNameDnsFullyQualified = 3
ComputerNamePhysicalNetBIOS = 4
ComputerNamePhysicalDnsHostname = 5
ComputerNamePhysicalDnsDomain = 6
ComputerNamePhysicalDnsFullyQualified = 7
LWA_COLORKEY = 0x00000001
LWA_ALPHA = 0x00000002
ULW_COLORKEY = 0x00000001
ULW_ALPHA = 0x00000002
ULW_OPAQUE = 0x00000004
# WinDef.h
TRUE = 1
FALSE = 0
MAX_PATH = 260
# WinGDI.h
AC_SRC_OVER = 0
AC_SRC_ALPHA = 1
GRADIENT_FILL_RECT_H = 0
GRADIENT_FILL_RECT_V = 1
GRADIENT_FILL_TRIANGLE = 2
GRADIENT_FILL_OP_FLAG = 255
## flags used with Get/SetSystemFileCacheSize
MM_WORKING_SET_MAX_HARD_ENABLE = 1
MM_WORKING_SET_MAX_HARD_DISABLE = 2
MM_WORKING_SET_MIN_HARD_ENABLE = 4
MM_WORKING_SET_MIN_HARD_DISABLE = 8
## Flags for GetFinalPathNameByHandle
VOLUME_NAME_DOS = 0
VOLUME_NAME_GUID = 1
VOLUME_NAME_NT = 2
VOLUME_NAME_NONE = 4
FILE_NAME_NORMALIZED = 0
FILE_NAME_OPENED = 8
DEVICE_NOTIFY_WINDOW_HANDLE = 0x00000000
DEVICE_NOTIFY_SERVICE_HANDLE = 0x00000001
# From Dbt.h
# Generated by h2py from Dbt.h
WM_DEVICECHANGE = 0x0219
BSF_QUERY = 0x00000001
BSF_IGNORECURRENTTASK = 0x00000002
BSF_FLUSHDISK = 0x00000004
BSF_NOHANG = 0x00000008
BSF_POSTMESSAGE = 0x00000010
BSF_FORCEIFHUNG = 0x00000020
BSF_NOTIMEOUTIFNOTHUNG = 0x00000040
BSF_MSGSRV32ISOK = (-2147483648)
BSF_MSGSRV32ISOK_BIT = 31
BSM_ALLCOMPONENTS = 0x00000000
BSM_VXDS = 0x00000001
BSM_NETDRIVER = 0x00000002
BSM_INSTALLABLEDRIVERS = 0x00000004
BSM_APPLICATIONS = 0x00000008
DBT_APPYBEGIN = 0x0000
DBT_APPYEND = 0x0001
DBT_DEVNODES_CHANGED = 0x0007
DBT_QUERYCHANGECONFIG = 0x0017
DBT_CONFIGCHANGED = 0x0018
DBT_CONFIGCHANGECANCELED = 0x0019
DBT_MONITORCHANGE = 0x001B
DBT_SHELLLOGGEDON = 0x0020
DBT_CONFIGMGAPI32 = 0x0022
DBT_VXDINITCOMPLETE = 0x0023
DBT_VOLLOCKQUERYLOCK = 0x8041
DBT_VOLLOCKLOCKTAKEN = 0x8042
DBT_VOLLOCKLOCKFAILED = 0x8043
DBT_VOLLOCKQUERYUNLOCK = 0x8044
DBT_VOLLOCKLOCKRELEASED = 0x8045
DBT_VOLLOCKUNLOCKFAILED = 0x8046
LOCKP_ALLOW_WRITES = 0x01
LOCKP_FAIL_WRITES = 0x00
LOCKP_FAIL_MEM_MAPPING = 0x02
LOCKP_ALLOW_MEM_MAPPING = 0x00
LOCKP_USER_MASK = 0x03
LOCKP_LOCK_FOR_FORMAT = 0x04
LOCKF_LOGICAL_LOCK = 0x00
LOCKF_PHYSICAL_LOCK = 0x01
DBT_NO_DISK_SPACE = 0x0047
DBT_LOW_DISK_SPACE = 0x0048
DBT_CONFIGMGPRIVATE = 0x7FFF
DBT_DEVICEARRIVAL = 0x8000
DBT_DEVICEQUERYREMOVE = 0x8001
DBT_DEVICEQUERYREMOVEFAILED = 0x8002
DBT_DEVICEREMOVEPENDING = 0x8003
DBT_DEVICEREMOVECOMPLETE = 0x8004
DBT_DEVICETYPESPECIFIC = 0x8005
DBT_CUSTOMEVENT = 0x8006
DBT_DEVTYP_OEM = 0x00000000
DBT_DEVTYP_DEVNODE = 0x00000001
DBT_DEVTYP_VOLUME = 0x00000002
DBT_DEVTYP_PORT = 0x00000003
DBT_DEVTYP_NET = 0x00000004
DBT_DEVTYP_DEVICEINTERFACE = 0x00000005
DBT_DEVTYP_HANDLE = 0x00000006
DBTF_MEDIA = 0x0001
DBTF_NET = 0x0002
DBTF_RESOURCE = 0x00000001
DBTF_XPORT = 0x00000002
DBTF_SLOWNET = 0x00000004
DBT_VPOWERDAPI = 0x8100
DBT_USERDEFINED = 0xFFFF
TCIF_TEXT =1
TCIF_IMAGE =2
TCIF_RTLREADING= 4
TCIF_PARAM = 8
TCM_FIRST = 0x1300
TCM_INSERTITEMA = (TCM_FIRST+7)
TCM_INSERTITEMW = (TCM_FIRST+62)
TCM_INSERTITEM = TCM_INSERTITEMA
TCM_ADJUSTRECT = (TCM_FIRST+40)
TCM_GETCURSEL = (TCM_FIRST+11)
TCM_SETCURSEL = (TCM_FIRST+12)
TCM_GETITEMA = (TCM_FIRST+5)
TCM_GETITEMW = (TCM_FIRST+60)
TCM_GETITEM = TCM_GETITEMA
CCM_FIRST = 0x2000 # Common control shared messages
CCM_SETBKCOLOR = (CCM_FIRST + 1)
PBM_SETRANGE = (WM_USER+1)
PBM_SETPOS = (WM_USER+2)
PBM_DELTAPOS = (WM_USER+3)
PBM_SETSTEP = (WM_USER+4)
PBM_STEPIT = (WM_USER+5)
PBM_SETRANGE32 = (WM_USER+6)
PBM_GETRANGE = (WM_USER+7)
PBM_GETPOS = (WM_USER+8)
PBM_SETBARCOLOR = (WM_USER+9)
PBM_SETBKCOLOR = CCM_SETBKCOLOR
HCURSOR = ctypes.wintypes.HICON
WNDPROC = ctypes.wintypes.WINFUNCTYPE(ctypes.c_long, ctypes.c_int, ctypes.c_uint, ctypes.c_int, ctypes.c_int)
class PAINTSTRUCT(ctypes.Structure):
_fields_ = [("hdc", ctypes.wintypes.HDC),
("fErase", ctypes.wintypes.BOOL),
("rcPaint", ctypes.wintypes.RECT),
("fRestore", ctypes.wintypes.BOOL),
("fIncUpdate", ctypes.wintypes.BOOL),
("rgbReserved", ctypes.wintypes.LPCSTR * 32)]
class TCITEM(ctypes.Structure):
_fields_ = [("mask", ctypes.wintypes.UINT),
("dwState", ctypes.wintypes.DWORD),
("dwStateMask", ctypes.wintypes.DWORD),
("pszText", ctypes.wintypes.LPWSTR),
("cchTextMax", INT),
("iImage", INT),
("lParam", ctypes.wintypes.LPARAM)]
class WNDCLASSEX(ctypes.Structure):
_fields_ = [("cbSize", ctypes.c_uint),
('style', ctypes.c_uint),
('lpfnWndProc', WNDPROC),
('cbClsExtra', ctypes.c_int),
('cbWndExtra', ctypes.c_int),
('hInstance', ctypes.c_int),
('hIcon', ctypes.c_int),
('hCursor', ctypes.c_int),
('hbrBackground', ctypes.c_int),
('lpszMenuName', ctypes.c_wchar_p),
('lpszClassName', ctypes.c_wchar_p),
("hIconSm", ctypes.c_int)]
def __init__(self,
wndProc,
className,
style=None,
clsExtra=0,
wndExtra=0,
menuName="",
instance=None,
icon=None,
icon_sm=None,
cursor=None,
background=None,
):
if style is None:
style = CS_HREDRAW | CS_VREDRAW
if not instance:
instance = ctypes.wintypes.windll.kernel32.GetModuleHandleW(ctypes.c_int(NULL))
if not icon:
icon = ctypes.wintypes.windll.user32.LoadIconW(ctypes.c_int(NULL), ctypes.c_int(IDI_APPLICATION))
if not icon_sm:
icon_sm = icon
if not cursor:
cursor = ctypes.wintypes.windll.user32.LoadCursorW(ctypes.c_int(NULL), ctypes.c_int(IDC_ARROW))
if not background:
background = COLOR_WINDOW#+1 #windll.gdi32.GetStockObject(c_int(WHITE_BRUSH))
self.cbSize = ctypes.sizeof(self)
self.lpfnWndProc = WNDPROC(wndProc)
self.style = style
self.cbClsExtra = clsExtra
self.cbWndExtra = wndExtra
self.hInstance = instance
self.hIcon = icon
self.hIconSm = icon_sm
self.hCursor = cursor
self.hbrBackground = background
self.lpszMenuName = unicode(menuName)
self.lpszClassName = unicode(className)
def ErrorIfZero(handle):
if handle == 0:
raise ctypes.wintypes.WinError()
else:
return handle
CreateWindowEx = ctypes.wintypes.windll.user32.CreateWindowExW
CreateWindowEx.restype = ErrorIfZero
CreateWindowEx.argtypes = [ctypes.c_int,
ctypes.c_wchar_p,
ctypes.c_wchar_p,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int]
SELF_HWND = object() #on instanciation the value has to be replaced with self._hwnd
PARENT_HWND = object() #on instanciation the value has to be replaced with self.parent._hwnd
APPLICATION_HINSTANCE = object() #on instanciation the value has to be replaced with self.application._hinstance
def RGB(r,g,b):
return r | (g<<8) | (b<<16)
class LOGFONT(ctypes.Structure):
_fields_ = [("lfHeight", ctypes.wintypes.LONG),
("lfWidth", ctypes.wintypes.LONG),
("lfEscapement", ctypes.wintypes.LONG),
("lfOrientation", ctypes.wintypes.LONG),
("lfWeight", ctypes.wintypes.LONG),
("lfItalic", ctypes.wintypes.BYTE),
("lfUnderline", ctypes.wintypes.BYTE),
("lfStrikeOut", ctypes.wintypes.BYTE),
("lfCharSet", ctypes.wintypes.BYTE),
("lfOutPrecision", ctypes.wintypes.BYTE),
("lfClipPrecision", ctypes.wintypes.BYTE),
("lfQuality", ctypes.wintypes.BYTE),
("lfPitchAndFamily", ctypes.wintypes.BYTE),
("lfFaceName", ctypes.c_wchar_p * LF_FACESIZE)]
class LUID(ctypes.Structure):
_fields_ = [
# C:/PROGRA~1/gccxml/bin/Vc6/Include/winnt.h 394
('LowPart', ctypes.wintypes.DWORD),
('HighPart', ctypes.wintypes.LONG),
]
class LUID_AND_ATTRIBUTES(ctypes.Structure):
_fields_ = [
# C:/PROGRA~1/gccxml/bin/Vc6/Include/winnt.h 3241
('Luid', LUID),
('Attributes', ctypes.wintypes.DWORD),
]
class TOKEN_PRIVILEGES(ctypes.Structure):
_fields_ = [
# C:/PROGRA~1/gccxml/bin/Vc6/Include/winnt.h 4188
('PrivilegeCount', ctypes.wintypes.DWORD),
('Privileges', LUID_AND_ATTRIBUTES * 1),
]
class LARGE_INTEGER(ctypes.Structure):
_fields_ = [
('QuadPart', ctypes.c_longlong),
]
CreateFileW = ctypes.wintypes.windll.kernel32.CreateFileW
CloseHandle = ctypes.wintypes.windll.kernel32.CloseHandle
SetFilePointerEx = ctypes.wintypes.windll.kernel32.SetFilePointerEx
SetEndOfFile = ctypes.wintypes.windll.kernel32.SetEndOfFile
GetVersion = ctypes.wintypes.windll.kernel32.GetVersion
WriteFile = ctypes.wintypes.windll.kernel32.WriteFile
OpenProcessToken = ctypes.wintypes.windll.Advapi32.OpenProcessToken
GetCurrentProcess = ctypes.wintypes.windll.kernel32.GetCurrentProcess
LookupPrivilegeValue = ctypes.wintypes.windll.Advapi32.LookupPrivilegeValueW
AdjustTokenPrivileges = ctypes.wintypes.windll.Advapi32.AdjustTokenPrivileges
SE_MANAGE_VOLUME_NAME = u"SeManageVolumePrivilege"
| gpl-2.0 | 595,423,973,199,668,400 | 23.155288 | 112 | 0.723723 | false |
ric2b/Vivaldi-browser | chromium/build/android/gyp/find_sun_tools_jar.py | 10 | 1568 | #!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This finds the java distribution's tools.jar and copies it somewhere.
"""
import argparse
import os
import re
import shutil
import sys
from util import build_utils
RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]')
def main():
parser = argparse.ArgumentParser(description='Find Sun Tools Jar')
parser.add_argument('--depfile',
help='Path to depfile. This must be specified as the '
'action\'s first output.')
parser.add_argument('--output', required=True)
args = parser.parse_args()
sun_tools_jar_path = FindSunToolsJarPath()
if sun_tools_jar_path is None:
raise Exception("Couldn\'t find tools.jar")
# Using copyfile instead of copy() because copy() calls copymode()
# We don't want the locked mode because we may copy over this file again
shutil.copyfile(sun_tools_jar_path, args.output)
if args.depfile:
build_utils.WriteDepfile(args.depfile, args.output, [sun_tools_jar_path])
def FindSunToolsJarPath():
# This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7
stdout = build_utils.CheckOutput(
["java", "-verbose", "-version"], print_stderr=False)
for ln in stdout.splitlines():
match = RT_JAR_FINDER.match(ln)
if match:
return os.path.join(match.group(1), 'lib', 'tools.jar')
return None
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | -1,896,300,772,895,182,600 | 28.037037 | 77 | 0.678571 | false |
saycel/saycel | documentation/resources/setup.py | 1 | 1956 | #!/usr/bin/fab -f
"""
Rhizomatica BTS Toolkit
Automate maintenance on the BTS
"""
import sys
from fabric.api import env, run, task
def ssh():
env.user = 'root'
env.password = ''
@task
def setup():
ssh()
run('sbts2050-util sbts2050-pwr-enable 1 1 0')
run('sed -i s/NO_START=0/NO_START=1/ /etc/default/osmo-nitb')
run('mv /etc/rc5.d/S90gprs.sh /home/root/ || true')
run('mv /etc/rc5.d/S30osmo-bsc /etc/rc5.d/K30osmo-bsc || true')
run('mv /etc/rc5.d/S30osmo-bsc-mgcp /etc/rc5.d/K30osmo-bsc-mgcp || true')
run('mv /etc/rc5.d/S30osmo-nitb /etc/rc5.d/K30osmo-nitb || true')
run('sed -i -e "s/sysmobts-2050\/201208\//sysmobts-2050\/201208-testing\//g" /etc/opkg/*')
run('opkg remove openggsn osmo-sgsn lcr')
run('opkg update')
run('opkg upgrade || true')
run('opkg upgrade')
trx_nr = int(run('sysmobts-util trx-nr'))
osmo(trx_nr)
network(trx_nr)
def osmo(trx_nr):
etc_osmo_bts = """
!
! OsmoBTS () configuration saved from vty
!!
!
log stderr
logging color 0
logging timestamp 0
logging level all everything
logging level rsl info
logging level oml info
logging level rll notice
logging level rr notice
logging level meas notice
logging level pag info
logging level l1c info
logging level l1p info
logging level dsp debug
logging level abis notice
!
line vty
no login
!
bts 0
band 900
ipa unit-id 1000 %(trx_nr)d
oml remote-ip 172.16.0.1
""" % {'trx_nr': trx_nr}
run("echo '%s' > /etc/osmocom/osmo-bts.cfg" % (etc_osmo_bts,))
def network(trx_nr):
if trx_nr == 0:
ip = "172.16.0.11" # master
else:
ip = "172.16.0.12" # slave
interfaces = """
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
address %s
netmask 255.255.255.0
""" % (ip,)
run("echo '%s' > /etc/network/interfaces" % (interfaces,))
@task(default=True)
def help():
print "%s -H bts_ip setup" % (sys.argv[0],)
| agpl-3.0 | 3,793,276,768,294,078,000 | 20.977528 | 94 | 0.630879 | false |
jldodds/s2n | tests/integration/s2n_handshake_test_gnutls.py | 1 | 8844 | #
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
"""
Simple handshake tests using gnutls-cli
"""
import argparse
import os
import sys
import ssl
import socket
import subprocess
import itertools
import multiprocessing
from os import environ
from multiprocessing.pool import ThreadPool
from s2n_test_constants import *
def try_gnutls_handshake(endpoint, port, priority_str, mfl_extension_test, enter_fips_mode=False):
# Fire up s2nd
s2nd_cmd = ["../../bin/s2nd", str(endpoint), str(port)]
s2nd_ciphers = "test_all"
if enter_fips_mode == True:
s2nd_ciphers = "test_all_fips"
s2nd_cmd.append("--enter-fips-mode")
s2nd_cmd.append("-c")
s2nd_cmd.append(s2nd_ciphers)
if mfl_extension_test:
s2nd_cmd.append("--enable-mfl")
s2nd = subprocess.Popen(s2nd_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# Make sure it's running
s2nd.stdout.readline()
gnutls_cmd = ["gnutls-cli", "--priority=" + priority_str,"--insecure", "-p " + str(port), str(endpoint)]
if mfl_extension_test:
gnutls_cmd.append("--recordsize=" + str(mfl_extension_test))
# Fire up gnutls-cli, use insecure since s2nd is using a dummy cert
gnutls_cli = subprocess.Popen(gnutls_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
# Write the priority str towards s2nd. Prepend with the 's2n' string to make sure we don't accidently match something
# in the gnutls-cli handshake output
written_str = "s2n" + priority_str
gnutls_cli.stdin.write((written_str + "\n").encode("utf-8"))
gnutls_cli.stdin.flush()
# Read it
found = 0
for line in range(0, 50):
output = s2nd.stdout.readline().decode("utf-8")
if output.strip() == written_str:
found = 1
break
if found == 0:
return -1
# Write the cipher name from s2n
s2nd.stdin.write((written_str + "\n").encode("utf-8"))
s2nd.stdin.flush()
found = 0
for line in range(0, 50):
output = gnutls_cli.stdout.readline().decode("utf-8")
if output.strip() == written_str:
found = 1
break
if found == 0:
return -1
gnutls_cli.kill()
gnutls_cli.wait()
s2nd.kill()
s2nd.wait()
return 0
def handshake(endpoint, port, cipher_name, ssl_version, priority_str, digests, mfl_extension_test, fips_mode):
ret = try_gnutls_handshake(endpoint, port, priority_str, mfl_extension_test, fips_mode)
prefix = ""
if mfl_extension_test:
prefix = "MFL: %-10s Cipher: %-10s Vers: %-10s ... " % (mfl_extension_test, cipher_name, S2N_PROTO_VERS_TO_STR[ssl_version])
elif len(digests) == 0:
prefix = "Cipher: %-30s Vers: %-10s ... " % (cipher_name, S2N_PROTO_VERS_TO_STR[ssl_version])
else:
# strip the first nine bytes from each name ("RSA-SIGN-")
digest_string = ':'.join([x[9:] for x in digests])
prefix = "Digests: %-40s Vers: %-10s ... " % (digest_string, S2N_PROTO_VERS_TO_STR[ssl_version])
suffix = ""
if ret == 0:
if sys.stdout.isatty():
suffix = "\033[32;1mPASSED\033[0m"
else:
suffix = "PASSED"
else:
if sys.stdout.isatty():
suffix = "\033[31;1mFAILED\033[0m"
else:
suffix = "FAILED"
print(prefix + suffix)
return ret
def create_thread_pool():
threadpool_size = multiprocessing.cpu_count() * 2 #Multiply by 2 since performance improves slightly if CPU has hyperthreading
print("\n\tCreating ThreadPool of size: " + str(threadpool_size))
threadpool = ThreadPool(processes=threadpool_size)
return threadpool
def main():
parser = argparse.ArgumentParser(description='Runs TLS server integration tests against s2nd using gnutls-cli')
parser.add_argument('host', help='The host for s2nd to bind to')
parser.add_argument('port', type=int, help='The port for s2nd to bind to')
parser.add_argument('--libcrypto', default='openssl-1.1.0', choices=['openssl-1.0.2', 'openssl-1.0.2-fips', 'openssl-1.1.0', 'openssl-1.1.x-master', 'libressl'],
help="""The Libcrypto that s2n was built with. s2n supports different cipher suites depending on
libcrypto version. Defaults to openssl-1.1.0.""")
args = parser.parse_args()
# Retrieve the test ciphers to use based on the libcrypto version s2n was built with
test_ciphers = S2N_LIBCRYPTO_TO_TEST_CIPHERS[args.libcrypto]
host = args.host
port = args.port
fips_mode = False
if environ.get("S2N_TEST_IN_FIPS_MODE") is not None:
fips_mode = True
print("\nRunning s2nd in FIPS mode.")
print("\nRunning GnuTLS handshake tests with: " + os.popen('gnutls-cli --version | grep -w gnutls-cli').read())
for ssl_version in [S2N_SSLv3, S2N_TLS10, S2N_TLS11, S2N_TLS12]:
if ssl_version == S2N_SSLv3 and fips_mode == True:
# FIPS does not permit the use of SSLv3
continue
print("\n\tTesting ciphers using client version: " + S2N_PROTO_VERS_TO_STR[ssl_version])
threadpool = create_thread_pool()
port_offset = 0
results = []
for cipher in test_ciphers:
# Use the Openssl name for printing
cipher_name = cipher.openssl_name
cipher_priority_str = cipher.gnutls_priority_str
cipher_vers = cipher.min_tls_vers
if ssl_version < cipher_vers:
continue
# Add the SSL version to make the cipher priority string fully qualified
complete_priority_str = cipher_priority_str + ":+" + S2N_PROTO_VERS_TO_GNUTLS[ssl_version] + ":+SIGN-ALL"
async_result = threadpool.apply_async(handshake, (host, port + port_offset, cipher_name, ssl_version, complete_priority_str, [], 0, fips_mode))
port_offset += 1
results.append(async_result)
threadpool.close()
threadpool.join()
for async_result in results:
if async_result.get() != 0:
return -1
# Produce permutations of every accepted signature alrgorithm in every possible order
signatures = ["SIGN-RSA-SHA1", "SIGN-RSA-SHA224", "SIGN-RSA-SHA256", "SIGN-RSA-SHA384", "SIGN-RSA-SHA512"];
for size in range(1, len(signatures) + 1):
print("\n\tTesting ciphers using signature preferences of size: " + str(size))
threadpool = create_thread_pool()
port_offset = 0
results = []
for permutation in itertools.permutations(signatures, size):
# Try an ECDHE cipher suite and a DHE one
for cipher in filter(lambda x: x.openssl_name == "ECDHE-RSA-AES128-GCM-SHA256" or x.openssl_name == "DHE-RSA-AES128-GCM-SHA256", ALL_TEST_CIPHERS):
complete_priority_str = cipher.gnutls_priority_str + ":+VERS-TLS1.2:+" + ":+".join(permutation)
async_result = threadpool.apply_async(handshake,(host, port + port_offset, cipher.openssl_name, S2N_TLS12, complete_priority_str, permutation, 0, fips_mode))
port_offset += 1
results.append(async_result)
threadpool.close()
threadpool.join()
for async_result in results:
if async_result.get() != 0:
return -1
print("\n\tTesting handshakes with Max Fragment Length Extension")
for ssl_version in [S2N_TLS10, S2N_TLS11, S2N_TLS12]:
print("\n\tTesting Max Fragment Length Extension using client version: " + S2N_PROTO_VERS_TO_STR[ssl_version])
threadpool = create_thread_pool()
port_offset = 0
results = []
for mfl_extension_test in [512, 1024, 2048, 4096]:
cipher = test_ciphers[0]
complete_priority_str = cipher.gnutls_priority_str + ":+" + S2N_PROTO_VERS_TO_GNUTLS[S2N_TLS10] + ":+" + ":+".join(permutation)
async_result = threadpool.apply_async(handshake,(host, port + port_offset, cipher.openssl_name, ssl_version, complete_priority_str, [], mfl_extension_test, fips_mode))
port_offset += 1
results.append(async_result)
threadpool.close()
threadpool.join()
for async_result in results:
if async_result.get() != 0:
return -1
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 | 4,389,911,156,309,983,000 | 38.837838 | 179 | 0.633424 | false |
pli1988/onsets | scripts/onsetGround_final.py | 1 | 3738 | from __future__ import division
import glob
import os
import librosa
import medleydb as mdb
import numpy as np
import scipy
import itertools
import jams
from onset.util import *
from onset.onsets import *
def main():
# get list of files on medleyDB path and load them
trackList = os.listdir(mdb.AUDIO_PATH)
trackList = [t for t in trackList if t[0]!='.']
mtrack_generator = mdb.load_multitracks(trackList)
# onset parameters
sr = 44100
gainWindow = int(sr*0.25)
temporalThreshold = sr*0.05
loudnessThreshold = -20
baseOutPath = './OnsetAnnotations_truth'
# annotation metadata
g ='Gain Window: ' + str(gainWindow/sr) + 'ms'
t = 'Temporal Threshold: ' + str(temporalThreshold/sr) + 'ms'
l = 'Loudness Threshold: ' + str(loudnessThreshold) + 'dB'
annotationRules = "\n".join([g,t,l])
# iterate through tracks
for track in mtrack_generator:
outPath = os.path.join(baseOutPath,track.track_id+'.jams')
if not os.path.exists(outPath):
# only compute annotations for tracks without bleed
if track.has_bleed == False:
### Load Thngs ###
# data paths
mixedAudioPath = track.mix_path
stemsPathList = track.stem_filepaths()
# audio
mixAudio, stemsAudio = loadAudio(mixedAudioPath,stemsPathList, sr = sr)
# track-level annotations
instList = track.stem_instruments
stemActivations = np.array(track.stem_activations)
### Gain Estimation ###
# estimate gain per stem
gain = estimateGain(mixAudio, stemsAudio, gainWindow, int(gainWindow/2))
# weight stem audio by gain
gainWeightedStem = np.array(gain)*stemsAudio
### Onsets ###
# compute onset strength envelopes
onsetEnvList_stem = [librosa.onset.onset_strength(y=s, sr=sr) for s in gainWeightedStem.T]
onsetEnv_mix = librosa.onset.onset_strength(y=mixAudio, sr=sr)
# find peaks of individual stem onset envelopes
stemPeakList = findPeaksOnStem(onsetEnvList_stem)
# merge stem onset
mergedOnset, sourceList, powerStem, powerMix = mergeOnset_greedy(gainWeightedStem, stemPeakList,
temporalThreshold = temporalThreshold,
loudnessThreshold = loudnessThreshold)
# computations for annotation
onsetTime = [s/sr for s in mergedOnset]
peakInstrument = [[instList[i] for i in s] for s in sourceList]
polyphony = [sizePolyphony(stemActivations, t) for t in onsetTime]
j = createAnnotation(onsetTime, peakInstrument, polyphony, annotationRules, powerStem, powerMix)
j.file_metadata.artist = track.artist
j.file_metadata.title = track.title
j.file_metadata.duration = len(mixAudio)/sr
metaData = {}
metaData['genre'] = track.genre
metaData['is_instrumental'] = track.is_instrumental
j.sandbox = metaData
#j.save(outPath)
print track.track_id
if __name__ == '__main__':
# TODO: take arguments from command line
main()
| gpl-3.0 | 5,322,627,742,839,144,000 | 31.789474 | 113 | 0.540128 | false |
person142/scipy | scipy/io/matlab/tests/test_pathological.py | 21 | 1059 | """ Test reading of files not conforming to matlab specification
We try and read any file that matlab reads, these files included
"""
from os.path import dirname, join as pjoin
from numpy.testing import assert_
from pytest import raises as assert_raises
from scipy.io.matlab.mio import loadmat
TEST_DATA_PATH = pjoin(dirname(__file__), 'data')
def test_multiple_fieldnames():
# Example provided by Dharhas Pothina
# Extracted using mio5.varmats_from_mat
multi_fname = pjoin(TEST_DATA_PATH, 'nasty_duplicate_fieldnames.mat')
vars = loadmat(multi_fname)
funny_names = vars['Summary'].dtype.names
assert_(set(['_1_Station_Q', '_2_Station_Q',
'_3_Station_Q']).issubset(funny_names))
def test_malformed1():
# Example from gh-6072
# Contains malformed header data, which previously resulted into a
# buffer overflow.
#
# Should raise an exception, not segfault
fname = pjoin(TEST_DATA_PATH, 'malformed1.mat')
with open(fname, 'rb') as f:
assert_raises(ValueError, loadmat, f)
| bsd-3-clause | 3,660,350,532,388,704,000 | 31.090909 | 73 | 0.693107 | false |
RondaStrauch/landlab | landlab/components/stream_power/examples/perturb_sed_flux_dep.py | 6 | 4719 | # -*- coding: utf-8 -*-
from __future__ import print_function
from six.moves import range
from landlab.components.flow_routing import FlowRouter
from landlab.components.stream_power import SedDepEroder
from landlab import ModelParameterDictionary
from landlab.plot import imshow
from landlab.plot.video_out import VideoPlotter
from landlab.plot import channel_profile as prf
from landlab.plot.imshow import imshow_node_grid
from pylab import colorbar, show, plot, loglog, figure, savefig, close, ylim
from landlab import RasterModelGrid
import numpy as np
import pylab
from copy import copy, deepcopy
from time import time
#get the needed properties to build the grid:
input_file = './sed_dep_NMGparams2.txt'
#####remember to change the fixed y-axis dimension in the plots!!
y_max = 200
make_output_plots=True
out_interval=15 #was 15
inputs = ModelParameterDictionary(input_file)
nrows = inputs.read_int('nrows')
ncols = inputs.read_int('ncols')
dx = inputs.read_float('dx')
uplift_rate = inputs.read_float('uplift_rate')
runtime = inputs.read_float('total_time')
dt = inputs.read_float('dt')
nt = int(runtime//dt)
uplift_per_step = uplift_rate * dt
print('uplift per step: ', uplift_per_step)
#check we have a plaubible grid
#mg = RasterModelGrid(nrows,ncols,dx)
assert mg.number_of_nodes == nrows*ncols
assert mg.node_spacing == dx
# Display a message
print('Running ...')
# instantiate the components:
fr = FlowRouter(mg)
sde = SedDepEroder(mg, input_file)
# don't allow overwriting of these, just in case
try:
x_profiles
except NameError:
x_profiles = []
z_profiles = []
S_profiles = []
A_profiles = []
# plot init conds
if make_output_plots:
mg = fr.route_flow(grid=mg)
pylab.figure('long_profile_anim')
ylim([0, y_max])
prf.analyze_channel_network_and_plot(mg)
savefig('0profile_anim_init.png')
close('long_profile_anim')
(profile_IDs, dists_upstr) = prf.analyze_channel_network_and_plot(mg)
start_node = [profile_IDs[0]]
time_on = time()
#perform the loops:
for i in range(nt):
#print 'loop ', i
mg.at_node['topographic__elevation'][mg.core_nodes] += uplift_per_step
mg = fr.route_flow()
#mg.calc_grad_across_cell_faces(mg.at_node['topographic__elevation'])
#neighbor_slopes = mg.calc_grad_along_node_links(mg.at_node['topographic__elevation'])
#mean_slope = np.mean(np.fabs(neighbor_slopes),axis=1)
#max_slope = np.max(np.fabs(neighbor_slopes),axis=1)
#mg,_,capacity_out = tl.erode(mg,dt,slopes_at_nodes='topographic__steepest_slope')
#mg,_,capacity_out = tl.erode(mg,dt,slopes_at_nodes=max_slope)
mg_copy = deepcopy(mg)
mg,_ = sde.erode(mg,dt)
#print sde.iterations_in_dt
#print 'capacity ', np.amax(capacity_out[mg.core_nodes])
#print 'rel sed ', np.nanmax(sed_in[mg.core_nodes]/capacity_out[mg.core_nodes])
if i%out_interval == 0:
print('loop ', i)
print('max_slope', np.amax(mg.at_node['topographic__steepest_slope'][mg.core_nodes]))
pylab.figure("long_profiles")
profile_IDs = prf.channel_nodes(mg, mg.at_node['topographic__steepest_slope'],
mg.at_node['drainage_area'], mg.at_node['flow__receiver_node'])
dists_upstr = prf.get_distances_upstream(mg, len(mg.at_node['topographic__steepest_slope']),
profile_IDs, mg.at_node['flow__link_to_receiver_node'])
prf.plot_profiles(dists_upstr, profile_IDs, mg.at_node['topographic__elevation'])
if i%out_interval == 0:
x_profiles.append(dists_upstr)
z_profiles.append(mg.at_node['topographic__elevation'][profile_IDs])
S_profiles.append(mg.at_node['topographic__steepest_slope'][profile_IDs])
A_profiles.append(mg.at_node['drainage_area'][profile_IDs])
if make_output_plots:
pylab.figure('long_profile_anim')
#prf.plot_profiles(dists_upstr, profile_IDs, mg.at_node['topographic_elevation'])
plot(dists_upstr,mg.at_node['topographic_elevation'][profile_IDs])
ylim([0,y_max])
if i==0:
savefig('profile_anim_000'+str(i)+'.png')
elif i<100:
savefig('profile_anim_00'+str(i)+'.png')
elif i<1000:
savefig('profile_anim_0'+str(i)+'.png')
else:
savefig('profile_anim_'+str(i)+'.png')
close('long_profile_anim')
#vid.add_frame(mg, 'topographic__elevation')
print('Completed the simulation. Plotting...')
time_off = time()
#Finalize and plot
elev = mg['node']['topographic__elevation']
#imshow.imshow_node_grid(mg, elev)
print('Done.')
print('Time: ', time_off-time_on)
#pylab.show()
#vid.produce_video()
| mit | 1,896,255,785,487,342,600 | 33.955556 | 103 | 0.663064 | false |
Salat-Cx65/python-for-android | python3-alpha/python3-src/Lib/test/test_marshal.py | 48 | 8676 | #!/usr/bin/env python3
from test import support
import marshal
import sys
import unittest
import os
class HelperMixin:
def helper(self, sample, *extra):
new = marshal.loads(marshal.dumps(sample, *extra))
self.assertEqual(sample, new)
try:
with open(support.TESTFN, "wb") as f:
marshal.dump(sample, f, *extra)
with open(support.TESTFN, "rb") as f:
new = marshal.load(f)
self.assertEqual(sample, new)
finally:
support.unlink(support.TESTFN)
class IntTestCase(unittest.TestCase, HelperMixin):
def test_ints(self):
# Test the full range of Python ints.
n = sys.maxsize
while n:
for expected in (-n, n):
self.helper(expected)
n = n >> 1
def test_int64(self):
# Simulate int marshaling on a 64-bit box. This is most interesting if
# we're running the test on a 32-bit box, of course.
def to_little_endian_string(value, nbytes):
b = bytearray()
for i in range(nbytes):
b.append(value & 0xff)
value >>= 8
return b
maxint64 = (1 << 63) - 1
minint64 = -maxint64-1
for base in maxint64, minint64, -maxint64, -(minint64 >> 1):
while base:
s = b'I' + to_little_endian_string(base, 8)
got = marshal.loads(s)
self.assertEqual(base, got)
if base == -1: # a fixed-point for shifting right 1
base = 0
else:
base >>= 1
def test_bool(self):
for b in (True, False):
self.helper(b)
class FloatTestCase(unittest.TestCase, HelperMixin):
def test_floats(self):
# Test a few floats
small = 1e-25
n = sys.maxsize * 3.7e250
while n > small:
for expected in (-n, n):
self.helper(float(expected))
n /= 123.4567
f = 0.0
s = marshal.dumps(f, 2)
got = marshal.loads(s)
self.assertEqual(f, got)
# and with version <= 1 (floats marshalled differently then)
s = marshal.dumps(f, 1)
got = marshal.loads(s)
self.assertEqual(f, got)
n = sys.maxsize * 3.7e-250
while n < small:
for expected in (-n, n):
f = float(expected)
self.helper(f)
self.helper(f, 1)
n *= 123.4567
class StringTestCase(unittest.TestCase, HelperMixin):
def test_unicode(self):
for s in ["", "Andr\xe8 Previn", "abc", " "*10000]:
self.helper(marshal.loads(marshal.dumps(s)))
def test_string(self):
for s in ["", "Andr\xe8 Previn", "abc", " "*10000]:
self.helper(s)
def test_bytes(self):
for s in [b"", b"Andr\xe8 Previn", b"abc", b" "*10000]:
self.helper(s)
class ExceptionTestCase(unittest.TestCase):
def test_exceptions(self):
new = marshal.loads(marshal.dumps(StopIteration))
self.assertEqual(StopIteration, new)
class CodeTestCase(unittest.TestCase):
def test_code(self):
co = ExceptionTestCase.test_exceptions.__code__
new = marshal.loads(marshal.dumps(co))
self.assertEqual(co, new)
def test_many_codeobjects(self):
# Issue2957: bad recursion count on code objects
count = 5000 # more than MAX_MARSHAL_STACK_DEPTH
codes = (ExceptionTestCase.test_exceptions.__code__,) * count
marshal.loads(marshal.dumps(codes))
class ContainerTestCase(unittest.TestCase, HelperMixin):
d = {'astring': '[email protected]',
'afloat': 7283.43,
'anint': 2**20,
'ashortlong': 2,
'alist': ['.zyx.41'],
'atuple': ('.zyx.41',)*10,
'aboolean': False,
'aunicode': "Andr\xe8 Previn"
}
def test_dict(self):
self.helper(self.d)
def test_list(self):
self.helper(list(self.d.items()))
def test_tuple(self):
self.helper(tuple(self.d.keys()))
def test_sets(self):
for constructor in (set, frozenset):
self.helper(constructor(self.d.keys()))
class BugsTestCase(unittest.TestCase):
def test_bug_5888452(self):
# Simple-minded check for SF 588452: Debug build crashes
marshal.dumps([128] * 1000)
def test_patch_873224(self):
self.assertRaises(Exception, marshal.loads, '0')
self.assertRaises(Exception, marshal.loads, 'f')
self.assertRaises(Exception, marshal.loads, marshal.dumps(2**65)[:-1])
def test_version_argument(self):
# Python 2.4.0 crashes for any call to marshal.dumps(x, y)
self.assertEqual(marshal.loads(marshal.dumps(5, 0)), 5)
self.assertEqual(marshal.loads(marshal.dumps(5, 1)), 5)
def test_fuzz(self):
# simple test that it's at least not *totally* trivial to
# crash from bad marshal data
for c in [chr(i) for i in range(256)]:
try:
marshal.loads(c)
except Exception:
pass
def test_loads_recursion(self):
s = 'c' + ('X' * 4*4) + '{' * 2**20
self.assertRaises(ValueError, marshal.loads, s)
def test_recursion_limit(self):
# Create a deeply nested structure.
head = last = []
# The max stack depth should match the value in Python/marshal.c.
if os.name == 'nt' and hasattr(sys, 'gettotalrefcount'):
MAX_MARSHAL_STACK_DEPTH = 1500
else:
MAX_MARSHAL_STACK_DEPTH = 2000
for i in range(MAX_MARSHAL_STACK_DEPTH - 2):
last.append([0])
last = last[-1]
# Verify we don't blow out the stack with dumps/load.
data = marshal.dumps(head)
new_head = marshal.loads(data)
# Don't use == to compare objects, it can exceed the recursion limit.
self.assertEqual(len(new_head), len(head))
self.assertEqual(len(new_head[0]), len(head[0]))
self.assertEqual(len(new_head[-1]), len(head[-1]))
last.append([0])
self.assertRaises(ValueError, marshal.dumps, head)
def test_exact_type_match(self):
# Former bug:
# >>> class Int(int): pass
# >>> type(loads(dumps(Int())))
# <type 'int'>
for typ in (int, float, complex, tuple, list, dict, set, frozenset):
# Note: str subclasses are not tested because they get handled
# by marshal's routines for objects supporting the buffer API.
subtyp = type('subtyp', (typ,), {})
self.assertRaises(ValueError, marshal.dumps, subtyp())
# Issue #1792 introduced a change in how marshal increases the size of its
# internal buffer; this test ensures that the new code is exercised.
def test_large_marshal(self):
size = int(1e6)
testString = 'abc' * size
marshal.dumps(testString)
def test_invalid_longs(self):
# Issue #7019: marshal.loads shouldn't produce unnormalized PyLongs
invalid_string = b'l\x02\x00\x00\x00\x00\x00\x00\x00'
self.assertRaises(ValueError, marshal.loads, invalid_string)
def test_multiple_dumps_and_loads(self):
# Issue 12291: marshal.load() should be callable multiple times
# with interleaved data written by non-marshal code
# Adapted from a patch by Engelbert Gruber.
data = (1, 'abc', b'def', 1.0, (2, 'a', ['b', b'c']))
for interleaved in (b'', b'0123'):
ilen = len(interleaved)
positions = []
try:
with open(support.TESTFN, 'wb') as f:
for d in data:
marshal.dump(d, f)
if ilen:
f.write(interleaved)
positions.append(f.tell())
with open(support.TESTFN, 'rb') as f:
for i, d in enumerate(data):
self.assertEqual(d, marshal.load(f))
if ilen:
f.read(ilen)
self.assertEqual(positions[i], f.tell())
finally:
support.unlink(support.TESTFN)
def test_main():
support.run_unittest(IntTestCase,
FloatTestCase,
StringTestCase,
CodeTestCase,
ContainerTestCase,
ExceptionTestCase,
BugsTestCase)
if __name__ == "__main__":
test_main()
| apache-2.0 | -534,061,942,868,352,000 | 33.843373 | 79 | 0.547372 | false |
CallaJun/hackprince | indico/matplotlib/tests/test_backend_ps.py | 10 | 2166 | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import io
import re
import six
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.testing.decorators import cleanup, knownfailureif
needs_ghostscript = knownfailureif(
matplotlib.checkdep_ghostscript()[0] is None,
"This test needs a ghostscript installation")
needs_tex = knownfailureif(
not matplotlib.checkdep_tex(),
"This test needs a TeX installation")
def _test_savefig_to_stringio(format='ps'):
buffers = [
six.moves.StringIO(),
io.StringIO(),
io.BytesIO()]
plt.figure()
plt.plot([0, 1], [0, 1])
plt.title("Déjà vu")
for buffer in buffers:
plt.savefig(buffer, format=format)
values = [x.getvalue() for x in buffers]
if six.PY3:
values = [
values[0].encode('ascii'),
values[1].encode('ascii'),
values[2]]
# Remove comments from the output. This includes things that
# could change from run to run, such as the time.
values = [re.sub(b'%%.*?\n', b'', x) for x in values]
assert values[0] == values[1]
assert values[1] == values[2].replace(b'\r\n', b'\n')
for buffer in buffers:
buffer.close()
@cleanup
def test_savefig_to_stringio():
_test_savefig_to_stringio()
@cleanup
@needs_ghostscript
def test_savefig_to_stringio_with_distiller():
matplotlib.rcParams['ps.usedistiller'] = 'ghostscript'
_test_savefig_to_stringio()
@cleanup
@needs_tex
def test_savefig_to_stringio_with_usetex():
matplotlib.rcParams['text.latex.unicode'] = True
matplotlib.rcParams['text.usetex'] = True
_test_savefig_to_stringio()
@cleanup
def test_savefig_to_stringio_eps():
_test_savefig_to_stringio(format='eps')
@cleanup
@needs_tex
def test_savefig_to_stringio_with_usetex_eps():
matplotlib.rcParams['text.latex.unicode'] = True
matplotlib.rcParams['text.usetex'] = True
_test_savefig_to_stringio(format='eps')
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
| lgpl-3.0 | -1,819,465,758,615,359,000 | 22.78022 | 66 | 0.647874 | false |
gnulinooks/sympy | sympy/printing/tests/test_python.py | 7 | 5924 | # -*- coding: utf-8 -*-
from sympy import Symbol, symbols, oo, limit, Rational, Integral, Derivative
from sympy import log, exp, sqrt, pi, Function, sin, Eq, Le, Gt, Ne
from sympy.printing.python import python
x, y = symbols('xy')
th = Symbol('theta')
ph = Symbol('phi')
def test_python_basic():
# Simple numbers/symbols
assert python(-Rational(1)/2) == "e = Rational(-1, 2)"
assert python(-Rational(13)/22) == "e = Rational(-13, 22)"
assert python(oo) == "e = oo"
# Powers
assert python((x**2)) == "x = Symbol(\'x\')\ne = x**2"
assert python(1/x) == "x = Symbol('x')\ne = 1/x"
assert python(y*x**-2) == "y = Symbol('y')\nx = Symbol('x')\ne = y/x**2"
assert python(x**Rational(-5, 2)) == "x = Symbol('x')\ne = x**(Rational(-5, 2))"
# Sums of terms
assert python((x**2 + x + 1)) in [
"x = Symbol('x')\ne = 1 + x + x**2",
"x = Symbol('x')\ne = x + x**2 + 1",
"x = Symbol('x')\ne = x**2 + x + 1",]
assert python(1-x) in [
"x = Symbol('x')\ne = 1 - x",
"x = Symbol('x')\ne = -x + 1"]
assert python(1-2*x) in [
"x = Symbol('x')\ne = 1 - 2*x",
"x = Symbol('x')\ne = -2*x + 1"]
assert python(1-Rational(3,2)*y/x) in [
"y = Symbol('y')\nx = Symbol('x')\ne = 1 - 3/2*y/x",
"y = Symbol('y')\nx = Symbol('x')\ne = -3/2*y/x + 1",
"y = Symbol('y')\nx = Symbol('x')\ne = 1 - 3*y/(2*x)"]
# Multiplication
assert python(x/y) == "x = Symbol('x')\ny = Symbol('y')\ne = x/y"
assert python(-x/y) == "x = Symbol('x')\ny = Symbol('y')\ne = -x/y"
assert python((x+2)/y) in [
"y = Symbol('y')\nx = Symbol('x')\ne = 1/y*(2 + x)",
"y = Symbol('y')\nx = Symbol('x')\ne = 1/y*(x + 2)",
"x = Symbol('x')\ny = Symbol('y')\ne = 1/y*(2 + x)",
"x = Symbol('x')\ny = Symbol('y')\ne = (2 + x)/y"]
assert python((1+x)*y) in [
"y = Symbol('y')\nx = Symbol('x')\ne = y*(1 + x)",
"y = Symbol('y')\nx = Symbol('x')\ne = y*(x + 1)",]
# Check for proper placement of negative sign
assert python(-5*x/(x+10)) == "x = Symbol('x')\ne = -5*x/(10 + x)"
assert python(1 - Rational(3,2)*(x+1)) in [
"x = Symbol('x')\ne = Rational(-1, 2) + Rational(-3, 2)*x",
"x = Symbol('x')\ne = Rational(-1, 2) - 3*x/2",
"x = Symbol('x')\ne = Rational(-1, 2) - 3*x/2"
]
def test_python_relational():
assert python(Eq(x, y)) == "x = Symbol('x')\ny = Symbol('y')\ne = x == y"
assert python(Le(x, y)) == "x = Symbol('x')\ny = Symbol('y')\ne = x <= y"
assert python(Gt(x, y)) == "y = Symbol('y')\nx = Symbol('x')\ne = y < x"
assert python(Ne(x/(y+1), y**2)) in [
"x = Symbol('x')\ny = Symbol('y')\ne = x/(1 + y) != y**2",
"x = Symbol('x')\ny = Symbol('y')\ne = x/(y + 1) != y**2"]
def test_python_functions():
# Simple
assert python((2*x + exp(x))) in "x = Symbol('x')\ne = 2*x + exp(x)"
assert python(sqrt(2)) == 'e = 2**(Half(1, 2))'
assert python(sqrt(2+pi)) == 'e = (2 + pi)**(Half(1, 2))'
assert python(abs(x)) == "x = Symbol('x')\ne = abs(x)"
assert python(abs(x/(x**2+1))) in ["x = Symbol('x')\ne = abs(x/(1 + x**2))",
"x = Symbol('x')\ne = abs(x/(x**2 + 1))"]
# Univariate/Multivariate functions
f = Function('f')
assert python(f(x)) == "x = Symbol('x')\nf = Function('f')\ne = f(x)"
assert python(f(x, y)) == "x = Symbol('x')\ny = Symbol('y')\nf = Function('f')\ne = f(x, y)"
assert python(f(x/(y+1), y)) in [
"x = Symbol('x')\ny = Symbol('y')\nf = Function('f')\ne = f(x/(1 + y), y)",
"x = Symbol('x')\ny = Symbol('y')\nf = Function('f')\ne = f(x/(y + 1), y)"]
# Nesting of square roots
assert python(sqrt((sqrt(x+1))+1)) in [
"x = Symbol('x')\ne = (1 + (1 + x)**(Half(1, 2)))**(Half(1, 2))",
"x = Symbol('x')\ne = ((x + 1)**(Half(1, 2)) + 1)**(Half(1, 2))"]
# Function powers
assert python(sin(x)**2) == "x = Symbol('x')\ne = sin(x)**2"
# Conjugates
a, b = map(Symbol, 'ab')
#assert python( conjugate(a+b*I) ) == '_ _\na - I*b'
#assert python( conjugate(exp(a+b*I)) ) == ' _ _\n a - I*b\ne '
def test_python_derivatives():
# Simple
f_1 = Derivative(log(x), x, evaluate=False)
assert python(f_1) == "x = Symbol('x')\ne = D(log(x), x)"
f_2 = Derivative(log(x), x, evaluate=False) + x
assert python(f_2) == "x = Symbol('x')\ne = x + D(log(x), x)"
# Multiple symbols
f_3 = Derivative(log(x) + x**2, x, y, evaluate=False)
#assert python(f_3) ==
f_4 = Derivative(2*x*y, y, x, evaluate=False) + x**2
assert python(f_4) in [
"x = Symbol('x')\ny = Symbol('y')\ne = x**2 + D(2*x*y, y, x)",
"x = Symbol('x')\ny = Symbol('y')\ne = D(2*x*y, y, x) + x**2"]
def test_python_integrals():
# Simple
f_1 = Integral(log(x), x)
assert python(f_1) == "x = Symbol('x')\ne = Integral(log(x), x)"
f_2 = Integral(x**2, x)
assert python(f_2) == "x = Symbol('x')\ne = Integral(x**2, x)"
# Double nesting of pow
f_3 = Integral(x**(2**x), x)
assert python(f_3) == "x = Symbol('x')\ne = Integral(x**(2**x), x)"
# Definite integrals
f_4 = Integral(x**2, (x,1,2))
assert python(f_4) == "x = Symbol('x')\ne = Integral(x**2, (x, 1, 2))"
f_5 = Integral(x**2, (x,Rational(1,2),10))
assert python(f_5) == "x = Symbol('x')\ne = Integral(x**2, (x, Half(1, 2), 10))"
# Nested integrals
f_6 = Integral(x**2*y**2, x,y)
assert python(f_6) == "x = Symbol('x')\ny = Symbol('y')\ne = Integral(x**2*y**2, x, y)"
# Not implemented yet
#def test_python_matrix():
# p = python(Matrix([[x**2+1, 1], [y, x+y]]))
# s = ''
# assert p == s
def test_python_limits():
assert python(limit(x, x, oo)) == 'e = oo'
assert python(limit(x**2, x, 0)) == 'e = 0'
| bsd-3-clause | -2,367,982,053,965,023,700 | 39.575342 | 96 | 0.478393 | false |
gsnbng/erpnext | erpnext/stock/doctype/stock_reconciliation/stock_reconciliation.py | 1 | 18013 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, erpnext
import frappe.defaults
from frappe import msgprint, _
from frappe.utils import cstr, flt, cint
from erpnext.controllers.stock_controller import StockController
from erpnext.accounts.utils import get_company_default
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
from erpnext.stock.utils import get_stock_balance, get_incoming_rate, get_available_serial_nos
from erpnext.stock.doctype.batch.batch import get_batch_qty
class OpeningEntryAccountError(frappe.ValidationError): pass
class EmptyStockReconciliationItemsError(frappe.ValidationError): pass
class StockReconciliation(StockController):
def __init__(self, *args, **kwargs):
super(StockReconciliation, self).__init__(*args, **kwargs)
self.head_row = ["Item Code", "Warehouse", "Quantity", "Valuation Rate"]
def validate(self):
if not self.expense_account:
self.expense_account = frappe.get_cached_value('Company', self.company, "stock_adjustment_account")
if not self.cost_center:
self.cost_center = frappe.get_cached_value('Company', self.company, "cost_center")
self.validate_posting_time()
self.remove_items_with_no_change()
self.validate_data()
self.validate_expense_account()
self.set_total_qty_and_amount()
if self._action=="submit":
self.make_batches('warehouse')
def on_submit(self):
self.update_stock_ledger()
self.make_gl_entries()
from erpnext.stock.doctype.serial_no.serial_no import update_serial_nos_after_submit
update_serial_nos_after_submit(self, "items")
def on_cancel(self):
self.ignore_linked_doctypes = ('GL Entry', 'Stock Ledger Entry')
self.make_sle_on_cancel()
self.make_gl_entries_on_cancel()
def remove_items_with_no_change(self):
"""Remove items if qty or rate is not changed"""
self.difference_amount = 0.0
def _changed(item):
item_dict = get_stock_balance_for(item.item_code, item.warehouse,
self.posting_date, self.posting_time, batch_no=item.batch_no)
if ((item.qty is None or item.qty==item_dict.get("qty")) and
(item.valuation_rate is None or item.valuation_rate==item_dict.get("rate")) and
(not item.serial_no or (item.serial_no == item_dict.get("serial_nos")) )):
return False
else:
# set default as current rates
if item.qty is None:
item.qty = item_dict.get("qty")
if item.valuation_rate is None:
item.valuation_rate = item_dict.get("rate")
if item_dict.get("serial_nos"):
item.current_serial_no = item_dict.get("serial_nos")
item.current_qty = item_dict.get("qty")
item.current_valuation_rate = item_dict.get("rate")
self.difference_amount += (flt(item.qty, item.precision("qty")) * \
flt(item.valuation_rate or item_dict.get("rate"), item.precision("valuation_rate")) \
- flt(item_dict.get("qty"), item.precision("qty")) * flt(item_dict.get("rate"), item.precision("valuation_rate")))
return True
items = list(filter(lambda d: _changed(d), self.items))
if not items:
frappe.throw(_("None of the items have any change in quantity or value."),
EmptyStockReconciliationItemsError)
elif len(items) != len(self.items):
self.items = items
for i, item in enumerate(self.items):
item.idx = i + 1
frappe.msgprint(_("Removed items with no change in quantity or value."))
def validate_data(self):
def _get_msg(row_num, msg):
return _("Row # {0}: ").format(row_num+1) + msg
self.validation_messages = []
item_warehouse_combinations = []
default_currency = frappe.db.get_default("currency")
for row_num, row in enumerate(self.items):
# find duplicates
key = [row.item_code, row.warehouse]
for field in ['serial_no', 'batch_no']:
if row.get(field):
key.append(row.get(field))
if key in item_warehouse_combinations:
self.validation_messages.append(_get_msg(row_num, _("Duplicate entry")))
else:
item_warehouse_combinations.append(key)
self.validate_item(row.item_code, row)
# validate warehouse
if not frappe.db.get_value("Warehouse", row.warehouse):
self.validation_messages.append(_get_msg(row_num, _("Warehouse not found in the system")))
# if both not specified
if row.qty in ["", None] and row.valuation_rate in ["", None]:
self.validation_messages.append(_get_msg(row_num,
_("Please specify either Quantity or Valuation Rate or both")))
# do not allow negative quantity
if flt(row.qty) < 0:
self.validation_messages.append(_get_msg(row_num,
_("Negative Quantity is not allowed")))
# do not allow negative valuation
if flt(row.valuation_rate) < 0:
self.validation_messages.append(_get_msg(row_num,
_("Negative Valuation Rate is not allowed")))
if row.qty and row.valuation_rate in ["", None]:
row.valuation_rate = get_stock_balance(row.item_code, row.warehouse,
self.posting_date, self.posting_time, with_valuation_rate=True)[1]
if not row.valuation_rate:
# try if there is a buying price list in default currency
buying_rate = frappe.db.get_value("Item Price", {"item_code": row.item_code,
"buying": 1, "currency": default_currency}, "price_list_rate")
if buying_rate:
row.valuation_rate = buying_rate
else:
# get valuation rate from Item
row.valuation_rate = frappe.get_value('Item', row.item_code, 'valuation_rate')
# throw all validation messages
if self.validation_messages:
for msg in self.validation_messages:
msgprint(msg)
raise frappe.ValidationError(self.validation_messages)
def validate_item(self, item_code, row):
from erpnext.stock.doctype.item.item import validate_end_of_life, \
validate_is_stock_item, validate_cancelled_item
# using try except to catch all validation msgs and display together
try:
item = frappe.get_doc("Item", item_code)
# end of life and stock item
validate_end_of_life(item_code, item.end_of_life, item.disabled, verbose=0)
validate_is_stock_item(item_code, item.is_stock_item, verbose=0)
# item should not be serialized
if item.has_serial_no and not row.serial_no and not item.serial_no_series:
raise frappe.ValidationError(_("Serial no(s) required for serialized item {0}").format(item_code))
# item managed batch-wise not allowed
if item.has_batch_no and not row.batch_no and not item.create_new_batch:
raise frappe.ValidationError(_("Batch no is required for batched item {0}").format(item_code))
# docstatus should be < 2
validate_cancelled_item(item_code, item.docstatus, verbose=0)
except Exception as e:
self.validation_messages.append(_("Row # ") + ("%d: " % (row.idx)) + cstr(e))
def update_stock_ledger(self):
""" find difference between current and expected entries
and create stock ledger entries based on the difference"""
from erpnext.stock.stock_ledger import get_previous_sle
sl_entries = []
has_serial_no = False
for row in self.items:
item = frappe.get_doc("Item", row.item_code)
if item.has_serial_no or item.has_batch_no:
has_serial_no = True
self.get_sle_for_serialized_items(row, sl_entries)
else:
if row.serial_no or row.batch_no:
frappe.throw(_("Row #{0}: Item {1} is not a Serialized/Batched Item. It cannot have a Serial No/Batch No against it.") \
.format(row.idx, frappe.bold(row.item_code)))
previous_sle = get_previous_sle({
"item_code": row.item_code,
"warehouse": row.warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time
})
if previous_sle:
if row.qty in ("", None):
row.qty = previous_sle.get("qty_after_transaction", 0)
if row.valuation_rate in ("", None):
row.valuation_rate = previous_sle.get("valuation_rate", 0)
if row.qty and not row.valuation_rate:
frappe.throw(_("Valuation Rate required for Item {0} at row {1}").format(row.item_code, row.idx))
if ((previous_sle and row.qty == previous_sle.get("qty_after_transaction")
and (row.valuation_rate == previous_sle.get("valuation_rate") or row.qty == 0))
or (not previous_sle and not row.qty)):
continue
sl_entries.append(self.get_sle_for_items(row))
if sl_entries:
if has_serial_no:
sl_entries = self.merge_similar_item_serial_nos(sl_entries)
self.make_sl_entries(sl_entries)
if has_serial_no and sl_entries:
self.update_valuation_rate_for_serial_no()
def get_sle_for_serialized_items(self, row, sl_entries):
from erpnext.stock.stock_ledger import get_previous_sle
serial_nos = get_serial_nos(row.serial_no)
# To issue existing serial nos
if row.current_qty and (row.current_serial_no or row.batch_no):
args = self.get_sle_for_items(row)
args.update({
'actual_qty': -1 * row.current_qty,
'serial_no': row.current_serial_no,
'batch_no': row.batch_no,
'valuation_rate': row.current_valuation_rate
})
if row.current_serial_no:
args.update({
'qty_after_transaction': 0,
})
sl_entries.append(args)
for serial_no in serial_nos:
args = self.get_sle_for_items(row, [serial_no])
previous_sle = get_previous_sle({
"item_code": row.item_code,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"serial_no": serial_no
})
if previous_sle and row.warehouse != previous_sle.get("warehouse"):
# If serial no exists in different warehouse
new_args = args.copy()
new_args.update({
'actual_qty': -1,
'qty_after_transaction': cint(previous_sle.get('qty_after_transaction')) - 1,
'warehouse': previous_sle.get("warehouse", '') or row.warehouse,
'valuation_rate': previous_sle.get("valuation_rate")
})
sl_entries.append(new_args)
if row.qty:
args = self.get_sle_for_items(row)
args.update({
'actual_qty': row.qty,
'incoming_rate': row.valuation_rate,
'valuation_rate': row.valuation_rate
})
sl_entries.append(args)
if serial_nos == get_serial_nos(row.current_serial_no):
# update valuation rate
self.update_valuation_rate_for_serial_nos(row, serial_nos)
def update_valuation_rate_for_serial_no(self):
for d in self.items:
if not d.serial_no: continue
serial_nos = get_serial_nos(d.serial_no)
self.update_valuation_rate_for_serial_nos(d, serial_nos)
def update_valuation_rate_for_serial_nos(self, row, serial_nos):
valuation_rate = row.valuation_rate if self.docstatus == 1 else row.current_valuation_rate
if valuation_rate is None:
return
for d in serial_nos:
frappe.db.set_value("Serial No", d, 'purchase_rate', valuation_rate)
def get_sle_for_items(self, row, serial_nos=None):
"""Insert Stock Ledger Entries"""
if not serial_nos and row.serial_no:
serial_nos = get_serial_nos(row.serial_no)
data = frappe._dict({
"doctype": "Stock Ledger Entry",
"item_code": row.item_code,
"warehouse": row.warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"voucher_type": self.doctype,
"voucher_no": self.name,
"voucher_detail_no": row.name,
"company": self.company,
"stock_uom": frappe.db.get_value("Item", row.item_code, "stock_uom"),
"is_cancelled": 1 if self.docstatus == 2 else 0,
"serial_no": '\n'.join(serial_nos) if serial_nos else '',
"batch_no": row.batch_no,
"valuation_rate": flt(row.valuation_rate, row.precision("valuation_rate"))
})
if not row.batch_no:
data.qty_after_transaction = flt(row.qty, row.precision("qty"))
if self.docstatus == 2 and not row.batch_no:
if row.current_qty:
data.actual_qty = -1 * row.current_qty
data.qty_after_transaction = flt(row.current_qty)
data.valuation_rate = flt(row.current_valuation_rate)
data.stock_value = data.qty_after_transaction * data.valuation_rate
data.stock_value_difference = -1 * flt(row.amount_difference)
else:
data.actual_qty = row.qty
data.qty_after_transaction = 0.0
data.valuation_rate = flt(row.valuation_rate)
data.stock_value_difference = -1 * flt(row.amount_difference)
return data
def make_sle_on_cancel(self):
sl_entries = []
has_serial_no = False
for row in self.items:
if row.serial_no or row.batch_no or row.current_serial_no:
has_serial_no = True
serial_nos = ''
if row.current_serial_no:
serial_nos = get_serial_nos(row.current_serial_no)
sl_entries.append(self.get_sle_for_items(row, serial_nos))
else:
sl_entries.append(self.get_sle_for_items(row))
if sl_entries:
if has_serial_no:
sl_entries = self.merge_similar_item_serial_nos(sl_entries)
sl_entries.reverse()
allow_negative_stock = frappe.db.get_value("Stock Settings", None, "allow_negative_stock")
self.make_sl_entries(sl_entries, allow_negative_stock=allow_negative_stock)
def merge_similar_item_serial_nos(self, sl_entries):
# If user has put the same item in multiple row with different serial no
new_sl_entries = []
merge_similar_entries = {}
for d in sl_entries:
if not d.serial_no or d.actual_qty < 0:
new_sl_entries.append(d)
continue
key = (d.item_code, d.warehouse)
if key not in merge_similar_entries:
merge_similar_entries[key] = d
elif d.serial_no:
data = merge_similar_entries[key]
data.actual_qty += d.actual_qty
data.qty_after_transaction += d.qty_after_transaction
data.valuation_rate = (data.valuation_rate + d.valuation_rate) / data.actual_qty
data.serial_no += '\n' + d.serial_no
if data.incoming_rate:
data.incoming_rate = (data.incoming_rate + d.incoming_rate) / data.actual_qty
for key, value in merge_similar_entries.items():
new_sl_entries.append(value)
return new_sl_entries
def get_gl_entries(self, warehouse_account=None):
if not self.cost_center:
msgprint(_("Please enter Cost Center"), raise_exception=1)
return super(StockReconciliation, self).get_gl_entries(warehouse_account,
self.expense_account, self.cost_center)
def validate_expense_account(self):
if not cint(erpnext.is_perpetual_inventory_enabled(self.company)):
return
if not self.expense_account:
frappe.throw(_("Please enter Expense Account"))
elif self.purpose == "Opening Stock" or not frappe.db.sql("""select name from `tabStock Ledger Entry` limit 1"""):
if frappe.db.get_value("Account", self.expense_account, "report_type") == "Profit and Loss":
frappe.throw(_("Difference Account must be a Asset/Liability type account, since this Stock Reconciliation is an Opening Entry"), OpeningEntryAccountError)
def set_total_qty_and_amount(self):
for d in self.get("items"):
d.amount = flt(d.qty, d.precision("qty")) * flt(d.valuation_rate, d.precision("valuation_rate"))
d.current_amount = (flt(d.current_qty,
d.precision("current_qty")) * flt(d.current_valuation_rate, d.precision("current_valuation_rate")))
d.quantity_difference = flt(d.qty) - flt(d.current_qty)
d.amount_difference = flt(d.amount) - flt(d.current_amount)
def get_items_for(self, warehouse):
self.items = []
for item in get_items(warehouse, self.posting_date, self.posting_time, self.company):
self.append("items", item)
def submit(self):
if len(self.items) > 100:
msgprint(_("The task has been enqueued as a background job. In case there is any issue on processing in background, the system will add a comment about the error on this Stock Reconciliation and revert to the Draft stage"))
self.queue_action('submit')
else:
self._submit()
@frappe.whitelist()
def get_items(warehouse, posting_date, posting_time, company):
lft, rgt = frappe.db.get_value("Warehouse", warehouse, ["lft", "rgt"])
items = frappe.db.sql("""
select i.name, i.item_name, bin.warehouse
from tabBin bin, tabItem i
where i.name=bin.item_code and i.disabled=0 and i.is_stock_item = 1
and i.has_variants = 0 and i.has_serial_no = 0 and i.has_batch_no = 0
and exists(select name from `tabWarehouse` where lft >= %s and rgt <= %s and name=bin.warehouse)
""", (lft, rgt))
items += frappe.db.sql("""
select i.name, i.item_name, id.default_warehouse
from tabItem i, `tabItem Default` id
where i.name = id.parent
and exists(select name from `tabWarehouse` where lft >= %s and rgt <= %s and name=id.default_warehouse)
and i.is_stock_item = 1 and i.has_serial_no = 0 and i.has_batch_no = 0
and i.has_variants = 0 and i.disabled = 0 and id.company=%s
group by i.name
""", (lft, rgt, company))
res = []
for d in set(items):
stock_bal = get_stock_balance(d[0], d[2], posting_date, posting_time,
with_valuation_rate=True)
if frappe.db.get_value("Item", d[0], "disabled") == 0:
res.append({
"item_code": d[0],
"warehouse": d[2],
"qty": stock_bal[0],
"item_name": d[1],
"valuation_rate": stock_bal[1],
"current_qty": stock_bal[0],
"current_valuation_rate": stock_bal[1]
})
return res
@frappe.whitelist()
def get_stock_balance_for(item_code, warehouse,
posting_date, posting_time, batch_no=None, with_valuation_rate= True):
frappe.has_permission("Stock Reconciliation", "write", throw = True)
item_dict = frappe.db.get_value("Item", item_code,
["has_serial_no", "has_batch_no"], as_dict=1)
serial_nos = ""
with_serial_no = True if item_dict.get("has_serial_no") else False
data = get_stock_balance(item_code, warehouse, posting_date, posting_time,
with_valuation_rate=with_valuation_rate, with_serial_no=with_serial_no)
if with_serial_no:
qty, rate, serial_nos = data
else:
qty, rate = data
if item_dict.get("has_batch_no"):
qty = get_batch_qty(batch_no, warehouse) or 0
return {
'qty': qty,
'rate': rate,
'serial_nos': serial_nos
}
@frappe.whitelist()
def get_difference_account(purpose, company):
if purpose == 'Stock Reconciliation':
account = get_company_default(company, "stock_adjustment_account")
else:
account = frappe.db.get_value('Account', {'is_group': 0,
'company': company, 'account_type': 'Temporary'}, 'name')
return account | agpl-3.0 | 1,155,120,755,609,143,600 | 34.183594 | 226 | 0.689058 | false |
matthew-morrison/discord-bot | bot.py | 1 | 5032 | import discord
import asyncio
import os
import json
import sqlite3
import urllib.parse
import psycopg2
import sys
from discord.ext import commands
import sys # used to inspect function calls
# relevent libraries and commands are listed in the README.md
#sys.stdout = open('botlog.txt', 'w') #redirect stdout to file
bot = commands.Bot(command_prefix='!', description="Statman the Testing Bot")
#bot.conn = sqlite3.connect("sample.db") # testing on local sqlite
configFile = "config.json" # you need to provide your own config if you wish to run the bot
if os.path.isfile("config.json"):
file = open("config.json")
conf = json.load(file)
discord_token = conf["discord_bot_token"]
postgres_pw = conf["postgres"]
bot.mashape_key = conf['mashape']
pg_ip = conf["pg_ip"]
pg_pw = conf["pg_pw"]
bot.imgur_id = conf['imgur_id']
bot.lang_url = conf['lang_url']
else:
print("Uh... no config file. Gonna explode now.")
urllib.parse.uses_netloc.append("postgres")
bot.conn_wc = psycopg2.connect("port='5432' user='ivpbuumx' host='hard-plum.db.elephantsql.com' password='"+postgres_pw+"'")
#bot.conn_wc = psycopg2.connect("port='5432' dbname='postgres' user='nos' host='"+pg_ip+"' password='"+pg_pw+"'")
#bot.conn_wc = psycopg2.connect("port='5432' dbname='postgres' user='henry' host='192.168.0.28' password='eggplantsarecoolandsoishenry'")
@bot.event
async def on_ready():
print('Logged in as')
print(bot.user.name)
print(bot.user.id)
print('------')
#@bot.command()
#async def play(word:str):
# await bot.change_presence(game=discord.Game(name=word))
#@bot.command()
#async def help():
# await bot.say("wow")
@bot.command(pass_context=True)
async def cogs(ctx):
"""Lists currently loaded cogs"""
cogs = list(bot.cogs.keys())
#await bot.say("\n".join(cogs))
cogs.sort()
await bot.embed_this_for_me("\n".join(cogs), ctx)
@bot.event
async def on_message(message):
await bot.process_commands(message) # to the superclass ???
async def embed_this_for_me(text, ctx):
callingframe = sys._getframe(1)
em = discord.Embed(colour=0xfff)
em.add_field(name="Results from "+callingframe.f_code.co_name, value=text)
#em.set_footer(text="Written by Nos", icon_url="https://cdn.discordapp.com/avatars/173177975045488640/61d53ada7449ce4a3e1fdc13dc0ee21e.png")
#em.set_author(name='nos', icon_url=ctx.message.author.avatar_url)
#em.set_thumbnail(url=ctx.message.author.avatar_url)
print("Length of msg: ", len(text))
await bot.send_message(ctx.message.channel, embed=em)
def fit_msg(msg, maxlen:int=2000):
"""Split a long message up to fit within discord's limits.
Makes divisions as natural as possible in order:
newline > space > any char
"""
msgs = []
while len(msg) >= maxlen:
print("msg remaining:\n---------------------\n", msg)
if '\n' in msg[:maxlen]:
print("through newline")
idx = msg[:maxlen].rfind('\n')
print("idx is: ",idx)
msgs.append(msg[:idx])
print("added to msgs: \n -----------------\n", msg[:idx])
msg = msg[idx+1:]
elif ' ' in msg[:maxlen]:
print("through space")
idx = msg[:maxlen].rfind(' ')
print("idx is: ", idx)
msgs.append(msg[:idx])
msg = msg[idx+1:]
else:
print("\nelse\n")
for x in range(maxlen,0,-1):
msgs.append(msg[:x])
msg = msg[x:]
break;
msgs.append(msg)
print("this is the msgs\n---------------------\n", msgs)
return msgs
bot.embed_this_for_me = embed_this_for_me # attach to bot object so cogs don't need to import main
bot.fit_msg = fit_msg
## load cogs
bot.load_extension("spellcheck")
bot.load_extension("poem")
bot.load_extension("wiki")
bot.load_extension("dictionary")
bot.load_extension("wordart")
bot.load_extension("urbandict")
bot.load_extension("youtubesearch")
bot.load_extension("duck")
bot.load_extension("tunes")
bot.load_extension("imgur")
bot.load_extension("sfusearch")
bot.load_extension("memes")
bot.load_extension("translate")
bot.load_extension("superhelp")
bot.load_extension("remindme")
bot.load_extension("modtools")
bot.run(discord_token)
#####################
#scopes: bot, messages.read,
# References
# https://discordapp.com/developers/docs/intro
# https://discordapp.com/developers/applications/me/304829848613027852?success=created
# https://github.com/hammerandchisel/discord-api-docs
# https://www.dictionaryapi.com/
# https://github.com/thundercomb/poetrydb/blob/master/README.md
# https://github.com/Rapptz/discord.py
# https://en.wikipedia.org/api/rest_v1/#!/Page_content/get_page_summary_title
# use this to add to new server?
# https://discordapp.com/oauth2/authorize?client_id=304829848613027852&scope=bot
##
| mit | 4,504,220,183,950,953,000 | 31.25641 | 144 | 0.633943 | false |
VisualComputingInstitute/TrackR-CNN | datasets/MOT/MOT17.py | 1 | 2964 | import numpy as np
from datasets.MOT.MOT_common import MOTDetectionDataset, MOTDataset
from datasets.Loader import register_dataset
from datasets.util.Util import username
# MOT17 is the same sequences as MOT16 with better ground truth annotations
NAME = "MOT17"
NAME_DETECTION = "MOT17_detection"
DEFAULT_PATH = "/fastwork/" + username() + "/mywork/data/MOT17/"
NUM_CLASSES = 3 # background, car, pedestrian
N_MAX_DETECTIONS = 100
# TODO This is from savitar1, any papers we can take this split from?
SEQ_IDS_TRAIN = ["MOT17-%02d-DPM" % idx for idx in [2, 5, 10, 13]]
SEQ_IDS_VAL = ["MOT17-%02d-DPM" % idx for idx in [4, 9, 11]]
# used for detection on invididual images
@register_dataset(NAME_DETECTION)
class MOT17DetectionDataset(MOTDetectionDataset):
def __init__(self, config, subset):
super().__init__(config, subset, NAME, DEFAULT_PATH, SEQ_IDS_TRAIN, SEQ_IDS_VAL, NUM_CLASSES)
def get_data_arrays_for_file(self, img_filename, img_h, img_w):
return mot17_get_data_arrays_for_file(self.gt_data, self.cat_to_class, self.visibility_threshold,
img_filename, img_h, img_w)
# used for training on chunks of video
@register_dataset(NAME)
class MOT17Dataset(MOTDataset):
def __init__(self, config, subset):
super().__init__(config, subset, NAME, DEFAULT_PATH, SEQ_IDS_TRAIN, SEQ_IDS_VAL, NUM_CLASSES)
def get_data_arrays_for_file(self, img_filename, img_h, img_w):
return mot17_get_data_arrays_for_file(self.gt_data, self.cat_to_class, self.visibility_threshold,
img_filename, img_h, img_w)
def mot17_get_data_arrays_for_file(gt_data, cat_to_class, visibility_threshold, img_filename, img_h, img_w):
img_filename = img_filename.decode('utf-8')
seq = img_filename.split("/")[-3]
img_id = int(img_filename.split("/")[-1][:-4])
all_anns = gt_data[seq][0]
anns_for_img = all_anns[all_anns[:, 0] == img_id, :]
assert (len(anns_for_img) <= N_MAX_DETECTIONS)
# they need to be padded to N_MAX_DETECTIONS
bboxes = np.zeros((N_MAX_DETECTIONS, 4), dtype="float32")
ids = np.zeros(N_MAX_DETECTIONS, dtype="int32")
classes = np.zeros(N_MAX_DETECTIONS, dtype="int32")
is_crowd = np.zeros(N_MAX_DETECTIONS, dtype="int32")
for idx, ann in enumerate(anns_for_img):
x1 = ann[2]
y1 = ann[3]
box_width = ann[4]
box_height = ann[5]
x2 = x1 + box_width
y2 = y1 + box_height
# clip box
x1 = np.clip(x1, 0, img_w - 1)
x2 = np.clip(x2, 0, img_w - 1)
y1 = np.clip(y1, 0, img_h - 1)
y2 = np.clip(y2, 0, img_h - 1)
bboxes[idx] = [y1, x1, y2, x2]
ids[idx] = ann[1]
non_object_class = True
if ann[7] in cat_to_class:
classes[idx] = cat_to_class[ann[7]]
non_object_class = False
if ann[8] < visibility_threshold or ann[6] == 0 or non_object_class: # ann[6]==0 means ignore in MOT17
is_crowd[idx] = 1
classes[idx] = 0
return bboxes, ids, classes, is_crowd | mit | -7,313,223,581,082,771,000 | 36.531646 | 108 | 0.650135 | false |
gammu/python-gammu | gammu/asyncworker.py | 1 | 5280 | """Async extensions for gammu."""
import asyncio
import gammu
import gammu.worker
class GammuAsyncThread(gammu.worker.GammuThread):
"""Thread for phone communication."""
def __init__(self, queue, config, callback):
"""Initialize thread."""
super().__init__(queue, config, callback)
def _do_command(self, future, cmd, params, percentage=100):
"""Execute single command on phone."""
func = getattr(self._sm, cmd)
result = None
try:
if params is None:
result = func()
elif isinstance(params, dict):
result = func(**params)
else:
result = func(*params)
except gammu.GSMError as info:
errcode = info.args[0]["Code"]
error = gammu.ErrorNumbers[errcode]
self._callback(future, result, error, percentage)
except Exception as exception: # pylint: disable=broad-except
self._callback(future, None, exception, percentage)
else:
self._callback(future, result, None, percentage)
class GammuAsyncWorker(gammu.worker.GammuWorker):
"""Extend gammu worker class for async operations."""
def worker_callback(self, name, result, error, percents):
"""Execute command from the thread worker."""
future = None
if name == "Init" and self._init_future is not None:
future = self._init_future
elif name == "Terminate" and self._terminate_future is not None:
# Set _kill to true on the base class to avoid waiting for termination
self._thread._kill = True # pylint: disable=protected-access
future = self._terminate_future
elif hasattr(name, "set_result"):
future = name
if future is not None:
if error is None:
self._loop.call_soon_threadsafe(future.set_result, result)
else:
exception = error
if not isinstance(error, Exception):
exception = gammu.GSMError(error)
self._loop.call_soon_threadsafe(future.set_exception, exception)
def __init__(self):
"""Initialize the worker class.
@param callback: See L{GammuThread.__init__} for description.
"""
super().__init__(self.worker_callback)
self._loop = asyncio.get_event_loop()
self._init_future = None
self._terminate_future = None
self._thread = None
async def init_async(self):
"""Connect to phone."""
self._init_future = self._loop.create_future()
self._thread = GammuAsyncThread(self._queue, self._config, self._callback)
self._thread.start()
await self._init_future
self._init_future = None
async def get_imei_async(self):
"""Get the IMEI of the device."""
future = self._loop.create_future()
self.enqueue(future, commands=[("GetIMEI", ())])
return await future
async def get_network_info_async(self):
"""Get the network info in the device."""
future = self._loop.create_future()
self.enqueue(future, commands=[("GetNetworkInfo", ())])
return await future
async def get_manufacturer_async(self):
"""Get the manufacturer of the device."""
future = self._loop.create_future()
self.enqueue(future, commands=[("GetManufacturer", ())])
return await future
async def get_model_async(self):
"""Get the model of the device."""
future = self._loop.create_future()
self.enqueue(future, commands=[("GetModel", ())])
return await future
async def get_firmware_async(self):
"""Get the firmware version of the device."""
future = self._loop.create_future()
self.enqueue(future, commands=[("GetFirmware", ())])
return await future
async def get_signal_quality_async(self):
"""Get signal quality from phone."""
future = self._loop.create_future()
self.enqueue(future, commands=[("GetSignalQuality", ())])
result = await future
return result
async def send_sms_async(self, message):
"""Send sms message via the phone."""
future = self._loop.create_future()
self.enqueue(future, commands=[("SendSMS", [message])])
result = await future
return result
async def set_incoming_callback_async(self, callback):
"""Set the callback to call from phone."""
future = self._loop.create_future()
self.enqueue(future, commands=[("SetIncomingCallback", [callback])])
result = await future
return result
async def set_incoming_sms_async(self):
"""Activate SMS notifications from phone."""
future = self._loop.create_future()
self.enqueue(future, commands=[("SetIncomingSMS", ())])
result = await future
return result
async def terminate_async(self):
"""Terminate phone communication."""
self._terminate_future = self._loop.create_future()
self.enqueue("Terminate")
await self._terminate_future
while self._thread.is_alive():
await asyncio.sleep(5)
self._thread = None
| gpl-2.0 | 8,637,560,298,412,838,000 | 35.413793 | 82 | 0.596023 | false |
ya7lelkom/googleads-python-lib | examples/adwords/v201506/campaign_management/set_ad_parameters.py | 3 | 3468 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example adds a text ad with ad parameters.
To get ad groups, run get_ad_groups.py. To get keywords, run add_keywords.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: AdGroupAdService.mutate, AdParamService.mutate
Api: AdWordsOnly
"""
__author__ = ('[email protected] (Kevin Winter)'
'Joseph DiLallo')
from googleads import adwords
AD_GROUP_ID = 'INSERT_AD_GROUP_ID_HERE'
CRITERION_ID = 'INSERT_KEYWORD_CRITERION_ID_HERE'
def main(client, ad_group_id, criterion_id):
# Initialize appropriate service.
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201506')
ad_param_service = client.GetService('AdParamService', version='v201506')
# Construct operations for adding text ad object and add to an ad group.
operations = [{
'operator': 'ADD',
'operand': {
'xsi_type': 'AdGroupAd',
'adGroupId': ad_group_id,
'ad': {
'xsi_type': 'TextAd',
'finalUrls': {
'urls': ['http://www.example.com']
},
'displayUrl': 'example.com',
'description1': 'Low-gravity fun for {param1:cheap}.',
'description2': 'Only {param2:a few} seats left!',
'headline': 'Luxury Mars Cruises'
},
'status': 'ENABLED'
}
}]
ads = ad_group_ad_service.mutate(operations)['value']
# Display results.
for ad in ads:
print ('Text ad with id \'%s\' was successfully added to an ad group with '
'id \'%s\'.' % (ad['adGroupId'], ad['ad']['id']))
# Construct operations for setting ad parameters.
operations = [
{
'operator': 'SET',
'operand': {
'adGroupId': ad_group_id,
'criterionId': criterion_id,
'insertionText': u'£100',
'paramIndex': '1'
}
},
{
'operator': 'SET',
'operand': {
'adGroupId': ad_group_id,
'criterionId': criterion_id,
'insertionText': '50',
'paramIndex': '2'
}
}
]
ad_params = ad_param_service.mutate(operations)
# Display results.
for ad_param in ad_params:
print ('Ad parameter with text \'%s\' was successfully set for criterion '
'with id \'%s\' and ad group id \'%s\'.'
% (ad_param['insertionText'], ad_param['criterionId'],
ad_param['adGroupId']))
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, AD_GROUP_ID, CRITERION_ID)
| apache-2.0 | 8,112,226,728,531,167,000 | 31.401869 | 80 | 0.61321 | false |
EvanK/ansible | lib/ansible/cli/inventory.py | 14 | 15431 | # Copyright: (c) 2017, Brian Coca <[email protected]>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import optparse
from operator import attrgetter
from ansible import constants as C
from ansible import context
from ansible.cli import CLI
from ansible.cli.arguments import optparse_helpers as opt_help
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.inventory.host import Host
from ansible.plugins.loader import vars_loader
from ansible.utils.vars import combine_vars
from ansible.utils.display import Display
display = Display()
INTERNAL_VARS = frozenset(['ansible_diff_mode',
'ansible_facts',
'ansible_forks',
'ansible_inventory_sources',
'ansible_limit',
'ansible_playbook_python',
'ansible_run_tags',
'ansible_skip_tags',
'ansible_verbosity',
'ansible_version',
'inventory_dir',
'inventory_file',
'inventory_hostname',
'inventory_hostname_short',
'groups',
'group_names',
'omit',
'playbook_dir', ])
class InventoryCLI(CLI):
''' used to display or dump the configured inventory as Ansible sees it '''
ARGUMENTS = {'host': 'The name of a host to match in the inventory, relevant when using --list',
'group': 'The name of a group in the inventory, relevant when using --graph', }
def __init__(self, args):
super(InventoryCLI, self).__init__(args)
self.vm = None
self.loader = None
self.inventory = None
def init_parser(self):
super(InventoryCLI, self).init_parser(
usage='usage: %prog [options] [host|group]',
epilog='Show Ansible inventory information, by default it uses the inventory script JSON format')
opt_help.add_inventory_options(self.parser)
opt_help.add_vault_options(self.parser)
opt_help.add_basedir_options(self.parser)
# remove unused default options
self.parser.remove_option('--limit')
self.parser.remove_option('--list-hosts')
# Actions
action_group = optparse.OptionGroup(self.parser, "Actions", "One of following must be used on invocation, ONLY ONE!")
action_group.add_option("--list", action="store_true", default=False, dest='list', help='Output all hosts info, works as inventory script')
action_group.add_option("--host", action="store", default=None, dest='host', help='Output specific host info, works as inventory script')
action_group.add_option("--graph", action="store_true", default=False, dest='graph',
help='create inventory graph, if supplying pattern it must be a valid group name')
self.parser.add_option_group(action_group)
# graph
self.parser.add_option("-y", "--yaml", action="store_true", default=False, dest='yaml',
help='Use YAML format instead of default JSON, ignored for --graph')
self.parser.add_option('--toml', action='store_true', default=False, dest='toml',
help='Use TOML format instead of default JSON, ignored for --graph')
self.parser.add_option("--vars", action="store_true", default=False, dest='show_vars',
help='Add vars to graph display, ignored unless used with --graph')
# list
self.parser.add_option("--export", action="store_true", default=C.INVENTORY_EXPORT, dest='export',
help="When doing an --list, represent in a way that is optimized for export,"
"not as an accurate representation of how Ansible has processed it")
# self.parser.add_option("--ignore-vars-plugins", action="store_true", default=False, dest='ignore_vars_plugins',
# help="When doing an --list, skip vars data from vars plugins, by default, this would include group_vars/ and host_vars/")
def post_process_args(self, options, args):
options, args = super(InventoryCLI, self).post_process_args(options, args)
display.verbosity = options.verbosity
self.validate_conflicts(options, vault_opts=True)
# there can be only one! and, at least, one!
used = 0
for opt in (options.list, options.host, options.graph):
if opt:
used += 1
if used == 0:
raise AnsibleOptionsError("No action selected, at least one of --host, --graph or --list needs to be specified.")
elif used > 1:
raise AnsibleOptionsError("Conflicting options used, only one of --host, --graph or --list can be used at the same time.")
# set host pattern to default if not supplied
if len(args) > 0:
options.pattern = args[0]
else:
options.pattern = 'all'
return options, args
def run(self):
super(InventoryCLI, self).run()
# Initialize needed objects
self.loader, self.inventory, self.vm = self._play_prereqs()
results = None
if context.CLIARGS['host']:
hosts = self.inventory.get_hosts(context.CLIARGS['host'])
if len(hosts) != 1:
raise AnsibleOptionsError("You must pass a single valid host to --host parameter")
myvars = self._get_host_variables(host=hosts[0])
self._remove_internal(myvars)
# FIXME: should we template first?
results = self.dump(myvars)
elif context.CLIARGS['graph']:
results = self.inventory_graph()
elif context.CLIARGS['list']:
top = self._get_group('all')
if context.CLIARGS['yaml']:
results = self.yaml_inventory(top)
elif context.CLIARGS['toml']:
results = self.toml_inventory(top)
else:
results = self.json_inventory(top)
results = self.dump(results)
if results:
# FIXME: pager?
display.display(results)
exit(0)
exit(1)
@staticmethod
def dump(stuff):
if context.CLIARGS['yaml']:
import yaml
from ansible.parsing.yaml.dumper import AnsibleDumper
results = yaml.dump(stuff, Dumper=AnsibleDumper, default_flow_style=False)
elif context.CLIARGS['toml']:
from ansible.plugins.inventory.toml import toml_dumps, HAS_TOML
if not HAS_TOML:
raise AnsibleError(
'The python "toml" library is required when using the TOML output format'
)
results = toml_dumps(stuff)
else:
import json
from ansible.parsing.ajson import AnsibleJSONEncoder
results = json.dumps(stuff, cls=AnsibleJSONEncoder, sort_keys=True, indent=4)
return results
# FIXME: refactor to use same for VM
def get_plugin_vars(self, path, entity):
data = {}
def _get_plugin_vars(plugin, path, entities):
data = {}
try:
data = plugin.get_vars(self.loader, path, entity)
except AttributeError:
try:
if isinstance(entity, Host):
data = combine_vars(data, plugin.get_host_vars(entity.name))
else:
data = combine_vars(data, plugin.get_group_vars(entity.name))
except AttributeError:
if hasattr(plugin, 'run'):
raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
else:
raise AnsibleError("Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
return data
for plugin in vars_loader.all():
data = combine_vars(data, _get_plugin_vars(plugin, path, entity))
return data
def _get_group_variables(self, group):
# get info from inventory source
res = group.get_vars()
# FIXME: add switch to skip vars plugins, add vars plugin info
for inventory_dir in self.inventory._sources:
res = combine_vars(res, self.get_plugin_vars(inventory_dir, group))
if group.priority != 1:
res['ansible_group_priority'] = group.priority
return res
def _get_host_variables(self, host):
if context.CLIARGS['export']:
hostvars = host.get_vars()
# FIXME: add switch to skip vars plugins
# add vars plugin info
for inventory_dir in self.inventory._sources:
hostvars = combine_vars(hostvars, self.get_plugin_vars(inventory_dir, host))
else:
hostvars = self.vm.get_vars(host=host, include_hostvars=False)
return hostvars
def _get_group(self, gname):
group = self.inventory.groups.get(gname)
return group
@staticmethod
def _remove_internal(dump):
for internal in INTERNAL_VARS:
if internal in dump:
del dump[internal]
@staticmethod
def _remove_empty(dump):
# remove empty keys
for x in ('hosts', 'vars', 'children'):
if x in dump and not dump[x]:
del dump[x]
@staticmethod
def _show_vars(dump, depth):
result = []
InventoryCLI._remove_internal(dump)
if context.CLIARGS['show_vars']:
for (name, val) in sorted(dump.items()):
result.append(InventoryCLI._graph_name('{%s = %s}' % (name, val), depth))
return result
@staticmethod
def _graph_name(name, depth=0):
if depth:
name = " |" * (depth) + "--%s" % name
return name
def _graph_group(self, group, depth=0):
result = [self._graph_name('@%s:' % group.name, depth)]
depth = depth + 1
for kid in sorted(group.child_groups, key=attrgetter('name')):
result.extend(self._graph_group(kid, depth))
if group.name != 'all':
for host in sorted(group.hosts, key=attrgetter('name')):
result.append(self._graph_name(host.name, depth))
result.extend(self._show_vars(host.get_vars(), depth + 1))
result.extend(self._show_vars(self._get_group_variables(group), depth))
return result
def inventory_graph(self):
start_at = self._get_group(context.CLIARGS['pattern'])
if start_at:
return '\n'.join(self._graph_group(start_at))
else:
raise AnsibleOptionsError("Pattern must be valid group name when using --graph")
def json_inventory(self, top):
seen = set()
def format_group(group):
results = {}
results[group.name] = {}
if group.name != 'all':
results[group.name]['hosts'] = [h.name for h in sorted(group.hosts, key=attrgetter('name'))]
results[group.name]['children'] = []
for subgroup in sorted(group.child_groups, key=attrgetter('name')):
results[group.name]['children'].append(subgroup.name)
if subgroup.name not in seen:
results.update(format_group(subgroup))
seen.add(subgroup.name)
if context.CLIARGS['export']:
results[group.name]['vars'] = self._get_group_variables(group)
self._remove_empty(results[group.name])
if not results[group.name]:
del results[group.name]
return results
results = format_group(top)
# populate meta
results['_meta'] = {'hostvars': {}}
hosts = self.inventory.get_hosts()
for host in hosts:
hvars = self._get_host_variables(host)
if hvars:
self._remove_internal(hvars)
results['_meta']['hostvars'][host.name] = hvars
return results
def yaml_inventory(self, top):
seen = []
def format_group(group):
results = {}
# initialize group + vars
results[group.name] = {}
# subgroups
results[group.name]['children'] = {}
for subgroup in sorted(group.child_groups, key=attrgetter('name')):
if subgroup.name != 'all':
results[group.name]['children'].update(format_group(subgroup))
# hosts for group
results[group.name]['hosts'] = {}
if group.name != 'all':
for h in sorted(group.hosts, key=attrgetter('name')):
myvars = {}
if h.name not in seen: # avoid defining host vars more than once
seen.append(h.name)
myvars = self._get_host_variables(host=h)
self._remove_internal(myvars)
results[group.name]['hosts'][h.name] = myvars
if context.CLIARGS['export']:
gvars = self._get_group_variables(group)
if gvars:
results[group.name]['vars'] = gvars
self._remove_empty(results[group.name])
return results
return format_group(top)
def toml_inventory(self, top):
seen = set()
has_ungrouped = bool(next(g.hosts for g in top.child_groups if g.name == 'ungrouped'))
def format_group(group):
results = {}
results[group.name] = {}
results[group.name]['children'] = []
for subgroup in sorted(group.child_groups, key=attrgetter('name')):
if subgroup.name == 'ungrouped' and not has_ungrouped:
continue
if group.name != 'all':
results[group.name]['children'].append(subgroup.name)
results.update(format_group(subgroup))
if group.name != 'all':
for host in sorted(group.hosts, key=attrgetter('name')):
if host.name not in seen:
seen.add(host.name)
host_vars = self._get_host_variables(host=host)
self._remove_internal(host_vars)
else:
host_vars = {}
try:
results[group.name]['hosts'][host.name] = host_vars
except KeyError:
results[group.name]['hosts'] = {host.name: host_vars}
if context.CLIARGS['export']:
results[group.name]['vars'] = self._get_group_variables(group)
self._remove_empty(results[group.name])
if not results[group.name]:
del results[group.name]
return results
results = format_group(top)
return results
| gpl-3.0 | 7,486,966,400,832,373,000 | 37.385572 | 153 | 0.555181 | false |
dennisss/sympy | sympy/solvers/inequalities.py | 1 | 14981 | """Tools for solving inequalities and systems of inequalities. """
from __future__ import print_function, division
from sympy.core import Symbol
from sympy.sets import Interval
from sympy.core.relational import Relational, Eq, Ge, Lt
from sympy.sets.sets import FiniteSet, Union
from sympy.core.singleton import S
from sympy.assumptions import ask, AppliedPredicate, Q
from sympy.functions import re, im, Abs
from sympy.logic import And
from sympy.polys import Poly, PolynomialError, parallel_poly_from_expr
from sympy.simplify import simplify
def solve_poly_inequality(poly, rel):
"""Solve a polynomial inequality with rational coefficients.
Examples
========
>>> from sympy import Poly
>>> from sympy.abc import x
>>> from sympy.solvers.inequalities import solve_poly_inequality
>>> solve_poly_inequality(Poly(x, x, domain='ZZ'), '==')
[{0}]
>>> solve_poly_inequality(Poly(x**2 - 1, x, domain='ZZ'), '!=')
[(-oo, -1), (-1, 1), (1, oo)]
>>> solve_poly_inequality(Poly(x**2 - 1, x, domain='ZZ'), '==')
[{-1}, {1}]
See Also
========
solve_poly_inequalities
"""
reals, intervals = poly.real_roots(multiple=False), []
if rel == '==':
for root, _ in reals:
interval = Interval(root, root)
intervals.append(interval)
elif rel == '!=':
left = S.NegativeInfinity
for right, _ in reals + [(S.Infinity, 1)]:
interval = Interval(left, right, True, True)
intervals.append(interval)
left = right
else:
if poly.LC() > 0:
sign = +1
else:
sign = -1
eq_sign, equal = None, False
if rel == '>':
eq_sign = +1
elif rel == '<':
eq_sign = -1
elif rel == '>=':
eq_sign, equal = +1, True
elif rel == '<=':
eq_sign, equal = -1, True
else:
raise ValueError("'%s' is not a valid relation" % rel)
right, right_open = S.Infinity, True
reals.sort(key=lambda w: w[0], reverse=True)
for left, multiplicity in reals:
if multiplicity % 2:
if sign == eq_sign:
intervals.insert(
0, Interval(left, right, not equal, right_open))
sign, right, right_open = -sign, left, not equal
else:
if sign == eq_sign and not equal:
intervals.insert(
0, Interval(left, right, True, right_open))
right, right_open = left, True
elif sign != eq_sign and equal:
intervals.insert(0, Interval(left, left))
if sign == eq_sign:
intervals.insert(
0, Interval(S.NegativeInfinity, right, True, right_open))
return intervals
def solve_poly_inequalities(polys):
"""Solve polynomial inequalities with rational coefficients.
Examples
========
>>> from sympy.solvers.inequalities import solve_poly_inequalities
>>> from sympy.polys import Poly
>>> from sympy.abc import x
>>> solve_poly_inequalities(((
... Poly(x**2 - 3), ">"), (
... Poly(-x**2 + 1), ">")))
(-oo, -sqrt(3)) U (-1, 1) U (sqrt(3), oo)
"""
from sympy import Union
return Union(*[solve_poly_inequality(*p) for p in polys])
def solve_rational_inequalities(eqs):
"""Solve a system of rational inequalities with rational coefficients.
Examples
========
>>> from sympy.abc import x
>>> from sympy import Poly
>>> from sympy.solvers.inequalities import solve_rational_inequalities
>>> solve_rational_inequalities([[
... ((Poly(-x + 1), Poly(1, x)), '>='),
... ((Poly(-x + 1), Poly(1, x)), '<=')]])
{1}
>>> solve_rational_inequalities([[
... ((Poly(x), Poly(1, x)), '!='),
... ((Poly(-x + 1), Poly(1, x)), '>=')]])
(-oo, 0) U (0, 1]
See Also
========
solve_poly_inequality
"""
result = S.EmptySet
for _eqs in eqs:
global_intervals = None
for (numer, denom), rel in _eqs:
numer_intervals = solve_poly_inequality(numer*denom, rel)
denom_intervals = solve_poly_inequality(denom, '==')
if global_intervals is None:
global_intervals = numer_intervals
else:
intervals = []
for numer_interval in numer_intervals:
for global_interval in global_intervals:
interval = numer_interval.intersect(global_interval)
if interval is not S.EmptySet:
intervals.append(interval)
global_intervals = intervals
intervals = []
for global_interval in global_intervals:
for denom_interval in denom_intervals:
global_interval -= denom_interval
if global_interval is not S.EmptySet:
intervals.append(global_interval)
global_intervals = intervals
if not global_intervals:
break
for interval in global_intervals:
result = result.union(interval)
return result
def reduce_rational_inequalities(exprs, gen, assume=True, relational=True):
"""Reduce a system of rational inequalities with rational coefficients.
Examples
========
>>> from sympy import Poly, Symbol
>>> from sympy.solvers.inequalities import reduce_rational_inequalities
>>> x = Symbol('x', real=True)
>>> reduce_rational_inequalities([[x**2 <= 0]], x)
x == 0
>>> reduce_rational_inequalities([[x + 2 > 0]], x)
x > -2
>>> reduce_rational_inequalities([[(x + 2, ">")]], x)
x > -2
>>> reduce_rational_inequalities([[x + 2]], x)
x == -2
"""
exact = True
eqs = []
for _exprs in exprs:
_eqs = []
for expr in _exprs:
if isinstance(expr, tuple):
expr, rel = expr
else:
if expr.is_Relational:
expr, rel = expr.lhs - expr.rhs, expr.rel_op
else:
expr, rel = expr, '=='
try:
(numer, denom), opt = parallel_poly_from_expr(
expr.together().as_numer_denom(), gen)
except PolynomialError:
raise PolynomialError("only polynomials and "
"rational functions are supported in this context")
if not opt.domain.is_Exact:
numer, denom, exact = numer.to_exact(), denom.to_exact(), False
domain = opt.domain.get_exact()
if not (domain.is_ZZ or domain.is_QQ):
raise NotImplementedError(
"inequality solving is not supported over %s" % opt.domain)
_eqs.append(((numer, denom), rel))
eqs.append(_eqs)
solution = solve_rational_inequalities(eqs)
if not exact:
solution = solution.evalf()
if not relational:
return solution
real = ask(Q.real(gen), assumptions=assume)
if not real:
result = And(solution.as_relational(re(gen)), Eq(im(gen), 0))
else:
result = solution.as_relational(gen)
return result
def reduce_abs_inequality(expr, rel, gen, assume=True):
"""Reduce an inequality with nested absolute values.
Examples
========
>>> from sympy import Q, Abs
>>> from sympy.abc import x
>>> from sympy.solvers.inequalities import reduce_abs_inequality
>>> reduce_abs_inequality(Abs(x - 5) - 3, '<', x, assume=Q.real(x))
And(2 < x, x < 8)
>>> reduce_abs_inequality(Abs(x + 2)*3 - 13, '<', x, assume=Q.real(x))
And(-19/3 < x, x < 7/3)
See Also
========
reduce_abs_inequalities
"""
if not ask(Q.real(gen), assumptions=assume):
raise NotImplementedError("can't solve inequalities with absolute "
"values of a complex variable")
def _bottom_up_scan(expr):
exprs = []
if expr.is_Add or expr.is_Mul:
op = expr.__class__
for arg in expr.args:
_exprs = _bottom_up_scan(arg)
if not exprs:
exprs = _exprs
else:
args = []
for expr, conds in exprs:
for _expr, _conds in _exprs:
args.append((op(expr, _expr), conds + _conds))
exprs = args
elif expr.is_Pow:
n = expr.exp
if not n.is_Integer or n < 0:
raise ValueError(
"only non-negative integer powers are allowed")
_exprs = _bottom_up_scan(expr.base)
for expr, conds in _exprs:
exprs.append((expr**n, conds))
elif isinstance(expr, Abs):
_exprs = _bottom_up_scan(expr.args[0])
for expr, conds in _exprs:
exprs.append(( expr, conds + [Ge(expr, 0)]))
exprs.append((-expr, conds + [Lt(expr, 0)]))
else:
exprs = [(expr, [])]
return exprs
exprs = _bottom_up_scan(expr)
mapping = {'<': '>', '<=': '>='}
inequalities = []
for expr, conds in exprs:
if rel not in mapping.keys():
expr = Relational( expr, 0, rel)
else:
expr = Relational(-expr, 0, mapping[rel])
inequalities.append([expr] + conds)
return reduce_rational_inequalities(inequalities, gen, assume)
def reduce_abs_inequalities(exprs, gen, assume=True):
"""Reduce a system of inequalities with nested absolute values.
Examples
========
>>> from sympy import Q, Abs
>>> from sympy.abc import x
>>> from sympy.solvers.inequalities import reduce_abs_inequalities
>>> reduce_abs_inequalities([(Abs(3*x - 5) - 7, '<'),
... (Abs(x + 25) - 13, '>')], x, assume=Q.real(x))
And(-2/3 < x, Or(x < -38, x > -12), x < 4)
>>> reduce_abs_inequalities([(Abs(x - 4) + Abs(3*x - 5) - 7, '<')], x,
... assume=Q.real(x))
And(1/2 < x, x < 4)
See Also
========
reduce_abs_inequality
"""
return And(*[ reduce_abs_inequality(expr, rel, gen, assume)
for expr, rel in exprs ])
def solve_univariate_inequality(expr, gen, assume=True, relational=True):
"""Solves a real univariate inequality.
Examples
========
>>> from sympy.solvers.inequalities import solve_univariate_inequality
>>> from sympy.core.symbol import Symbol
>>> x = Symbol('x', real=True)
>>> solve_univariate_inequality(x**2 >= 4, x)
Or(x <= -2, x >= 2)
>>> solve_univariate_inequality(x**2 >= 4, x, relational=False)
(-oo, -2] U [2, oo)
"""
# Implementation for continous functions
from sympy.solvers.solvers import solve
solns = solve(expr.lhs - expr.rhs, gen, assume=assume)
oo = S.Infinity
start = -oo
sol_sets = [S.EmptySet]
for x in sorted(s for s in solns if s.is_real):
end = x
if simplify(expr.subs(gen, (start + end)/2 if start != -oo else end - 1)):
sol_sets.append(Interval(start, end, True, True))
if simplify(expr.subs(gen, x)):
sol_sets.append(FiniteSet(x))
start = end
end = oo
if simplify(expr.subs(gen, start + 1)):
sol_sets.append(Interval(start, end, True, True))
rv = Union(*sol_sets)
return rv if not relational else rv.as_relational(gen)
def _solve_inequality(ie, s, assume=True):
""" A hacky replacement for solve, since the latter only works for
univariate inequalities. """
if not ie.rel_op in ('>', '>=', '<', '<='):
raise NotImplementedError
expr = ie.lhs - ie.rhs
try:
p = Poly(expr, s)
if p.degree() != 1:
raise NotImplementedError
except (PolynomialError, NotImplementedError):
try:
n, d = expr.as_numer_denom()
return reduce_rational_inequalities([[ie]], s, assume=assume)
except PolynomialError:
return solve_univariate_inequality(ie, s, assume=assume)
a, b = p.all_coeffs()
if a.is_positive:
return ie.func(s, -b/a)
elif a.is_negative:
return ie.func(-b/a, s)
else:
raise NotImplementedError
def reduce_inequalities(inequalities, assume=True, symbols=[]):
"""Reduce a system of inequalities with rational coefficients.
Examples
========
>>> from sympy import Q, sympify as S
>>> from sympy.abc import x, y
>>> from sympy.solvers.inequalities import reduce_inequalities
>>> reduce_inequalities(S(0) <= x + 3, Q.real(x), [])
x >= -3
>>> reduce_inequalities(S(0) <= x + y*2 - 1, True, [x])
-2*y + 1 <= x
"""
if not hasattr(inequalities, '__iter__'):
inequalities = [inequalities]
if len(inequalities) == 1 and len(symbols) == 1 \
and inequalities[0].is_Relational:
try:
return _solve_inequality(inequalities[0], symbols[0],
assume=assume)
except NotImplementedError:
pass
poly_part, abs_part, extra_assume = {}, {}, []
for inequality in inequalities:
if inequality == True:
continue
elif inequality == False:
return False
if isinstance(inequality, AppliedPredicate):
extra_assume.append(inequality)
continue
if inequality.is_Relational:
expr, rel = inequality.lhs - inequality.rhs, inequality.rel_op
else:
expr, rel = inequality, '=='
gens = expr.free_symbols
if not gens:
return False
elif len(gens) == 1:
gen = gens.pop()
else:
raise NotImplementedError(
"only univariate inequalities are supported")
components = expr.find(lambda u: u.is_Function)
if not components:
if gen in poly_part:
poly_part[gen].append((expr, rel))
else:
poly_part[gen] = [(expr, rel)]
else:
if all(isinstance(comp, Abs) for comp in components):
if gen in abs_part:
abs_part[gen].append((expr, rel))
else:
abs_part[gen] = [(expr, rel)]
else:
raise NotImplementedError("can't reduce %s" % inequalities)
extra_assume = And(*extra_assume)
if assume is not None:
assume = And(assume, extra_assume)
else:
assume = extra_assume
poly_reduced = []
abs_reduced = []
for gen, exprs in poly_part.items():
poly_reduced.append(reduce_rational_inequalities([exprs], gen, assume))
for gen, exprs in abs_part.items():
abs_reduced.append(reduce_abs_inequalities(exprs, gen, assume))
return And(*(poly_reduced + abs_reduced))
| bsd-3-clause | 6,071,974,645,913,519,000 | 27.699234 | 82 | 0.544289 | false |
rwl/PyCIM | CIM15/CDPSM/Connectivity/IEC61970/Wires/Jumper.py | 1 | 1708 | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.CDPSM.Connectivity.IEC61970.Wires.Switch import Switch
class Jumper(Switch):
"""A short section of conductor with negligible impedance which can be manually removed and replaced if the circuit is de-energized. Note that zero-impedance branches can be modelled by an ACLineSegment with a zero impedance ConductorType
"""
def __init__(self, *args, **kw_args):
"""Initialises a new 'Jumper' instance.
"""
super(Jumper, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = []
_many_refs = []
| mit | -2,513,537,015,504,231,000 | 42.794872 | 242 | 0.730679 | false |
DataONEorg/d1_python | lib_scimeta/src/d1_scimeta/schema_prepare.py | 1 | 14551 | #!/usr/bin/env python
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Prepare DataONE Science Metadata schema files for use with lxml.
The `d1_scimeta` validator is based on the `lxml` library. `lxml` cannot easily be
blocked from following HTTP URLs referenced in `schemaLocation` attributes in
`xs:include` and `xs:import` elements in XSD schemas used while validating an XML doc.
As outgoing network connections and associated delays are not acceptable in many
validation scenarios, this script rewrites URLs in the schemas to point to existing
local XSD files. Or, where there are no existing local XSDs, downloads them to a local
cache, and rewrites the `schemaLocation` attributes to reference them in the cache.
The general procedure is as follows:
- Each set of schema files that corresponds with a single `formatId` are handled
separately.
- A dictionary is created that maps schema file names to schema file positions.
- Each schema file is searched for xs:include and xs:import elements holding
`schemaLocation` attributes.
- Whenever a `schemaLocation` holding a URL is found, it is replaced with the relative
path to a local XSD file with the same name, selected from the XSD files that share
the same `formatId`.
- If a local XSD cannot be found, it is downloaded from the `schemaLocation` and stored
in a local cache. The `schemaLocation` is then rewritten to reference the file in the
cache.
- Files downloaded to the cache are themselves rewritten and their dependencies are
downloaded to the cache recursively until there are no remaining HTTP `schemaLocation`
attributes.
See the README.md in this directory for more information about how to use this script.
"""
import logging
import os
import re
import shutil
import subprocess
import requests
import d1_scimeta.util
import d1_common.utils.filesystem
import d1_common.utils.ulog
import d1_client.command_line
import d1_test.pycharm
log = logging.getLogger(__name__)
def main():
d1_client.command_line.log_setup(is_debug=False)
add_log_file()
d1_common.utils.filesystem.create_missing_directories_for_dir(
d1_scimeta.util.SCHEMA_ROOT_PATH
)
cache_dir_path = os.path.join(d1_scimeta.util.SCHEMA_ROOT_PATH, "_cache")
d1_common.utils.filesystem.create_missing_directories_for_dir(cache_dir_path)
for format_id in d1_scimeta.util.get_supported_format_id_list():
prepare_schema_for_format_id(format_id, cache_dir_path)
cache_rewrite(cache_dir_path)
def add_log_file():
"""Add logging to file so that changes that were made to the schema files are
automatically captured."""
log_file_name = d1_common.utils.filesystem.abs_path(
"./ext/{}.log".format(os.path.splitext(__file__)[0])
)
if os.path.exists(log_file_name):
os.unlink(log_file_name)
file_handler = logging.FileHandler(log_file_name)
log_formatter = logging.Formatter("%(levelname)-8s %(message)s", None)
file_handler.setFormatter(log_formatter)
log.addHandler(file_handler)
# Download, rewrite and cache schema dependencies that are not included directly in the
# DataONE schema set.
def cache_rewrite(cache_dir_path):
"""Rewrite XSDs downloaded to cache dir.
Since rewriting the XSDs can cause more XSDs to be downloaded, this calls
rewrite_xsd() repeatedly until all XSDs have been downloaded and processed.
"""
done_xsd_path_set = set()
while True:
xsd_path_set = (
set(d1_scimeta.util.gen_abs_xsd_path_list(cache_dir_path))
- done_xsd_path_set
)
files_modified = cache_rewrite_all_xsd(cache_dir_path, sorted(xsd_path_set))
if not files_modified:
break
done_xsd_path_set.update(xsd_path_set)
def cache_rewrite_all_xsd(cache_dir_path, xsd_path_list):
log.info("#" * 100)
log.info(cache_dir_path)
files_modified = False
for xsd_path in xsd_path_list:
files_modified |= cache_rewrite_single_xsd(cache_dir_path, xsd_path)
return files_modified
def cache_rewrite_single_xsd(cache_dir_path, xsd_path):
# create_from_original(xsd_path)
try:
xsd_tree = d1_scimeta.util.load_xml_file_to_tree(xsd_path)
except d1_scimeta.util.SciMetaError:
return False
if not has_http_schema_locations(xsd_tree):
return False
log.info("-" * 100)
log.info(xsd_path)
files_modified = False
for loc_el in xsd_tree.xpath(
"//xs:include|xs:import", namespaces=d1_scimeta.util.NS_MAP
):
try:
files_modified |= cache_rewrite_uri(cache_dir_path, xsd_path, loc_el)
except SchemaRewriteError as e:
log.error("Unable to rewrite: {}".format(e))
if files_modified:
create_original(xsd_path)
d1_scimeta.util.save_tree_to_file(xsd_tree, xsd_path)
# show_diff(get_original_path(xsd_path), xsd_path)
return files_modified
def cache_rewrite_uri(cache_dir_path, xsd_path, loc_el):
uri = loc_el.attrib["schemaLocation"]
if not d1_scimeta.util.is_url(uri):
return False
cache_rewrite_to_cache(cache_dir_path, xsd_path, loc_el, uri)
return True
def cache_rewrite_to_cache(cache_dir_path, xsd_path, loc_el, download_url):
child_xsd_tree = download_xsd(download_url)
rel_to_abs_include_import(download_url, child_xsd_tree)
xsd_name = gen_cache_name(download_url)
cache_path = os.path.join(cache_dir_path, xsd_name)
d1_scimeta.util.save_tree_to_file(child_xsd_tree, cache_path)
log.info("Wrote XSD to: {}".format(cache_path))
rel_path = d1_scimeta.util.get_rel_path(xsd_path, cache_path)
loc_el.attrib["schemaLocation"] = rel_path
log.info("Rewrite ok: {} -> {}".format(download_url, rel_path))
# DataONE Science Metadata schema set rewrite
def prepare_schema_for_format_id(format_id, cache_dir_path):
"""Prepare all XSD files for a given format_id.
"""
log.info("#" * 100)
branch_path = d1_scimeta.util.get_abs_schema_branch_path(format_id)
xsd_path_list = d1_scimeta.util.gen_abs_xsd_path_list(branch_path)
xsd_name_dict = d1_scimeta.util.gen_xsd_name_dict(branch_path, xsd_path_list)
log.info("Schema branch: {}".format(branch_path))
log.info("Number of XSD: {}".format(len(xsd_path_list)))
# d1_scimeta.util.dump(xsd_path_list, "xsd_path_list")
# d1_scimeta.util.dump(xsd_name_dict, "xsd_name_list")
schema_is_modified = False
for xsd_path in xsd_path_list:
schema_is_modified |= prepare_single_xsd(
format_id, xsd_name_dict, xsd_path, cache_dir_path
)
return schema_is_modified
def prepare_single_xsd(format_id, xsd_name_dict, xsd_path, cache_dir_path):
log.info("-" * 100)
log.info("XSD: {}".format(xsd_path))
xsd_tree = load_xsd_file(xsd_path)
xsd_is_modified = False
xslt_path = gen_schema_transform_xslt_path(format_id, xsd_path)
if xslt_path:
log.info("Applying XSLT: {}".format(xslt_path))
xsd_tree = d1_scimeta.util.apply_xslt_transform(xsd_tree, xslt_path)
xsd_is_modified = True
if has_http_schema_locations(xsd_tree):
xsd_is_modified |= rewrite_single_xsd(
xsd_path, xsd_tree, xsd_name_dict, cache_dir_path
)
if xsd_is_modified:
save_xsd_file(xsd_path, xsd_tree)
# show_diff(get_original_path(xsd_path), xsd_path)
return xsd_is_modified
def load_xsd_file(xsd_path):
create_from_original(xsd_path)
return d1_scimeta.util.load_xml_file_to_tree(xsd_path)
def save_xsd_file(xsd_path, xsd_tree):
create_original(xsd_path)
d1_scimeta.util.save_tree_to_file(xsd_tree, xsd_path)
def rewrite_single_xsd(xsd_path, xsd_tree, xsd_name_dict, cache_dir_path):
"""Modifify `schemaLocation` URIs in xs:include and xs:import elements to relative
paths pointing to local files instead of to the web in a single XSD file.
E.g.:
`schemaLocation` URI = http://standards.iso.org/ittf/PubliclyAvailableStandards/ISO_19139_Schemas/gmd/gmd.xsd
-> ../gmd/gmd.xsd
Args:
xsd_path: str
Abs path to XSD file to rewrite
Returns:
True if any files were modified
"""
files_modified = False
for loc_el in xsd_tree.xpath(
"//xs:include|xs:import", namespaces=d1_scimeta.util.NS_MAP
):
try:
files_modified |= rewrite_uri(
xsd_path, loc_el, xsd_name_dict, cache_dir_path
)
except SchemaRewriteError as e:
log.error("Unable to rewrite: {}".format(e))
return files_modified
def rewrite_uri(xsd_path, loc_el, xsd_name_dict, cache_dir_path):
"""Rewrite the `schemaLocation` in a single xs:include or xs:import element.
Args:
xsd_path: str
Abs path to the XML file to which the element belongs.
loc_el: Element
xs:include or xs:import element holding a `schemaLocation` URI.
xsd_name_dict:
cache_dir_path:
Returns:
True if the `schemaLocation` was rewritten.
"""
uri = loc_el.attrib["schemaLocation"]
if not d1_scimeta.util.is_url(uri):
return False
# uri = os.path.join(xsd_path, uri)
try:
abs_trans_path = d1_scimeta.util.get_xsd_path(xsd_name_dict, uri)
rel_trans_path = d1_scimeta.util.get_rel_path(xsd_path, abs_trans_path)
loc_el.attrib["schemaLocation"] = rel_trans_path
log.info("Rewrite ok: {} -> {}".format(uri, rel_trans_path))
except d1_scimeta.util.SciMetaError:
# An XSD with the required name was not found. Download it to cache.
rewrite_to_cache(xsd_path, loc_el, uri, cache_dir_path)
return True
def rewrite_to_cache(xsd_path, loc_el, download_url, cache_dir_path):
"""Download XSD which does not exist locally and rewrite to it.
Args:
xsd_path: str
Abs path to XSD file that has the `schemaLocation`.
download_url: str
URL from which to download the XSD.
cache_dir_path:
Abs path to dir in which to store the downloaded XSD.
loc_el: Element
xs:include or xs:import element holding a `schemaLocation` URI.
"""
xsd_name = gen_cache_name(download_url)
cache_path = os.path.join(cache_dir_path, xsd_name)
if os.path.exists(cache_path):
log.info("Skipped download: Already exists: {}".format(cache_path))
else:
child_xsd_tree = download_xsd(download_url)
rel_to_abs_include_import(download_url, child_xsd_tree)
d1_scimeta.util.save_tree_to_file(child_xsd_tree, cache_path)
log.info("Downloaded XSD: {} -> {}".format(download_url, cache_path))
rel_path = d1_scimeta.util.get_rel_path(xsd_path, cache_path)
loc_el.attrib["schemaLocation"] = rel_path
log.info("Rewrite ok: {} -> {}".format(download_url, rel_path))
def rel_to_abs_include_import(download_url, xsd_tree):
for loc_el in xsd_tree.xpath(
"//xs:include|xs:import", namespaces=d1_scimeta.util.NS_MAP
):
loc_el.attrib["schemaLocation"] = d1_scimeta.util.gen_abs_uri(
download_url, loc_el.attrib["schemaLocation"]
)
def download_xsd(url):
"""Download XSD and check that it's well formed XML.
Args: url: str URL from which to download the XSD.
"""
response = requests.get(url)
if response.status_code != 200:
raise SchemaRewriteError(
'Download error. url="{}" code={}'.format(url, response.status_code)
)
return d1_scimeta.util.parse_xml_bytes(response.content, url)
def gen_cache_name(uri):
"""Generate a local filename for an XSD that will be saved in the cache.
"""
path, file_name = os.path.split(uri)
name_str = "{}__{}".format(path, file_name)
return re.sub(r"[^a-z0-9_\-.]+", "_", name_str.lower())
def has_http_schema_locations(xsd_tree):
"""Return True if there is at least one `schemaLocation` in the doc which contains a
http or https URI."""
for uri in xsd_tree.xpath("//*/@schemaLocation", namespaces=d1_scimeta.util.NS_MAP):
if d1_scimeta.util.is_url(uri):
return True
return False
def gen_original_path(xml_path):
"""Generate the path to the original version of the XML doc at xml_path."""
return "{}.ORIGINAL{}".format(*os.path.splitext(xml_path))
def create_from_original(xsd_path):
"""If xsd has been updated before, use the original as source."""
original_path = gen_original_path(xsd_path)
if os.path.exists(original_path):
shutil.copy(original_path, xsd_path)
def create_original(xsd_path):
"""Copy file to original path.
If an original file does not exist for the XSD at `xsd_path`, copy the XSD to the
original file location.
"""
original_path = gen_original_path(xsd_path)
if not os.path.exists(original_path):
shutil.copy(xsd_path, original_path)
def gen_schema_transform_xslt_path(format_id, xsd_path):
"""Get the path to any XSLT file that needs to be applied to this XSD file. If no
XSLT file has been provided, return None."""
orig_base, orig_ext = os.path.splitext(
d1_scimeta.util.get_abs_root_xsd_path(format_id)
)
xsd_base_name = os.path.splitext(os.path.split(xsd_path)[1])[0]
xslt_path = "{}.{}.xslt".format(orig_base, xsd_base_name)
log.info("Checking for XSLT at: {}".format(xslt_path))
return xslt_path if os.path.isfile(xslt_path) else None
def show_diff(original_path, rewritten_path):
"""Open the PyCharm diff viewer."""
try:
d1_test.pycharm.diff(original_path, rewritten_path)
except subprocess.CalledProcessError:
pass
class SchemaRewriteError(Exception):
pass
if __name__ == "__main__":
main()
| apache-2.0 | -5,827,841,012,437,342,000 | 31.407572 | 117 | 0.672462 | false |
CodeNameGhost/shiva | thirdparty/scapy/contrib/tacacs.py | 1 | 14481 | #!/usr/bin/env python
'''
Copyright (C) 2017 Francois Contat <[email protected]>
This program is published under a GPLv2 license
Based on tacacs+ v6 draft https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06
'''
import struct
import hashlib
from scapy.packet import Packet, bind_layers
from scapy.fields import ByteEnumField, ByteField, IntField
from scapy.fields import FieldListField
from scapy.fields import FieldLenField, ConditionalField, StrLenField
from scapy.layers.inet import TCP
from scapy.config import conf
SECRET = 'test'
def obfuscate(pay, secret, session_id, version, seq):
'''
Obfuscation methodology from section 3.7
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-3.7
'''
pad = ''
curr_pad = ''
# pad length must equal the payload to obfuscate.
# pad = {MD5_1 [,MD5_2 [ ... ,MD5_n]]}
while len(pad) < len(pay):
msg = hashlib.md5()
msg.update(struct.pack('!I', session_id))
msg.update(secret)
msg.update(struct.pack('!BB', version, seq))
msg.update(curr_pad)
curr_pad = msg.digest()
pad += curr_pad
# Obf/Unobfuscation via XOR operation between plaintext and pad
new_payload = (struct.pack('B', ord(pad[i]) ^ ord(pay[i])) for i in xrange(len(pay)))
return "".join(new_payload)
TACACSPRIVLEVEL = {15:'Root',
1:'User',
0:'Minimum'}
##########################
# Authentication Packets #
##########################
TACACSVERSION = {1:'Tacacs',
192:'Tacacs+'}
TACACSTYPE = {1:'Authentication',
2:'Authorization',
3:'Accounting'}
TACACSFLAGS = {1:'Unencrypted',
4:'Single Connection'}
TACACSAUTHENACTION = {1:'Login',
2:'Change Pass',
4:'Send Authentication'}
TACACSAUTHENTYPE = {1:'ASCII',
2:'PAP',
3:'CHAP',
4:'ARAP', #Deprecated
5:'MSCHAP',
6:'MSCHAPv2'}
TACACSAUTHENSERVICE = {0:'None',
1:'Login',
2:'Enable',
3:'PPP',
4:'ARAP',
5:'PT',
6:'RCMD',
7:'X25',
8:'NASI',
9:'FwProxy'}
TACACSREPLYPASS = {1:'PASS',
2:'FAIL',
3:'GETDATA',
4:'GETUSER',
5:'GETPASS',
6:'RESTART',
7:'ERROR',
21:'FOLLOW'}
TACACSREPLYFLAGS = {1:'NOECHO'}
TACACSCONTINUEFLAGS = {1:'ABORT'}
class TacacsAuthenticationStart(Packet):
'''
Tacacs authentication start body from section 4.1
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-4.1
'''
name = 'Tacacs Authentication Start Body'
fields_desc = [ByteEnumField('action', 1, TACACSAUTHENACTION),
ByteEnumField('priv_lvl', 1, TACACSPRIVLEVEL),
ByteEnumField('authen_type', 1, TACACSAUTHENTYPE),
ByteEnumField('authen_service', 1, TACACSAUTHENSERVICE),
FieldLenField('user_len', None, fmt='!B', length_of='user'),
FieldLenField('port_len', None, fmt='!B', length_of='port'),
FieldLenField('rem_addr_len', None, fmt='!B', length_of='rem_addr'),
FieldLenField('data_len', None, fmt='!B', length_of='data'),
ConditionalField(StrLenField('user', '', length_from=lambda x: x.user_len),
lambda x: x != ''),
StrLenField('port', '', length_from=lambda x: x.port_len),
StrLenField('rem_addr', '', length_from=lambda x: x.rem_addr_len),
StrLenField('data', '', length_from=lambda x: x.data_len)]
class TacacsAuthenticationReply(Packet):
'''
Tacacs authentication reply body from section 4.2
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-4.2
'''
name = 'Tacacs Authentication Reply Body'
fields_desc = [ByteEnumField('status', 1, TACACSREPLYPASS),
ByteEnumField('flags', 0, TACACSREPLYFLAGS),
FieldLenField('server_msg_len', None, fmt='!H', length_of='server_msg'),
FieldLenField('data_len', None, fmt='!H', length_of='data'),
StrLenField('server_msg', '', length_from=lambda x: x.server_msg_len),
StrLenField('data', '', length_from=lambda x: x.data_len)]
class TacacsAuthenticationContinue(Packet):
'''
Tacacs authentication continue body from section 4.3
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-4.3
'''
name = 'Tacacs Authentication Continue Body'
fields_desc = [FieldLenField('user_msg_len', None, fmt='!H', length_of='user_msg'),
FieldLenField('data_len', None, fmt='!H', length_of='data'),
ByteEnumField('flags', 1, TACACSCONTINUEFLAGS),
StrLenField('user_msg', '', length_from=lambda x: x.user_msg_len),
StrLenField('data', '', length_from=lambda x: x.data_len)]
#########################
# Authorization Packets #
#########################
TACACSAUTHORTYPE = {0:'Not Set',
1:'None',
2:'Kerberos 5',
3:'Line',
4:'Enable',
5:'Local',
6:'Tacacs+',
8:'Guest',
16:'Radius',
17:'Kerberos 4',
32:'RCMD'}
TACACSAUTHORSTATUS = {1:'Pass Add',
2:'Pass repl',
16:'Fail',
17:'Error',
33:'Follow'}
class TacacsAuthorizationRequest(Packet):
'''
Tacacs authorization request body from section 5.1
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-5.1
'''
name = 'Tacacs Authorization Request Body'
fields_desc = [ByteEnumField('authen_method', 0, TACACSAUTHORTYPE),
ByteEnumField('priv_lvl', 1, TACACSPRIVLEVEL),
ByteEnumField('authen_type', 1, TACACSAUTHENTYPE),
ByteEnumField('authen_service', 1, TACACSAUTHENSERVICE),
FieldLenField('user_len', None, fmt='!B', length_of='user'),
FieldLenField('port_len', None, fmt='!B', length_of='port'),
FieldLenField('rem_addr_len', None, fmt='!B', length_of='rem_addr'),
FieldLenField('arg_cnt', None, fmt='!B', count_of='arg_len_list'),
FieldListField('arg_len_list', [], ByteField('', 0),
length_from=lambda pkt: pkt.arg_cnt),
StrLenField('user', '', length_from=lambda x: x.user_len),
StrLenField('port', '', length_from=lambda x: x.port_len),
StrLenField('rem_addr', '', length_from=lambda x: x.rem_addr_len)]
def guess_payload_class(self, pay):
if self.arg_cnt > 0:
return TacacsPacketArguments
return conf.padding_layer
class TacacsAuthorizationReply(Packet):
'''
Tacacs authorization reply body from section 5.2
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-5.2
'''
name = 'Tacacs Authorization Reply Body'
fields_desc = [ByteEnumField('status', 0, TACACSAUTHORSTATUS),
FieldLenField('arg_cnt', None, fmt='!B', count_of='arg_len_list'),
FieldLenField('server_msg_len', None, fmt='!H', length_of='server_msg'),
FieldLenField('data_len', None, fmt='!H', length_of='data'),
FieldListField('arg_len_list', [], ByteField('', 0),
length_from=lambda pkt: pkt.arg_cnt),
StrLenField('server_msg', '', length_from=lambda x: x.server_msg_len),
StrLenField('data', '', length_from=lambda x: x.data_len)]
def guess_payload_class(self, pay):
if self.arg_cnt > 0:
return TacacsPacketArguments
return conf.padding_layer
######################
# Accounting Packets #
######################
TACACSACNTFLAGS = {2:'Start',
4:'Stop',
8:'Watchdog'}
TACACSACNTSTATUS = {1:'Success',
2:'Error',
33:'Follow'}
class TacacsAccountingRequest(Packet):
'''
Tacacs accounting request body from section 6.1
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-6.1
'''
name = 'Tacacs Accounting Request Body'
fields_desc = [ByteEnumField('flags', 0, TACACSACNTFLAGS),
ByteEnumField('authen_method', 0, TACACSAUTHORTYPE),
ByteEnumField('priv_lvl', 1, TACACSPRIVLEVEL),
ByteEnumField('authen_type', 1, TACACSAUTHENTYPE),
ByteEnumField('authen_service', 1, TACACSAUTHENSERVICE),
FieldLenField('user_len', None, fmt='!B', length_of='user'),
FieldLenField('port_len', None, fmt='!B', length_of='port'),
FieldLenField('rem_addr_len', None, fmt='!B', length_of='rem_addr'),
FieldLenField('arg_cnt', None, fmt='!B', count_of='arg_len_list'),
FieldListField('arg_len_list', [], ByteField('', 0),
length_from=lambda pkt: pkt.arg_cnt),
StrLenField('user', '', length_from=lambda x: x.user_len),
StrLenField('port', '', length_from=lambda x: x.port_len),
StrLenField('rem_addr', '', length_from=lambda x: x.rem_addr_len)]
def guess_payload_class(self, pay):
if self.arg_cnt > 0:
return TacacsPacketArguments
return conf.padding_layer
class TacacsAccountingReply(Packet):
'''
Tacacs accounting reply body from section 6.2
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-6.2
'''
name = 'Tacacs Accounting Reply Body'
fields_desc = [FieldLenField('server_msg_len', None, fmt='!H', length_of='server_msg'),
FieldLenField('data_len', None, fmt='!H', length_of='data'),
ByteEnumField('status', None, TACACSACNTSTATUS),
StrLenField('server_msg', '', length_from=lambda x: x.server_msg_len),
StrLenField('data', '', length_from=lambda x: x.data_len)]
class TacacsPacketArguments(Packet):
'''
Class defined to handle the arguments listed at the end of tacacs+
Authorization and Accounting packets.
'''
__slots__ = ['_len']
name = 'Arguments in Tacacs+ packet'
fields_desc = [StrLenField('data', '', length_from=lambda pkt: pkt._len)]
def pre_dissect(self, s):
cur = self.underlayer
i = 0
# Searching the position in layer in order to get its length
while isinstance(cur, TacacsPacketArguments):
cur = cur.underlayer
i += 1
self._len = cur.arg_len_list[i]
return s
def guess_payload_class(self, pay):
cur = self.underlayer
i = 0
# Guessing if Argument packet. Nothing in encapsulated via tacacs+
while isinstance(cur, TacacsPacketArguments):
cur = cur.underlayer
i += 1
if i+1 < cur.arg_cnt:
return TacacsPacketArguments
return conf.padding_layer
class TacacsClientPacket(Packet):
'''
Super class for tacacs packet in order to get them uncrypted
Obfuscation methodology from section 3.7
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-3.7
'''
def post_dissect(self, pay):
if self.flags == 0:
pay = obfuscate(pay, SECRET, self.session_id, self.version, self.seq)
return pay
class TacacsHeader(TacacsClientPacket):
'''
Tacacs Header packet from section 3.8
https://tools.ietf.org/html/draft-ietf-opsawg-tacacs-06#section-3.8
'''
name = 'Tacacs Header'
fields_desc = [ByteEnumField('version', 192, TACACSVERSION),
ByteEnumField('type', 1, TACACSTYPE),
ByteField('seq', 1),
ByteEnumField('flags', 0, TACACSFLAGS),
IntField('session_id', 0),
IntField('length', None)]
def guess_payload_class(self, payload):
# Guessing packet type from type and seq values
# Authentication packet - type 1
if self.type == 1:
if self.seq % 2 == 0:
return TacacsAuthenticationReply
if sum(struct.unpack('bbbb', payload[4:8])) == len(payload[8:]):
return TacacsAuthenticationStart
elif sum(struct.unpack('!hh', payload[:4])) == len(payload[5:]):
return TacacsAuthenticationContinue
# Authorization packet - type 2
if self.type == 2:
if self.seq % 2 == 0:
return TacacsAuthorizationReply
return TacacsAuthorizationRequest
# Accounting packet - type 3
if self.type == 3:
if self.seq % 2 == 0:
return TacacsAccountingReply
return TacacsAccountingRequest
return conf.raw_layer
def post_build(self, p, pay):
# Setting length of packet to obfuscate if not filled by user
if self.length is None and pay:
p = p[:-4] + struct.pack('!I', len(pay))
if self.flags == 0:
pay = obfuscate(pay, SECRET, self.session_id, self.version, self.seq)
return p + pay
return p
def hashret(self):
return struct.pack('I', self.session_id)
def answers(self, other):
return (isinstance(other, TacacsHeader) and
self.seq == other.seq + 1 and
self.type == other.type and
self.session_id == other.session_id)
bind_layers(TCP, TacacsHeader, dport=49)
bind_layers(TCP, TacacsHeader, sport=49)
bind_layers(TacacsHeader, TacacsAuthenticationStart, type=1, dport=49)
bind_layers(TacacsHeader, TacacsAuthenticationReply, type=1, sport=49)
if __name__ == '__main__':
from scapy.main import interact
interact(mydict=globals(), mybanner='tacacs+')
| mit | -7,428,636,829,584,146,000 | 32.520833 | 94 | 0.551965 | false |
saimn/glue | glue/viewers/common/qt/mpl_toolbar.py | 2 | 5119 | from __future__ import absolute_import, division, print_function
from qtpy import QtCore
from qtpy import PYQT5
from glue.icons.qt import get_icon
from glue.utils import nonpartial
from glue.viewers.common.qt.tool import CheckableTool, Tool
from glue.viewers.common.qt.mouse_mode import MouseMode
from glue.viewers.common.qt.toolbar import BasicToolbar
if PYQT5:
from matplotlib.backends.backend_qt5 import NavigationToolbar2QT
else:
from matplotlib.backends.backend_qt4 import NavigationToolbar2QT
__all__ = ['HomeTool', 'SaveTool', 'BackTool', 'ForwardTool', 'PanTool',
'ZoomTool', 'MatplotlibViewerToolbar']
class HomeTool(Tool):
def __init__(self, viewer, toolbar=None):
super(HomeTool, self).__init__(viewer=viewer)
self.tool_id = 'mpl:home'
self.icon = get_icon('glue_home')
self.action_text = 'Home'
self.tool_tip = 'Reset original zoom'
self.shortcut = 'H'
self.checkable = False
self.toolbar = toolbar
def activate(self):
self.toolbar.home()
class SaveTool(Tool):
def __init__(self, viewer, toolbar=None):
super(SaveTool, self).__init__(viewer=viewer)
self.tool_id = 'mpl:save'
self.icon = get_icon('glue_filesave')
self.action_text = 'Save'
self.tool_tip = 'Save the figure'
self.shortcut = 'Ctrl+Shift+S'
self.toolbar = toolbar
def activate(self):
self.toolbar.save_figure()
class BackTool(Tool):
def __init__(self, viewer, toolbar=None):
super(BackTool, self).__init__(viewer=viewer)
self.tool_id = 'mpl:back'
self.icon = get_icon('glue_back')
self.action_text = 'Back'
self.tool_tip = 'Back to previous view'
self.toolbar = toolbar
def activate(self):
self.toolbar.back()
class ForwardTool(Tool):
def __init__(self, viewer, toolbar=None):
super(ForwardTool, self).__init__(viewer=viewer)
self.tool_id = 'mpl:forward'
self.icon = get_icon('glue_forward')
self.action_text = 'Forward'
self.tool_tip = 'Forward to next view'
self.toolbar = toolbar
def activate(self):
self.toolbar.forward()
class PanTool(CheckableTool):
def __init__(self, viewer, toolbar=None):
super(PanTool, self).__init__(viewer=viewer)
self.tool_id = 'mpl:pan'
self.icon = get_icon('glue_move')
self.action_text = 'Pan'
self.tool_tip = 'Pan axes with left mouse, zoom with right'
self.shortcut = 'M'
self.toolbar = toolbar
def activate(self):
self.toolbar.pan()
def deactivate(self):
self.toolbar.pan()
class ZoomTool(CheckableTool):
def __init__(self, viewer, toolbar=None):
super(ZoomTool, self).__init__(viewer=viewer)
self.tool_id = 'mpl:zoom'
self.icon = get_icon('glue_zoom_to_rect')
self.action_text = 'Zoom'
self.tool_tip = 'Zoom to rectangle'
self.shortcut = 'Z'
self.toolbar = toolbar
def activate(self):
self.toolbar.zoom()
def deactivate(self):
self.toolbar.zoom()
class MatplotlibViewerToolbar(BasicToolbar):
pan_begin = QtCore.Signal()
pan_end = QtCore.Signal()
def __init__(self, parent):
self.canvas = parent.central_widget.canvas
# Set up virtual Matplotlib navigation toolbar (don't show it)
self._mpl_nav = NavigationToolbar2QT(self.canvas, parent)
self._mpl_nav.hide()
BasicToolbar.__init__(self, parent)
def setup_default_modes(self):
# Set up default Matplotlib Tools - this gets called by the __init__
# call to the parent class above.
home_mode = HomeTool(self.parent(), toolbar=self._mpl_nav)
self.add_tool(home_mode)
save_mode = SaveTool(self.parent(), toolbar=self._mpl_nav)
self.add_tool(save_mode)
back_mode = BackTool(self.parent(), toolbar=self._mpl_nav)
self.add_tool(back_mode)
forward_mode = ForwardTool(self.parent(), toolbar=self._mpl_nav)
self.add_tool(forward_mode)
pan_mode = PanTool(self.parent(), toolbar=self._mpl_nav)
self.add_tool(pan_mode)
zoom_mode = ZoomTool(self.parent(), toolbar=self._mpl_nav)
self.add_tool(zoom_mode)
self._connections = []
def activate_tool(self, mode):
if isinstance(mode, MouseMode):
self._connections.append(self.canvas.mpl_connect('button_press_event', mode.press))
self._connections.append(self.canvas.mpl_connect('motion_notify_event', mode.move))
self._connections.append(self.canvas.mpl_connect('button_release_event', mode.release))
self._connections.append(self.canvas.mpl_connect('key_press_event', mode.key))
super(MatplotlibViewerToolbar, self).activate_tool(mode)
def deactivate_tool(self, mode):
for connection in self._connections:
self.canvas.mpl_disconnect(connection)
self._connections = []
super(MatplotlibViewerToolbar, self).deactivate_tool(mode)
| bsd-3-clause | -5,870,537,988,625,900,000 | 29.470238 | 99 | 0.632155 | false |
khardix/udiskie | udiskie/automount.py | 2 | 1590 | """
Automount utility.
"""
__all__ = ['AutoMounter']
class AutoMounter(object):
"""
Automount utility.
Being connected to the udiskie daemon, this component automatically
mounts newly discovered external devices. Instances are constructed with
a Mounter object, like so:
>>> AutoMounter(Mounter(udisks=Daemon()))
"""
def __init__(self, mounter):
"""
Store mounter as member variable and connect to the underlying udisks.
:param Mounter mounter: mounter object
"""
self._mounter = mounter
mounter.udisks.connect_all(self)
def device_added(self, device):
"""
Mount newly added devices.
:param Device device: newly added device
"""
if self._mounter.is_handleable(device):
self._mounter.add(device)
def media_added(self, device):
"""
Mount newly added media.
:param Device device: device with newly added media
"""
if self._mounter.is_handleable(device):
self._mounter.add(device)
def device_changed(self, old_state, new_state):
"""
Mount newly mountable devices.
:param Device old_state: before change
:param Device new_state: after change
"""
# udisks2 sometimes adds empty devices and later updates them which
# makes is_external become true not at device_added time:
if (self._mounter.is_handleable(new_state)
and not self._mounter.is_handleable(old_state)):
self._mounter.add(new_state)
| mit | -2,716,067,139,691,777,000 | 26.413793 | 78 | 0.613208 | false |
dianchen96/gym | gym/envs/__init__.py | 1 | 19225 | from gym.envs.registration import registry, register, make, spec
# Algorithmic
# ----------------------------------------
register(
id='Copy-v0',
entry_point='gym.envs.algorithmic:CopyEnv',
max_episode_steps=200,
reward_threshold=25.0,
)
register(
id='RepeatCopy-v0',
entry_point='gym.envs.algorithmic:RepeatCopyEnv',
max_episode_steps=200,
reward_threshold=75.0,
)
register(
id='ReversedAddition-v0',
entry_point='gym.envs.algorithmic:ReversedAdditionEnv',
kwargs={'rows' : 2},
max_episode_steps=200,
reward_threshold=25.0,
)
register(
id='ReversedAddition3-v0',
entry_point='gym.envs.algorithmic:ReversedAdditionEnv',
kwargs={'rows' : 3},
max_episode_steps=200,
reward_threshold=25.0,
)
register(
id='DuplicatedInput-v0',
entry_point='gym.envs.algorithmic:DuplicatedInputEnv',
max_episode_steps=200,
reward_threshold=9.0,
)
register(
id='Reverse-v0',
entry_point='gym.envs.algorithmic:ReverseEnv',
max_episode_steps=200,
reward_threshold=25.0,
)
# Classic
# ----------------------------------------
register(
id='CartPole-v0',
entry_point='gym.envs.classic_control:CartPoleEnv',
max_episode_steps=200,
reward_threshold=195.0,
)
register(
id='CartPole-v1',
entry_point='gym.envs.classic_control:CartPoleEnv',
max_episode_steps=500,
reward_threshold=475.0,
)
register(
id='MountainCar-v0',
entry_point='gym.envs.classic_control:MountainCarEnv',
max_episode_steps=200,
reward_threshold=-110.0,
)
register(
id='MountainCarContinuous-v0',
entry_point='gym.envs.classic_control:Continuous_MountainCarEnv',
max_episode_steps=999,
reward_threshold=90.0,
)
register(
id='Pendulum-v0',
entry_point='gym.envs.classic_control:PendulumEnv',
max_episode_steps=200,
)
register(
id='Acrobot-v1',
entry_point='gym.envs.classic_control:AcrobotEnv',
max_episode_steps=500,
)
# Box2d
# ----------------------------------------
register(
id='LunarLander-v2',
entry_point='gym.envs.box2d:LunarLander',
max_episode_steps=1000,
reward_threshold=200,
)
register(
id='LunarLanderContinuous-v2',
entry_point='gym.envs.box2d:LunarLanderContinuous',
max_episode_steps=1000,
reward_threshold=200,
)
register(
id='BipedalWalker-v2',
entry_point='gym.envs.box2d:BipedalWalker',
max_episode_steps=1600,
reward_threshold=300,
)
register(
id='BipedalWalkerHardcore-v2',
entry_point='gym.envs.box2d:BipedalWalkerHardcore',
max_episode_steps=2000,
reward_threshold=300,
)
register(
id='CarRacing-v0',
entry_point='gym.envs.box2d:CarRacing',
max_episode_steps=1000,
reward_threshold=900,
)
# Toy Text
# ----------------------------------------
register(
id='Blackjack-v0',
entry_point='gym.envs.toy_text:BlackjackEnv',
)
register(
id='FrozenLake-v0',
entry_point='gym.envs.toy_text:FrozenLakeEnv',
kwargs={'map_name' : '4x4'},
max_episode_steps=100,
reward_threshold=0.78, # optimum = .8196
)
register(
id='FrozenLake8x8-v0',
entry_point='gym.envs.toy_text:FrozenLakeEnv',
kwargs={'map_name' : '8x8'},
max_episode_steps=200,
reward_threshold=0.99, # optimum = 1
)
register(
id='NChain-v0',
entry_point='gym.envs.toy_text:NChainEnv',
max_episode_steps=1000,
)
register(
id='Roulette-v0',
entry_point='gym.envs.toy_text:RouletteEnv',
max_episode_steps=100,
)
register(
id='Taxi-v2',
entry_point='gym.envs.toy_text.taxi:TaxiEnv',
reward_threshold=8, # optimum = 8.46
max_episode_steps=200,
)
register(
id='GuessingGame-v0',
entry_point='gym.envs.toy_text.guessing_game:GuessingGame',
max_episode_steps=200,
)
register(
id='HotterColder-v0',
entry_point='gym.envs.toy_text.hotter_colder:HotterColder',
max_episode_steps=200,
)
# Mujoco
# ----------------------------------------
# 2D
register(
id='Reacher-v1',
entry_point='gym.envs.mujoco:ReacherEnv',
max_episode_steps=50,
reward_threshold=-3.75,
)
register(
id='InvertedPendulum-v1',
entry_point='gym.envs.mujoco:InvertedPendulumEnv',
max_episode_steps=1000,
reward_threshold=950.0,
)
register(
id='InvertedDoublePendulum-v1',
entry_point='gym.envs.mujoco:InvertedDoublePendulumEnv',
max_episode_steps=1000,
reward_threshold=9100.0,
)
register(
id='HalfCheetah-v1',
entry_point='gym.envs.mujoco:HalfCheetahEnv',
max_episode_steps=1000,
reward_threshold=4800.0,
)
register(
id='Hopper-v1',
entry_point='gym.envs.mujoco:HopperEnv',
max_episode_steps=1000,
reward_threshold=3800.0,
)
register(
id='Swimmer-v1',
entry_point='gym.envs.mujoco:SwimmerEnv',
max_episode_steps=1000,
reward_threshold=360.0,
)
register(
id='Walker2d-v1',
max_episode_steps=1000,
entry_point='gym.envs.mujoco:Walker2dEnv',
)
register(
id='Ant-v1',
entry_point='gym.envs.mujoco:AntEnv',
max_episode_steps=1000,
reward_threshold=6000.0,
)
register(
id='Humanoid-v1',
entry_point='gym.envs.mujoco:HumanoidEnv',
max_episode_steps=1000,
)
register(
id='HumanoidStandup-v1',
entry_point='gym.envs.mujoco:HumanoidStandupEnv',
max_episode_steps=1000,
)
# Custom Mujoco
# ----------------------------------------
## V0: reach reward 0.4, grey
register(
id="Box3dReachPixel-v0",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGrey",
max_episode_steps=200,
)
## V1: reach reward 0.1, grey
register(
id="Box3dReachPixel-v1",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyHarder",
max_episode_steps=200,
)
## V2: no reward, 6 boxes, grey
register(
id="Box3dReachPixel-v2",
entry_point="gym.envs.mujoco:Box3dMulMulObjConAvoidPixelGreyEnv",
max_episode_steps=1000,
)
## V3: reach rew 0.1 with 4 obs
register(
id="Box3dReachPixel-v3",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyHarderMulAct",
max_episode_steps=100,
)
## V4: Two cam, 1 box, 0.1 reward
register(
id="Box3dReachPixel-v4",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyHarderTwoCam",
max_episode_steps=200,
)
## V5: Two cam, 1 box, 0.1 reward, 4 step
register(
id="Box3dReachPixel-v5",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyHarderTwoCamMulAct",
max_episode_steps=200,
)
## V6: Two cam, 1 box, 0.1 reward, 2 step
register(
id="Box3dReachPixel-v6",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyHarderTwoCamMulActLess",
max_episode_steps=200,
)
## V7: Two cam, 6 box, no reward, 2 step
register(
id="Box3dReachPixel-v7",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyMulMulTwoCamMulActLess",
max_episode_steps=1000,
)
## V8: Two cam, 6 box, no reward, 4 step
register(
id="Box3dReachPixel-v8",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyMulMulTwoCamMulAct",
max_episode_steps=1000,
)
## V9: Two cam, 6 box, contact reward, 2 step
register(
id="Box3dReachPixel-v9",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyMulMulContactTwoCamMulActLess",
max_episode_steps=200,
)
## V10: Two cam, 1 box, reach reward, 4 step but 2 obs
register(
id="Box3dReachPixel-v10",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyHarderTwoCamMulActLessRepeatTwo",
max_episode_steps=200,
)
## V11: Two cam, 6 box, contact reward, 4 step
register(
id="Box3dReachPixel-v11",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyMulMulContactTwoCamMulAct",
max_episode_steps=200,
)
# V17: Two cam, 1 box, contact reward, 2 step
register(
id="Box3dReachPixel-v17",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyMulMulContactTwoCamAct",
max_episode_steps=200,
)
## V12: Two cam, 6 box, contact reward, 4 step, env_info output joint pos (key: joint_pos)
register(
id="Box3dReachPixel-v12",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyMulMulContactTwoCamMulActFusion",
max_episode_steps=200,
)
## V13: Two cam, 6 box, contact reward, 4 step, no mass for objects
register(
id="Box3dReachPixel-v13",
entry_point="gym.envs.mujoco:Box3dFixedReachEnvPixelGreyMulMulContactTwoCamMulActNoMas",
max_episode_steps=200,
)
## V14: Two cam, 1 box, contact reward
register(
id="Box3dReachPixel-v14",
entry_point="gym.envs.mujoco:Box3dFixedReachPixelMulMulObjConAvoidEnvOne",
max_episode_steps=200,
)
# V15: Two cam, 1 box, 0.4 reach reward, 4 step
register(
id="Box3dReachPixel-v15",
entry_point="gym.envs.mujoco:Box3dFixedReachPixelMulMulObjConAvoidEnvOneEasy",
max_episode_steps=200,
)
# V16: Two cam, 1 box, 0.4 reach reward, 2 step
register(
id="Box3dReachPixel-v16",
entry_point="gym.envs.mujoco:Box3dFixedReachPixelMulObjConAvoidEnvOneEasy",
max_episode_steps=200,
)
# ========= UP: PIXEL = ## = DOWN: STATE ======== #
## V18: contact reward, 10 step
register(
id="Box3dReach-v18",
entry_point="gym.envs.mujoco:Box3dFixedReachSixBoxEnvMulContactTwoCam10Step",
max_episode_steps=1000,
)
## V16: contact reward, no box velocities
register(
id="Box3dReach-v16",
entry_point="gym.envs.mujoco:Box3dFixedReachSixBoxEnvMulContactTwoCamNoBoxVel",
max_episode_steps=1000,
)
## V17: contact reward, 4 action repeat
register(
id="Box3dReach-v17",
entry_point="gym.envs.mujoco:Box3dFixedReachSixBoxEnvMulContactTwoCam4Step",
max_episode_steps=1000,
)
## V12: contact reward
register(
id="Box3dReach-v12",
entry_point="gym.envs.mujoco:Box3dFixedReachSixBoxEnvMulContactTwoCam",
max_episode_steps=1000,
)
## V11: contact reward, 1 box
register(
id="Box3dReach-v11",
entry_point="gym.envs.mujoco:Box3dFixedReachMulMulObjConAvoidEnvOne",
max_episode_steps=200,
)
## V10: no reward, 6 boxes with small random init
register(
id="Box3dReach-v10",
entry_point="gym.envs.mujoco:Box3dFixedReachMulMulObjConAvoidEnv",
max_episode_steps=1000,
)
## V9: no reward, 3 boxes with large random init
register(
id="Box3dReach-v9",
entry_point="gym.envs.mujoco:Box3dFixedReachMulObjConAvoidMoreEnv",
max_episode_steps=1000,
)
## V8: no reward, 3 boxes with previous frame velocity as input
register(
id="Box3dReach-v8",
entry_point="gym.envs.mujoco:Box3dFixedReachMulObjPrevVelEnv",
max_episode_steps=1000,
)
## V7: no reward, 3 boxes with contact checking
register(
id="Box3dReach-v7",
entry_point="gym.envs.mujoco:Box3dFixedReachMulObjConAvoidEnv",
max_episode_steps=1000,
)
## V6: no reward, 3 boxes
register(
id="Box3dReach-v6",
entry_point="gym.envs.mujoco:Box3dFixedReachMulObjEnv",
max_episode_steps=1000,
)
## test: 0.1 reach reward, 1 box, 10/4/2 step
register(
id="Box3dReach-v13",
entry_point="gym.envs.mujoco:Box3dFixedReachHarderEnv4Step",
max_episode_steps=200,
)
register(
id="Box3dReach-v14",
entry_point="gym.envs.mujoco:Box3dFixedReachHarderEnv2Step",
max_episode_steps=200,
)
## V4: 0.1 reach reward, 1 box
register(
id="Box3dReach-v4",
entry_point="gym.envs.mujoco:Box3dFixedReachHarderEnv",
max_episode_steps=200,
)
register(
id="Box3dReach-v5",
entry_point="gym.envs.mujoco:Box3dFixedReachHardestEnv",
max_episode_steps=200,
)
register(
id="Box3dReach-v3",
entry_point="gym.envs.mujoco:Box3dContactReachEnv",
max_episode_steps=200,
)
register(
id="Box3dReach-v2",
entry_point="gym.envs.mujoco:Box3dFixedReachEnv",
max_episode_steps=200,
)
register(
id='Box3dReach-v0',
entry_point='gym.envs.mujoco:Box3dReachPosEnv',
max_episode_steps=100,
)
register(
id='Box3dReach-v1',
entry_point='gym.envs.mujoco:Box3dReachEnv',
max_episode_steps=100,
)
register(
id='Box3dGrasp-v0',
entry_point='gym.envs.mujoco:Box3dGraspEnv',
max_episode_steps=1000,
)
register(
id='Box3dNoReward-v0',
entry_point='gym.envs.mujoco:Box3dNoRewardEnv',
max_episode_steps=200,
)
# Atari
# ----------------------------------------
# # print ', '.join(["'{}'".format(name.split('.')[0]) for name in atari_py.list_games()])
for game in ['air_raid', 'alien', 'amidar', 'assault', 'asterix', 'asteroids', 'atlantis',
'bank_heist', 'battle_zone', 'beam_rider', 'berzerk', 'bowling', 'boxing', 'breakout', 'carnival',
'centipede', 'chopper_command', 'crazy_climber', 'demon_attack', 'double_dunk',
'elevator_action', 'enduro', 'fishing_derby', 'freeway', 'frostbite', 'gopher', 'gravitar',
'ice_hockey', 'jamesbond', 'journey_escape', 'kangaroo', 'krull', 'kung_fu_master',
'montezuma_revenge', 'ms_pacman', 'name_this_game', 'phoenix', 'pitfall', 'pong', 'pooyan',
'private_eye', 'qbert', 'riverraid', 'road_runner', 'robotank', 'seaquest', 'skiing',
'solaris', 'space_invaders', 'star_gunner', 'tennis', 'time_pilot', 'tutankham', 'up_n_down',
'venture', 'video_pinball', 'wizard_of_wor', 'yars_revenge', 'zaxxon']:
for obs_type in ['image', 'ram']:
# space_invaders should yield SpaceInvaders-v0 and SpaceInvaders-ram-v0
name = ''.join([g.capitalize() for g in game.split('_')])
if obs_type == 'ram':
name = '{}-ram'.format(name)
nondeterministic = False
if game == 'elevator_action' and obs_type == 'ram':
# ElevatorAction-ram-v0 seems to yield slightly
# non-deterministic observations about 10% of the time. We
# should track this down eventually, but for now we just
# mark it as nondeterministic.
nondeterministic = True
register(
id='{}-v0'.format(name),
entry_point='gym.envs.atari:AtariEnv',
kwargs={'game': game, 'obs_type': obs_type, 'repeat_action_probability': 0.25},
max_episode_steps=10000,
nondeterministic=nondeterministic,
)
register(
id='{}-v3'.format(name),
entry_point='gym.envs.atari:AtariEnv',
kwargs={'game': game, 'obs_type': obs_type},
max_episode_steps=100000,
nondeterministic=nondeterministic,
)
# Standard Deterministic (as in the original DeepMind paper)
if game == 'space_invaders':
frameskip = 3
else:
frameskip = 4
# Use a deterministic frame skip.
register(
id='{}Deterministic-v0'.format(name),
entry_point='gym.envs.atari:AtariEnv',
kwargs={'game': game, 'obs_type': obs_type, 'frameskip': frameskip, 'repeat_action_probability': 0.25},
max_episode_steps=100000,
nondeterministic=nondeterministic,
)
register(
id='{}Deterministic-v3'.format(name),
entry_point='gym.envs.atari:AtariEnv',
kwargs={'game': game, 'obs_type': obs_type, 'frameskip': frameskip},
max_episode_steps=100000,
nondeterministic=nondeterministic,
)
register(
id='{}NoFrameskip-v0'.format(name),
entry_point='gym.envs.atari:AtariEnv',
kwargs={'game': game, 'obs_type': obs_type, 'frameskip': 1, 'repeat_action_probability': 0.25}, # A frameskip of 1 means we get every frame
max_episode_steps=frameskip * 100000,
nondeterministic=nondeterministic,
)
# No frameskip. (Atari has no entropy source, so these are
# deterministic environments.)
register(
id='{}NoFrameskip-v3'.format(name),
entry_point='gym.envs.atari:AtariEnv',
kwargs={'game': game, 'obs_type': obs_type, 'frameskip': 1}, # A frameskip of 1 means we get every frame
max_episode_steps=frameskip * 100000,
nondeterministic=nondeterministic,
)
# Board games
# ----------------------------------------
register(
id='Go9x9-v0',
entry_point='gym.envs.board_game:GoEnv',
kwargs={
'player_color': 'black',
'opponent': 'pachi:uct:_2400',
'observation_type': 'image3c',
'illegal_move_mode': 'lose',
'board_size': 9,
},
# The pachi player seems not to be determistic given a fixed seed.
# (Reproduce by running 'import gym; h = gym.make('Go9x9-v0'); h.seed(1); h.reset(); h.step(15); h.step(16); h.step(17)' a few times.)
#
# This is probably due to a computation time limit.
nondeterministic=True,
)
register(
id='Go19x19-v0',
entry_point='gym.envs.board_game:GoEnv',
kwargs={
'player_color': 'black',
'opponent': 'pachi:uct:_2400',
'observation_type': 'image3c',
'illegal_move_mode': 'lose',
'board_size': 19,
},
nondeterministic=True,
)
register(
id='Hex9x9-v0',
entry_point='gym.envs.board_game:HexEnv',
kwargs={
'player_color': 'black',
'opponent': 'random',
'observation_type': 'numpy3c',
'illegal_move_mode': 'lose',
'board_size': 9,
},
)
# Debugging
# ----------------------------------------
register(
id='OneRoundDeterministicReward-v0',
entry_point='gym.envs.debugging:OneRoundDeterministicRewardEnv',
local_only=True
)
register(
id='TwoRoundDeterministicReward-v0',
entry_point='gym.envs.debugging:TwoRoundDeterministicRewardEnv',
local_only=True
)
register(
id='OneRoundNondeterministicReward-v0',
entry_point='gym.envs.debugging:OneRoundNondeterministicRewardEnv',
local_only=True
)
register(
id='TwoRoundNondeterministicReward-v0',
entry_point='gym.envs.debugging:TwoRoundNondeterministicRewardEnv',
local_only=True,
)
# Parameter tuning
# ----------------------------------------
register(
id='ConvergenceControl-v0',
entry_point='gym.envs.parameter_tuning:ConvergenceControl',
)
register(
id='CNNClassifierTraining-v0',
entry_point='gym.envs.parameter_tuning:CNNClassifierTraining',
)
# Safety
# ----------------------------------------
# interpretability envs
register(
id='PredictActionsCartpole-v0',
entry_point='gym.envs.safety:PredictActionsCartpoleEnv',
max_episode_steps=200,
)
register(
id='PredictObsCartpole-v0',
entry_point='gym.envs.safety:PredictObsCartpoleEnv',
max_episode_steps=200,
)
# semi_supervised envs
# probably the easiest:
register(
id='SemisuperPendulumNoise-v0',
entry_point='gym.envs.safety:SemisuperPendulumNoiseEnv',
max_episode_steps=200,
)
# somewhat harder because of higher variance:
register(
id='SemisuperPendulumRandom-v0',
entry_point='gym.envs.safety:SemisuperPendulumRandomEnv',
max_episode_steps=200,
)
# probably the hardest because you only get a constant number of rewards in total:
register(
id='SemisuperPendulumDecay-v0',
entry_point='gym.envs.safety:SemisuperPendulumDecayEnv',
max_episode_steps=200,
)
# off_switch envs
register(
id='OffSwitchCartpole-v0',
entry_point='gym.envs.safety:OffSwitchCartpoleEnv',
max_episode_steps=200,
)
register(
id='OffSwitchCartpoleProb-v0',
entry_point='gym.envs.safety:OffSwitchCartpoleProbEnv',
max_episode_steps=200,
)
| mit | 3,449,843,786,167,522,300 | 23.870634 | 151 | 0.654616 | false |
robotcator/gensim | gensim/test/test_ldavowpalwabbit_wrapper.py | 4 | 8833 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <[email protected]>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Tests for Vowpal Wabbit LDA wrapper.
Will not be run unless the environment variable 'VOWPAL_WABBIT_PATH' is set
and points to the `vw` executable.
"""
import logging
import unittest
import os
import os.path
import tempfile
from collections import defaultdict
import six
from gensim.corpora import Dictionary
import gensim.models.wrappers.ldavowpalwabbit as ldavowpalwabbit
from gensim.models.wrappers.ldavowpalwabbit import LdaVowpalWabbit
module_path = os.path.dirname(__file__) # needed because sample data files are located in the same folder
datapath = lambda fname: os.path.join(module_path, 'test_data', fname)
# set up vars used in testing ("Deerwester" from the web tutorial)
TOPIC_WORDS = [
'cat lion leopard mouse jaguar lynx cheetah tiger kitten puppy'.split(),
'engine car wheel brakes tyre motor suspension cylinder exhaust clutch'.split(),
'alice bob robert tim sue rachel dave harry alex jim'.split(),
'c cplusplus go python haskell scala java ruby csharp erlang'.split(),
'eggs ham mushrooms cereal coffee beans tea juice sausages bacon'.split()
]
def get_corpus():
text_path = datapath('ldavowpalwabbit.txt')
dict_path = datapath('ldavowpalwabbit.dict.txt')
dictionary = Dictionary.load_from_text(dict_path)
with open(text_path) as fhandle:
corpus = [dictionary.doc2bow(l.strip().split()) for l in fhandle]
return corpus, dictionary
class TestLdaVowpalWabbit(unittest.TestCase):
def setUp(self):
vw_path = os.environ.get('VOWPAL_WABBIT_PATH', None)
if not vw_path:
msg = "Environment variable 'VOWPAL_WABBIT_PATH' not specified, skipping tests"
try:
raise unittest.SkipTest(msg)
except AttributeError:
# couldn't find a way of skipping tests in python 2.6
self.vw_path = None
corpus, dictionary = get_corpus()
self.vw_path = vw_path
self.corpus = corpus
self.dictionary = dictionary
def test_save_load(self):
"""Test loading/saving LdaVowpalWabbit model."""
if not self.vw_path: # for python 2.6
return
lda = LdaVowpalWabbit(self.vw_path,
corpus=self.corpus,
passes=10,
chunksize=256,
id2word=self.dictionary,
cleanup_files=True,
alpha=0.1,
eta=0.1,
num_topics=len(TOPIC_WORDS),
random_seed=1)
with tempfile.NamedTemporaryFile() as fhandle:
lda.save(fhandle.name)
lda2 = LdaVowpalWabbit.load(fhandle.name)
# ensure public fields are saved/loaded correctly
saved_fields = [lda.alpha, lda.chunksize, lda.cleanup_files,
lda.decay, lda.eta, lda.gamma_threshold,
lda.id2word, lda.num_terms, lda.num_topics,
lda.passes, lda.random_seed, lda.vw_path]
loaded_fields = [lda2.alpha, lda2.chunksize, lda2.cleanup_files,
lda2.decay, lda2.eta, lda2.gamma_threshold,
lda2.id2word, lda2.num_terms, lda2.num_topics,
lda2.passes, lda2.random_seed, lda2.vw_path]
self.assertEqual(saved_fields, loaded_fields)
# ensure topic matrices are saved/loaded correctly
saved_topics = lda.show_topics(num_topics=5, num_words=10)
loaded_topics = lda2.show_topics(num_topics=5, num_words=10)
self.assertEqual(loaded_topics, saved_topics)
def test_model_update(self):
"""Test updating existing LdaVowpalWabbit model."""
if not self.vw_path: # for python 2.6
return
lda = LdaVowpalWabbit(self.vw_path,
corpus=[self.corpus[0]],
passes=10,
chunksize=256,
id2word=self.dictionary,
cleanup_files=True,
alpha=0.1,
eta=0.1,
num_topics=len(TOPIC_WORDS),
random_seed=1)
lda.update(self.corpus[1:])
result = lda.log_perplexity(self.corpus)
self.assertTrue(result < -1)
self.assertTrue(result > -5)
def test_perplexity(self):
"""Test LdaVowpalWabbit perplexity is within expected range."""
if not self.vw_path: # for python 2.6
return
lda = LdaVowpalWabbit(self.vw_path,
corpus=self.corpus,
passes=10,
chunksize=256,
id2word=self.dictionary,
cleanup_files=True,
alpha=0.1,
eta=0.1,
num_topics=len(TOPIC_WORDS),
random_seed=1)
# varies, but should be between -1 and -5
result = lda.log_perplexity(self.corpus)
self.assertTrue(result < -1)
self.assertTrue(result > -5)
def test_topic_coherence(self):
"""Test LdaVowpalWabbit topic coherence."""
if not self.vw_path: # for python 2.6
return
corpus, dictionary = get_corpus()
lda = LdaVowpalWabbit(self.vw_path,
corpus=corpus,
passes=10,
chunksize=256,
id2word=dictionary,
cleanup_files=True,
alpha=0.1,
eta=0.1,
num_topics=len(TOPIC_WORDS),
random_seed=1)
lda.print_topics(5, 10)
# map words in known topic to an ID
topic_map = {}
for i, words in enumerate(TOPIC_WORDS):
topic_map[frozenset(words)] = i
n_coherent = 0
for topic_id in range(lda.num_topics):
topic = lda.show_topic(topic_id, topn=20)
# get all words from LDA topic
topic_words = [w[1] for w in topic]
# get list of original topics that each word actually belongs to
ids = []
for word in topic_words:
for src_topic_words, src_topic_id in six.iteritems(topic_map):
if word in src_topic_words:
ids.append(src_topic_id)
# count the number of times each original topic appears
counts = defaultdict(int)
for found_topic_id in ids:
counts[found_topic_id] += 1
# if at least 6/10 words assigned to same topic, consider it coherent
max_count = 0
for count in six.itervalues(counts):
max_count = max(max_count, count)
if max_count >= 6:
n_coherent += 1
# not 100% deterministic, but should always get 3+ coherent topics
self.assertTrue(n_coherent >= 3)
def test_corpus_to_vw(self):
"""Test corpus to Vowpal Wabbit format conversion."""
if not self.vw_path: # for python 2.6
return
corpus = [[(0, 5), (7, 1), (5, 3), (0, 2)],
[(7, 2), (2, 1), (3, 11)],
[(1, 1)],
[],
[(5, 2), (0, 1)]]
expected = """
| 0:5 7:1 5:3 0:2
| 7:2 2:1 3:11
| 1:1
|
| 5:2 0:1
""".strip()
result = '\n'.join(ldavowpalwabbit.corpus_to_vw(corpus))
self.assertEqual(result, expected)
def testvwmodel2ldamodel(self):
"""Test copying of VWModel to LdaModel"""
if not self.vw_path:
return
tm1 = LdaVowpalWabbit(vw_path=self.vw_path, corpus=self.corpus, num_topics=2, id2word=self.dictionary)
tm2 = ldavowpalwabbit.vwmodel2ldamodel(tm1)
for document in self.corpus:
element1_1, element1_2 = tm1[document][0]
element2_1, element2_2 = tm2[document][0]
self.assertAlmostEqual(element1_1, element2_1)
self.assertAlmostEqual(element1_2, element2_2, 5)
logging.debug('%d %d', element1_1, element2_1)
logging.debug('%d %d', element1_2, element2_2)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
| lgpl-2.1 | -6,241,659,100,305,212,000 | 36.909871 | 110 | 0.540926 | false |
jrbourbeau/cr-composition | processing/legacy/anisotropy/ks_test_multipart/save_teststat.py | 1 | 2766 | #!/usr/bin/env python
import os
import argparse
import numpy as np
import healpy as hp
from scipy.stats import ks_2samp
import pandas as pd
import comptools as comp
import comptools.anisotropy.anisotropy as anisotropy
if __name__ == "__main__":
p = argparse.ArgumentParser(
description='Extracts and saves desired information from simulation/data .i3 files')
p.add_argument('--infiles_sample_0', dest='infiles_sample_0', nargs='*',
help='Input reference map files')
p.add_argument('--infiles_sample_1', dest='infiles_sample_1', nargs='*',
help='Input reference map files')
p.add_argument('--outfile', dest='outfile',
help='Output DataFrame file')
p.add_argument('--overwrite', dest='overwrite',
default=False, action='store_true',
help='Option to overwrite reference map file, '
'if it alreadu exists')
args = p.parse_args()
if args.infiles_sample_0 is None or args.infiles_sample_1 is None:
raise ValueError('Input files must be specified')
elif len(args.infiles_sample_0) != len(args.infiles_sample_1):
raise ValueError('Both samples of input files must be the same length')
if args.outfile is None:
raise ValueError('Outfile must be specified')
else:
comp.check_output_dir(args.outfile)
data_dict = {'ks_statistic': [], 'pval': []}
# Read in all the input maps
kwargs_relint = {'smooth': 20, 'scale': None, 'decmax': -55}
for file_0, file_1 in zip(args.infiles_sample_0, args.infiles_sample_1):
relint_0 = anisotropy.get_map(files=file_0, name='relint', **kwargs_relint)
relint_1 = anisotropy.get_map(files=file_1, name='relint', **kwargs_relint)
relerr_0 = anisotropy.get_map(files=file_0, name='relerr', **kwargs_relint)
relerr_1 = anisotropy.get_map(files=file_1, name='relerr', **kwargs_relint)
ri_0, ri_err_0, ra, ra_err = anisotropy.get_proj_relint(relint_0, relerr_0, n_bins=100)
ri_1, ri_err_1, ra, ra_err = anisotropy.get_proj_relint(relint_1, relerr_1, n_bins=100)
print('Comparing:')
print('ri_0 = {}'.format(ri_0))
print('ri_1 = {}\n'.format(ri_1))
ks_statistic, pval = ks_2samp(ri_0, ri_1)
print('ks_statistic = {}'.format(ks_statistic))
print('pval = {}\n\n'.format(pval))
data_dict['ks_statistic'].append(ks_statistic)
data_dict['pval'].append(pval)
data_dict['chi2'] = np.sum((ri_0-ri_1)**2/(ri_err_0**2+ri_err_1**2))
with pd.HDFStore(args.outfile) as output_store:
dataframe = pd.DataFrame(data_dict)
output_store.put('dataframe', dataframe, format='table', data_columns=True)
| mit | 6,565,205,285,209,626,000 | 40.283582 | 95 | 0.624006 | false |
GaussDing/django | tests/annotations/models.py | 33 | 2630 | # coding: utf-8
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=100)
age = models.IntegerField()
friends = models.ManyToManyField('self', blank=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Publisher(models.Model):
name = models.CharField(max_length=255)
num_awards = models.IntegerField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=9)
name = models.CharField(max_length=255)
pages = models.IntegerField()
rating = models.FloatField()
price = models.DecimalField(decimal_places=2, max_digits=6)
authors = models.ManyToManyField(Author)
contact = models.ForeignKey(Author, related_name='book_contact_set')
publisher = models.ForeignKey(Publisher)
pubdate = models.DateField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class Store(models.Model):
name = models.CharField(max_length=255)
books = models.ManyToManyField(Book)
original_opening = models.DateTimeField()
friday_night_closing = models.TimeField()
def __str__(self):
return self.name
@python_2_unicode_compatible
class DepartmentStore(Store):
chain = models.CharField(max_length=255)
def __str__(self):
return '%s - %s ' % (self.chain, self.name)
@python_2_unicode_compatible
class Employee(models.Model):
# The order of these fields matter, do not change. Certain backends
# rely on field ordering to perform database conversions, and this
# model helps to test that.
first_name = models.CharField(max_length=20)
manager = models.BooleanField(default=False)
last_name = models.CharField(max_length=20)
store = models.ForeignKey(Store)
age = models.IntegerField()
salary = models.DecimalField(max_digits=8, decimal_places=2)
def __str__(self):
return '%s %s' % (self.first_name, self.last_name)
@python_2_unicode_compatible
class Company(models.Model):
name = models.CharField(max_length=200)
motto = models.CharField(max_length=200, null=True, blank=True)
ticker_name = models.CharField(max_length=10, null=True, blank=True)
description = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
return ('Company(name=%s, motto=%s, ticker_name=%s, description=%s)'
% (self.name, self.motto, self.ticker_name, self.description)
)
| bsd-3-clause | 2,172,695,178,926,418,200 | 29.581395 | 76 | 0.690114 | false |
rebeling/spaCy | tests/parser/test_base_nps.py | 4 | 1273 | from __future__ import unicode_literals
import pytest
@pytest.mark.models
def test_nsubj(EN):
sent = EN(u'A base phrase should be recognized.')
base_nps = list(sent.noun_chunks)
assert len(base_nps) == 1
assert base_nps[0].string == 'A base phrase '
@pytest.mark.models
def test_coord(EN):
sent = EN(u'A base phrase and a good phrase are often the same.')
base_nps = list(sent.noun_chunks)
assert len(base_nps) == 2
assert base_nps[0].string == 'A base phrase '
assert base_nps[1].string == 'a good phrase '
@pytest.mark.models
def test_pp(EN):
sent = EN(u'A phrase with another phrase occurs')
base_nps = list(sent.noun_chunks)
assert len(base_nps) == 2
assert base_nps[0].string == 'A phrase '
assert base_nps[1].string == 'another phrase '
@pytest.mark.models
def test_merge_pp(EN):
sent = EN(u'A phrase with another phrase occurs')
nps = [(np[0].idx, np[-1].idx + len(np[-1]), np.lemma_, np[0].ent_type_) for np in sent.noun_chunks]
for start, end, lemma, ent_type in nps:
sent.merge(start, end, u'NP', lemma, ent_type)
assert sent[0].string == 'A phrase '
assert sent[1].string == 'with '
assert sent[2].string == 'another phrase '
assert sent[3].string == 'occurs'
| mit | -2,201,390,057,216,463,400 | 30.04878 | 104 | 0.64022 | false |
ZenithDK/mopidy | mopidy/models/fields.py | 5 | 5166 | from __future__ import absolute_import, unicode_literals
from mopidy import compat
class Field(object):
"""
Base field for use in
:class:`~mopidy.models.immutable.ValidatedImmutableObject`. These fields
are responsible for type checking and other data sanitation in our models.
For simplicity fields use the Python descriptor protocol to store the
values in the instance dictionary. Also note that fields are mutable if
the object they are attached to allow it.
Default values will be validated with the exception of :class:`None`.
:param default: default value for field
:param type: if set the field value must be of this type
:param choices: if set the field value must be one of these
"""
def __init__(self, default=None, type=None, choices=None):
self._name = None # Set by ValidatedImmutableObjectMeta
self._choices = choices
self._default = default
self._type = type
if self._default is not None:
self.validate(self._default)
def validate(self, value):
"""Validate and possibly modify the field value before assignment"""
if self._type and not isinstance(value, self._type):
raise TypeError('Expected %s to be a %s, not %r' %
(self._name, self._type, value))
if self._choices and value not in self._choices:
raise TypeError('Expected %s to be a one of %s, not %r' %
(self._name, self._choices, value))
return value
def __get__(self, instance, owner):
if not instance:
return self
return getattr(instance, '_' + self._name, self._default)
def __set__(self, instance, value):
if value is not None:
value = self.validate(value)
if value is None or value == self._default:
self.__delete__(instance)
else:
setattr(instance, '_' + self._name, value)
def __delete__(self, instance):
if hasattr(instance, '_' + self._name):
delattr(instance, '_' + self._name)
class String(Field):
"""
Specialized :class:`Field` which is wired up for bytes and unicode.
:param default: default value for field
"""
def __init__(self, default=None):
# TODO: normalize to unicode?
# TODO: only allow unicode?
# TODO: disallow empty strings?
super(String, self).__init__(type=compat.string_types, default=default)
class Date(String):
"""
:class:`Field` for storing ISO 8601 dates as a string.
Supported formats are ``YYYY-MM-DD``, ``YYYY-MM`` and ``YYYY``, currently
not validated.
:param default: default value for field
"""
pass # TODO: make this check for YYYY-MM-DD, YYYY-MM, YYYY using strptime.
class Identifier(String):
"""
:class:`Field` for storing ASCII values such as GUIDs or other identifiers.
Values will be interned.
:param default: default value for field
"""
def validate(self, value):
return compat.intern(str(super(Identifier, self).validate(value)))
class URI(Identifier):
"""
:class:`Field` for storing URIs
Values will be interned, currently not validated.
:param default: default value for field
"""
pass # TODO: validate URIs?
class Integer(Field):
"""
:class:`Field` for storing integer numbers.
:param default: default value for field
:param min: field value must be larger or equal to this value when set
:param max: field value must be smaller or equal to this value when set
"""
def __init__(self, default=None, min=None, max=None):
self._min = min
self._max = max
super(Integer, self).__init__(
type=compat.integer_types, default=default)
def validate(self, value):
value = super(Integer, self).validate(value)
if self._min is not None and value < self._min:
raise ValueError('Expected %s to be at least %d, not %d' %
(self._name, self._min, value))
if self._max is not None and value > self._max:
raise ValueError('Expected %s to be at most %d, not %d' %
(self._name, self._max, value))
return value
class Collection(Field):
"""
:class:`Field` for storing collections of a given type.
:param type: all items stored in the collection must be of this type
:param container: the type to store the items in
"""
def __init__(self, type, container=tuple):
super(Collection, self).__init__(type=type, default=container())
def validate(self, value):
if isinstance(value, compat.string_types):
raise TypeError('Expected %s to be a collection of %s, not %r'
% (self._name, self._type.__name__, value))
for v in value:
if not isinstance(v, self._type):
raise TypeError('Expected %s to be a collection of %s, not %r'
% (self._name, self._type.__name__, value))
return self._default.__class__(value) or None
| apache-2.0 | 8,499,307,627,970,788,000 | 31.904459 | 79 | 0.60782 | false |
jookies/jasmin-api | jasmin_api/rest_api/views/filters.py | 1 | 5880 | from collections import OrderedDict
from django.conf import settings
from django.http import JsonResponse
from django.utils.datastructures import MultiValueDictKeyError
from rest_framework.viewsets import ViewSet
from rest_framework.decorators import list_route
from rest_api.tools import set_ikeys, split_cols
from rest_api.exceptions import (JasminSyntaxError, JasminError,
UnknownError, MissingKeyError,
MutipleValuesRequiredKeyError, ObjectNotFoundError)
STANDARD_PROMPT = settings.STANDARD_PROMPT
INTERACTIVE_PROMPT = settings.INTERACTIVE_PROMPT
class FiltersViewSet(ViewSet):
"Viewset for managing Filters"
lookup_field = 'fid'
def _list(self, telnet):
"List Filters as python dict"
telnet.sendline('filter -l')
telnet.expect([r'(.+)\n' + STANDARD_PROMPT])
result = telnet.match.group(0).strip().replace("\r", '').split("\n")
if len(result) < 3:
return {'filters': []}
results = [l.replace(', ', ',').replace('(!)', '')
for l in result[2:-2] if l]
filters = split_cols(results)
return {
'filters':
[
{
'fid': f[0].strip().lstrip('#'),
'type': f[1],
'routes': f[2] + ' ' + f[3],
'description': ' '.join(f[4:])
} for f in filters
]
}
def list(self, request):
"List Filters. No parameters"
return JsonResponse(self._list(request.telnet))
def get_filter(self, telnet, fid):
"Return data for one filter as Python dict"
filters = self._list(telnet)['filters']
try:
return {'filter':
next((m for m in filters if m['fid'] == fid), None)
}
except StopIteration:
raise ObjectNotFoundError('No Filter with fid: %s' % fid)
def retrieve(self, request, fid):
"Details for one Filter by fid (integer)"
return JsonResponse(self.get_filter(request.telnet, fid))
def create(self, request):
"""Create Filter.
Required parameters: type, fid, parameters
---
# YAML
omit_serializer: true
parameters:
- name: type
description: One of TransparentFilter, ConnectorFilter, UserFilter, GroupFilter, SourceAddrFilter, DestinationAddrFilter, ShortMessageFilter, DateIntervalFilter, TimeIntervalFilter, TagFilter, EvalPyFilter
required: true
type: string
paramType: form
- name: fid
description: Filter id, used to identify filter
required: true
type: string
paramType: form
- name: parameter
description: Parameter
required: false
type: string
paramType: form
"""
telnet = request.telnet
data = request.data
try:
ftype, fid = data['type'], data['fid']
except IndexError:
raise MissingKeyError(
'Missing parameter: type or fid required')
ftype = ftype.lower()
telnet.sendline('filter -a')
telnet.expect(r'Adding a new Filter(.+)\n' + INTERACTIVE_PROMPT)
ikeys = OrderedDict({'type': ftype, 'fid': fid})
if ftype != 'transparentfilter':
try:
parameter = data['parameter']
except MultiValueDictKeyError:
raise MissingKeyError('%s filter requires parameter' % ftype)
if ftype == 'connectorfilter':
ikeys['cid'] = parameter
elif ftype == 'userfilter':
ikeys['uid'] = parameter
elif ftype == 'groupfilter':
ikeys['gid'] = parameter
elif ftype == 'sourceaddrfilter':
ikeys['source_addr'] = parameter
elif ftype == 'destinationaddrfilter':
ikeys['destination_addr'] = parameter
elif ftype == 'shortmessagefilter':
ikeys['short_message'] = parameter
elif ftype == 'dateintervalfilter':
ikeys['dateInterval'] = parameter
elif ftype == 'timeintervalfilter':
ikeys['timeInterval'] = parameter
elif ftype == 'tagfilter':
ikeys['tag'] = parameter
elif ftype == 'evalpyfilter':
ikeys['pyCode'] = parameter
print ikeys
set_ikeys(telnet, ikeys)
telnet.sendline('persist\n')
telnet.expect(r'.*' + STANDARD_PROMPT)
return JsonResponse({'filter': self.get_filter(telnet, fid)})
def simple_filter_action(self, telnet, action, fid, return_filter=True):
telnet.sendline('filter -%s %s' % (action, fid))
matched_index = telnet.expect([
r'.+Successfully(.+)' + STANDARD_PROMPT,
r'.+Unknown Filter: (.+)' + STANDARD_PROMPT,
r'.+(.*)' + STANDARD_PROMPT,
])
if matched_index == 0:
telnet.sendline('persist\n')
if return_filter:
telnet.expect(r'.*' + STANDARD_PROMPT)
return JsonResponse({'filter': self.get_filter(telnet, fid)})
else:
return JsonResponse({'fid': fid})
elif matched_index == 1:
raise UnknownError(detail='No filter:' + fid)
else:
raise JasminError(telnet.match.group(1))
def destroy(self, request, fid):
"""Delete a filter. One parameter required, the filter identifier (a string)
HTTP codes indicate result as follows
- 200: successful deletion
- 404: nonexistent filter
- 400: other error
"""
return self.simple_filter_action(
request.telnet, 'r', fid, return_filter=False)
| apache-2.0 | 8,424,079,596,901,886,000 | 36.692308 | 215 | 0.558673 | false |
odoousers2014/LibrERP | c2c_sequence_fy/__init__.py | 2 | 1255 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 Camptocamp (<http://www.camptocamp.at>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_fiscalyear
import ir_sequence_type
import ir_sequence
import account
import account_move
import ir_sequence_installer
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 3,776,277,859,974,741,500 | 43.821429 | 79 | 0.63745 | false |
dingliumath/quant-econ | examples/clt3d.py | 7 | 2222 | """
Origin: QE by John Stachurski and Thomas J. Sargent
Filename: clt3d.py
Visual illustration of the central limit theorem. Produces a 3D figure
showing the density of the scaled sample mean \sqrt{n} \bar X_n plotted
against n.
"""
import numpy as np
from scipy.stats import beta, gaussian_kde
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.collections import PolyCollection
import matplotlib.pyplot as plt
beta_dist = beta(2, 2)
def gen_x_draws(k):
"""
Returns a flat array containing k independent draws from the
distribution of X, the underlying random variable. This distribution is
itself a convex combination of three beta distributions.
"""
bdraws = beta_dist.rvs((3, k))
# == Transform rows, so each represents a different distribution == #
bdraws[0, :] -= 0.5
bdraws[1, :] += 0.6
bdraws[2, :] -= 1.1
# == Set X[i] = bdraws[j, i], where j is a random draw from {0, 1, 2} == #
js = np.random.random_integers(0, 2, size=k)
X = bdraws[js, np.arange(k)]
# == Rescale, so that the random variable is zero mean == #
m, sigma = X.mean(), X.std()
return (X - m) / sigma
nmax = 5
reps = 100000
ns = list(range(1, nmax + 1))
# == Form a matrix Z such that each column is reps independent draws of X == #
Z = np.empty((reps, nmax))
for i in range(nmax):
Z[:, i] = gen_x_draws(reps)
# == Take cumulative sum across columns
S = Z.cumsum(axis=1)
# == Multiply j-th column by sqrt j == #
Y = (1 / np.sqrt(ns)) * S
# == Plot == #
fig = plt.figure()
ax = fig.gca(projection='3d')
a, b = -3, 3
gs = 100
xs = np.linspace(a, b, gs)
# == Build verts == #
greys = np.linspace(0.3, 0.7, nmax)
verts = []
for n in ns:
density = gaussian_kde(Y[:, n-1])
ys = density(xs)
verts.append(list(zip(xs, ys)))
poly = PolyCollection(verts, facecolors=[str(g) for g in greys])
poly.set_alpha(0.85)
ax.add_collection3d(poly, zs=ns, zdir='x')
# ax.text(np.mean(rhos), a-1.4, -0.02, r'$\beta$', fontsize=16)
# ax.text(np.max(rhos)+0.016, (a+b)/2, -0.02, r'$\log(y)$', fontsize=16)
ax.set_xlim3d(1, nmax)
ax.set_xticks(ns)
ax.set_xlabel("n")
ax.set_yticks((-3, 0, 3))
ax.set_ylim3d(a, b)
ax.set_zlim3d(0, 0.4)
ax.set_zticks((0.2, 0.4))
plt.show()
| bsd-3-clause | 3,552,866,793,791,686,700 | 26.775 | 78 | 0.638614 | false |
alex-dow/pybuilder | src/integrationtest/python/should_write_manifest_file_tests.py | 7 | 2264 | # -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from integrationtest_support import IntegrationTestSupport
class Test (IntegrationTestSupport):
def test(self):
self.write_build_file("""
from pybuilder.core import use_plugin, init
use_plugin('python.core')
use_plugin('python.distutils')
name = 'integration-test'
default_task = 'publish'
@init
def init (project):
project.include_file('spam', 'eggs')
project.install_file('spam_dir', 'more_spam')
project.install_file('eggs_dir', 'more_eggs')
""")
self.create_directory("src/main/python/spam")
self.write_file("src/main/python/spam/eggs", "")
self.write_file("src/main/python/more_spam", "")
self.write_file("src/main/python/more_eggs", "")
reactor = self.prepare_reactor()
reactor.build()
self.assert_directory_exists(
"target/dist/integration-test-1.0.dev0")
self.assert_directory_exists(
"target/dist/integration-test-1.0.dev0/spam")
self.assert_file_empty(
"target/dist/integration-test-1.0.dev0/spam/eggs")
self.assert_file_empty(
"target/dist/integration-test-1.0.dev0/more_spam")
self.assert_file_empty(
"target/dist/integration-test-1.0.dev0/more_eggs")
manifest_in = "target/dist/integration-test-1.0.dev0/MANIFEST.in"
self.assert_file_exists(manifest_in)
self.assert_file_permissions(0o664, manifest_in)
self.assert_file_content(manifest_in, """include spam/eggs
include more_spam
include more_eggs
""")
if __name__ == "__main__":
unittest.main()
| apache-2.0 | -3,180,325,561,569,549,000 | 30.887324 | 76 | 0.670936 | false |
Edraak/edraak-platform | lms/lib/utils.py | 32 | 1410 | """
Helper methods for the LMS.
"""
def get_parent_unit(xblock):
"""
Finds xblock's parent unit if it exists.
To find an xblock's parent unit, we traverse up the xblock's
family tree until we find an xblock whose parent is a
sequential xblock, which guarantees that the xblock is a unit.
The `get_parent()` call on both the xblock and the parent block
ensure that we don't accidentally return that a unit is its own
parent unit.
Returns:
xblock: Returns the parent unit xblock if it exists.
If no parent unit exists, returns None
"""
while xblock:
parent = xblock.get_parent()
if parent is None:
return None
grandparent = parent.get_parent()
if grandparent is None:
return None
if parent.category == "vertical" and grandparent.category == "sequential":
return parent
xblock = parent
def is_unit(xblock):
"""
Checks whether the xblock is a unit.
Get_parent_unit() returns None if the current xblock either does
not have a parent unit or is itself a unit.
To make sure that get_parent_unit() isn't returning None because
the xblock is an orphan, we check that the xblock has a parent.
Returns:
True if the xblock is itself a unit, False otherwise.
"""
return get_parent_unit(xblock) is None and xblock.get_parent()
| agpl-3.0 | -5,053,668,647,061,971,000 | 29.652174 | 82 | 0.655319 | false |
martinschaef/coinstuff | historic.py | 1 | 5454 | import pandas as pd
import pandas_datareader as pdr
import datetime
import matplotlib.pyplot as plt
import pylab
"""
This one looks like a more reasonable tutorial using Yahoo data and pandas.
https://ntguardian.wordpress.com/2016/09/19/introduction-stock-market-data-python-1/
"""
from matplotlib.dates import DateFormatter, WeekdayLocator,\
DayLocator, MONDAY
from matplotlib.finance import candlestick_ohlc
import matplotlib.dates as mdates
def pandas_candlestick_ohlc(dat, stick = "day", otherseries = None):
"""
:param dat: pandas DataFrame object with datetime64 index, and float columns "Open", "High", "Low", and "Close", likely created via DataReader from "yahoo"
:param stick: A string or number indicating the period of time covered by a single candlestick. Valid string inputs include "day", "week", "month", and "year", ("day" default), and any numeric input indicates the number of trading days included in a period
:param otherseries: An iterable that will be coerced into a list, containing the columns of dat that hold other series to be plotted as lines
This will show a Japanese candlestick plot for stock data stored in dat, also plotting other series if passed.
"""
mondays = WeekdayLocator(MONDAY) # major ticks on the mondays
alldays = DayLocator() # minor ticks on the days
dayFormatter = DateFormatter('%d') # e.g., 12
# Create a new DataFrame which includes OHLC data for each period specified by stick input
transdat = dat.loc[:,["Open", "High", "Low", "Close"]]
if (type(stick) == str):
if stick == "day":
plotdat = transdat
stick = 1 # Used for plotting
elif stick in ["week", "month", "year"]:
if stick == "week":
transdat["week"] = pd.to_datetime(transdat.index).map(lambda x: x.isocalendar()[1]) # Identify weeks
elif stick == "month":
transdat["month"] = pd.to_datetime(transdat.index).map(lambda x: x.month) # Identify months
transdat["year"] = pd.to_datetime(transdat.index).map(lambda x: x.isocalendar()[0]) # Identify years
grouped = transdat.groupby(list(set(["year",stick]))) # Group by year and other appropriate variable
plotdat = pd.DataFrame({"Open": [], "High": [], "Low": [], "Close": []}) # Create empty data frame containing what will be plotted
for name, group in grouped:
plotdat = plotdat.append(pd.DataFrame({"Open": group.iloc[0,0],
"High": max(group.High),
"Low": min(group.Low),
"Close": group.iloc[-1,3]},
index = [group.index[0]]))
if stick == "week": stick = 5
elif stick == "month": stick = 30
elif stick == "year": stick = 365
elif (type(stick) == int and stick >= 1):
transdat["stick"] = [np.floor(i / stick) for i in range(len(transdat.index))]
grouped = transdat.groupby("stick")
plotdat = pd.DataFrame({"Open": [], "High": [], "Low": [], "Close": []}) # Create empty data frame containing what will be plotted
for name, group in grouped:
plotdat = plotdat.append(pd.DataFrame({"Open": group.iloc[0,0],
"High": max(group.High),
"Low": min(group.Low),
"Close": group.iloc[-1,3]},
index = [group.index[0]]))
else:
raise ValueError('Valid inputs to argument "stick" include the strings "day", "week", "month", "year", or a positive integer')
# Set plot parameters, including the axis object ax used for plotting
fig, ax = plt.subplots()
fig.subplots_adjust(bottom=0.2)
if plotdat.index[-1] - plotdat.index[0] < pd.Timedelta('730 days'):
weekFormatter = DateFormatter('%b %d') # e.g., Jan 12
ax.xaxis.set_major_locator(mondays)
ax.xaxis.set_minor_locator(alldays)
else:
weekFormatter = DateFormatter('%b %d, %Y')
ax.xaxis.set_major_formatter(weekFormatter)
ax.grid(True)
# Create the candelstick chart
candlestick_ohlc(ax, list(zip(list(mdates.date2num(plotdat.index.tolist())), plotdat["Open"].tolist(), plotdat["High"].tolist(),
plotdat["Low"].tolist(), plotdat["Close"].tolist())),
colorup = "black", colordown = "red", width = stick * .4)
# Plot other series (such as moving averages) as lines
if otherseries != None:
if type(otherseries) != list:
otherseries = [otherseries]
dat.loc[:,otherseries].plot(ax = ax, lw = 1.3, grid = True)
ax.xaxis_date()
ax.autoscale_view()
plt.setp(plt.gca().get_xticklabels(), rotation=45, horizontalalignment='right')
plt.show()
def show_candelstick_for(yahoo_data, start_date, end_date):
# Let's get Apple stock data; Apple's ticker symbol is AAPL
# First argument is the series we want, second is the source ("yahoo" for Yahoo! Finance), third is the start date, fourth is the end date
btc = pdr.get_data_yahoo(yahoo_data, start_date, end_date)
print btc.head()
pandas_candlestick_ohlc(btc)
show_candelstick_for("BTC-USD", datetime.datetime(2017,10,1), datetime.date.today())
| mit | 8,654,233,575,508,818,000 | 48.581818 | 260 | 0.605977 | false |
RussTedrake/director | src/python/ddapp/camerabookmarks.py | 6 | 3149 | from ddapp import applogic
from ddapp import cameracontrol
from PythonQt import QtCore, QtGui
class CameraBookmarks(object):
def __init__(self, view):
self.bookmarks = {}
self.view = view
self.flyer = cameracontrol.Flyer(view)
self.flyer.flyTime = 1.0
def storeCameraBookmark(self, key):
camera = self.view.camera()
focal, position = camera.GetFocalPoint(), camera.GetPosition()
self.bookmarks[key] = (focal, position)
def clear(self):
self.bookmarks = {}
def getBookmark(self, key):
return self.bookmarks.get(key)
def flyToBookmark(self, key):
focal, position = self.getBookmark(key)
self.flyer.zoomTo(focal, position)
class CameraBookmarkWidget(object):
def __init__(self, view):
self.bookmarks = CameraBookmarks(view)
self.widget = QtGui.QScrollArea()
self.widget.setWindowTitle('Camera Bookmarks')
self.storeMapper = QtCore.QSignalMapper()
self.flyMapper = QtCore.QSignalMapper()
self.storeMapper.connect('mapped(QObject*)', self.onStoreCamera)
self.flyMapper.connect('mapped(QObject*)', self.onFlyToCamera)
self.numberOfBookmarks = 8
self.updateLayout()
def updateLayout(self):
self.storeButtons = []
self.flyButtons = []
w = QtGui.QWidget()
l = QtGui.QGridLayout(w)
for i in xrange(self.numberOfBookmarks):
storeButton = QtGui.QPushButton('set')
flyButton = QtGui.QPushButton('fly')
textEdit = QtGui.QLineEdit('camera %d' % i)
storeButton.connect('clicked()', self.storeMapper, 'map()')
flyButton.connect('clicked()', self.flyMapper, 'map()')
self.storeMapper.setMapping(storeButton, storeButton)
self.flyMapper.setMapping(flyButton, flyButton)
self.storeButtons.append(storeButton)
self.flyButtons.append(flyButton)
l.addWidget(storeButton, i, 0)
l.addWidget(flyButton, i, 1)
l.addWidget(textEdit, i, 2)
flyButton.setEnabled(False)
self.flySpeedSpinner = QtGui.QDoubleSpinBox()
self.flySpeedSpinner.setMinimum(0)
self.flySpeedSpinner.setMaximum(60)
self.flySpeedSpinner.setDecimals(1)
self.flySpeedSpinner.setSingleStep(0.5)
self.flySpeedSpinner.setSuffix(' seconds')
self.flySpeedSpinner.setValue(1.0)
l.addWidget(QtGui.QLabel('Fly speed:'), i+1, 0, 2)
l.addWidget(self.flySpeedSpinner, i+1, 2)
self.widget.setWidget(w)
def onStoreCamera(self, button):
index = self.storeButtons.index(button)
self.bookmarks.storeCameraBookmark(index)
self.flyButtons[index].setEnabled(True)
def onFlyToCamera(self, button):
index = self.flyButtons.index(button)
self.bookmarks.flyer.flyTime = self.flySpeedSpinner.value
self.bookmarks.flyToBookmark(index)
def init(view):
global widget, dock
widget = CameraBookmarkWidget(view)
dock = applogic.addWidgetToDock(widget.widget, action=None)
dock.hide()
| bsd-3-clause | -7,772,769,358,616,111,000 | 32.5 | 72 | 0.646554 | false |
geodynamics/gale | Underworld/config/SConfig/Project.py | 6 | 2508 | import os
import SCons.Script
import SConfig
class Project(SConfig.Node):
def __init__(self, scons_env, scons_opts, required=False):
SConfig.Node.__init__(self, scons_env, scons_opts, required)
self.checks += [self.check_libs, self.print_results]
def setup_options(self):
self.opts.AddOptions(
SCons.Script.BoolOption('with_debug',
'Generate debugging symbols', 1),
SCons.Script.BoolOption('static_libraries',
'Build static libraries', 1),
SCons.Script.BoolOption('shared_libraries',
'Build shared libraries', 1),
('build_dir', 'Temporary build directory', 'build')
)
def check_libs(self):
if not self.env['static_libraries'] and not self.env['shared_libraries']:
self.ctx.Display(" Both static and shared libraries disabled!\n")
return False
return True
def print_results(self):
self.ctx.Display(" Static libraries: %s\n" % str(bool(self.env['static_libraries'])))
self.ctx.Display(" Shared libraries: %s\n" % str(bool(self.env['shared_libraries'])))
self.ctx.Display(" Using build directory: %s\n" % self.env['build_dir'])
self.ctx.Display(" Debugging symbols: %s\n" % str(bool(self.env['with_debug'])))
return True
def setup(self):
SConfig.Node.setup(self)
modified = []
if self.env['shared_libraries']:
for pkg in self.env.package_list:
if isinstance(pkg, SConfig.Package):
pkg.require_shared = True
modified += [pkg]
return modified
def enable(self, scons_env, old_state=None):
SConfig.Node.enable(self, scons_env, old_state)
# Setup debugging flags.
if self.env['with_debug']:
scons_env.MergeFlags('-g')
# Setup the include paths.
inc_dir = self.env.get_build_path('include')
self.backup_variable(scons_env, 'CPPPATH', old_state)
scons_env.PrependUnique(CPPPATH=[inc_dir])
# Setup LIB_DIR.
lib_dir = self.env.get_build_path('lib')
self.backup_variable(scons_env, 'LIBPATH', old_state)
scons_env.PrependUnique(LIBPATH=[lib_dir])
# Setup the RPATH.
self.backup_variable(scons_env, 'RPATH', old_state)
scons_env.PrependUnique(RPATH=[scons_env.Dir(lib_dir).abspath])
| gpl-2.0 | -2,416,433,304,155,444,700 | 38.809524 | 94 | 0.580941 | false |
cmos3511/cmos_linux | python/pj/misc/stp_gen/stp_gen.py | 1 | 2974 | #! /usr/bin/env python3
### used to generate stp from text files provided
import argparse
import os
import re
import collections
import datetime as dt
import jinja2
class REOpter(object):
def __init__(self, re_str):
self.re_str = re_str
def match(self, re_rs):
self.re_result = re.match(re_rs, self.re_str)
return bool(self.re_result)
def search(self, re_rs):
self.re_result = re.search(re_rs, self.re_str)
return bool(self.re_result)
def group(self, i):
return self.re_result.group(i)
def gen_args_top():
parser = argparse.ArgumentParser()
h_str = ("input signals text file")
parser.add_argument('-txt', dest='txt_file', required=True, help=h_str)
return parser.parse_args()
args = gen_args_top()
if not os.path.isfile(args.txt_file):
os.sys.exit("signals text file {0} is NA".format(args.txt_file))
group_dic = collections.OrderedDict()
with open(args.txt_file) as tf:
for line in tf:
line = line.strip()
m = REOpter(line)
if m.match(r'signal:\s+([/\w]+)(?:\[(\d+):(\d+)\])?,\s+\w+:\s+(\d+),'):
group_name = 'Group{0}'.format(m.group(4))
if group_name not in group_dic:
group_dic[group_name] = []
sig_lst = m.group(1).split('/')
new_sig_lst = []
for index, part in enumerate(sig_lst):
if index == len(sig_lst)-1:
new_sig_lst.append(part)
elif part.startswith('u_'):
new_sig_lst.append('{0}:{1}'.format(part[2:], part))
elif part.endswith('_inst'):
new_sig_lst.append('{0}:{1}'.format(part[:-5], part))
new_sig = '|'.join(new_sig_lst)
if m.group(2) and m.group(3):
for i in range(int(m.group(3)), int(m.group(2))+1):
group_dic[group_name].append('{0}[{1}]'.format(new_sig, i))
else:
group_dic[group_name].append(new_sig)
inst_dic_lst = []
for index_org, group in enumerate(group_dic):
offset = int(index_org/50)
index = index_org%50
if index == 0:
inst_dic = {'inst_num': offset,
'inst_time': dt.datetime.now().strftime('%Y/%m/%d %H:%M:%S'),
'group_dic': {}}
inst_dic_lst.append(inst_dic)
inst_dic['group_dic']['{0:04d}'.format(index_org)] = []
for index_sig, signal in enumerate(reversed(group_dic[group])):
inst_dic['group_dic']['{0:04d}'.format(index_org)].append({'index': index_sig, 'sig_name': signal})
dir_name = os.path.dirname(os.path.realpath(__file__))
templateLoader = jinja2.FileSystemLoader(dir_name)
templateEnv = jinja2.Environment(loader=templateLoader)
template = templateEnv.get_template('template.stp')
template_out = template.render({'inst_dic_lst': inst_dic_lst})
stp_file = os.path.splitext(args.txt_file)[0]+'.stp'
with open(stp_file, 'w') as f:
f.write(template_out)
| gpl-3.0 | -5,299,927,862,998,110,000 | 36.175 | 107 | 0.573974 | false |
pancentric/django-cms | cms/management/commands/subcommands/delete_orphaned_plugins.py | 9 | 2268 | from django.core.management.base import NoArgsCommand
from cms.management.commands.subcommands.list import plugin_report
from cms.utils.compat.input import raw_input
class DeleteOrphanedPluginsCommand(NoArgsCommand):
help = "Delete plugins from the CMSPlugins table that should have instances but don't, and ones for which a corresponding plugin model can no longer be found"
def handle_noargs(self, **options):
"""
Obtains a plugin report -
cms.management.commands.subcommands.list.plugin_report - and uses it
to delete orphaned plugins from the database, i.e. ones that are no
longer installed, and ones that have no corresponding saved plugin
instances (as will happen if a plugin is inserted into a placeholder,
but not saved).
"""
self.stdout.write(u"Obtaining plugin report\n")
uninstalled_instances = []
unsaved_instances = []
for plugin in plugin_report():
if not plugin["model"]:
for instance in plugin["instances"]:
uninstalled_instances.append(instance)
for instance in plugin["unsaved_instances"]:
unsaved_instances.append(instance)
if options.get('interactive'):
confirm = raw_input("""
You have requested to delete any instances of uninstalled plugins and unsaved plugin instances.
There are %d uninstalled plugins and %d unsaved_plugins.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """ % (len(uninstalled_instances), len(unsaved_instances)))
else:
confirm = 'yes'
if confirm == 'yes':
# delete items whose plugin is uninstalled and items with unsaved instances
self.stdout.write(u"... deleting any instances of uninstalled plugins and unsaved plugin instances\n")
for instance in uninstalled_instances:
instance.delete()
for instance in unsaved_instances:
instance.delete()
self.stdout.write(u"Deleted instances of: \n %s uninstalled plugins \n %s plugins with unsaved instances\n" % (len(uninstalled_instances), len(unsaved_instances)))
self.stdout.write(u"all done\n")
| bsd-3-clause | 7,033,483,344,696,063,000 | 44.36 | 182 | 0.66358 | false |
bgris/ODL_bgris | lib/python3.5/site-packages/spyder/app/mainwindow.py | 1 | 128756 | # -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
Spyder, the Scientific PYthon Development EnviRonment
=====================================================
Developped and maintained by the Spyder Project
Contributors
Copyright © Spyder Project Contributors
Licensed under the terms of the MIT License
(see spyder/__init__.py for details)
"""
# =============================================================================
# Stdlib imports
# =============================================================================
from __future__ import print_function
import atexit
import errno
import os
import os.path as osp
import re
import shutil
import signal
import socket
import subprocess
import sys
import threading
import traceback
#==============================================================================
# Keeping a reference to the original sys.exit before patching it
#==============================================================================
ORIGINAL_SYS_EXIT = sys.exit
#==============================================================================
# Check requirements
#==============================================================================
from spyder import requirements
requirements.check_path()
requirements.check_qt()
#==============================================================================
# Windows only: support for hiding console window when started with python.exe
#==============================================================================
set_attached_console_visible = None
is_attached_console_visible = None
set_windows_appusermodelid = None
if os.name == 'nt':
from spyder.utils.windows import (set_attached_console_visible,
is_attached_console_visible,
set_windows_appusermodelid)
#==============================================================================
# Workaround: importing rope.base.project here, otherwise this module can't
# be imported if Spyder was executed from another folder than spyder
#==============================================================================
try:
import rope.base.project # analysis:ignore
except ImportError:
pass
#==============================================================================
# Qt imports
#==============================================================================
from qtpy import API, PYQT5
from qtpy.compat import from_qvariant
from qtpy.QtCore import (QByteArray, QCoreApplication, QPoint, QSize, Qt,
QThread, QTimer, QUrl, Signal, Slot)
from qtpy.QtGui import QColor, QDesktopServices, QKeySequence, QPixmap
from qtpy.QtWidgets import (QAction, QApplication, QDockWidget, QMainWindow,
QMenu, QMessageBox, QShortcut, QSplashScreen,
QStyleFactory)
# Avoid a "Cannot mix incompatible Qt library" error on Windows platforms
# when PySide is selected by the QT_API environment variable and when PyQt4
# is also installed (or any other Qt-based application prepending a directory
# containing incompatible Qt DLLs versions in PATH):
from qtpy import QtSvg # analysis:ignore
# Avoid a bug in Qt: https://bugreports.qt.io/browse/QTBUG-46720
from qtpy import QtWebEngineWidgets # analysis:ignore
# To catch font errors in QtAwesome
from qtawesome.iconic_font import FontError
#==============================================================================
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute must
# be set before creating the application.
#==============================================================================
from spyder.config.main import CONF
if CONF.get('main', 'high_dpi_scaling'):
high_dpi_scaling = True
else:
high_dpi_scaling = False
if hasattr(Qt, 'AA_EnableHighDpiScaling'):
QCoreApplication.setAttribute(Qt.AA_EnableHighDpiScaling, high_dpi_scaling)
#==============================================================================
# Create our QApplication instance here because it's needed to render the
# splash screen created below
#==============================================================================
from spyder.utils.qthelpers import qapplication, MENU_SEPARATOR
MAIN_APP = qapplication()
#==============================================================================
# Create splash screen out of MainWindow to reduce perceived startup time.
#==============================================================================
from spyder.config.base import _, get_image_path, DEV, PYTEST
if not PYTEST:
SPLASH = QSplashScreen(QPixmap(get_image_path('splash.svg')))
SPLASH_FONT = SPLASH.font()
SPLASH_FONT.setPixelSize(10)
SPLASH.setFont(SPLASH_FONT)
SPLASH.show()
SPLASH.showMessage(_("Initializing..."), Qt.AlignBottom | Qt.AlignCenter |
Qt.AlignAbsolute, QColor(Qt.white))
QApplication.processEvents()
else:
SPLASH = None
#==============================================================================
# Local utility imports
#==============================================================================
from spyder import __version__, __project_url__, __forum_url__, get_versions
from spyder.config.base import (get_conf_path, get_module_data_path,
get_module_source_path, STDERR, DEBUG,
debug_print, MAC_APP_NAME, get_home_dir,
running_in_mac_app, get_module_path,
reset_config_files)
from spyder.config.main import OPEN_FILES_PORT
from spyder.config.utils import IMPORT_EXT, is_gtk_desktop
from spyder.app.cli_options import get_options
from spyder import dependencies
from spyder.config.ipython import QTCONSOLE_INSTALLED
from spyder.py3compat import (getcwd, is_text_string, to_text_string,
PY3, qbytearray_to_str, configparser as cp)
from spyder.utils import encoding, programs
from spyder.utils import icon_manager as ima
from spyder.utils.introspection import module_completion
from spyder.utils.programs import is_module_installed
from spyder.utils.misc import select_port
#==============================================================================
# Local gui imports
#==============================================================================
# NOTE: Move (if possible) import's of widgets and plugins exactly where they
# are needed in MainWindow to speed up perceived startup time (i.e. the time
# from clicking the Spyder icon to showing the splash screen).
try:
from spyder.utils.environ import WinUserEnvDialog
except ImportError:
WinUserEnvDialog = None # analysis:ignore
from spyder.utils.qthelpers import (create_action, add_actions, get_icon,
add_shortcut_to_tooltip,
create_module_bookmark_actions,
create_program_action, DialogManager,
create_python_script_action, file_uri)
from spyder.config.gui import get_shortcut
from spyder.otherplugins import get_spyderplugins_mods
from spyder.app import tour
#==============================================================================
# Get the cwd before initializing WorkingDirectory, which sets it to the one
# used in the last session
#==============================================================================
CWD = getcwd()
#==============================================================================
# Spyder's main window widgets utilities
#==============================================================================
def get_python_doc_path():
"""
Return Python documentation path
(Windows: return the PythonXX.chm path if available)
"""
if os.name == 'nt':
doc_path = osp.join(sys.prefix, "Doc")
if not osp.isdir(doc_path):
return
python_chm = [path for path in os.listdir(doc_path)
if re.match(r"(?i)Python[0-9]{3,6}.chm", path)]
if python_chm:
return file_uri(osp.join(doc_path, python_chm[0]))
else:
vinf = sys.version_info
doc_path = '/usr/share/doc/python%d.%d/html' % (vinf[0], vinf[1])
python_doc = osp.join(doc_path, "index.html")
if osp.isfile(python_doc):
return file_uri(python_doc)
def get_focus_python_shell():
"""Extract and return Python shell from widget
Return None if *widget* is not a Python shell (e.g. IPython kernel)"""
widget = QApplication.focusWidget()
from spyder.widgets.shell import PythonShellWidget
from spyder.widgets.externalshell.pythonshell import ExternalPythonShell
if isinstance(widget, PythonShellWidget):
return widget
elif isinstance(widget, ExternalPythonShell):
return widget.shell
#==============================================================================
# Main Window
#==============================================================================
class MainWindow(QMainWindow):
"""Spyder main window"""
DOCKOPTIONS = QMainWindow.AllowTabbedDocks|QMainWindow.AllowNestedDocks
SPYDER_PATH = get_conf_path('path')
BOOKMARKS = (
('numpy', "http://docs.scipy.org/doc/",
_("Numpy and Scipy documentation")),
('matplotlib', "http://matplotlib.sourceforge.net/contents.html",
_("Matplotlib documentation")),
('PyQt4',
"http://pyqt.sourceforge.net/Docs/PyQt4/",
_("PyQt4 Reference Guide")),
('PyQt4',
"http://pyqt.sourceforge.net/Docs/PyQt4/classes.html",
_("PyQt4 API Reference")),
('winpython', "https://winpython.github.io/",
_("WinPython"))
)
# Signals
restore_scrollbar_position = Signal()
all_actions_defined = Signal()
sig_pythonpath_changed = Signal()
sig_open_external_file = Signal(str)
sig_resized = Signal("QResizeEvent") # related to interactive tour
sig_moved = Signal("QMoveEvent") # related to interactive tour
def __init__(self, options=None):
QMainWindow.__init__(self)
qapp = QApplication.instance()
if PYQT5:
# Enabling scaling for high dpi
qapp.setAttribute(Qt.AA_UseHighDpiPixmaps)
self.default_style = str(qapp.style().objectName())
self.dialog_manager = DialogManager()
self.init_workdir = options.working_directory
self.profile = options.profile
self.multithreaded = options.multithreaded
self.new_instance = options.new_instance
self.open_project = options.open_project
self.debug_print("Start of MainWindow constructor")
def signal_handler(signum, frame=None):
"""Handler for signals."""
sys.stdout.write('Handling signal: %s\n' % signum)
sys.stdout.flush()
QApplication.quit()
if os.name == "nt":
try:
import win32api
win32api.SetConsoleCtrlHandler(signal_handler, True)
except ImportError:
pass
else:
signal.signal(signal.SIGTERM, signal_handler)
# Use a custom Qt stylesheet
if sys.platform == 'darwin':
spy_path = get_module_source_path('spyder')
img_path = osp.join(spy_path, 'images')
mac_style = open(osp.join(spy_path, 'app', 'mac_stylesheet.qss')).read()
mac_style = mac_style.replace('$IMAGE_PATH', img_path)
self.setStyleSheet(mac_style)
# Create our TEMPDIR
if not osp.isdir(programs.TEMPDIR):
os.mkdir(programs.TEMPDIR)
# Shortcut management data
self.shortcut_data = []
# Loading Spyder path
self.path = []
self.project_path = []
if osp.isfile(self.SPYDER_PATH):
self.path, _x = encoding.readlines(self.SPYDER_PATH)
self.path = [name for name in self.path if osp.isdir(name)]
self.remove_path_from_sys_path()
self.add_path_to_sys_path()
# Plugins
self.console = None
self.workingdirectory = None
self.editor = None
self.explorer = None
self.help = None
self.onlinehelp = None
self.projects = None
self.outlineexplorer = None
self.historylog = None
self.extconsole = None
self.ipyconsole = None
self.variableexplorer = None
self.findinfiles = None
self.thirdparty_plugins = []
# Tour # TODO: Should I consider it a plugin?? or?
self.tour = None
self.tours_available = None
# Check for updates Thread and Worker, refereces needed to prevent
# segfaulting
self.check_updates_action = None
self.thread_updates = None
self.worker_updates = None
self.give_updates_feedback = True
# Preferences
from spyder.plugins.configdialog import (MainConfigPage,
ColorSchemeConfigPage)
from spyder.plugins.shortcuts import ShortcutsConfigPage
from spyder.plugins.runconfig import RunConfigPage
from spyder.plugins.maininterpreter import MainInterpreterConfigPage
self.general_prefs = [MainConfigPage, ShortcutsConfigPage,
ColorSchemeConfigPage, MainInterpreterConfigPage,
RunConfigPage]
self.prefs_index = None
self.prefs_dialog_size = None
# Quick Layouts and Dialogs
from spyder.plugins.layoutdialog import (LayoutSaveDialog,
LayoutSettingsDialog)
self.dialog_layout_save = LayoutSaveDialog
self.dialog_layout_settings = LayoutSettingsDialog
# Actions
self.lock_dockwidgets_action = None
self.show_toolbars_action = None
self.close_dockwidget_action = None
self.undo_action = None
self.redo_action = None
self.copy_action = None
self.cut_action = None
self.paste_action = None
self.selectall_action = None
self.maximize_action = None
self.fullscreen_action = None
# Menu bars
self.file_menu = None
self.file_menu_actions = []
self.edit_menu = None
self.edit_menu_actions = []
self.search_menu = None
self.search_menu_actions = []
self.source_menu = None
self.source_menu_actions = []
self.run_menu = None
self.run_menu_actions = []
self.debug_menu = None
self.debug_menu_actions = []
self.consoles_menu = None
self.consoles_menu_actions = []
self.projects_menu = None
self.projects_menu_actions = []
self.tools_menu = None
self.tools_menu_actions = []
self.external_tools_menu = None # We must keep a reference to this,
# otherwise the external tools menu is lost after leaving setup method
self.external_tools_menu_actions = []
self.view_menu = None
self.plugins_menu = None
self.plugins_menu_actions = []
self.toolbars_menu = None
self.help_menu = None
self.help_menu_actions = []
# Status bar widgets
self.mem_status = None
self.cpu_status = None
# Toolbars
self.visible_toolbars = []
self.toolbarslist = []
self.main_toolbar = None
self.main_toolbar_actions = []
self.file_toolbar = None
self.file_toolbar_actions = []
self.edit_toolbar = None
self.edit_toolbar_actions = []
self.search_toolbar = None
self.search_toolbar_actions = []
self.source_toolbar = None
self.source_toolbar_actions = []
self.run_toolbar = None
self.run_toolbar_actions = []
self.debug_toolbar = None
self.debug_toolbar_actions = []
self.layout_toolbar = None
self.layout_toolbar_actions = []
# Set Window title and icon
if DEV is not None:
title = "Spyder %s (Python %s.%s)" % (__version__,
sys.version_info[0],
sys.version_info[1])
else:
title = "Spyder (Python %s.%s)" % (sys.version_info[0],
sys.version_info[1])
if DEBUG:
title += " [DEBUG MODE %d]" % DEBUG
if options.window_title is not None:
title += ' -- ' + options.window_title
self.base_title = title
self.update_window_title()
resample = os.name != 'nt'
icon = ima.icon('spyder', resample=resample)
# Resampling SVG icon only on non-Windows platforms (see Issue 1314):
self.setWindowIcon(icon)
if set_windows_appusermodelid != None:
res = set_windows_appusermodelid()
debug_print("appusermodelid: " + str(res))
# Setting QTimer if running in travis
test_travis = os.environ.get('TEST_CI_APP', None)
if test_travis is not None:
global MAIN_APP
timer_shutdown_time = 30000
self.timer_shutdown = QTimer(self)
self.timer_shutdown.timeout.connect(MAIN_APP.quit)
self.timer_shutdown.start(timer_shutdown_time)
# Showing splash screen
self.splash = SPLASH
if CONF.get('main', 'current_version', '') != __version__:
CONF.set('main', 'current_version', __version__)
# Execute here the actions to be performed only once after
# each update (there is nothing there for now, but it could
# be useful some day...)
# List of satellite widgets (registered in add_dockwidget):
self.widgetlist = []
# Flags used if closing() is called by the exit() shell command
self.already_closed = False
self.is_starting_up = True
self.is_setting_up = True
self.dockwidgets_locked = CONF.get('main', 'panes_locked')
self.floating_dockwidgets = []
self.window_size = None
self.window_position = None
self.state_before_maximizing = None
self.current_quick_layout = None
self.previous_layout_settings = None # TODO: related to quick layouts
self.last_plugin = None
self.fullscreen_flag = None # isFullscreen does not work as expected
# The following flag remember the maximized state even when
# the window is in fullscreen mode:
self.maximized_flag = None
# Track which console plugin type had last focus
# True: Console plugin
# False: IPython console plugin
self.last_console_plugin_focus_was_python = True
# To keep track of the last focused widget
self.last_focused_widget = None
self.previous_focused_widget = None
# Server to open external files on a single instance
self.open_files_server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
self.apply_settings()
self.debug_print("End of MainWindow constructor")
def debug_print(self, message):
"""Debug prints"""
debug_print(message)
#---- Window setup
def create_toolbar(self, title, object_name, iconsize=24):
"""Create and return toolbar with *title* and *object_name*"""
toolbar = self.addToolBar(title)
toolbar.setObjectName(object_name)
toolbar.setIconSize(QSize(iconsize, iconsize))
self.toolbarslist.append(toolbar)
return toolbar
def setup(self):
"""Setup main window"""
self.debug_print("*** Start of MainWindow setup ***")
self.debug_print(" ..core actions")
self.close_dockwidget_action = create_action(self,
icon=ima.icon('DialogCloseButton'),
text=_("Close current pane"),
triggered=self.close_current_dockwidget,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.close_dockwidget_action, "_",
"Close pane")
self.lock_dockwidgets_action = create_action(self, _("Lock panes"),
toggled=self.toggle_lock_dockwidgets,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.lock_dockwidgets_action, "_",
"Lock unlock panes")
# custom layouts shortcuts
self.toggle_next_layout_action = create_action(self,
_("Use next layout"),
triggered=self.toggle_next_layout,
context=Qt.ApplicationShortcut)
self.toggle_previous_layout_action = create_action(self,
_("Use previous layout"),
triggered=self.toggle_previous_layout,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.toggle_next_layout_action, "_",
"Use next layout")
self.register_shortcut(self.toggle_previous_layout_action, "_",
"Use previous layout")
def create_edit_action(text, tr_text, icon):
textseq = text.split(' ')
method_name = textseq[0].lower()+"".join(textseq[1:])
action = create_action(self, tr_text,
icon=icon,
triggered=self.global_callback,
data=method_name,
context=Qt.WidgetShortcut)
self.register_shortcut(action, "Editor", text)
return action
self.undo_action = create_edit_action('Undo', _('Undo'),
ima.icon('undo'))
self.redo_action = create_edit_action('Redo', _('Redo'),
ima.icon('redo'))
self.copy_action = create_edit_action('Copy', _('Copy'),
ima.icon('editcopy'))
self.cut_action = create_edit_action('Cut', _('Cut'),
ima.icon('editcut'))
self.paste_action = create_edit_action('Paste', _('Paste'),
ima.icon('editpaste'))
self.selectall_action = create_edit_action("Select All",
_("Select All"),
ima.icon('selectall'))
self.edit_menu_actions = [self.undo_action, self.redo_action,
None, self.cut_action, self.copy_action,
self.paste_action, self.selectall_action]
namespace = None
self.debug_print(" ..toolbars")
# File menu/toolbar
self.file_menu = self.menuBar().addMenu(_("&File"))
self.file_toolbar = self.create_toolbar(_("File toolbar"),
"file_toolbar")
# Edit menu/toolbar
self.edit_menu = self.menuBar().addMenu(_("&Edit"))
self.edit_toolbar = self.create_toolbar(_("Edit toolbar"),
"edit_toolbar")
# Search menu/toolbar
self.search_menu = self.menuBar().addMenu(_("&Search"))
self.search_toolbar = self.create_toolbar(_("Search toolbar"),
"search_toolbar")
# Source menu/toolbar
self.source_menu = self.menuBar().addMenu(_("Sour&ce"))
self.source_toolbar = self.create_toolbar(_("Source toolbar"),
"source_toolbar")
# Run menu/toolbar
self.run_menu = self.menuBar().addMenu(_("&Run"))
self.run_toolbar = self.create_toolbar(_("Run toolbar"),
"run_toolbar")
# Debug menu/toolbar
self.debug_menu = self.menuBar().addMenu(_("&Debug"))
self.debug_toolbar = self.create_toolbar(_("Debug toolbar"),
"debug_toolbar")
# Consoles menu/toolbar
self.consoles_menu = self.menuBar().addMenu(_("C&onsoles"))
# Projects menu
self.projects_menu = self.menuBar().addMenu(_("&Projects"))
# Tools menu
self.tools_menu = self.menuBar().addMenu(_("&Tools"))
# View menu
self.view_menu = self.menuBar().addMenu(_("&View"))
# Help menu
self.help_menu = self.menuBar().addMenu(_("&Help"))
# Status bar
status = self.statusBar()
status.setObjectName("StatusBar")
status.showMessage(_("Welcome to Spyder!"), 5000)
self.debug_print(" ..tools")
# Tools + External Tools
prefs_action = create_action(self, _("Pre&ferences"),
icon=ima.icon('configure'),
triggered=self.edit_preferences,
context=Qt.ApplicationShortcut)
self.register_shortcut(prefs_action, "_", "Preferences",
add_sc_to_tip=True)
spyder_path_action = create_action(self,
_("PYTHONPATH manager"),
None, icon=ima.icon('pythonpath'),
triggered=self.path_manager_callback,
tip=_("Python Path Manager"),
menurole=QAction.ApplicationSpecificRole)
update_modules_action = create_action(self,
_("Update module names list"),
triggered=lambda:
module_completion.reset(),
tip=_("Refresh list of module names "
"available in PYTHONPATH"))
reset_spyder_action = create_action(
self, _("Reset Spyder to factory defaults"),
triggered=self.reset_spyder)
self.tools_menu_actions = [prefs_action, spyder_path_action]
if WinUserEnvDialog is not None:
winenv_action = create_action(self,
_("Current user environment variables..."),
icon='win_env.png',
tip=_("Show and edit current user environment "
"variables in Windows registry "
"(i.e. for all sessions)"),
triggered=self.win_env)
self.tools_menu_actions.append(winenv_action)
self.tools_menu_actions += [reset_spyder_action, MENU_SEPARATOR,
update_modules_action]
# External Tools submenu
self.external_tools_menu = QMenu(_("External Tools"))
self.external_tools_menu_actions = []
# WinPython control panel
self.wp_action = create_action(self, _("WinPython control panel"),
icon=get_icon('winpython.svg'),
triggered=lambda:
programs.run_python_script('winpython', 'controlpanel'))
if os.name == 'nt' and is_module_installed('winpython'):
self.external_tools_menu_actions.append(self.wp_action)
# Qt-related tools
additact = []
for name in ("designer-qt4", "designer"):
qtdact = create_program_action(self, _("Qt Designer"),
name, 'qtdesigner.png')
if qtdact:
break
for name in ("linguist-qt4", "linguist"):
qtlact = create_program_action(self, _("Qt Linguist"),
"linguist", 'qtlinguist.png')
if qtlact:
break
args = ['-no-opengl'] if os.name == 'nt' else []
qteact = create_python_script_action(self,
_("Qt examples"), 'qt.png', "PyQt4",
osp.join("examples", "demos",
"qtdemo", "qtdemo"), args)
for act in (qtdact, qtlact, qteact):
if act:
additact.append(act)
if additact and is_module_installed('winpython'):
self.external_tools_menu_actions += [None] + additact
# Guidata and Sift
self.debug_print(" ..sift?")
gdgq_act = []
# Guidata and Guiqwt don't support PyQt5 yet and they fail
# with an AssertionError when imported using those bindings
# (see issue 2274)
try:
from guidata import configtools
from guidata import config # analysis:ignore
guidata_icon = configtools.get_icon('guidata.svg')
guidata_act = create_python_script_action(self,
_("guidata examples"), guidata_icon,
"guidata",
osp.join("tests", "__init__"))
gdgq_act += [guidata_act]
except (ImportError, AssertionError):
pass
try:
from guidata import configtools
from guiqwt import config # analysis:ignore
guiqwt_icon = configtools.get_icon('guiqwt.svg')
guiqwt_act = create_python_script_action(self,
_("guiqwt examples"), guiqwt_icon, "guiqwt",
osp.join("tests", "__init__"))
if guiqwt_act:
gdgq_act += [guiqwt_act]
sift_icon = configtools.get_icon('sift.svg')
sift_act = create_python_script_action(self, _("Sift"),
sift_icon, "guiqwt", osp.join("tests", "sift"))
if sift_act:
gdgq_act += [sift_act]
except (ImportError, AssertionError):
pass
if gdgq_act:
self.external_tools_menu_actions += [None] + gdgq_act
# ViTables
vitables_act = create_program_action(self, _("ViTables"),
"vitables", 'vitables.png')
if vitables_act:
self.external_tools_menu_actions += [None, vitables_act]
# Maximize current plugin
self.maximize_action = create_action(self, '',
triggered=self.maximize_dockwidget,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.maximize_action, "_", "Maximize pane")
self.__update_maximize_action()
# Fullscreen mode
self.fullscreen_action = create_action(self,
_("Fullscreen mode"),
triggered=self.toggle_fullscreen,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.fullscreen_action, "_",
"Fullscreen mode", add_sc_to_tip=True)
# Main toolbar
self.main_toolbar_actions = [self.maximize_action,
self.fullscreen_action,
None,
prefs_action, spyder_path_action]
self.main_toolbar = self.create_toolbar(_("Main toolbar"),
"main_toolbar")
# Internal console plugin
self.debug_print(" ..plugin: internal console")
from spyder.plugins.console import Console
self.console = Console(self, namespace, exitfunc=self.closing,
profile=self.profile,
multithreaded=self.multithreaded,
message=_("Spyder Internal Console\n\n"
"This console is used to report application\n"
"internal errors and to inspect Spyder\n"
"internals with the following commands:\n"
" spy.app, spy.window, dir(spy)\n\n"
"Please don't use it to run your code\n\n"))
self.console.register_plugin()
# Working directory plugin
self.debug_print(" ..plugin: working directory")
from spyder.plugins.workingdirectory import WorkingDirectory
self.workingdirectory = WorkingDirectory(self, self.init_workdir, main=self)
self.workingdirectory.register_plugin()
self.toolbarslist.append(self.workingdirectory)
# Help plugin
if CONF.get('help', 'enable'):
self.set_splash(_("Loading help..."))
from spyder.plugins.help import Help
self.help = Help(self)
self.help.register_plugin()
# Outline explorer widget
if CONF.get('outline_explorer', 'enable'):
self.set_splash(_("Loading outline explorer..."))
from spyder.plugins.outlineexplorer import OutlineExplorer
fullpath_sorting = CONF.get('editor', 'fullpath_sorting', True)
self.outlineexplorer = OutlineExplorer(self,
fullpath_sorting=fullpath_sorting)
self.outlineexplorer.register_plugin()
# Editor plugin
self.set_splash(_("Loading editor..."))
from spyder.plugins.editor import Editor
self.editor = Editor(self)
self.editor.register_plugin()
# Populating file menu entries
quit_action = create_action(self, _("&Quit"),
icon=ima.icon('exit'),
tip=_("Quit"),
triggered=self.console.quit,
context=Qt.ApplicationShortcut)
self.register_shortcut(quit_action, "_", "Quit")
restart_action = create_action(self, _("&Restart"),
icon=ima.icon('restart'),
tip=_("Restart"),
triggered=self.restart,
context=Qt.ApplicationShortcut)
self.register_shortcut(restart_action, "_", "Restart")
self.file_menu_actions += [None, restart_action, quit_action]
self.set_splash("")
self.debug_print(" ..widgets")
# Find in files
if CONF.get('find_in_files', 'enable'):
from spyder.plugins.findinfiles import FindInFiles
self.findinfiles = FindInFiles(self)
self.findinfiles.register_plugin()
# Explorer
if CONF.get('explorer', 'enable'):
self.set_splash(_("Loading file explorer..."))
from spyder.plugins.explorer import Explorer
self.explorer = Explorer(self)
self.explorer.register_plugin()
# History log widget
if CONF.get('historylog', 'enable'):
self.set_splash(_("Loading history plugin..."))
from spyder.plugins.history import HistoryLog
self.historylog = HistoryLog(self)
self.historylog.register_plugin()
# Online help widget
try: # Qt >= v4.4
from spyder.plugins.onlinehelp import OnlineHelp
except ImportError: # Qt < v4.4
OnlineHelp = None # analysis:ignore
if CONF.get('onlinehelp', 'enable') and OnlineHelp is not None:
self.set_splash(_("Loading online help..."))
self.onlinehelp = OnlineHelp(self)
self.onlinehelp.register_plugin()
# Project explorer widget
self.set_splash(_("Loading project explorer..."))
from spyder.plugins.projects import Projects
self.projects = Projects(self)
self.projects.register_plugin()
self.project_path = self.projects.get_pythonpath(at_start=True)
# External console
self.set_splash(_("Loading external console..."))
from spyder.plugins.externalconsole import ExternalConsole
self.extconsole = ExternalConsole(self)
self.extconsole.register_plugin()
# Namespace browser
self.set_splash(_("Loading namespace browser..."))
from spyder.plugins.variableexplorer import VariableExplorer
self.variableexplorer = VariableExplorer(self)
self.variableexplorer.register_plugin()
# IPython console
if QTCONSOLE_INSTALLED:
self.set_splash(_("Loading IPython console..."))
from spyder.plugins.ipythonconsole import IPythonConsole
self.ipyconsole = IPythonConsole(self)
self.ipyconsole.register_plugin()
self.set_splash(_("Setting up main window..."))
# Help menu
dep_action = create_action(self, _("Dependencies..."),
triggered=self.show_dependencies,
icon=ima.icon('advanced'))
report_action = create_action(self,
_("Report issue..."),
icon=ima.icon('bug'),
triggered=self.report_issue)
support_action = create_action(self,
_("Spyder support..."),
triggered=self.google_group)
self.check_updates_action = create_action(self,
_("Check for updates..."),
triggered=self.check_updates)
# Spyder documentation
doc_path = get_module_data_path('spyder', relpath="doc",
attr_name='DOCPATH')
# * Trying to find the chm doc
spyder_doc = osp.join(doc_path, "Spyderdoc.chm")
if not osp.isfile(spyder_doc):
spyder_doc = osp.join(doc_path, os.pardir, "Spyderdoc.chm")
# * Trying to find the html doc
if not osp.isfile(spyder_doc):
spyder_doc = osp.join(doc_path, "index.html")
# * Trying to find the development-version html doc
if not osp.isfile(spyder_doc):
spyder_doc = osp.join(get_module_source_path('spyder'),
os.pardir, 'build', 'lib', 'spyder',
'doc', "index.html")
# * If we totally fail, point to our web build
if not osp.isfile(spyder_doc):
spyder_doc = 'http://pythonhosted.org/spyder'
else:
spyder_doc = file_uri(spyder_doc)
doc_action = create_action(self, _("Spyder documentation"),
icon=ima.icon('DialogHelpButton'),
triggered=lambda:
programs.start_file(spyder_doc))
self.register_shortcut(doc_action, "_",
"spyder documentation")
if self.help is not None:
tut_action = create_action(self, _("Spyder tutorial"),
triggered=self.help.show_tutorial)
else:
tut_action = None
#----- Tours
self.tour = tour.AnimatedTour(self)
self.tours_menu = QMenu(_("Interactive tours"))
self.tour_menu_actions = []
# TODO: Only show intro tour for now. When we are close to finish
# 3.0, we will finish and show the other tour
self.tours_available = tour.get_tours(0)
for i, tour_available in enumerate(self.tours_available):
self.tours_available[i]['last'] = 0
tour_name = tour_available['name']
def trigger(i=i, self=self): # closure needed!
return lambda: self.show_tour(i)
temp_action = create_action(self, tour_name, tip="",
triggered=trigger())
self.tour_menu_actions += [temp_action]
self.tours_menu.addActions(self.tour_menu_actions)
self.help_menu_actions = [doc_action, tut_action, self.tours_menu,
MENU_SEPARATOR, report_action, dep_action,
self.check_updates_action, support_action,
MENU_SEPARATOR]
# Python documentation
if get_python_doc_path() is not None:
pydoc_act = create_action(self, _("Python documentation"),
triggered=lambda:
programs.start_file(get_python_doc_path()))
self.help_menu_actions.append(pydoc_act)
# IPython documentation
if self.ipyconsole is not None and self.help is not None:
ipython_menu = QMenu(_("IPython documentation"), self)
intro_action = create_action(self, _("Intro to IPython"),
triggered=self.ipyconsole.show_intro)
quickref_action = create_action(self, _("Quick reference"),
triggered=self.ipyconsole.show_quickref)
guiref_action = create_action(self, _("Console help"),
triggered=self.ipyconsole.show_guiref)
add_actions(ipython_menu, (intro_action, guiref_action,
quickref_action))
self.help_menu_actions.append(ipython_menu)
# Windows-only: documentation located in sys.prefix/Doc
ipm_actions = []
def add_ipm_action(text, path):
"""Add installed Python module doc action to help submenu"""
# QAction.triggered works differently for PySide and PyQt
path = file_uri(path)
if not API == 'pyside':
slot=lambda _checked, path=path: programs.start_file(path)
else:
slot=lambda path=path: programs.start_file(path)
action = create_action(self, text,
icon='%s.png' % osp.splitext(path)[1][1:],
triggered=slot)
ipm_actions.append(action)
sysdocpth = osp.join(sys.prefix, 'Doc')
if osp.isdir(sysdocpth): # exists on Windows, except frozen dist.
for docfn in os.listdir(sysdocpth):
pt = r'([a-zA-Z\_]*)(doc)?(-dev)?(-ref)?(-user)?.(chm|pdf)'
match = re.match(pt, docfn)
if match is not None:
pname = match.groups()[0]
if pname not in ('Python', ):
add_ipm_action(pname, osp.join(sysdocpth, docfn))
# Installed Python modules submenu (Windows only)
if ipm_actions:
pymods_menu = QMenu(_("Installed Python modules"), self)
add_actions(pymods_menu, ipm_actions)
self.help_menu_actions.append(pymods_menu)
# Online documentation
web_resources = QMenu(_("Online documentation"))
webres_actions = create_module_bookmark_actions(self,
self.BOOKMARKS)
webres_actions.insert(2, None)
webres_actions.insert(5, None)
add_actions(web_resources, webres_actions)
self.help_menu_actions.append(web_resources)
# Qt assistant link
if sys.platform.startswith('linux') and not PYQT5:
qta_exe = "assistant-qt4"
else:
qta_exe = "assistant"
qta_act = create_program_action(self, _("Qt documentation"),
qta_exe)
if qta_act:
self.help_menu_actions += [qta_act, None]
# About Spyder
about_action = create_action(self,
_("About %s...") % "Spyder",
icon=ima.icon('MessageBoxInformation'),
triggered=self.about)
self.help_menu_actions += [MENU_SEPARATOR, about_action]
# Status bar widgets
from spyder.widgets.status import MemoryStatus, CPUStatus
self.mem_status = MemoryStatus(self, status)
self.cpu_status = CPUStatus(self, status)
self.apply_statusbar_settings()
# Third-party plugins
for mod in get_spyderplugins_mods():
try:
plugin = mod.PLUGIN_CLASS(self)
self.thirdparty_plugins.append(plugin)
plugin.register_plugin()
except Exception as error:
print("%s: %s" % (mod, str(error)), file=STDERR)
traceback.print_exc(file=STDERR)
#----- View
# View menu
self.plugins_menu = QMenu(_("Panes"), self)
self.toolbars_menu = QMenu(_("Toolbars"), self)
self.quick_layout_menu = QMenu(_("Window layouts"), self)
self.quick_layout_set_menu()
self.view_menu.addMenu(self.plugins_menu) # Panes
add_actions(self.view_menu, (self.lock_dockwidgets_action,
self.close_dockwidget_action,
self.maximize_action,
MENU_SEPARATOR))
self.show_toolbars_action = create_action(self,
_("Show toolbars"),
triggered=self.show_toolbars,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.show_toolbars_action, "_",
"Show toolbars")
self.view_menu.addMenu(self.toolbars_menu)
self.view_menu.addAction(self.show_toolbars_action)
add_actions(self.view_menu, (MENU_SEPARATOR,
self.quick_layout_menu,
self.toggle_previous_layout_action,
self.toggle_next_layout_action,
MENU_SEPARATOR,
self.fullscreen_action))
if set_attached_console_visible is not None:
cmd_act = create_action(self,
_("Attached console window (debugging)"),
toggled=set_attached_console_visible)
cmd_act.setChecked(is_attached_console_visible())
add_actions(self.view_menu, (MENU_SEPARATOR, cmd_act))
# Adding external tools action to "Tools" menu
if self.external_tools_menu_actions:
external_tools_act = create_action(self, _("External Tools"))
external_tools_act.setMenu(self.external_tools_menu)
self.tools_menu_actions += [None, external_tools_act]
# Filling out menu/toolbar entries:
add_actions(self.file_menu, self.file_menu_actions)
add_actions(self.edit_menu, self.edit_menu_actions)
add_actions(self.search_menu, self.search_menu_actions)
add_actions(self.source_menu, self.source_menu_actions)
add_actions(self.run_menu, self.run_menu_actions)
add_actions(self.debug_menu, self.debug_menu_actions)
add_actions(self.consoles_menu, self.consoles_menu_actions)
add_actions(self.projects_menu, self.projects_menu_actions)
add_actions(self.tools_menu, self.tools_menu_actions)
add_actions(self.external_tools_menu,
self.external_tools_menu_actions)
add_actions(self.help_menu, self.help_menu_actions)
add_actions(self.main_toolbar, self.main_toolbar_actions)
add_actions(self.file_toolbar, self.file_toolbar_actions)
add_actions(self.edit_toolbar, self.edit_toolbar_actions)
add_actions(self.search_toolbar, self.search_toolbar_actions)
add_actions(self.source_toolbar, self.source_toolbar_actions)
add_actions(self.debug_toolbar, self.debug_toolbar_actions)
add_actions(self.run_toolbar, self.run_toolbar_actions)
# Apply all defined shortcuts (plugins + 3rd-party plugins)
self.apply_shortcuts()
# Emitting the signal notifying plugins that main window menu and
# toolbar actions are all defined:
self.all_actions_defined.emit()
# Window set-up
self.debug_print("Setting up window...")
self.setup_layout(default=False)
# Show and hide shortcuts in menus for Mac.
# This is a workaround because we can't disable shortcuts
# by setting context=Qt.WidgetShortcut there
if sys.platform == 'darwin':
for name in ['file', 'edit', 'search', 'source', 'run', 'debug',
'projects', 'tools', 'plugins']:
menu_object = getattr(self, name + '_menu')
menu_object.aboutToShow.connect(
lambda name=name: self.show_shortcuts(name))
menu_object.aboutToHide.connect(
lambda name=name: self.hide_shortcuts(name))
if self.splash is not None:
self.splash.hide()
# Enabling tear off for all menus except help menu
if CONF.get('main', 'tear_off_menus'):
for child in self.menuBar().children():
if isinstance(child, QMenu) and child != self.help_menu:
child.setTearOffEnabled(True)
# Menu about to show
for child in self.menuBar().children():
if isinstance(child, QMenu):
try:
child.aboutToShow.connect(self.update_edit_menu)
except TypeError:
pass
self.debug_print("*** End of MainWindow setup ***")
self.is_starting_up = False
def post_visible_setup(self):
"""Actions to be performed only after the main window's `show` method
was triggered"""
self.restore_scrollbar_position.emit()
# Remove our temporary dir
atexit.register(self.remove_tmpdir)
# [Workaround for Issue 880]
# QDockWidget objects are not painted if restored as floating
# windows, so we must dock them before showing the mainwindow,
# then set them again as floating windows here.
for widget in self.floating_dockwidgets:
widget.setFloating(True)
# In MacOS X 10.7 our app is not displayed after initialized (I don't
# know why because this doesn't happen when started from the terminal),
# so we need to resort to this hack to make it appear.
if running_in_mac_app():
idx = __file__.index(MAC_APP_NAME)
app_path = __file__[:idx]
subprocess.call(['open', app_path + MAC_APP_NAME])
# Server to maintain just one Spyder instance and open files in it if
# the user tries to start other instances with
# $ spyder foo.py
if CONF.get('main', 'single_instance') and not self.new_instance:
t = threading.Thread(target=self.start_open_files_server)
t.setDaemon(True)
t.start()
# Connect the window to the signal emmited by the previous server
# when it gets a client connected to it
self.sig_open_external_file.connect(self.open_external_file)
# Create Plugins and toolbars submenus
self.create_plugins_menu()
self.create_toolbars_menu()
self.extconsole.setMinimumHeight(0)
# Update toolbar visibility status
self.toolbars_visible = CONF.get('main', 'toolbars_visible')
self.load_last_visible_toolbars()
# Update lock status of dockidgets (panes)
self.lock_dockwidgets_action.setChecked(self.dockwidgets_locked)
self.apply_panes_settings()
# Hide Internal Console so that people don't use it instead of
# the External or IPython ones
if self.console.dockwidget.isVisible() and DEV is None:
self.console.toggle_view_action.setChecked(False)
self.console.dockwidget.hide()
# Show Help and Consoles by default
plugins_to_show = []
if self.help is not None:
plugins_to_show.append(self.help)
if self.ipyconsole is not None:
if self.ipyconsole.isvisible:
plugins_to_show += [self.extconsole, self.ipyconsole]
else:
plugins_to_show += [self.ipyconsole, self.extconsole]
else:
plugins_to_show += [self.extconsole]
for plugin in plugins_to_show:
if plugin.dockwidget.isVisible():
plugin.dockwidget.raise_()
# Show history file if no console is visible
ipy_visible = self.ipyconsole is not None and self.ipyconsole.isvisible
if not self.extconsole.isvisible and not ipy_visible:
self.historylog.add_history(get_conf_path('history.py'))
if self.open_project:
self.projects.open_project(self.open_project)
else:
# Load last project if a project was active when Spyder
# was closed
self.projects.reopen_last_project()
# If no project is active, load last session
if self.projects.get_active_project() is None:
self.editor.setup_open_files()
# Check for spyder updates
if DEV is None and CONF.get('main', 'check_updates_on_startup'):
self.give_updates_feedback = False
self.check_updates()
# Show dialog with missing dependencies
self.report_missing_dependencies()
self.is_setting_up = False
def update_window_title(self):
"""Update main spyder window title based on projects."""
title = self.base_title
if self.projects is not None:
path = self.projects.get_active_project_path()
if path:
path = path.replace(get_home_dir(), '~')
title = '{0} - {1}'.format(path, title)
self.setWindowTitle(title)
def report_missing_dependencies(self):
"""Show a QMessageBox with a list of missing hard dependencies"""
missing_deps = dependencies.missing_dependencies()
if missing_deps:
QMessageBox.critical(self, _('Error'),
_("<b>You have missing dependencies!</b>"
"<br><br><tt>%s</tt><br><br>"
"<b>Please install them to avoid this message.</b>"
"<br><br>"
"<i>Note</i>: Spyder could work without some of these "
"dependencies, however to have a smooth experience when "
"using Spyder we <i>strongly</i> recommend you to install "
"all the listed missing dependencies.<br><br>"
"Failing to install these dependencies might result in bugs. "
"Please be sure that any found bugs are not the direct "
"result of missing dependencies, prior to reporting a new "
"issue."
) % missing_deps, QMessageBox.Ok)
def load_window_settings(self, prefix, default=False, section='main'):
"""Load window layout settings from userconfig-based configuration
with *prefix*, under *section*
default: if True, do not restore inner layout"""
get_func = CONF.get_default if default else CONF.get
window_size = get_func(section, prefix+'size')
prefs_dialog_size = get_func(section, prefix+'prefs_dialog_size')
if default:
hexstate = None
else:
hexstate = get_func(section, prefix+'state', None)
pos = get_func(section, prefix+'position')
# It's necessary to verify if the window/position value is valid
# with the current screen. See issue 3748
width = pos[0]
height = pos[1]
screen_shape = QApplication.desktop().geometry()
current_width = screen_shape.width()
current_height = screen_shape.height()
if current_width < width or current_height < height:
pos = CONF.get_default(section, prefix+'position')
is_maximized = get_func(section, prefix+'is_maximized')
is_fullscreen = get_func(section, prefix+'is_fullscreen')
return hexstate, window_size, prefs_dialog_size, pos, is_maximized, \
is_fullscreen
def get_window_settings(self):
"""Return current window settings
Symetric to the 'set_window_settings' setter"""
window_size = (self.window_size.width(), self.window_size.height())
is_fullscreen = self.isFullScreen()
if is_fullscreen:
is_maximized = self.maximized_flag
else:
is_maximized = self.isMaximized()
pos = (self.window_position.x(), self.window_position.y())
prefs_dialog_size = (self.prefs_dialog_size.width(),
self.prefs_dialog_size.height())
hexstate = qbytearray_to_str(self.saveState())
return (hexstate, window_size, prefs_dialog_size, pos, is_maximized,
is_fullscreen)
def set_window_settings(self, hexstate, window_size, prefs_dialog_size,
pos, is_maximized, is_fullscreen):
"""Set window settings
Symetric to the 'get_window_settings' accessor"""
self.setUpdatesEnabled(False)
self.window_size = QSize(window_size[0], window_size[1]) # width,height
self.prefs_dialog_size = QSize(prefs_dialog_size[0],
prefs_dialog_size[1]) # width,height
self.window_position = QPoint(pos[0], pos[1]) # x,y
self.setWindowState(Qt.WindowNoState)
self.resize(self.window_size)
self.move(self.window_position)
# Window layout
if hexstate:
self.restoreState( QByteArray().fromHex(
str(hexstate).encode('utf-8')) )
# [Workaround for Issue 880]
# QDockWidget objects are not painted if restored as floating
# windows, so we must dock them before showing the mainwindow.
for widget in self.children():
if isinstance(widget, QDockWidget) and widget.isFloating():
self.floating_dockwidgets.append(widget)
widget.setFloating(False)
# Is fullscreen?
if is_fullscreen:
self.setWindowState(Qt.WindowFullScreen)
self.__update_fullscreen_action()
# Is maximized?
if is_fullscreen:
self.maximized_flag = is_maximized
elif is_maximized:
self.setWindowState(Qt.WindowMaximized)
self.setUpdatesEnabled(True)
def save_current_window_settings(self, prefix, section='main'):
"""Save current window settings with *prefix* in
the userconfig-based configuration, under *section*"""
win_size = self.window_size
prefs_size = self.prefs_dialog_size
CONF.set(section, prefix+'size', (win_size.width(), win_size.height()))
CONF.set(section, prefix+'prefs_dialog_size',
(prefs_size.width(), prefs_size.height()))
CONF.set(section, prefix+'is_maximized', self.isMaximized())
CONF.set(section, prefix+'is_fullscreen', self.isFullScreen())
pos = self.window_position
CONF.set(section, prefix+'position', (pos.x(), pos.y()))
self.maximize_dockwidget(restore=True)# Restore non-maximized layout
qba = self.saveState()
CONF.set(section, prefix+'state', qbytearray_to_str(qba))
CONF.set(section, prefix+'statusbar',
not self.statusBar().isHidden())
def tabify_plugins(self, first, second):
"""Tabify plugin dockwigdets"""
self.tabifyDockWidget(first.dockwidget, second.dockwidget)
# --- Layouts
def setup_layout(self, default=False):
"""Setup window layout"""
prefix = 'window' + '/'
settings = self.load_window_settings(prefix, default)
hexstate = settings[0]
self.first_spyder_run = False
if hexstate is None:
# First Spyder execution:
self.setWindowState(Qt.WindowMaximized)
self.first_spyder_run = True
self.setup_default_layouts('default', settings)
self.extconsole.setMinimumHeight(250)
# Now that the initial setup is done, copy the window settings,
# except for the hexstate in the quick layouts sections for the
# default layouts.
# Order and name of the default layouts is found in config.py
section = 'quick_layouts'
get_func = CONF.get_default if default else CONF.get
order = get_func(section, 'order')
# restore the original defaults if reset layouts is called
if default:
CONF.set(section, 'active', order)
CONF.set(section, 'order', order)
CONF.set(section, 'names', order)
for index, name, in enumerate(order):
prefix = 'layout_{0}/'.format(index)
self.save_current_window_settings(prefix, section)
CONF.set(section, prefix+'state', None)
# store the initial layout as the default in spyder
prefix = 'layout_default/'
section = 'quick_layouts'
self.save_current_window_settings(prefix, section)
self.current_quick_layout = 'default'
CONF.set(section, prefix+'state', None)
# Regenerate menu
self.quick_layout_set_menu()
self.set_window_settings(*settings)
for plugin in self.widgetlist:
try:
plugin.initialize_plugin_in_mainwindow_layout()
except Exception as error:
print("%s: %s" % (plugin, str(error)), file=STDERR)
traceback.print_exc(file=STDERR)
def setup_default_layouts(self, index, settings):
"""Setup default layouts when run for the first time"""
self.set_window_settings(*settings)
self.setUpdatesEnabled(False)
# IMPORTANT: order has to be the same as defined in the config file
MATLAB, RSTUDIO, VERTICAL, HORIZONTAL = range(4)
# define widgets locally
editor = self.editor
console_ipy = self.ipyconsole
console_ext = self.extconsole
console_int = self.console
outline = self.outlineexplorer
explorer_project = self.projects
explorer_file = self.explorer
explorer_variable = self.variableexplorer
history = self.historylog
finder = self.findinfiles
help_plugin = self.help
helper = self.onlinehelp
plugins = self.thirdparty_plugins
global_hidden_widgets = [finder, console_int, explorer_project,
helper] + plugins
global_hidden_toolbars = [self.source_toolbar, self.edit_toolbar,
self.search_toolbar]
# Layout definition
# layouts are organized by columns, each colum is organized by rows
# widths have to add 1.0, height per column have to add 1.0
# Spyder Default Initial Layout
s_layout = {'widgets': [
# column 0
[[explorer_project]],
# column 1
[[editor]],
# column 2
[[outline]],
# column 3
[[help_plugin, explorer_variable, helper, explorer_file,
finder] + plugins,
[console_int, console_ext, console_ipy, history]]
],
'width fraction': [0.0, # column 0 width
0.55, # column 1 width
0.0, # column 2 width
0.45], # column 3 width
'height fraction': [[1.0], # column 0, row heights
[1.0], # column 1, row heights
[1.0], # column 2, row heights
[0.46, 0.54]], # column 3, row heights
'hidden widgets': [outline],
'hidden toolbars': [],
}
r_layout = {'widgets': [
# column 0
[[editor],
[console_ipy, console_ext, console_int]],
# column 1
[[explorer_variable, history, outline, finder] + plugins,
[explorer_file, explorer_project, help_plugin, helper]]
],
'width fraction': [0.55, # column 0 width
0.45], # column 1 width
'height fraction': [[0.55, 0.45], # column 0, row heights
[0.55, 0.45]], # column 1, row heights
'hidden widgets': [outline],
'hidden toolbars': [],
}
# Matlab
m_layout = {'widgets': [
# column 0
[[explorer_file, explorer_project],
[outline]],
# column 1
[[editor],
[console_ipy, console_ext, console_int]],
# column 2
[[explorer_variable, finder] + plugins,
[history, help_plugin, helper]]
],
'width fraction': [0.20, # column 0 width
0.40, # column 1 width
0.40], # column 2 width
'height fraction': [[0.55, 0.45], # column 0, row heights
[0.55, 0.45], # column 1, row heights
[0.55, 0.45]], # column 2, row heights
'hidden widgets': [],
'hidden toolbars': [],
}
# Vertically split
v_layout = {'widgets': [
# column 0
[[editor],
[console_ipy, console_ext, console_int, explorer_file,
explorer_project, help_plugin, explorer_variable,
history, outline, finder, helper] + plugins]
],
'width fraction': [1.0], # column 0 width
'height fraction': [[0.55, 0.45]], # column 0, row heights
'hidden widgets': [outline],
'hidden toolbars': [],
}
# Horizontally split
h_layout = {'widgets': [
# column 0
[[editor]],
# column 1
[[console_ipy, console_ext, console_int, explorer_file,
explorer_project, help_plugin, explorer_variable,
history, outline, finder, helper] + plugins]
],
'width fraction': [0.55, # column 0 width
0.45], # column 1 width
'height fraction': [[1.0], # column 0, row heights
[1.0]], # column 1, row heights
'hidden widgets': [outline],
'hidden toolbars': []
}
# Layout selection
layouts = {'default': s_layout,
RSTUDIO: r_layout,
MATLAB: m_layout,
VERTICAL: v_layout,
HORIZONTAL: h_layout}
layout = layouts[index]
widgets_layout = layout['widgets']
widgets = []
for column in widgets_layout :
for row in column:
for widget in row:
if widget is not None:
widgets.append(widget)
# Make every widget visible
for widget in widgets:
widget.toggle_view(True)
action = widget.toggle_view_action
action.setChecked(widget.dockwidget.isVisible())
# Set the widgets horizontally
for i in range(len(widgets) - 1):
first, second = widgets[i], widgets[i+1]
if first is not None and second is not None:
self.splitDockWidget(first.dockwidget, second.dockwidget,
Qt.Horizontal)
# Arrange rows vertically
for column in widgets_layout :
for i in range(len(column) - 1):
first_row, second_row = column[i], column[i+1]
if first_row is not None and second_row is not None:
self.splitDockWidget(first_row[0].dockwidget,
second_row[0].dockwidget,
Qt.Vertical)
# Tabify
for column in widgets_layout :
for row in column:
for i in range(len(row) - 1):
first, second = row[i], row[i+1]
if first is not None and second is not None:
self.tabify_plugins(first, second)
# Raise front widget per row
row[0].dockwidget.show()
row[0].dockwidget.raise_()
# Hide toolbars
hidden_toolbars = global_hidden_toolbars + layout['hidden toolbars']
for toolbar in hidden_toolbars:
if toolbar is not None:
toolbar.close()
# Hide widgets
hidden_widgets = global_hidden_widgets + layout['hidden widgets']
for widget in hidden_widgets:
if widget is not None:
widget.dockwidget.close()
# set the width and height
self._layout_widget_info = []
width, height = self.window_size.width(), self.window_size.height()
# fix column width
# for c in range(len(widgets_layout)):
# widget = widgets_layout[c][0][0].dockwidget
# min_width, max_width = widget.minimumWidth(), widget.maximumWidth()
# info = {'widget': widget,
# 'min width': min_width,
# 'max width': max_width}
# self._layout_widget_info.append(info)
# new_width = int(layout['width fraction'][c] * width * 0.95)
# widget.setMinimumWidth(new_width)
# widget.setMaximumWidth(new_width)
# widget.updateGeometry()
# print(c, widgets_layout[c][0][0], new_width)
# fix column height
for c, column in enumerate(widgets_layout):
for r in range(len(column) - 1):
widget = column[r][0]
dockwidget = widget.dockwidget
dock_min_h = dockwidget.minimumHeight()
dock_max_h = dockwidget.maximumHeight()
info = {'widget': widget,
'dock min height': dock_min_h,
'dock max height': dock_max_h}
self._layout_widget_info.append(info)
# The 0.95 factor is to adjust height based on usefull
# estimated area in the window
new_height = int(layout['height fraction'][c][r]*height*0.95)
dockwidget.setMinimumHeight(new_height)
dockwidget.setMaximumHeight(new_height)
self._custom_layout_timer = QTimer(self)
self._custom_layout_timer.timeout.connect(self.layout_fix_timer)
self._custom_layout_timer.setSingleShot(True)
self._custom_layout_timer.start(5000)
def layout_fix_timer(self):
"""Fixes the height of docks after a new layout is set."""
info = self._layout_widget_info
for i in info:
dockwidget = i['widget'].dockwidget
if 'dock min width' in i:
dockwidget.setMinimumWidth(i['dock min width'])
dockwidget.setMaximumWidth(i['dock max width'])
if 'dock min height' in i:
dockwidget.setMinimumHeight(i['dock min height'])
dockwidget.setMaximumHeight(i['dock max height'])
dockwidget.updateGeometry()
self.setUpdatesEnabled(True)
@Slot()
def toggle_previous_layout(self):
""" """
self.toggle_layout('previous')
@Slot()
def toggle_next_layout(self):
""" """
self.toggle_layout('next')
def toggle_layout(self, direction='next'):
""" """
get = CONF.get
names = get('quick_layouts', 'names')
order = get('quick_layouts', 'order')
active = get('quick_layouts', 'active')
if len(active) == 0:
return
layout_index = ['default']
for name in order:
if name in active:
layout_index.append(names.index(name))
current_layout = self.current_quick_layout
dic = {'next': 1, 'previous': -1}
if current_layout is None:
# Start from default
current_layout = 'default'
if current_layout in layout_index:
current_index = layout_index.index(current_layout)
else:
current_index = 0
new_index = (current_index + dic[direction]) % len(layout_index)
self.quick_layout_switch(layout_index[new_index])
def quick_layout_set_menu(self):
""" """
get = CONF.get
names = get('quick_layouts', 'names')
order = get('quick_layouts', 'order')
active = get('quick_layouts', 'active')
ql_actions = []
ql_actions = [create_action(self, _('Spyder Default Layout'),
triggered=lambda:
self.quick_layout_switch('default'))]
for name in order:
if name in active:
index = names.index(name)
# closure required so lambda works with the default parameter
def trigger(i=index, self=self):
return lambda: self.quick_layout_switch(i)
qli_act = create_action(self, name, triggered=trigger())
# closure above replaces the following which stopped working
# qli_act = create_action(self, name, triggered=lambda i=index:
# self.quick_layout_switch(i)
ql_actions += [qli_act]
self.ql_save = create_action(self, _("Save current layout"),
triggered=lambda:
self.quick_layout_save(),
context=Qt.ApplicationShortcut)
self.ql_preferences = create_action(self, _("Layout preferences"),
triggered=lambda:
self.quick_layout_settings(),
context=Qt.ApplicationShortcut)
self.ql_reset = create_action(self, _('Reset to spyder default'),
triggered=self.reset_window_layout)
self.register_shortcut(self.ql_save, "_", "Save current layout")
self.register_shortcut(self.ql_preferences, "_", "Layout preferences")
ql_actions += [None]
ql_actions += [self.ql_save, self.ql_preferences, self.ql_reset]
self.quick_layout_menu.clear()
add_actions(self.quick_layout_menu, ql_actions)
if len(order) == 0:
self.ql_preferences.setEnabled(False)
else:
self.ql_preferences.setEnabled(True)
@Slot()
def reset_window_layout(self):
"""Reset window layout to default"""
answer = QMessageBox.warning(self, _("Warning"),
_("Window layout will be reset to default settings: "
"this affects window position, size and dockwidgets.\n"
"Do you want to continue?"),
QMessageBox.Yes | QMessageBox.No)
if answer == QMessageBox.Yes:
self.setup_layout(default=True)
def quick_layout_save(self):
"""Save layout dialog"""
get = CONF.get
set_ = CONF.set
names = get('quick_layouts', 'names')
order = get('quick_layouts', 'order')
active = get('quick_layouts', 'active')
dlg = self.dialog_layout_save(self, names)
if dlg.exec_():
name = dlg.combo_box.currentText()
if name in names:
answer = QMessageBox.warning(self, _("Warning"),
_("Layout <b>%s</b> will be \
overwritten. Do you want to \
continue?") % name,
QMessageBox.Yes | QMessageBox.No)
index = order.index(name)
else:
answer = True
if None in names:
index = names.index(None)
names[index] = name
else:
index = len(names)
names.append(name)
order.append(name)
# Always make active a new layout even if it overwrites an inactive
# layout
if name not in active:
active.append(name)
if answer:
self.save_current_window_settings('layout_{}/'.format(index),
section='quick_layouts')
set_('quick_layouts', 'names', names)
set_('quick_layouts', 'order', order)
set_('quick_layouts', 'active', active)
self.quick_layout_set_menu()
def quick_layout_settings(self):
"""Layout settings dialog"""
get = CONF.get
set_ = CONF.set
section = 'quick_layouts'
names = get(section, 'names')
order = get(section, 'order')
active = get(section, 'active')
dlg = self.dialog_layout_settings(self, names, order, active)
if dlg.exec_():
set_(section, 'names', dlg.names)
set_(section, 'order', dlg.order)
set_(section, 'active', dlg.active)
self.quick_layout_set_menu()
def quick_layout_switch(self, index):
"""Switch to quick layout number *index*"""
section = 'quick_layouts'
try:
settings = self.load_window_settings('layout_{}/'.format(index),
section=section)
(hexstate, window_size, prefs_dialog_size, pos, is_maximized,
is_fullscreen) = settings
# The defaults layouts will alwyas be regenerated unless there was
# an overwrite, either by rewriting with same name, or by deleting
# and then creating a new one
if hexstate is None:
self.setup_default_layouts(index, settings)
except cp.NoOptionError:
QMessageBox.critical(self, _("Warning"),
_("Quick switch layout #%s has not yet "
"been defined.") % str(index))
return
# TODO: is there any real use in calling the previous layout
# setting?
# self.previous_layout_settings = self.get_window_settings()
self.set_window_settings(*settings)
self.current_quick_layout = index
# make sure the flags are correctly set for visible panes
for plugin in self.widgetlist:
action = plugin.toggle_view_action
action.setChecked(plugin.dockwidget.isVisible())
# --- Show/Hide toolbars
def _update_show_toolbars_action(self):
"""Update the text displayed in the menu entry."""
if self.toolbars_visible:
text = _("Hide toolbars")
tip = _("Hide toolbars")
else:
text = _("Show toolbars")
tip = _("Show toolbars")
self.show_toolbars_action.setText(text)
self.show_toolbars_action.setToolTip(tip)
def save_visible_toolbars(self):
"""Saves the name of the visible toolbars in the .ini file."""
toolbars = []
for toolbar in self.visible_toolbars:
toolbars.append(toolbar.objectName())
CONF.set('main', 'last_visible_toolbars', toolbars)
def get_visible_toolbars(self):
"""Collects the visible toolbars."""
toolbars = []
for toolbar in self.toolbarslist:
if toolbar.toggleViewAction().isChecked():
toolbars.append(toolbar)
self.visible_toolbars = toolbars
def load_last_visible_toolbars(self):
"""Loads the last visible toolbars from the .ini file."""
toolbars_names = CONF.get('main', 'last_visible_toolbars', default=[])
if toolbars_names:
dic = {}
for toolbar in self.toolbarslist:
dic[toolbar.objectName()] = toolbar
toolbars = []
for name in toolbars_names:
if name in dic:
toolbars.append(dic[name])
self.visible_toolbars = toolbars
else:
self.get_visible_toolbars()
self._update_show_toolbars_action()
@Slot()
def show_toolbars(self):
"""Show/Hides toolbars."""
value = not self.toolbars_visible
CONF.set('main', 'toolbars_visible', value)
if value:
self.save_visible_toolbars()
else:
self.get_visible_toolbars()
for toolbar in self.visible_toolbars:
toolbar.toggleViewAction().setChecked(value)
toolbar.setVisible(value)
self.toolbars_visible = value
self._update_show_toolbars_action()
# --- Other
def plugin_focus_changed(self):
"""Focus has changed from one plugin to another"""
self.update_edit_menu()
self.update_search_menu()
# Now deal with Python shell and IPython plugins
if self.ipyconsole is not None:
focus_client = self.ipyconsole.get_focus_client()
if focus_client is not None:
self.last_console_plugin_focus_was_python = False
else:
shell = get_focus_python_shell()
if shell is not None:
self.last_console_plugin_focus_was_python = True
def show_shortcuts(self, menu):
"""Show action shortcuts in menu"""
for element in getattr(self, menu + '_menu_actions'):
if element and isinstance(element, QAction):
if element._shown_shortcut is not None:
element.setShortcut(element._shown_shortcut)
def hide_shortcuts(self, menu):
"""Hide action shortcuts in menu"""
for element in getattr(self, menu + '_menu_actions'):
if element and isinstance(element, QAction):
if element._shown_shortcut is not None:
element.setShortcut(QKeySequence())
def get_focus_widget_properties(self):
"""Get properties of focus widget
Returns tuple (widget, properties) where properties is a tuple of
booleans: (is_console, not_readonly, readwrite_editor)"""
widget = QApplication.focusWidget()
from spyder.widgets.shell import ShellBaseWidget
from spyder.widgets.editor import TextEditBaseWidget
from spyder.widgets.ipythonconsole import ControlWidget
# if focused widget isn't valid try the last focused
if not isinstance(widget, (ShellBaseWidget, TextEditBaseWidget,
ControlWidget)):
widget = self.previous_focused_widget
textedit_properties = None
if isinstance(widget, (ShellBaseWidget, TextEditBaseWidget,
ControlWidget)):
console = isinstance(widget, (ShellBaseWidget, ControlWidget))
not_readonly = not widget.isReadOnly()
readwrite_editor = not_readonly and not console
textedit_properties = (console, not_readonly, readwrite_editor)
return widget, textedit_properties
def update_edit_menu(self):
"""Update edit menu"""
widget, textedit_properties = self.get_focus_widget_properties()
if textedit_properties is None: # widget is not an editor/console
return
#!!! Below this line, widget is expected to be a QPlainTextEdit instance
console, not_readonly, readwrite_editor = textedit_properties
# Editor has focus and there is no file opened in it
if not console and not_readonly and not self.editor.is_file_opened():
return
# Disabling all actions to begin with
for child in self.edit_menu.actions():
child.setEnabled(False)
self.selectall_action.setEnabled(True)
# Undo, redo
self.undo_action.setEnabled( readwrite_editor \
and widget.document().isUndoAvailable() )
self.redo_action.setEnabled( readwrite_editor \
and widget.document().isRedoAvailable() )
# Copy, cut, paste, delete
has_selection = widget.has_selected_text()
self.copy_action.setEnabled(has_selection)
self.cut_action.setEnabled(has_selection and not_readonly)
self.paste_action.setEnabled(not_readonly)
# Comment, uncomment, indent, unindent...
if not console and not_readonly:
# This is the editor and current file is writable
for action in self.editor.edit_menu_actions:
action.setEnabled(True)
def update_search_menu(self):
"""Update search menu"""
if self.menuBar().hasFocus():
return
widget, textedit_properties = self.get_focus_widget_properties()
for action in self.editor.search_menu_actions:
try:
action.setEnabled(self.editor.isAncestorOf(widget))
except RuntimeError:
pass
if textedit_properties is None: # widget is not an editor/console
return
#!!! Below this line, widget is expected to be a QPlainTextEdit instance
_x, _y, readwrite_editor = textedit_properties
# Disable the replace action for read-only files
self.search_menu_actions[3].setEnabled(readwrite_editor)
def create_plugins_menu(self):
order = ['editor', 'console', 'ipython_console', 'variable_explorer',
'help', None, 'explorer', 'outline_explorer',
'project_explorer', 'find_in_files', None, 'historylog',
'profiler', 'breakpoints', 'pylint', None,
'onlinehelp', 'internal_console']
for plugin in self.widgetlist:
action = plugin.toggle_view_action
action.setChecked(plugin.dockwidget.isVisible())
try:
name = plugin.CONF_SECTION
pos = order.index(name)
except ValueError:
pos = None
if pos is not None:
order[pos] = action
else:
order.append(action)
actions = order[:]
for action in order:
if type(action) is str:
actions.remove(action)
self.plugins_menu_actions = actions
add_actions(self.plugins_menu, actions)
def create_toolbars_menu(self):
order = ['file_toolbar', 'run_toolbar', 'debug_toolbar',
'main_toolbar', 'Global working directory', None,
'search_toolbar', 'edit_toolbar', 'source_toolbar']
for toolbar in self.toolbarslist:
action = toolbar.toggleViewAction()
name = toolbar.objectName()
try:
pos = order.index(name)
except ValueError:
pos = None
if pos is not None:
order[pos] = action
else:
order.append(action)
add_actions(self.toolbars_menu, order)
def createPopupMenu(self):
menu = QMenu('', self)
actions = self.help_menu_actions[:3] + \
[None, self.help_menu_actions[-1]]
add_actions(menu, actions)
return menu
def set_splash(self, message):
"""Set splash message"""
if self.splash is None:
return
if message:
self.debug_print(message)
self.splash.show()
self.splash.showMessage(message, Qt.AlignBottom | Qt.AlignCenter |
Qt.AlignAbsolute, QColor(Qt.white))
QApplication.processEvents()
def remove_tmpdir(self):
"""Remove Spyder temporary directory"""
shutil.rmtree(programs.TEMPDIR, ignore_errors=True)
def closeEvent(self, event):
"""closeEvent reimplementation"""
if self.closing(True):
event.accept()
else:
event.ignore()
def resizeEvent(self, event):
"""Reimplement Qt method"""
if not self.isMaximized() and not self.fullscreen_flag:
self.window_size = self.size()
QMainWindow.resizeEvent(self, event)
# To be used by the tour to be able to resize
self.sig_resized.emit(event)
def moveEvent(self, event):
"""Reimplement Qt method"""
if not self.isMaximized() and not self.fullscreen_flag:
self.window_position = self.pos()
QMainWindow.moveEvent(self, event)
# To be used by the tour to be able to move
self.sig_moved.emit(event)
def hideEvent(self, event):
"""Reimplement Qt method"""
for plugin in self.widgetlist:
if plugin.isAncestorOf(self.last_focused_widget):
plugin.visibility_changed(True)
QMainWindow.hideEvent(self, event)
def change_last_focused_widget(self, old, now):
"""To keep track of to the last focused widget"""
if (now is None and QApplication.activeWindow() is not None):
QApplication.activeWindow().setFocus()
self.last_focused_widget = QApplication.focusWidget()
elif now is not None:
self.last_focused_widget = now
self.previous_focused_widget = old
def closing(self, cancelable=False):
"""Exit tasks"""
if self.already_closed or self.is_starting_up:
return True
if cancelable and CONF.get('main', 'prompt_on_exit'):
reply = QMessageBox.critical(self, 'Spyder',
'Do you really want to exit?',
QMessageBox.Yes, QMessageBox.No)
if reply == QMessageBox.No:
return False
prefix = 'window' + '/'
self.save_current_window_settings(prefix)
if CONF.get('main', 'single_instance'):
self.open_files_server.close()
for plugin in self.thirdparty_plugins:
if not plugin.closing_plugin(cancelable):
return False
for widget in self.widgetlist:
if not widget.closing_plugin(cancelable):
return False
self.dialog_manager.close_all()
if self.toolbars_visible:
self.save_visible_toolbars()
self.already_closed = True
return True
def add_dockwidget(self, child):
"""Add QDockWidget and toggleViewAction"""
dockwidget, location = child.create_dockwidget()
if CONF.get('main', 'vertical_dockwidget_titlebars'):
dockwidget.setFeatures(dockwidget.features()|
QDockWidget.DockWidgetVerticalTitleBar)
self.addDockWidget(location, dockwidget)
self.widgetlist.append(child)
@Slot()
def close_current_dockwidget(self):
widget = QApplication.focusWidget()
for plugin in self.widgetlist:
if plugin.isAncestorOf(widget):
plugin.dockwidget.hide()
break
def toggle_lock_dockwidgets(self, value):
"""Lock/Unlock dockwidgets"""
self.dockwidgets_locked = value
self.apply_panes_settings()
CONF.set('main', 'panes_locked', value)
def __update_maximize_action(self):
if self.state_before_maximizing is None:
text = _("Maximize current pane")
tip = _("Maximize current pane")
icon = ima.icon('maximize')
else:
text = _("Restore current pane")
tip = _("Restore pane to its original size")
icon = ima.icon('unmaximize')
self.maximize_action.setText(text)
self.maximize_action.setIcon(icon)
self.maximize_action.setToolTip(tip)
@Slot()
@Slot(bool)
def maximize_dockwidget(self, restore=False):
"""Shortcut: Ctrl+Alt+Shift+M
First call: maximize current dockwidget
Second call (or restore=True): restore original window layout"""
if self.state_before_maximizing is None:
if restore:
return
# No plugin is currently maximized: maximizing focus plugin
self.state_before_maximizing = self.saveState()
focus_widget = QApplication.focusWidget()
for plugin in self.widgetlist:
plugin.dockwidget.hide()
if plugin.isAncestorOf(focus_widget):
self.last_plugin = plugin
self.last_plugin.dockwidget.toggleViewAction().setDisabled(True)
self.setCentralWidget(self.last_plugin)
self.last_plugin.ismaximized = True
# Workaround to solve an issue with editor's outline explorer:
# (otherwise the whole plugin is hidden and so is the outline explorer
# and the latter won't be refreshed if not visible)
self.last_plugin.show()
self.last_plugin.visibility_changed(True)
if self.last_plugin is self.editor:
# Automatically show the outline if the editor was maximized:
self.addDockWidget(Qt.RightDockWidgetArea,
self.outlineexplorer.dockwidget)
self.outlineexplorer.dockwidget.show()
else:
# Restore original layout (before maximizing current dockwidget)
self.last_plugin.dockwidget.setWidget(self.last_plugin)
self.last_plugin.dockwidget.toggleViewAction().setEnabled(True)
self.setCentralWidget(None)
self.last_plugin.ismaximized = False
self.restoreState(self.state_before_maximizing)
self.state_before_maximizing = None
self.last_plugin.get_focus_widget().setFocus()
self.__update_maximize_action()
def __update_fullscreen_action(self):
if self.isFullScreen():
icon = ima.icon('window_nofullscreen')
else:
icon = ima.icon('window_fullscreen')
if is_text_string(icon):
icon = get_icon(icon)
self.fullscreen_action.setIcon(icon)
@Slot()
def toggle_fullscreen(self):
if self.isFullScreen():
self.fullscreen_flag = False
self.showNormal()
if self.maximized_flag:
self.showMaximized()
else:
self.maximized_flag = self.isMaximized()
self.fullscreen_flag = True
self.showFullScreen()
self.__update_fullscreen_action()
def add_to_toolbar(self, toolbar, widget):
"""Add widget actions to toolbar"""
actions = widget.toolbar_actions
if actions is not None:
add_actions(toolbar, actions)
@Slot()
def about(self):
"""About Spyder"""
versions = get_versions()
# Show Mercurial revision for development version
revlink = ''
if versions['revision']:
rev = versions['revision']
revlink = " (<a href='https://github.com/spyder-ide/spyder/"\
"commit/%s'>Commit: %s</a>)" % (rev, rev)
QMessageBox.about(self,
_("About %s") % "Spyder",
"""<b>Spyder %s</b> %s
<br>The Scientific PYthon Development EnviRonment
<br>Copyright © The Spyder Project Contributors
<br>Licensed under the terms of the MIT License
<p>Created by Pierre Raybaut.
<br>Developed and maintained by the
<a href="%s/blob/master/AUTHORS">Spyder Project Contributors</a>.
<br>Many thanks to all the Spyder beta testers and regular users.
<p>For bug reports and feature requests, please go
to our <a href="%s">Github website</a>. For discussions around the
project, please go to our <a href="%s">Google Group</a>
<p>This project is part of a larger effort to promote and
facilitate the use of Python for scientific and engineering
software development. The popular Python distributions
<a href="http://continuum.io/downloads">Anaconda</a>,
<a href="https://winpython.github.io/">WinPython</a> and
<a href="http://python-xy.github.io/">Python(x,y)</a>
also contribute to this plan.
<p>Python %s %dbits, Qt %s, %s %s on %s
<p><small>Most of the icons for the Spyder 2 theme come from the Crystal
Project (© 2006-2007 Everaldo Coelho). Other icons for that
theme come from <a href="http://p.yusukekamiyamane.com/"> Yusuke
Kamiyamane</a> (all rights reserved) and from
<a href="http://www.oxygen-icons.org/">
The Oxygen icon theme</a></small>.
"""
% (versions['spyder'], revlink, __project_url__,
__project_url__, __forum_url__, versions['python'],
versions['bitness'], versions['qt'], versions['qt_api'],
versions['qt_api_ver'], versions['system']))
@Slot()
def show_dependencies(self):
"""Show Spyder's Dependencies dialog box"""
from spyder.widgets.dependencies import DependenciesDialog
dlg = DependenciesDialog(None)
dlg.set_data(dependencies.DEPENDENCIES)
dlg.show()
dlg.exec_()
@Slot()
def report_issue(self):
if PY3:
from urllib.parse import quote
else:
from urllib import quote # analysis:ignore
versions = get_versions()
# Get git revision for development version
revision = ''
if versions['revision']:
revision = versions['revision']
issue_template = """\
## Description
**What steps will reproduce the problem?**
1.
2.
3.
**What is the expected output? What do you see instead?**
**Please provide any additional information below**
## Version and main components
* Spyder Version: %s %s
* Python Version: %s
* Qt Versions: %s, %s %s on %s
## Dependencies
```
%s
```
""" % (versions['spyder'],
revision,
versions['python'],
versions['qt'],
versions['qt_api'],
versions['qt_api_ver'],
versions['system'],
dependencies.status())
url = QUrl("https://github.com/spyder-ide/spyder/issues/new")
if PYQT5:
from qtpy.QtCore import QUrlQuery
query = QUrlQuery()
query.addQueryItem("body", quote(issue_template))
url.setQuery(query)
else:
url.addEncodedQueryItem("body", quote(issue_template))
QDesktopServices.openUrl(url)
@Slot()
def google_group(self):
url = QUrl("http://groups.google.com/group/spyderlib")
QDesktopServices.openUrl(url)
@Slot()
def global_callback(self):
"""Global callback"""
widget = QApplication.focusWidget()
action = self.sender()
callback = from_qvariant(action.data(), to_text_string)
from spyder.widgets.editor import TextEditBaseWidget
# if focused widget isn't valid try the last focused^M
if not isinstance(widget, TextEditBaseWidget):
widget = self.previous_focused_widget
if isinstance(widget, TextEditBaseWidget):
getattr(widget, callback)()
def redirect_internalshell_stdio(self, state):
if state:
self.console.shell.interpreter.redirect_stds()
else:
self.console.shell.interpreter.restore_stds()
def open_external_console(self, fname, wdir, args, interact, debug, python,
python_args, systerm, post_mortem=False):
"""Open external console"""
if systerm:
# Running script in an external system terminal
try:
programs.run_python_script_in_terminal(fname, wdir, args,
interact, debug, python_args)
except NotImplementedError:
QMessageBox.critical(self, _("Run"),
_("Running an external system terminal "
"is not supported on platform %s."
) % os.name)
else:
self.extconsole.visibility_changed(True)
self.extconsole.raise_()
self.extconsole.start(
fname=to_text_string(fname), wdir=to_text_string(wdir),
args=to_text_string(args), interact=interact,
debug=debug, python=python, post_mortem=post_mortem,
python_args=to_text_string(python_args) )
def execute_in_external_console(self, lines, focus_to_editor):
"""
Execute lines in external or IPython console and eventually set focus
to the editor
"""
console = self.extconsole
if self.ipyconsole is None or self.last_console_plugin_focus_was_python:
console = self.extconsole
else:
console = self.ipyconsole
console.visibility_changed(True)
console.raise_()
console.execute_code(lines)
if focus_to_editor:
self.editor.visibility_changed(True)
def open_file(self, fname, external=False):
"""
Open filename with the appropriate application
Redirect to the right widget (txt -> editor, spydata -> workspace, ...)
or open file outside Spyder (if extension is not supported)
"""
fname = to_text_string(fname)
ext = osp.splitext(fname)[1]
if encoding.is_text_file(fname):
self.editor.load(fname)
elif self.variableexplorer is not None and ext in IMPORT_EXT:
self.variableexplorer.import_data(fname)
elif not external:
fname = file_uri(fname)
programs.start_file(fname)
def open_external_file(self, fname):
"""
Open external files that can be handled either by the Editor or the
variable explorer inside Spyder.
"""
fname = encoding.to_unicode_from_fs(fname)
if osp.isfile(fname):
self.open_file(fname, external=True)
elif osp.isfile(osp.join(CWD, fname)):
self.open_file(osp.join(CWD, fname), external=True)
#---- PYTHONPATH management, etc.
def get_spyder_pythonpath(self):
"""Return Spyder PYTHONPATH"""
return self.path+self.project_path
def add_path_to_sys_path(self):
"""Add Spyder path to sys.path"""
for path in reversed(self.get_spyder_pythonpath()):
sys.path.insert(1, path)
def remove_path_from_sys_path(self):
"""Remove Spyder path from sys.path"""
sys_path = sys.path
while sys_path[1] in self.get_spyder_pythonpath():
sys_path.pop(1)
@Slot()
def path_manager_callback(self):
"""Spyder path manager"""
from spyder.widgets.pathmanager import PathManager
self.remove_path_from_sys_path()
project_path = self.projects.get_pythonpath()
dialog = PathManager(self, self.path, project_path, sync=True)
dialog.redirect_stdio.connect(self.redirect_internalshell_stdio)
dialog.exec_()
self.add_path_to_sys_path()
encoding.writelines(self.path, self.SPYDER_PATH) # Saving path
self.sig_pythonpath_changed.emit()
def pythonpath_changed(self):
"""Projects PYTHONPATH contribution has changed"""
self.remove_path_from_sys_path()
self.project_path = self.projects.get_pythonpath()
self.add_path_to_sys_path()
self.sig_pythonpath_changed.emit()
@Slot()
def win_env(self):
"""Show Windows current user environment variables"""
self.dialog_manager.show(WinUserEnvDialog(self))
#---- Preferences
def apply_settings(self):
"""Apply settings changed in 'Preferences' dialog box"""
qapp = QApplication.instance()
# Set 'gtk+' as the default theme in Gtk-based desktops
# Fixes Issue 2036
if is_gtk_desktop() and ('GTK+' in QStyleFactory.keys()):
try:
qapp.setStyle('gtk+')
except:
pass
else:
qapp.setStyle(CONF.get('main', 'windows_style',
self.default_style))
default = self.DOCKOPTIONS
if CONF.get('main', 'vertical_tabs'):
default = default|QMainWindow.VerticalTabs
if CONF.get('main', 'animated_docks'):
default = default|QMainWindow.AnimatedDocks
self.setDockOptions(default)
self.apply_panes_settings()
self.apply_statusbar_settings()
def apply_panes_settings(self):
"""Update dockwidgets features settings"""
# Update toggle action on menu
for child in self.widgetlist:
features = child.FEATURES
if CONF.get('main', 'vertical_dockwidget_titlebars'):
features = features | QDockWidget.DockWidgetVerticalTitleBar
if not self.dockwidgets_locked:
features = features | QDockWidget.DockWidgetMovable
child.dockwidget.setFeatures(features)
child.update_margins()
def apply_statusbar_settings(self):
"""Update status bar widgets settings"""
show_status_bar = CONF.get('main', 'show_status_bar')
self.statusBar().setVisible(show_status_bar)
if show_status_bar:
for widget, name in ((self.mem_status, 'memory_usage'),
(self.cpu_status, 'cpu_usage')):
if widget is not None:
widget.setVisible(CONF.get('main', '%s/enable' % name))
widget.set_interval(CONF.get('main', '%s/timeout' % name))
else:
return
@Slot()
def edit_preferences(self):
"""Edit Spyder preferences"""
from spyder.plugins.configdialog import ConfigDialog
dlg = ConfigDialog(self)
dlg.size_change.connect(self.set_prefs_size)
if self.prefs_dialog_size is not None:
dlg.resize(self.prefs_dialog_size)
for PrefPageClass in self.general_prefs:
widget = PrefPageClass(dlg, main=self)
widget.initialize()
dlg.add_page(widget)
for plugin in [self.workingdirectory, self.editor,
self.projects, self.extconsole, self.ipyconsole,
self.historylog, self.help, self.variableexplorer,
self.onlinehelp, self.explorer, self.findinfiles
]+self.thirdparty_plugins:
if plugin is not None:
try:
widget = plugin.create_configwidget(dlg)
if widget is not None:
dlg.add_page(widget)
except Exception:
traceback.print_exc(file=sys.stderr)
if self.prefs_index is not None:
dlg.set_current_index(self.prefs_index)
dlg.show()
dlg.check_all_settings()
dlg.pages_widget.currentChanged.connect(self.__preference_page_changed)
dlg.exec_()
def __preference_page_changed(self, index):
"""Preference page index has changed"""
self.prefs_index = index
def set_prefs_size(self, size):
"""Save preferences dialog size"""
self.prefs_dialog_size = size
#---- Shortcuts
def register_shortcut(self, qaction_or_qshortcut, context, name,
add_sc_to_tip=False):
"""
Register QAction or QShortcut to Spyder main application,
with shortcut (context, name, default)
"""
self.shortcut_data.append( (qaction_or_qshortcut, context,
name, add_sc_to_tip) )
def apply_shortcuts(self):
"""Apply shortcuts settings to all widgets/plugins"""
toberemoved = []
for index, (qobject, context, name,
add_sc_to_tip) in enumerate(self.shortcut_data):
keyseq = QKeySequence( get_shortcut(context, name) )
try:
if isinstance(qobject, QAction):
if sys.platform == 'darwin' and \
qobject._shown_shortcut == 'missing':
qobject._shown_shortcut = keyseq
else:
qobject.setShortcut(keyseq)
if add_sc_to_tip:
add_shortcut_to_tooltip(qobject, context, name)
elif isinstance(qobject, QShortcut):
qobject.setKey(keyseq)
except RuntimeError:
# Object has been deleted
toberemoved.append(index)
for index in sorted(toberemoved, reverse=True):
self.shortcut_data.pop(index)
# -- Open files server
def start_open_files_server(self):
self.open_files_server.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
port = select_port(default_port=OPEN_FILES_PORT)
CONF.set('main', 'open_files_port', port)
self.open_files_server.bind(('127.0.0.1', port))
self.open_files_server.listen(20)
while 1: # 1 is faster than True
try:
req, dummy = self.open_files_server.accept()
except socket.error as e:
# See Issue 1275 for details on why errno EINTR is
# silently ignored here.
eintr = errno.WSAEINTR if os.name == 'nt' else errno.EINTR
# To avoid a traceback after closing on Windows
if e.args[0] == eintr:
continue
# handle a connection abort on close error
enotsock = (errno.WSAENOTSOCK if os.name == 'nt'
else errno.ENOTSOCK)
if e.args[0] in [errno.ECONNABORTED, enotsock]:
return
raise
fname = req.recv(1024)
fname = fname.decode('utf-8')
self.sig_open_external_file.emit(fname)
req.sendall(b' ')
# ---- Quit and restart, and reset spyder defaults
@Slot()
def reset_spyder(self):
"""
Quit and reset Spyder and then Restart application.
"""
answer = QMessageBox.warning(self, _("Warning"),
_("Spyder will restart and reset to default settings: <br><br>"
"Do you want to continue?"),
QMessageBox.Yes | QMessageBox.No)
if answer == QMessageBox.Yes:
self.restart(reset=True)
@Slot()
def restart(self, reset=False):
"""
Quit and Restart Spyder application.
If reset True it allows to reset spyder on restart.
"""
# Get start path to use in restart script
spyder_start_directory = get_module_path('spyder')
restart_script = osp.join(spyder_start_directory, 'app', 'restart.py')
# Get any initial argument passed when spyder was started
# Note: Variables defined in bootstrap.py and spyder/app/start.py
env = os.environ.copy()
bootstrap_args = env.pop('SPYDER_BOOTSTRAP_ARGS', None)
spyder_args = env.pop('SPYDER_ARGS')
# Get current process and python running spyder
pid = os.getpid()
python = sys.executable
# Check if started with bootstrap.py
if bootstrap_args is not None:
spyder_args = bootstrap_args
is_bootstrap = True
else:
is_bootstrap = False
# Pass variables as environment variables (str) to restarter subprocess
env['SPYDER_ARGS'] = spyder_args
env['SPYDER_PID'] = str(pid)
env['SPYDER_IS_BOOTSTRAP'] = str(is_bootstrap)
env['SPYDER_RESET'] = str(reset)
if DEV:
if os.name == 'nt':
env['PYTHONPATH'] = ';'.join(sys.path)
else:
env['PYTHONPATH'] = ':'.join(sys.path)
# Build the command and popen arguments depending on the OS
if os.name == 'nt':
# Hide flashing command prompt
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
shell = False
else:
startupinfo = None
shell = True
command = '"{0}" "{1}"'
command = command.format(python, restart_script)
try:
if self.closing(True):
subprocess.Popen(command, shell=shell, env=env,
startupinfo=startupinfo)
self.console.quit()
except Exception as error:
# If there is an error with subprocess, Spyder should not quit and
# the error can be inspected in the internal console
print(error)
print(command)
# ---- Interactive Tours
def show_tour(self, index):
""" """
frames = self.tours_available[index]
self.tour.set_tour(index, frames, self)
self.tour.start_tour()
# ---- Check for Spyder Updates
def _check_updates_ready(self):
"""Called by WorkerUpdates when ready"""
from spyder.widgets.helperwidgets import MessageCheckBox
# feedback` = False is used on startup, so only positive feedback is
# given. `feedback` = True is used when after startup (when using the
# menu action, and gives feeback if updates are, or are not found.
feedback = self.give_updates_feedback
# Get results from worker
update_available = self.worker_updates.update_available
latest_release = self.worker_updates.latest_release
error_msg = self.worker_updates.error
url_r = 'https://github.com/spyder-ide/spyder/releases'
url_i = 'http://pythonhosted.org/spyder/installation.html'
# Define the custom QMessageBox
box = MessageCheckBox()
box.setWindowTitle(_("Spyder updates"))
box.set_checkbox_text(_("Check for updates on startup"))
box.setStandardButtons(QMessageBox.Ok)
box.setDefaultButton(QMessageBox.Ok)
#The next line is commented because it freezes the dialog.
#For now there is then no info icon. This solves issue #3609.
#box.setIcon(QMessageBox.Information)
# Adjust the checkbox depending on the stored configuration
section, option = 'main', 'check_updates_on_startup'
check_updates = CONF.get(section, option)
box.set_checked(check_updates)
if error_msg is not None:
msg = error_msg
box.setText(msg)
box.set_check_visible(False)
box.exec_()
check_updates = box.is_checked()
else:
if update_available:
msg = _("<b>Spyder %s is available!</b> <br><br>Please use "
"your package manager to update Spyder or go to our "
"<a href=\"%s\">Releases</a> page to download this "
"new version. <br><br>If you are not sure how to "
"proceed to update Spyder please refer to our "
" <a href=\"%s\">Installation</a> instructions."
"") % (latest_release, url_r, url_i)
box.setText(msg)
box.set_check_visible(True)
box.exec_()
check_updates = box.is_checked()
elif feedback:
msg = _("Spyder is up to date.")
box.setText(msg)
box.set_check_visible(False)
box.exec_()
check_updates = box.is_checked()
# Update checkbox based on user interaction
CONF.set(section, option, check_updates)
# Enable check_updates_action after the thread has finished
self.check_updates_action.setDisabled(False)
# Provide feeback when clicking menu if check on startup is on
self.give_updates_feedback = True
@Slot()
def check_updates(self):
"""
Check for spyder updates on github releases using a QThread.
"""
from spyder.workers.updates import WorkerUpdates
# Disable check_updates_action while the thread is working
self.check_updates_action.setDisabled(True)
if self.thread_updates is not None:
self.thread_updates.terminate()
self.thread_updates = QThread(self)
self.worker_updates = WorkerUpdates(self)
self.worker_updates.sig_ready.connect(self._check_updates_ready)
self.worker_updates.sig_ready.connect(self.thread_updates.quit)
self.worker_updates.moveToThread(self.thread_updates)
self.thread_updates.started.connect(self.worker_updates.start)
self.thread_updates.start()
#==============================================================================
# Utilities to create the 'main' function
#==============================================================================
def initialize():
"""Initialize Qt, patching sys.exit and eventually setting up ETS"""
# This doesn't create our QApplication, just holds a reference to
# MAIN_APP, created above to show our splash screen as early as
# possible
app = qapplication()
#----Monkey patching QApplication
class FakeQApplication(QApplication):
"""Spyder's fake QApplication"""
def __init__(self, args):
self = app # analysis:ignore
@staticmethod
def exec_():
"""Do nothing because the Qt mainloop is already running"""
pass
from qtpy import QtWidgets
QtWidgets.QApplication = FakeQApplication
# ----Monkey patching sys.exit
def fake_sys_exit(arg=[]):
pass
sys.exit = fake_sys_exit
# ----Monkey patching sys.excepthook to avoid crashes in PyQt 5.5+
if PYQT5:
def spy_excepthook(type_, value, tback):
sys.__excepthook__(type_, value, tback)
sys.excepthook = spy_excepthook
# Removing arguments from sys.argv as in standard Python interpreter
sys.argv = ['']
# Selecting Qt4 backend for Enthought Tool Suite (if installed)
try:
from enthought.etsconfig.api import ETSConfig
ETSConfig.toolkit = 'qt4'
except ImportError:
pass
return app
class Spy(object):
"""
Inspect Spyder internals
Attributes:
app Reference to main QApplication object
window Reference to spyder.MainWindow widget
"""
def __init__(self, app, window):
self.app = app
self.window = window
def __dir__(self):
return list(self.__dict__.keys()) +\
[x for x in dir(self.__class__) if x[0] != '_']
def versions(self):
return get_versions()
def run_spyder(app, options, args):
"""
Create and show Spyder's main window
Start QApplication event loop
"""
#TODO: insert here
# Main window
main = MainWindow(options)
try:
main.setup()
except BaseException:
if main.console is not None:
try:
main.console.shell.exit_interpreter()
except BaseException:
pass
raise
main.show()
main.post_visible_setup()
if main.console:
main.console.shell.interpreter.namespace['spy'] = \
Spy(app=app, window=main)
# Open external files passed as args
if args:
for a in args:
main.open_external_file(a)
# Don't show icons in menus for Mac
if sys.platform == 'darwin':
QCoreApplication.setAttribute(Qt.AA_DontShowIconsInMenus, True)
# Open external files with our Mac app
if running_in_mac_app():
app.sig_open_external_file.connect(main.open_external_file)
# To give focus again to the last focused widget after restoring
# the window
app.focusChanged.connect(main.change_last_focused_widget)
if not PYTEST:
app.exec_()
return main
#==============================================================================
# Main
#==============================================================================
def main():
"""Main function"""
# **** Collect command line options ****
# Note regarding Options:
# It's important to collect options before monkey patching sys.exit,
# otherwise, optparse won't be able to exit if --help option is passed
options, args = get_options()
if set_attached_console_visible is not None:
set_attached_console_visible(DEBUG or options.show_console \
or options.reset_config_files \
or options.reset_to_defaults \
or options.optimize)
app = initialize()
if options.reset_config_files:
# <!> Remove all configuration files!
reset_config_files()
return
elif options.reset_to_defaults:
# Reset Spyder settings to defaults
CONF.reset_to_defaults(save=True)
return
elif options.optimize:
# Optimize the whole Spyder's source code directory
import spyder
programs.run_python_script(module="compileall",
args=[spyder.__path__[0]], p_args=['-O'])
return
# Show crash dialog
if CONF.get('main', 'crash', False) and not DEV:
CONF.set('main', 'crash', False)
if SPLASH is not None:
SPLASH.hide()
QMessageBox.information(None, "Spyder",
"Spyder crashed during last session.<br><br>"
"If Spyder does not start at all and <u>before submitting a "
"bug report</u>, please try to reset settings to defaults by "
"running Spyder with the command line option '--reset':<br>"
"<span style=\'color: #555555\'><b>python spyder --reset"
"</b></span><br><br>"
"<span style=\'color: #ff5555\'><b>Warning:</b></span> "
"this command will remove all your Spyder configuration files "
"located in '%s').<br><br>"
"If restoring the default settings does not help, please take "
"the time to search for <a href=\"%s\">known bugs</a> or "
"<a href=\"%s\">discussions</a> matching your situation before "
"eventually creating a new issue <a href=\"%s\">here</a>. "
"Your feedback will always be greatly appreciated."
"" % (get_conf_path(), __project_url__,
__forum_url__, __project_url__))
# Create main window
mainwindow = None
try:
mainwindow = run_spyder(app, options, args)
except FontError as fontError:
QMessageBox.information(None, "Spyder",
"Spyder was unable to load the <i>Spyder 3</i> "
"icon theme. That's why it's going to fallback to the "
"theme used in Spyder 2.<br><br>"
"For that, please close this window and start Spyder again.")
CONF.set('main', 'icon_theme', 'spyder 2')
except BaseException:
CONF.set('main', 'crash', True)
import traceback
traceback.print_exc(file=STDERR)
traceback.print_exc(file=open('spyder_crash.log', 'w'))
if mainwindow is None:
# An exception occured
if SPLASH is not None:
SPLASH.hide()
return
ORIGINAL_SYS_EXIT()
if __name__ == "__main__":
main()
| gpl-3.0 | -2,276,537,805,049,350,000 | 40.605559 | 84 | 0.534298 | false |
haowu4682/gem5 | src/sim/Process.py | 15 | 2870 | # Copyright (c) 2005-2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from m5.SimObject import SimObject
from m5.params import *
from m5.proxy import *
class Process(SimObject):
type = 'Process'
abstract = True
cxx_header = "sim/process.hh"
input = Param.String('cin', "filename for stdin")
output = Param.String('cout', 'filename for stdout')
errout = Param.String('cerr', 'filename for stderr')
system = Param.System(Parent.any, "system process will run on")
max_stack_size = Param.MemorySize('64MB', 'maximum size of the stack')
@classmethod
def export_methods(cls, code):
code('bool map(Addr vaddr, Addr paddr, int size);')
class LiveProcess(Process):
type = 'LiveProcess'
cxx_header = "sim/process.hh"
executable = Param.String('', "executable (overrides cmd[0] if set)")
cmd = VectorParam.String("command line (executable plus arguments)")
env = VectorParam.String([], "environment settings")
cwd = Param.String('', "current working directory")
uid = Param.Int(100, 'user id')
euid = Param.Int(100, 'effective user id')
gid = Param.Int(100, 'group id')
egid = Param.Int(100, 'effective group id')
pid = Param.Int(100, 'process id')
ppid = Param.Int(99, 'parent process id')
simpoint = Param.UInt64(0, 'simulation point at which to start simulation')
| bsd-3-clause | -4,299,719,036,758,281,000 | 46.833333 | 79 | 0.738328 | false |
ashleysommer/sanic-dispatcher | setup.py | 1 | 1832 | # -*- coding: utf-8 -*-
"""
setup
~~~~
A Dispatcher extension for Sanic which also acts as a Sanic-to-WSGI adapter
:copyright: (c) 2017-2021 by Ashley Sommer (based on DispatcherMiddleware in Workzeug).
:license: MIT, see LICENSE for more details.
"""
from setuptools import setup
from os.path import join, dirname
with open(join(dirname(__file__), 'sanic_dispatcher/version.py'), 'r',
encoding='latin-1') as f:
exec(f.read())
with open(join(dirname(__file__), 'requirements.txt'), 'r') as f:
install_requires = f.read().split("\n")
setup(
name='Sanic-Dispatcher',
version=__version__,
url='https://github.com/ashleysommer/sanic-dispatcher',
license='MIT',
author='Ashley Sommer',
author_email='[email protected]',
description="Multi-application dispatcher based on DispatcherMiddleware from the Werkzeug Project.",
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
packages=['sanic_dispatcher'],
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=install_requires,
tests_require=[
'nose'
],
test_suite='nose.collector',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| mit | 5,688,819,853,862,774,000 | 33.566038 | 104 | 0.641376 | false |
ardi69/pyload-0.4.10 | pyload/plugin/hoster/UpstoreNet.py | 1 | 2409 | # -*- coding: utf-8 -*-
import re
from pyload.plugin.captcha.ReCaptcha import ReCaptcha
from pyload.plugin.internal.SimpleHoster import SimpleHoster
class UpstoreNet(SimpleHoster):
__name = "UpstoreNet"
__type = "hoster"
__version = "0.05"
__pattern = r'https?://(?:www\.)?upstore\.net/'
__config = [("use_premium", "bool", "Use premium account if available", True)]
__description = """Upstore.Net File Download Hoster"""
__license = "GPLv3"
__authors = [("igel", "[email protected]")]
INFO_PATTERN = r'<div class="comment">.*?</div>\s*\n<h2 style="margin:0">(?P<N>.*?)</h2>\s*\n<div class="comment">\s*\n\s*(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
OFFLINE_PATTERN = r'<span class="error">File not found</span>'
WAIT_PATTERN = r'var sec = (\d+)'
CHASH_PATTERN = r'<input type="hidden" name="hash" value="(.+?)">'
LINK_FREE_PATTERN = r'<a href="(https?://.*?)" target="_blank"><b>'
def handle_free(self, pyfile):
# STAGE 1: get link to continue
m = re.search(self.CHASH_PATTERN, self.html)
if m is None:
self.error(_("CHASH_PATTERN not found"))
chash = m.group(1)
self.logDebug("Read hash " + chash)
# continue to stage2
post_data = {'hash': chash, 'free': 'Slow download'}
self.html = self.load(pyfile.url, post=post_data, decode=True)
# STAGE 2: solv captcha and wait
# first get the infos we need: recaptcha key and wait time
recaptcha = ReCaptcha(self)
# try the captcha 5 times
for _i in xrange(5):
m = re.search(self.WAIT_PATTERN, self.html)
if m is None:
self.error(_("Wait pattern not found"))
wait_time = int(m.group(1))
# then, do the waiting
self.wait(wait_time)
# then, handle the captcha
response, challenge = recaptcha.challenge()
post_data.update({'recaptcha_challenge_field': challenge,
'recaptcha_response_field' : response})
self.html = self.load(pyfile.url, post=post_data, decode=True)
# STAGE 3: get direct link
m = re.search(self.LINK_FREE_PATTERN, self.html, re.S)
if m:
break
if m is None:
self.error(_("Download link not found"))
self.link = m.group(1)
| gpl-3.0 | -3,414,625,318,480,715,000 | 33.414286 | 156 | 0.558323 | false |
zigama/rapidsms-rwanda | apps/poll/models.py | 8 | 5052 | #!/usr/bin/env python
# vim: noet
from django.db import models
from django.contrib.auth import models as auth_models
from django.core.exceptions import ObjectDoesNotExist
from datetime import date
class Respondant(models.Model):
connection = models.CharField(max_length=100, blank=True, null=True)
is_active = models.BooleanField()
def __unicode__(self):
return self.connection.identity
@classmethod
def subscribe(klass, connection, active=True):
created = False
try:
# attempt to reactivate an
# unsubscribed respondant
r = klass.objects.get(connection=connection)
r.is_active = active
r.save()
# no existing respondant, so create
# a new, pre-activated, respondant
except ObjectDoesNotExist:
r = klass.objects.create(connection=connection, is_active=active)
created = True
# always return the object, with a bool
# "created" flat like get_or_create
return (r, created)
@classmethod
def unsubscribe(klass, connection):
# recycle the "subscribe" function to
# create and deactivate the respondant
return klass.subscribe(connection, False)
class Message(models.Model):
connection = models.CharField(max_length=100, blank=True, null=True)
time = models.DateTimeField(auto_now_add=True)
text = models.CharField(max_length=160)
is_outgoing = models.BooleanField()
def __unicode__(self):
return self.text
class Question(models.Model):
QUESTION_TYPES = (
('F', 'Free text'),
('B', 'Boolean'),
('M', 'Multiple choice'),
)
start = models.DateField()
end = models.DateField()
text = models.CharField(max_length=160)
type = models.CharField(max_length=1, choices=QUESTION_TYPES)
sent_to = models.IntegerField(blank=True, null=True)
def __unicode__(self):
return self.text
def is_current(self):
'''returns True if this is the current question'''
return (self == Question.current())
def is_past(self):
"""Return True if this question has already ended"""
return (not self.is_current() and (self.end < date.today()))
def is_future(self):
"""Return True if this question has not started yet"""
return (not self.is_current() and (self.start > date.today()))
def answers(self):
"""Return the same data as self.answers_set.all(), with blank
Answers (those with an empty 'choice' or 'text' property removed"""
return [answer for answer in self.answer_set.all().order_by("choice") if answer.text != ""]
def results(self):
"""Return an array of tuples containing each answer for this Question,
and total Entries for each, such as: [(Answer, 10), (Answer, 20)].
We use tuples, rather than a simple Dict, because the order of
answers is sometimes important."""
entries = [entry.text for entry in self.entry_set.filter(is_unparseable=False)]
return [(answer, entries.count(str(answer.choice))) for answer in self.answers()]
@staticmethod
def current():
# delegate to the 'on' method, to find
# the (single!) question active today
return Question.on(date.today())
@staticmethod
def on(day):
# fetch all of the questions with dates spanning 'date'. the
# app should prevent there being more than one question active
# on a single day, but since django 1.0 doesn't have model
# validation, it's entirely possible
active = Question.objects.filter(
start__lte=day,
end__gte=day
).order_by('-end')
# it's okay if nothing is active today
# return None to prompt some other view
if len(active) == 0: return None
# othewise, return the first active question.
# todo: warn or fix if multiple Qs are active
else: return active[0]
class Answer(models.Model):
question = models.ForeignKey(Question)
text = models.CharField(max_length=30)
choice = models.CharField(max_length=1)
def __unicode__(self):
return "(%s) %s" % (self.choice, self.text)
class Entry(models.Model):
respondant = models.ForeignKey(Respondant, blank=True, null=True)
question = models.ForeignKey(Question, blank=True, null=True)
message = models.ForeignKey(Message, blank=True, null=True)
time = models.DateTimeField(auto_now_add=True)
text = models.CharField(max_length=160)
is_unparseable = models.BooleanField()
moderated = models.BooleanField()
def __unicode__(self):
return self.text
def meta_data(self):
return "%s - %s %s" % (
self.respondant.phone,
self.time.strftime("%a %b %e"),
self.time.strftime("%I:%M %p"))
def display_text(self):
# assume that the display text is just the text,
# since this is what it is for free text entries
display_text = self.text
# switch the text for boolean/multiple choice entries
if self.question.type == "B":
# TODO proper i18n for this!
if self.text == "0": display_text = "No"
elif self.text == "1": display_text = "Yes"
elif self.question.type == "M":
# get the full answer text
try:
display_text = Answer.objects.get(
question=self.question,
choice=self.text).text
except: pass # TODO something here...
return display_text
class Meta:
verbose_name_plural="Entries"
| lgpl-3.0 | -7,681,831,240,798,307,000 | 28.893491 | 93 | 0.700713 | false |
Ameriks/velo.lv | velo/payment/tasks.py | 1 | 2664 | import celery
import datetime
import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.utils import timezone
from velo.payment.bank import close_business_day
from velo.payment.models import Transaction, Payment
from velo.core.utils import log_message
from velo.payment.utils import approve_payment
@celery.task
def check_firstdata_transaction(transaction_id):
transaction = Transaction.objects.get(id=transaction_id)
instance = transaction.channel.get_class(transaction)
if not instance.server_check_transaction():
check_firstdata_transaction.apply_async(args=[transaction_id], countdown=30)
return True
@periodic_task(run_every=crontab(minute="*/10"))
def timeout_old_transactions():
transactions = Transaction.objects.filter(status__in=[Transaction.STATUSES.new, Transaction.STATUSES.pending],
modified__lt=(timezone.now() - datetime.timedelta(minutes=15)))
for t in transactions:
log_message('TIMEOUT Transaction', object=t)
t.status = Transaction.STATUSES.timeout
t.save()
@periodic_task(run_every=crontab(minute="35", hour="0"))
def close_business_day_task():
close_business_day()
@periodic_task(run_every=crontab(minute="*/22"))
def check_transactions():
ok_payments = list(Payment.objects.filter(status=Payment.STATUSES.pending, transaction__status=Transaction.STATUSES.ok))
for ok_payment in ok_payments:
approve_payment(ok_payment)
@celery.task
def update_family_codes(file_name: str):
import xlrd
from velo.payment.models import DiscountCode
campaign_ids = [6, 8]
file_path = os.path.join(settings.MEDIA_ROOT, file_name)
# adding new codes
with xlrd.open_workbook(file_path) as wb:
sheet = wb.sheet_by_name('Pieaugušo kartes')
code_list = []
for row in range(1, sheet.nrows):
active_code = sheet.row_values(row)[0]
d_codes = DiscountCode.objects.filter(code=active_code)
if not d_codes:
for camp_id in campaign_ids:
DiscountCode.objects.create(
campaign_id=camp_id,
code=active_code,
usage_times=0,
)
code_list.append(active_code)
# disabling codes if they ar not in the file
for discount_code in DiscountCode.objects.filter(campaign_id__in=campaign_ids):
if discount_code.code not in code_list:
discount_code.is_active = False
discount_code.save()
os.remove(file_path)
| gpl-3.0 | -749,400,339,838,037,600 | 31.876543 | 124 | 0.666917 | false |
prathamtandon/g4gproblems | Data Structures/longest_balanced_parantheses.py | 1 | 1265 | import unittest
"""
Given a string consisting of opening and closing parenthesis, find length of longest valid
parenthesis substring.
Input: ((()
Output: 2
Input: )()())
Output: 4
"""
"""
Approach:
1. Maintain a stack of indexes.
2. Scan the string from left to right.
3. If current character is '(' push it on top of stack.
4. If current character is ')', then pop from the stack. Now two cases:
(a) If popped character is '(', then calculate current balanced substring length as current index - top of stack.
(b) Otherwise, push ')' on top of stack.
"""
def longest_balanced(string):
stack = [-1]
max_len = -float('inf')
for i in range(len(string)):
if string[i] == '(':
stack.insert(0, i)
elif len(stack) > 0:
top = stack.pop(0)
if string[top] == '(':
max_len = max(max_len, i - stack[0])
else:
stack.insert(0, i)
return max_len
class TestLongestBalanced(unittest.TestCase):
def test_longest_balanced(self):
self.assertEqual(longest_balanced('()'), 2)
self.assertEqual(longest_balanced('(((()'), 2)
self.assertEqual(longest_balanced(')()()'), 4)
self.assertEqual(longest_balanced('((()))(()'), 6)
| mit | -6,308,702,715,183,830,000 | 26.5 | 117 | 0.601581 | false |
xuegang/gpdb | src/test/tinc/tincrepo/mpp/gpdb/tests/dispatch/interconnect/udp/test_gp_interconnect_ard_regression.py | 6 | 6832 | """
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import fnmatch
import os
import tinctest
import random
import time
import subprocess
import platform
from tinctest.models.gpdb.sql import SQLTestCase
from tinctest.lib import local_path, PSQL
from gppylib.commands.base import Command
'''
UDP ic Bugs for verificaton
'''
def runShellCommand( cmdstr, cmdname = 'shell command'):
"""
Executes a given command string using gppylib.Command. Definite candidate for a move to
tinctest.lib.
@param cmdname - Name of the command
@param cmdstr - Command string to be executed
"""
cmd = Command(cmdname, cmdstr)
tinctest.logger.info('Executing command: %s : %s' %(cmdname, cmdstr))
cmd.run()
result = cmd.get_results()
tinctest.logger.info('Finished command execution with return code ' + str(result.rc))
tinctest.logger.debug('stdout: ' + result.stdout)
tinctest.logger.debug('stderr: ' + result.stderr)
if result.rc != 0:
return False
return True
class UDPICARDBFVCases(SQLTestCase):
"""
@product_version gpdb: [4.3-]
"""
# GUC
gp_interconnect_queue_depth = "gp_interconnect_queue_depth"
gp_interconnect_snd_queue_depth = "gp_interconnect_snd_queue_depth"
gp_interconnect_min_retries_before_timeout = "gp_interconnect_min_retries_before_timeout"
gp_interconnect_transmit_timeout = "gp_interconnect_transmit_timeout"
gp_interconnect_cache_future_packets = "gp_interconnect_cache_future_packets"
gp_interconnect_default_rtt = "gp_interconnect_default_rtt"
gp_interconnect_fc_method = "gp_interconnect_fc_method"
gp_interconnect_hash_multiplier = "gp_interconnect_hash_multiplier"
gp_interconnect_min_rto = "gp_interconnect_min_rto"
gp_interconnect_setup_timeout = "gp_interconnect_setup_timeout"
gp_interconnect_timer_checking_period = "gp_interconnect_timer_checking_period"
gp_interconnect_timer_period = "gp_interconnect_timer_period"
gp_interconnect_type = "gp_interconnect_type"
common_sql = 'common/'
hostlist = []
hoststr = ''
log_str = ''
cluster_platform = ''
@classmethod
def setUpClass(cls):
sql = "SELECT hostname FROM pg_catalog.gp_segment_configuration WHERE content > -1" \
"AND status = 'u' GROUP BY hostname ORDER by hostname;"
psql_cmd = "psql -c " + '"' + sql + '"'
psqlProcess = subprocess.Popen(psql_cmd, shell = True, stdout = subprocess.PIPE)
ret = psqlProcess.stdout.read().split('\n')
if (len(ret) < 5):
raise AssertionError('Get segment host list failed')
cls.hoststr = ''.join(['-h %s '%host for host in ret[2:] if host != '' and host.find('(') < 0] )
if(os.path.exists(local_path('log/'))==False):
os.mkdir(local_path('log/'))
cls.log_str='gpssh ' + cls.hoststr + ' \"sudo cat /proc/ickmlog\"' + '>>' + local_path('log/')
def __init__(self, methodName):
super(UDPICARDBFVCases, self).__init__(methodName)
self.infer_metadata()
(cur_platform,version, state) = platform.linux_distribution()
self.cluster_platform = cur_platform
def infer_metadata(self):
intended_docstring = ""
sql_file = local_path(self.common_sql + str(self._testMethodName) + '.sql')
with open(sql_file, 'r') as f:
for line in f:
line = line.strip()
if line.find('--') != 0:
break
intended_docstring += line[2:].strip()
intended_docstring += "\n"
line = line[2:].strip()
if line.find('@') != 0:
continue
line = line[1:]
(key, value) = line.split(' ', 1)
self._metadata[key] = value
self.gpdb_version = self._metadata.get('gpdb_version', None)
def checkGUC(self, name):
if (len(name) <1):
return -1
cmd = "show " + name
out = PSQL.run_sql_command(cmd)
return out.split('\n')
# Get the GUC value before change it
def getGUCvalue(self, name):
if(len(name) < 1):
return -1
cmd = "show " + name
out = PSQL.run_sql_command(cmd)
result = out.split('\n')
return result[3].strip()
# Reset the GUC value after test case finish
def setGUCvalue(self, name, value):
if(len(name) < 1):
return -1
cmd = "set " + name + " = " + value
out = PSQL.run_sql_command(cmd)
def test_gp_interconnect_fc_ard_142(self):
if (self.cluster_platform.lower().find('red hat enterprise linux server') < 0):
self.skipTest('Test only applies to RHEL platform.')
try:
out = self.checkGUC(self.gp_interconnect_min_retries_before_timeout)
self.assertTrue(len(out) > 4)
out = self.checkGUC(self.gp_interconnect_transmit_timeout)
self.assertTrue(len(out) > 4)
out = self.checkGUC(self.gp_interconnect_fc_method)
self.assertTrue(len(out) > 4)
except:
self.skipTest("GUC " + self.gp_interconnect_min_retries_before_timeout + " or " + self.gp_interconnect_transmit_timeout + " or " + self.gp_interconnect_fc_method + " not defined")
result = runShellCommand('gpssh ' + self.hoststr + ' \"export PATH=$PATH:/sbin; \
sudo insmod ickm.ko ict_type=0x101 seq_array=2 drop_times=80\"')
self.assertTrue(result)
sql_file = local_path(self.common_sql + str(self._testMethodName) + '.sql');
self.assertTrue(PSQL.run_sql_file(local_path(sql_file)))
out_file = sql_file.replace(".sql",".out")
test_ret = "Failed to send packet (seq 2) to" in open(out_file).read() and "retries in 40 seconds" in open(out_file).read()
ret_log = runShellCommand(self.log_str + self._testMethodName + '.log' )
result = runShellCommand('gpssh ' + self.hoststr + ' \"export PATH=$PATH:/sbin;sudo rmmod ickm.ko \"')
self.assertTrue(result)
self.assertTrue(ret_log)
self.assertTrue(test_ret)
| apache-2.0 | -1,435,075,005,109,995,800 | 39.426036 | 192 | 0.623683 | false |
nonZero/OpenCommunity | src/issues/south_migrations/0002_auto__add_field_issuecomment_uid__add_field_issue_uid__add_field_propo.py | 3 | 15836 | # -*- coding: utf-8 -*-
from django.db import models
from ocd.migration_utils import create_uids
from south.db import db
from south.v2 import SchemaMigration
import datetime
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'IssueComment.uid'
db.add_column(u'issues_issuecomment', 'uid',
self.gf('django.db.models.fields.CharField')(max_length=24, null=True, blank=True),
keep_default=False)
# Adding field 'Issue.uid'
db.add_column(u'issues_issue', 'uid',
self.gf('django.db.models.fields.CharField')(max_length=24, null=True, blank=True),
keep_default=False)
# Adding field 'Proposal.uid'
db.add_column(u'issues_proposal', 'uid',
self.gf('django.db.models.fields.CharField')(max_length=24, null=True, blank=True),
keep_default=False)
if not db.dry_run:
create_uids(orm['issues.Issue'])
create_uids(orm['issues.IssueComment'])
create_uids(orm['issues.Proposal'])
def backwards(self, orm):
# Deleting field 'IssueComment.uid'
db.delete_column(u'issues_issuecomment', 'uid')
# Deleting field 'Issue.uid'
db.delete_column(u'issues_issue', 'uid')
# Deleting field 'Proposal.uid'
db.delete_column(u'issues_proposal', 'uid')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'communities.community': {
'Meta': {'object_name': 'Community'},
'board_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '24', 'null': 'True', 'blank': 'True'}),
'upcoming_meeting_comments': ('ocd.base_models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'upcoming_meeting_guests': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'upcoming_meeting_is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'upcoming_meeting_location': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'upcoming_meeting_participants': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'+'", 'blank': 'True', 'to': u"orm['users.OCUser']"}),
'upcoming_meeting_published_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'upcoming_meeting_scheduled_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'upcoming_meeting_started': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'upcoming_meeting_summary': ('ocd.base_models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'upcoming_meeting_version': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'issues.issue': {
'Meta': {'object_name': 'Issue'},
'abstract': ('ocd.base_models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'calculated_score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'closed_at_meeting': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['meetings.Meeting']", 'null': 'True', 'blank': 'True'}),
'community': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'issues'", 'to': u"orm['communities.Community']"}),
'content': ('ocd.base_models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'issues_created'", 'to': u"orm['users.OCUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_upcoming_meeting': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'length_in_minutes': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'order_in_upcoming_meeting': ('django.db.models.fields.IntegerField', [], {'default': '9999', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '24', 'null': 'True', 'blank': 'True'})
},
u'issues.issuecomment': {
'Meta': {'ordering': "('created_at',)", 'object_name': 'IssueComment'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content': ('ocd.base_models.HTMLField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'issue_comments_created'", 'to': u"orm['users.OCUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': u"orm['issues.Issue']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'issue_comments_last_edited'", 'null': 'True', 'to': u"orm['users.OCUser']"}),
'ordinal': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '24', 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
u'issues.issuecommentrevision': {
'Meta': {'object_name': 'IssueCommentRevision'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': u"orm['issues.IssueComment']"}),
'content': ('django.db.models.fields.TextField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'issue_comment_versions_created'", 'to': u"orm['users.OCUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'version': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'issues.proposal': {
'Meta': {'object_name': 'Proposal'},
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'assigned_to': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'assigned_to_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'proposals_assigned'", 'null': 'True', 'to': u"orm['users.OCUser']"}),
'content': ('ocd.base_models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'proposals_created'", 'to': u"orm['users.OCUser']"}),
'due_by': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'proposals'", 'to': u"orm['issues.Issue']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '24', 'null': 'True', 'blank': 'True'}),
'votes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'proposals'", 'blank': 'True', 'through': u"orm['issues.ProposalVote']", 'to': u"orm['users.OCUser']"})
},
u'issues.proposalvote': {
'Meta': {'unique_together': "(('proposal', 'user'),)", 'object_name': 'ProposalVote'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'proposal': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['issues.Proposal']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['users.OCUser']"}),
'value': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'meetings.agendaitem': {
'Meta': {'unique_together': "(('meeting', 'issue'),)", 'object_name': 'AgendaItem'},
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['issues.Issue']"}),
'meeting': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agenda'", 'to': u"orm['meetings.Meeting']"}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '100'})
},
u'meetings.meeting': {
'Meta': {'ordering': "('-held_at',)", 'object_name': 'Meeting'},
'agenda_items': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['issues.Issue']", 'symmetrical': 'False', 'through': u"orm['meetings.AgendaItem']", 'blank': 'True'}),
'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'community': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'meetings'", 'to': u"orm['communities.Community']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'meetings_created'", 'to': u"orm['users.OCUser']"}),
'guests': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'held_at': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'participated_in_meeting'", 'symmetrical': 'False', 'through': u"orm['meetings.MeetingParticipant']", 'to': u"orm['users.OCUser']"}),
'scheduled_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '24', 'null': 'True', 'blank': 'True'})
},
u'meetings.meetingparticipant': {
'Meta': {'unique_together': "(('meeting', 'ordinal'), ('meeting', 'user'))", 'object_name': 'MeetingParticipant'},
'default_group_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'meeting': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'participations'", 'to': u"orm['meetings.Meeting']"}),
'ordinal': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'participations'", 'to': u"orm['users.OCUser']"})
},
u'users.ocuser': {
'Meta': {'object_name': 'OCUser'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
}
}
complete_apps = ['issues'] | bsd-3-clause | 2,559,060,409,468,201,500 | 79.80102 | 235 | 0.561569 | false |
martin-ejdestig/sork | sork/checks/license_header.py | 1 | 14055 | # This file is part of Sork.
#
# Copyright (C) 2016-2019 Martin Ejdestig <[email protected]>
#
# Sork is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sork is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sork. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: GPL-3.0-or-later
import itertools
import glob
import os
import re
import string
from typing import List, Optional, Pattern, Sequence
from .check import Check
from .. import error
from ..config import Config
from ..project import Project
from ..source import SourceFile
NAME = 'license_header'
class License:
def __init__(self, name: str, content_pattern: str, header_lines: List[str]) -> None:
self.name = name
self.content_pattern = content_pattern
self.header_lines = header_lines
_LICENSE_APACHE2 = \
License('apache2',
content_pattern=r"\s*Apache License\s*\n"
r"\s*Version 2.0, January 2004",
header_lines=[
'$copyright',
'',
'Licensed under the Apache License, Version 2.0 (the "License");',
'you may not use this file except in compliance with the License.',
'You may obtain a copy of the License at',
'',
' http://www.apache.org/licenses/LICENSE-2.0',
'',
'Unless required by applicable law or agreed to in writing, software',
'distributed under the License is distributed on an "AS IS" BASIS,',
'WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
'See the License for the specific language governing permissions and',
'limitations under the License.',
'',
'SPDX-License-Identifier: Apache-2.0'
])
_LICENSE_GPLV2 = \
License('gplv2',
content_pattern=r"\s*GNU GENERAL PUBLIC LICENSE\s*\n"
r"\s*Version 2, June 1991",
header_lines=[
'This file is part of $project.',
'',
'$copyright',
'',
'$project is free software; you can redistribute it and/or modify',
'it under the terms of the GNU General Public License as published by',
'the Free Software Foundation; either version 2 of the License, or',
'(at your option) any later version.',
'',
'$project is distributed in the hope that it will be useful,',
'but WITHOUT ANY WARRANTY; without even the implied warranty of',
'MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the',
'GNU General Public License for more details.',
'',
'You should have received a copy of the GNU General Public License',
'along with $project. If not, see <http://www.gnu.org/licenses/>.',
'',
'SPDX-License-Identifier: GPL-2.0-or-later'
])
_LICENSE_GPLV3 = \
License('gplv3',
content_pattern=r"\s*GNU GENERAL PUBLIC LICENSE\s*\n"
r"\s*Version 3, 29 June 2007",
header_lines=[
'This file is part of $project.',
'',
'$copyright',
'',
'$project is free software: you can redistribute it and/or modify',
'it under the terms of the GNU General Public License as published by',
'the Free Software Foundation, either version 3 of the License, or',
'(at your option) any later version.',
'',
'$project is distributed in the hope that it will be useful,',
'but WITHOUT ANY WARRANTY; without even the implied warranty of',
'MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the',
'GNU General Public License for more details.',
'',
'You should have received a copy of the GNU General Public License',
'along with $project. If not, see <http://www.gnu.org/licenses/>.',
'',
'SPDX-License-Identifier: GPL-3.0-or-later'
])
_LICENSE_LGPLV2 = \
License('lgplv2',
content_pattern=r"\s*GNU LIBRARY GENERAL PUBLIC LICENSE\s*\n"
r"\s*Version 2, June 1991",
header_lines=[
'This file is part of $project.',
'',
'$copyright',
'',
'$project is free software; you can redistribute it and/or modify',
'it under the terms of the GNU Library General Public License as published by',
'the Free Software Foundation; either version 2 of the License, or',
'(at your option) any later version.',
'',
'$project is distributed in the hope that it will be useful,',
'but WITHOUT ANY WARRANTY; without even the implied warranty of',
'MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the',
'GNU Library General Public License for more details.',
'',
'You should have received a copy of the GNU Library General Public License',
'along with $project. If not, see <http://www.gnu.org/licenses/>.',
'',
'SPDX-License-Identifier: LGPL-2.0-or-later'
])
_LICENSE_LGPLV2_1 = \
License('lgplv2.1',
content_pattern=r"\s*GNU LESSER GENERAL PUBLIC LICENSE\s*\n"
r"\s*Version 2.1, February 1999",
header_lines=[
'This file is part of $project.',
'',
'$copyright',
'',
'$project is free software; you can redistribute it and/or modify',
'it under the terms of the GNU Lesser General Public License as published by',
'the Free Software Foundation; either version 2.1 of the License, or',
'(at your option) any later version.',
'',
'$project is distributed in the hope that it will be useful,',
'but WITHOUT ANY WARRANTY; without even the implied warranty of',
'MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the',
'GNU Lesser General Public License for more details.',
'',
'You should have received a copy of the GNU Lesser General Public License',
'along with $project. If not, see <http://www.gnu.org/licenses/>.',
'',
'SPDX-License-Identifier: LGPL-2.1-or-later'
])
_LICENSE_LGPLV3 = \
License('lgplv3',
content_pattern=r"\s*GNU LESSER GENERAL PUBLIC LICENSE\s*\n"
r"\s*Version 3, 29 June 2007",
header_lines=[
'This file is part of $project.',
'',
'$copyright',
'',
'$project is free software: you can redistribute it and/or modify',
'it under the terms of the GNU Lesser General Public License as published by',
'the Free Software Foundation, either version 3 of the License, or',
'(at your option) any later version.',
'',
'$project is distributed in the hope that it will be useful,',
'but WITHOUT ANY WARRANTY; without even the implied warranty of',
'MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the',
'GNU Lesser General Public License for more details.',
'',
'You should have received a copy of the GNU Lesser General Public License',
'along with $project. If not, see <http://www.gnu.org/licenses/>.',
'',
'SPDX-License-Identifier: LGPL-3.0-or-later'
])
_LICENSE_MPL2 = \
License('mpl2',
content_pattern=r"Mozilla Public License Version 2.0\n"
r"==================================",
header_lines=[
'$copyright',
'',
'This Source Code Form is subject to the terms of the Mozilla Public',
'License, v. 2.0. If a copy of the MPL was not distributed with this',
'file, You can obtain one at https://mozilla.org/MPL/2.0/.',
'',
'SPDX-License-Identifier: MPL-2.0'
])
_LICENSES: List[License] = [
_LICENSE_APACHE2,
_LICENSE_GPLV2,
_LICENSE_GPLV3,
_LICENSE_LGPLV2,
_LICENSE_LGPLV2_1,
_LICENSE_LGPLV3,
_LICENSE_MPL2
]
_LICENSE_BASE_FILE_NAMES = ['COPYING', 'LICENSE']
class Error(error.Error):
pass
def _detect_license(project: Project) -> License:
def ignore_case_if_alpha(char: str) -> str:
return '[{}{}]'.format(char.upper(), char.lower()) if char.isalpha() else char
def pattern_ignore_case(pattern: str) -> str:
return ''.join([ignore_case_if_alpha(char) for char in pattern])
def find_license_paths() -> List[str]:
patterns = [os.path.join(project.path, pattern_ignore_case(n + '*'))
for n in _LICENSE_BASE_FILE_NAMES]
paths = itertools.chain.from_iterable(glob.glob(p) for p in patterns)
return list(paths)
def determine_license_in_file(path: str) -> License:
try:
with open(path) as file:
content = file.read()
except OSError as exception:
raise Error(exception)
for license_ in _LICENSES:
if re.match(license_.content_pattern, content):
return license_
raise Error('Unknown license in {}.'.format(path))
paths = find_license_paths()
if not paths:
raise Error('Unable to find any license file(s) in \'{}\'.'. format(project.path))
licenses = [determine_license_in_file(path) for path in paths]
if len(licenses) == 1:
return licenses[0]
if len(licenses) == 2 and _LICENSE_GPLV3 in licenses and _LICENSE_LGPLV3 in licenses:
return _LICENSE_LGPLV3
raise Error('Unable to automatically determine license in \'{}\'.'. format(project.path))
def _get_license_template_str(project: Project, config: Config) -> str:
def get_header_lines() -> Sequence[str]:
name_or_lines = config['license']
if isinstance(name_or_lines, list):
return name_or_lines
if name_or_lines:
name = name_or_lines.lower()
license_ = next((l for l in _LICENSES if l.name == name), None)
if not license_:
raise Error('{} is an unknown license'.format(name_or_lines))
else:
license_ = _detect_license(project)
return license_.header_lines
def join_header_lines(lines: Sequence[str]) -> str:
prefix = config['prefix']
line_prefix = config['line_prefix']
suffix = config['suffix']
def prepend_prefix(line: str) -> str:
return line_prefix + line if line else line_prefix.rstrip()
return ''.join([prefix, '\n'.join(prepend_prefix(l) for l in lines), suffix])
return join_header_lines(get_header_lines())
def _compile_license_regex(config: Config, template_str: str) -> Pattern:
def escape_regex_chars(unescaped: str) -> str:
escaped = unescaped
escaped = re.sub(r'\*', r'\*', escaped)
escaped = re.sub(r'([()])', r'\\\1', escaped)
return escaped
template = string.Template(escape_regex_chars(template_str))
project_regex_str = config['project'] or r"[^$\s]{1}.*"
year_regex_str = r"[0-9]{4}(-[0-9]{4})?"
author_regex_str = r"[^$\s]{1}.*"
copyright_line_regex_str = r'Copyright \(C\) ' + year_regex_str + ' ' + author_regex_str
copyright_regex_str = r"(" + copyright_line_regex_str + r")"
copyright_regex_str += r"(\n" + config['line_prefix'] + copyright_line_regex_str + r")*"
regex_str = template.safe_substitute(project=project_regex_str,
copyright=copyright_regex_str)
try:
return re.compile(regex_str)
except re.error:
raise Error('Failed to compile regular expression for license header')
def create(project: Project) -> Check:
config = project.config['checks.' + NAME]
template_str = _get_license_template_str(project, config)
license_regex = _compile_license_regex(config, template_str)
def error_message() -> str:
strs = ['Invalid license header, must match below template.']
if '$project' in template_str:
strs += ['$project can be any string not starting with space or $.']
if '$copyright' in template_str:
strs += [
'$copyright must match \'Copyright (C) $year $author\' where $year can be a',
'specific year (2019) or a range (2018-2019) and author can be any string not',
'starting with space or $. There can be multiple lines of copyright notices.'
]
return ' '.join(strs) + '\n' + template_str
def run(source_file: SourceFile) -> Optional[str]:
if not license_regex.match(source_file.content):
return '{}:1: error: {}'.format(source_file.path, error_message())
return None
return Check(NAME, run)
| gpl-3.0 | -6,359,019,093,589,417,000 | 39.504323 | 95 | 0.563501 | false |
carwyn/veillance | bot.py | 1 | 1205 | import time
import functools
import httplib
import urllib2
import argparse
# GREEN = "10.42.0.104"
# PURPLE = "10.42.0.188"
class BoundHTTPHandler(urllib2.HTTPHandler):
def __init__(self, source_address=None, debuglevel=0):
urllib2.HTTPHandler.__init__(self, debuglevel)
self.http_class = functools.partial(httplib.HTTPConnection,
source_address=source_address)
def http_open(self, req):
return self.do_open(self.http_class, req)
def main():
parser = argparse.ArgumentParser(description="Veillance Bot")
parser.add_argument("--site", default="http://news.bbc.co.uk/")
parser.add_argument("--source")
parser.add_argument("--sleep", default=60, type=int)
args = parser.parse_args()
if args.source:
print "Using Source IP:", args.source
handler = BoundHTTPHandler(source_address=(args.source, 0))
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
while True:
f = urllib2.urlopen(args.site)
content = f.readlines()
print f.getcode(), f.geturl(), "Length: ", len(content)
time.sleep(args.sleep)
if __name__ == "__main__": main()
| bsd-3-clause | 1,782,617,800,748,849,700 | 26.386364 | 67 | 0.642324 | false |
CERNDocumentServer/cds | tests/unit/test_deposit_search.py | 3 | 2580 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2017 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Deposit search tests."""
import json
from time import sleep
from flask import url_for
from invenio_accounts.models import User
from invenio_accounts.testutils import login_user_via_session
from invenio_indexer.api import RecordIndexer
from helpers import new_project
def test_aggregations(api_app, es, cds_jsonresolver, users, location,
db, deposit_metadata, json_headers):
"""Test deposit search aggregations."""
project_1, _, _ = new_project(api_app, es, cds_jsonresolver,
users, location, db, deposit_metadata)
_users = [users[1]]
project_2, _, _ = new_project(api_app, es, cds_jsonresolver,
_users, location, db, deposit_metadata)
RecordIndexer().bulk_index([project_1.id, project_2.id])
RecordIndexer().process_bulk_queue()
sleep(2)
with api_app.test_client() as client:
login_user_via_session(client, email=User.query.get(users[0]).email)
url = url_for('invenio_deposit_rest.project_list', q='')
res = client.get(url, headers=json_headers)
assert res.status_code == 200
data = json.loads(res.data.decode('utf-8'))
assert len(data['aggregations']['created_by']['buckets']) == 1
assert data['aggregations']['created_by']['buckets'][0][
'key'] == users[0]
# Invalid query syntax (Invalid ES syntax)
url = url_for('invenio_deposit_rest.project_list')
res = client.get(
url, headers=json_headers, query_string=dict(q='title/back'))
assert res.status_code == 400
| gpl-2.0 | -1,761,711,066,843,644,700 | 38.692308 | 76 | 0.676357 | false |
Intel-EPID-SDK/epid-sdk | parts-site/configurations/release/clang_posix-any.py | 1 | 2481 | ############################################################################
# Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############################################################################
# pylint: disable=locally-disabled, invalid-name, missing-docstring
"""clang compiler configuration for release
"""
from parts.config import ConfigValues, configuration
def map_default_version(env):
return env['CLANG_VERSION']
config = configuration(map_default_version)
config.VersionRange("3-*",
append=ConfigValues(
CCFLAGS=['',
# second level optimization
'-O2',
# treat warnings as errors
'-Werror',
# enable all warnings
'-Wall',
# extra warnings
'-Wextra',
'-Wno-empty-body',
'-Wno-missing-braces',
'-Wno-missing-field-initializers',
'-Wno-unknown-pragmas',
# do not assume strict aliasing
'-fno-strict-aliasing',
'-Wformat',
'-Wformat-security',
'-fstack-protector',
'-fPIC'],
CXXFLAGS=['',
# modern C++ features support
'-std=c++0x'],
CPPDEFINES=['NDEBUG',
'_FORTIFY_SOURCE=2'],
LINKFLAGS=['-Wl,-znoexecstack',
'-Wl,-zrelro',
'-Wl,-znow'], ))
| apache-2.0 | 8,704,135,361,121,457,000 | 42.526316 | 76 | 0.425635 | false |
jmankoff/data | Assignments/jmankoff-explore/main.py | 1 | 3253 | """`main` is the top level module for your Flask application."""
# Data Exploration Byte Version 1
#
# Copyright 1/2016 Jennifer Mankoff
#
# Licensed under GPL v3 (http://www.gnu.org/licenses/gpl.html)
#
# Imports
import os
import jinja2
import webapp2
import logging
import json
import urllib
# this is used for constructing URLs to google's APIS
from googleapiclient.discovery import build
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
# This API key is provided by google as described in the tutorial
API_KEY = 'XXxxXxXXXXxxNXXxXXXxxxNNXXxxxxxxxXXXxXX'
# This uses discovery to create an object that can talk to the
# fusion tables API using the developer key
service = build('fusiontables', 'v1', developerKey=API_KEY)
# This is the table id for the fusion table
TABLE_ID = 'NxxxNXxXxxNxXXXXNXxXXXxXxxxNxXxNxXxxXxxX'
# This is the default columns for the query
query_cols = []
query_animals = ['DOG']
# Import the Flask Framework
from flask import Flask, request
app = Flask(__name__)
def get_all_data(query):
response = service.query().sql(sql=query).execute()
return response
# make a query given a set of columns to retrieve
def make_query(cols, animals, limit):
string_cols = ""
if cols == []:
cols = ['*']
for col in cols:
string_cols = string_cols + ", " + col
string_cols = string_cols[2:len(string_cols)]
string_animals = ""
for animal in animals:
string_animals = string_animals + ", " + animal
string_animals = string_animals[2:len(string_animals)]
query = "SELECT " + string_cols + " FROM " + TABLE_ID + " WHERE AnimalType = '" + string_animals + "'"
query = query + " LIMIT " + str(limit)
logging.info(query)
# query = "SELECT * FROM " + TABLE_ID + " WHERE AnimalType = 'DOG' LIMIT 2"
return query
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def index():
template = JINJA_ENVIRONMENT.get_template('templates/index.html')
request = service.column().list(tableId=TABLE_ID)
allheaders = get_all_data(make_query([], query_animals, 1))
logging.info('allheaders')
return template.render(allheaders=allheaders['columns'] )
@app.route('/_update_table', methods=['POST'])
def update_table():
logging.info(request.get_json())
cols = request.json['cols']
logging.info(cols)
result = get_all_data(make_query(cols, query_animals, 100))
logging.info(result)
return json.dumps({'content' : result['rows'], 'headers' : result['columns']})
@app.route('/about')
def about():
template = JINJA_ENVIRONMENT.get_template('templates/about.html')
return template.render()
@app.route('/quality')
def quality():
template = JINJA_ENVIRONMENT.get_template('templates/quality.html')
return template.render()
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def application_error(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500
| gpl-3.0 | 5,943,271,027,229,840,000 | 28.572727 | 106 | 0.687058 | false |
kosgroup/odoo | addons/google_account/models/google_service.py | 12 | 9469 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
import json
import logging
import urllib2
import werkzeug.urls
from odoo import api, fields, models, registry, _
from odoo.http import request
_logger = logging.getLogger(__name__)
TIMEOUT = 20
GOOGLE_AUTH_ENDPOINT = 'https://accounts.google.com/o/oauth2/auth'
GOOGLE_TOKEN_ENDPOINT = 'https://accounts.google.com/o/oauth2/token'
GOOGLE_API_BASE_URL = 'https://www.googleapis.com'
# FIXME : this needs to become an AbstractModel, to be inhereted by google_calendar_service and google_drive_service
class GoogleService(models.TransientModel):
_name = 'google.service'
@api.model
def generate_refresh_token(self, service, authorization_code):
""" Call Google API to refresh the token, with the given authorization code
:param service : the name of the google service to actualize
:param authorization_code : the code to exchange against the new refresh token
:returns the new refresh token
"""
Parameters = self.env['ir.config_parameter'].sudo()
client_id = Parameters.get_param('google_%s_client_id' % service)
client_secret = Parameters.get_param('google_%s_client_secret' % service)
redirect_uri = Parameters.get_param('google_redirect_uri')
# Get the Refresh Token From Google And store it in ir.config_parameter
headers = {"Content-type": "application/x-www-form-urlencoded"}
data = werkzeug.url_encode({
'code': authorization_code,
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': redirect_uri,
'grant_type': "authorization_code"
})
try:
req = urllib2.Request(GOOGLE_TOKEN_ENDPOINT, data, headers)
content = urllib2.urlopen(req, timeout=TIMEOUT).read()
except urllib2.HTTPError:
error_msg = _("Something went wrong during your token generation. Maybe your Authorization Code is invalid or already expired")
raise self.env['res.config.settings'].get_config_warning(error_msg)
content = json.loads(content)
return content.get('refresh_token')
@api.model
def _get_google_token_uri(self, service, scope):
Parameters = self.env['ir.config_parameter'].sudo()
encoded_params = werkzeug.url_encode({
'scope': scope,
'redirect_uri': Parameters.get_param('google_redirect_uri'),
'client_id': Parameters.get_param('google_%s_client_id' % service),
'response_type': 'code',
})
return '%s?%s' % (GOOGLE_AUTH_ENDPOINT, encoded_params)
@api.model
def _get_authorize_uri(self, from_url, service, scope=False):
""" This method return the url needed to allow this instance of Odoo to access to the scope
of gmail specified as parameters
"""
state = {
'd': self.env.cr.dbname,
's': service,
'f': from_url
}
Parameters = self.env['ir.config_parameter']
base_url = Parameters.get_param('web.base.url', default='http://www.odoo.com?NoBaseUrl')
client_id = Parameters.sudo().get_param('google_%s_client_id' % (service,), default=False)
encoded_params = werkzeug.url_encode({
'response_type': 'code',
'client_id': client_id,
'state': json.dumps(state),
'scope': scope or '%s/auth/%s' % (GOOGLE_API_BASE_URL, service), # If no scope is passed, we use service by default to get a default scope
'redirect_uri': base_url + '/google_account/authentication',
'approval_prompt': 'force',
'access_type': 'offline'
})
return "%s?%s" % (GOOGLE_AUTH_ENDPOINT, encoded_params)
@api.model
def _get_google_token_json(self, authorize_code, service):
""" Call Google API to exchange authorization code against token, with POST request, to
not be redirected.
"""
Parameters = self.env['ir.config_parameter']
base_url = Parameters.get_param('web.base.url', default='http://www.odoo.com?NoBaseUrl')
client_id = Parameters.sudo().get_param('google_%s_client_id' % (service,), default=False)
client_secret = Parameters.sudo().get_param('google_%s_client_secret' % (service,), default=False)
headers = {"content-type": "application/x-www-form-urlencoded"}
data = werkzeug.url_encode({
'code': authorize_code,
'client_id': client_id,
'client_secret': client_secret,
'grant_type': 'authorization_code',
'redirect_uri': base_url + '/google_account/authentication'
})
try:
dummy, response, dummy = self._do_request(GOOGLE_TOKEN_ENDPOINT, params=data, headers=headers, type='POST', preuri='')
return response
except urllib2.HTTPError:
error_msg = _("Something went wrong during your token generation. Maybe your Authorization Code is invalid")
raise self.env['res.config.settings'].get_config_warning(error_msg)
# FIXME : this method update a field defined in google_calendar module. Since it is used only in that module, maybe it should be moved.
@api.model
def _refresh_google_token_json(self, refresh_token, service): # exchange_AUTHORIZATION vs Token (service = calendar)
Parameters = self.env['ir.config_parameter'].sudo()
client_id = Parameters.get_param('google_%s_client_id' % (service,), default=False)
client_secret = Parameters.get_param('google_%s_client_secret' % (service,), default=False)
headers = {"content-type": "application/x-www-form-urlencoded"}
data = werkzeug.url_encode({
'refresh_token': refresh_token,
'client_id': client_id,
'client_secret': client_secret,
'grant_type': 'refresh_token',
})
try:
dummy, response, dummy = self._do_request(GOOGLE_TOKEN_ENDPOINT, params=data, headers=headers, type='POST', preuri='')
return response
except urllib2.HTTPError, error:
if error.code == 400: # invalid grant
with registry(request.session.db).cursor() as cur:
self.env(cur)['res.users'].browse(self.env.uid).write({'google_%s_rtoken' % service: False})
error_key = json.loads(error.read()).get("error", "nc")
_logger.exception("Bad google request : %s !", error_key)
error_msg = _("Something went wrong during your token generation. Maybe your Authorization Code is invalid or already expired [%s]") % error_key
raise self.env['res.config.settings'].get_config_warning(error_msg)
# TODO JEM : remove preuri param, and rename type into method
@api.model
def _do_request(self, uri, params={}, headers={}, type='POST', preuri="https://www.googleapis.com"):
""" Execute the request to Google API. Return a tuple ('HTTP_CODE', 'HTTP_RESPONSE')
:param uri : the url to contact
:param params : dict or already encoded parameters for the request to make
:param headers : headers of request
:param type : the method to use to make the request
:param preuri : pre url to prepend to param uri.
"""
_logger.debug("Uri: %s - Type : %s - Headers: %s - Params : %s !" % (uri, type, headers, werkzeug.url_encode(params) if type == 'GET' else params))
status = 418
response = ""
ask_time = fields.Datetime.now()
try:
if type.upper() == 'GET' or type.upper() == 'DELETE':
data = werkzeug.url_encode(params)
req = urllib2.Request(preuri + uri + "?" + data)
elif type.upper() == 'POST' or type.upper() == 'PATCH' or type.upper() == 'PUT':
req = urllib2.Request(preuri + uri, params, headers)
else:
raise Exception(_('Method not supported [%s] not in [GET, POST, PUT, PATCH or DELETE]!') % (type))
req.get_method = lambda: type.upper()
resp = urllib2.urlopen(req, timeout=TIMEOUT)
status = resp.getcode()
if int(status) in (204, 404): # Page not found, no response
response = False
else:
content = resp.read()
response = json.loads(content)
try:
ask_time = datetime.strptime(resp.headers.get('date'), "%a, %d %b %Y %H:%M:%S %Z")
except:
pass
except urllib2.HTTPError, error:
if error.code in (204, 404):
status = error.code
response = ""
else:
_logger.exception("Bad google request : %s !", error.read())
if error.code in (400, 401, 410):
raise error
raise self.env['res.config.settings'].get_config_warning(_("Something went wrong with your request to google"))
return (status, response, ask_time)
# TODO : remove me, it is only used in google calendar. Make google_calendar use the constants
@api.model
def get_client_id(self, service):
return self.env['ir.config_parameter'].sudo().get_param('google_%s_client_id' % (service,), default=False)
| gpl-3.0 | 3,410,827,654,959,012,400 | 46.582915 | 156 | 0.605977 | false |
loli/sklearn-ensembletrees | sklearn/cluster/_feature_agglomeration.py | 1 | 2656 | """
Feature agglomeration. Base classes and functions for performing feature
agglomeration.
"""
# Author: V. Michel, A. Gramfort
# License: BSD 3 clause
import numpy as np
from ..base import TransformerMixin
from ..utils import array2d
###############################################################################
# Mixin class for feature agglomeration.
class AgglomerationTransform(TransformerMixin):
"""
A class for feature agglomeration via the transform interface
"""
pooling_func = np.mean
def transform(self, X, pooling_func=None):
"""
Transform a new matrix using the built clustering
Parameters
----------
X : array-like, shape = [n_samples, n_features] or [n_features]
A M by N array of M observations in N dimensions or a length
M array of M one-dimensional observations.
pooling_func : callable, default=np.mean
This combines the values of agglomerated features into a single
value, and should accept an array of shape [M, N] and the keyword
argument `axis=1`, and reduce it to an array of size [M].
Returns
-------
Y : array, shape = [n_samples, n_clusters] or [n_clusters]
The pooled values for each feature cluster.
"""
if pooling_func is not None:
warnings.warn("The pooling_func parameter is deprecated since 0.15 and will be "
"removed in 0.18. Pass it to the constructor instead.", DeprecationWarning)
else:
pooling_func = self.pooling_func
X = array2d(X)
nX = []
if len(self.labels_) != X.shape[1]:
raise ValueError("X has a different number of features than "
"during fitting.")
for l in np.unique(self.labels_):
nX.append(pooling_func(X[:, self.labels_ == l], axis=1))
return np.array(nX).T
def inverse_transform(self, Xred):
"""
Inverse the transformation.
Return a vector of size nb_features with the values of Xred assigned
to each group of features
Parameters
----------
Xred : array-like, shape=[n_samples, n_clusters] or [n_clusters,]
The values to be assigned to each cluster of samples
Returns
-------
X : array, shape=[n_samples, n_features] or [n_features]
A vector of size n_samples with the values of Xred assigned to
each of the cluster of samples.
"""
unil, inverse = np.unique(self.labels_, return_inverse=True)
return Xred[..., inverse]
| bsd-3-clause | 1,009,311,063,967,769,600 | 33.493506 | 92 | 0.583961 | false |
atalax/wytch | wytch/event.py | 1 | 11509 | # The MIT License (MIT)
#
# Copyright (c) 2016 Josef Gajdusek
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import inspect
import collections
from functools import wraps
_unbound_handlers = {}
def _fullname(o):
if hasattr(o, "__module__") and o.__module__:
return o.__module__ + "." + o.__qualname__
return o.__qualname__
def handler(evname, **kwargs):
"""
Mark a method as a handler for an Event of the specified name.
Usage:
class A(EventSource):
@handler("key")
def onkey(self, event):
pass
@handler("key", invert = True, key = "\r"):
def onkey(self, event):
pass
"""
def decor(fn):
# As there is no way to get the class object at this point, put the
# event handler into a global map and then .class_bind it properly when
# the constructor first gets called.
# Also note that we are using string name instead of the actual function
# reference. This is because @wraps does not fix the == operator.
nm = _fullname(fn)
if nm not in _unbound_handlers:
_unbound_handlers[nm] = []
_unbound_handlers[nm].append((evname, kwargs))
return fn
return decor
class EventSource:
"""
Base class for all classes that want to fire events.
"""
class Handler:
def __init__(self, evname, fn, mkws = {}):
self.evname = evname
self.fn = fn
self.mkws = mkws
def __init__(self):
self._bind_handlers()
self._handlers = {}
self._inherit_handlers()
@classmethod
def _class_bind(cls, evname, fn, **kwargs):
"""
Bind a method of this class (or a parent) to an event. These methods
will get translated to bound methods on instantiation.
"""
if evname not in cls._class_handlers:
cls._class_handlers[evname] = []
cls._class_handlers[evname].append(EventSource.Handler(evname, fn, kwargs))
@classmethod
def _bind_handlers(cls):
"""
Bind all class handlers from @handler.
Recursively calls ._bind_handlers() on all base classes.
"""
# Use __dict__ to check that the variable exists in this class and not a superclass
if "_class_handlers" in cls.__dict__:
return
for base in cls.__bases__:
if hasattr(base, "_bind_handlers"):
base._bind_handlers()
cls._class_handlers = {}
for x in cls.__dict__.values():
if not callable(x):
continue
nm = _fullname(x)
if nm in _unbound_handlers:
for evname, mkws in _unbound_handlers[nm]:
cls._class_bind(evname, x, **mkws)
def _inherit_handlers(self):
""" .bind class handlers from all parent classes. """
for parent in [p for p in inspect.getmro(self.__class__) \
if hasattr(p, "_class_handlers")]:
for ev, hdls in parent._class_handlers.items():
for h in hdls:
# Find the appropriate bound method of self
for metname in self.__dir__():
# Notice that we cannot just getattr on self as that could
# attempt to dereference properties depending on uninitialized variables
clfn = getattr(self.__class__, metname, None)
if clfn and clfn == h.fn:
self.bind(ev, getattr(self, metname), **h.mkws)
def bind(self, evname, fn, **kwargs):
"""
Bind an handler for an event with the provided name.
Returns a reference to an instance of EventSource.Handler which can be then passed
to .unbind
"""
if evname not in self._handlers:
self._handlers[evname] = []
h = EventSource.Handler(evname, fn, kwargs)
self._handlers[evname].append(h)
return h
def unbind(self, handler):
""" Unbind a handler registered with the .bind method. """
self._handlers[handler.evname].remove(handler)
def fire(self, event):
"""
Fire an event from this object and return True when at least one
handler was found and executed.
"""
if self._handlers.get(event.name, []):
ret = False
for h in self._handlers[event.name]:
kws = h.mkws.copy()
matcher = event.matches
flip = False
canreject = False
if "matcher" in kws:
kws.pop("matcher")
matcher = lambda **kwargs: h.mkws["matcher"](event, **kwargs)
if "invert" in kws:
flip = kws["invert"]
kws.pop("invert")
if "canreject" in kws:
canreject = kws["canreject"]
kws.pop("canreject")
if flip ^ matcher(**kws):
hrt = h.fn(event)
if canreject:
ret = hrt or ret
else:
ret = True
return ret
return False
class Event:
"""
Base class for events.
"""
def __init__(self, name, source = None):
self.name = name
self.source = source
def matches(self):
"""
Method to be implemented by a subclass that wants to provide more
specific matching than by name.
"""
return True
class KeyEvent(Event):
_CSI_CURSOR = {
"A": "<up>",
"B": "<down>",
"C": "<right>",
"D": "<left>",
"H": "<home>",
"F": "<end>",
"P": "<f1>",
"Q": "<f2>",
"R": "<f3>",
"S": "<f4>",
}
_CSINUM = {
2: "<insert>",
3: "<delete>",
5: "<pageup>",
6: "<pagedown>",
15: "<f5>",
17: "<f6>",
18: "<f7>",
19: "<f8>",
20: "<f9>",
21: "<f10>",
23: "<f11>",
24: "<f12>",
}
def __init__(self, s):
super(KeyEvent, self).__init__("key")
self.raw = s
self.shift = False
self.alt = False
self.ctrl = False
self.isescape = False
if s[0] == "\x1b": # Escape sequence
if s[1] in ["[", "O"]:
csinum = 1
if ";" in s: # Some modifiers were pressed
spl = s[2:-1].split(";")
csinum = int(spl[0])
mod = int(spl[1]) - 1
if mod & 0x1:
self.shift = True
if mod & 0x2:
self.alt = True
if mod & 0x4:
self.ctrl = True
elif s[-1] == "~":
csinum = int(s[2:-1])
if csinum != 1 and csinum in KeyEvent._CSINUM.keys():
self.val = KeyEvent._CSINUM[csinum]
elif s[-1] in KeyEvent._CSI_CURSOR.keys():
self.val = KeyEvent._CSI_CURSOR[s[-1]]
elif s[-1] == "Z":
self.val = "\t"
self.shift = True
else:
raise ValueError("Invalid CSI value")
else:
self.val = s[1]
self.alt = True
else:
self.val = s
if len(self.val) == 1 and ord(self.val) in range(0x01, 0x1a) \
and self.val not in "\r\t\n":
self.val = chr(ord(self.val) + 0x60)
self.ctrl = True
if self.shift:
self.val = self.val.upper()
if self.alt:
self.val = "!" + self.val
if self.ctrl:
self.val = "^" + self.val
def matches(self, key = None, keys = None):
return (key is None or self.val == key) and \
(keys is None or self.val in keys)
def __str__(self):
return "<input.KeyEvent shift = %r alt = %r ctrl = %r val = %r>" % \
(self.shift, self.alt, self.ctrl, self.val)
class MouseEvent(Event):
LEFT = 0
MIDDLE = 1
RIGHT = 2
RELEASED = 3
def __init__(self, s = None):
super(MouseEvent, self).__init__("mouse")
if not s:
s = b"\x1b[M\x00!!"
if s[0:3] != b"\x1b[M" or len(s) != 6:
raise ValueError("Invalid escape sequence %r" % s)
self.raw = s
code = s[3]
self.button = code & 0x03
self.drag = bool(code & 0x40)
self.released = self.button == MouseEvent.RELEASED
self.pressed = not self.released and not self.drag
# Start at 0 0
self.x = s[4] - 32 - 1
self.y = s[5] - 32 - 1
if self.x < 0:
self.x += 255
if self.y < 0:
self.y += 255
def shifted(self, x, y):
ret = MouseEvent(self.raw)
ret.x = self.x - x
ret.y = self.y - y
return ret
def matches(self, pressed = None, released = None, drag = None, button = None):
return (button is None or button == self.button) and \
(pressed is None or pressed == self.pressed) and \
(released is None or released == self.released) and \
(drag is None or drag == self.drag)
def __str__(self):
return "<input.MouseEvent x = %d y = %d button = %d pressed = %r drag = %r released = %r>" % \
(self.x, self.y, self.button, self.pressed, self.drag, self.released)
class ClickEvent(Event):
""" Event fired on itself by the Widget class when it decides it has been clicked on. """
def __init__(self, source = None):
super(ClickEvent, self).__init__("click", source = source)
class ValueEvent(Event):
""" Event fired by ValueWidget when its value changes. """
def __init__(self, new, old = None, source = None):
super(ValueEvent, self).__init__("value", source = source)
self.new = new
self.old = old
def matches(self, new = None, old = None):
return (new is None or new == self.new) and \
(old is None or old == self.old)
class PressEvent(Event):
""" Event fired on itself by button when pressed. """
def __init__(self, source = None):
super(PressEvent, self).__init__("press", source = source)
| mit | -3,843,865,033,580,623,400 | 31.603399 | 102 | 0.517856 | false |
VenturaDelMonte/staticwebanalyzer | SDK/mechanize-0.2.5/test/test_cookies.py | 22 | 76923 | """Tests for _clientcookie."""
import StringIO
import errno
import inspect
import mimetools
import os
import re
import sys
import tempfile
import time
import unittest
import mechanize
from mechanize._util import hide_experimental_warnings, \
reset_experimental_warnings
from mechanize import Request
class FakeResponse:
def __init__(self, headers=[], url=None):
"""
headers: list of RFC822-style 'Key: value' strings
"""
f = StringIO.StringIO("\n".join(headers))
self._headers = mimetools.Message(f)
self._url = url
def info(self): return self._headers
def interact_2965(cookiejar, url, *set_cookie_hdrs):
return _interact(cookiejar, url, set_cookie_hdrs, "Set-Cookie2")
def interact_netscape(cookiejar, url, *set_cookie_hdrs):
return _interact(cookiejar, url, set_cookie_hdrs, "Set-Cookie")
def _interact(cookiejar, url, set_cookie_hdrs, hdr_name):
"""Perform a single request / response cycle, returning Cookie: header."""
req = Request(url)
cookiejar.add_cookie_header(req)
cookie_hdr = req.get_header("Cookie", "")
headers = []
for hdr in set_cookie_hdrs:
headers.append("%s: %s" % (hdr_name, hdr))
res = FakeResponse(headers, url)
cookiejar.extract_cookies(res, req)
return cookie_hdr
class TempfileTestMixin:
def setUp(self):
self._tempfiles = []
def tearDown(self):
for fn in self._tempfiles:
try:
os.remove(fn)
except IOError, exc:
if exc.errno != errno.ENOENT:
raise
def mktemp(self):
fn = tempfile.mktemp()
self._tempfiles.append(fn)
return fn
def caller():
return sys._getframe().f_back.f_back.f_code.co_name
def attribute_names(obj):
return set([spec[0] for spec in inspect.getmembers(obj)
if not spec[0].startswith("__")])
class CookieJarInterfaceTests(unittest.TestCase):
def test_add_cookie_header(self):
from mechanize import CookieJar
# verify only these methods are used
class MockRequest(object):
def __init__(self):
self.added_headers = []
self.called = set()
def log_called(self):
self.called.add(caller())
def get_full_url(self):
self.log_called()
return "https://example.com:443"
def get_host(self):
self.log_called()
return "example.com:443"
def get_type(self):
self.log_called()
return "https"
def has_header(self, header_name):
self.log_called()
return False
def get_header(self, header_name, default=None):
self.log_called()
pass # currently not called
def header_items(self):
self.log_called()
pass # currently not called
def add_unredirected_header(self, key, val):
self.log_called()
self.added_headers.append((key, val))
def is_unverifiable(self):
self.log_called()
return False
jar = CookieJar()
interact_netscape(jar, "https://example.com:443",
"foo=bar; port=443; secure")
request = MockRequest()
jar.add_cookie_header(request)
expect_called = attribute_names(MockRequest) - set(
["port", "get_header", "header_items", "log_called"])
self.assertEquals(request.called, expect_called)
self.assertEquals(request.added_headers, [("Cookie", "foo=bar")])
def test_extract_cookies(self):
from mechanize import CookieJar
# verify only these methods are used
class StubMessage(object):
def getheaders(self, name):
return ["foo=bar; port=443"]
class StubResponse(object):
def info(self):
return StubMessage()
class StubRequest(object):
def __init__(self):
self.added_headers = []
self.called = set()
def log_called(self):
self.called.add(caller())
def get_full_url(self):
self.log_called()
return "https://example.com:443"
def get_host(self):
self.log_called()
return "example.com:443"
def is_unverifiable(self):
self.log_called()
return False
jar = CookieJar()
response = StubResponse()
request = StubRequest()
jar.extract_cookies(response, request)
expect_called = attribute_names(StubRequest) - set(
["port", "log_called"])
self.assertEquals(request.called, expect_called)
self.assertEquals([(cookie.name, cookie.value) for cookie in jar],
[("foo", "bar")])
def test_unverifiable(self):
from mechanize._clientcookie import request_is_unverifiable
# .unverifiable was added in mechanize, .is_unverifiable() later got
# added in cookielib. XXX deprecate .unverifiable
class StubRequest(object):
def __init__(self, attrs):
self._attrs = attrs
self.accessed = set()
def __getattr__(self, name):
self.accessed.add(name)
try:
return self._attrs[name]
except KeyError:
raise AttributeError(name)
request = StubRequest(dict(is_unverifiable=lambda: False))
self.assertEquals(request_is_unverifiable(request), False)
request = StubRequest(dict(is_unverifiable=lambda: False,
unverifiable=True))
self.assertEquals(request_is_unverifiable(request), False)
request = StubRequest(dict(unverifiable=False))
self.assertEquals(request_is_unverifiable(request), False)
class CookieTests(unittest.TestCase):
# XXX
# Get rid of string comparisons where not actually testing str / repr.
# .clear() etc.
# IP addresses like 50 (single number, no dot) and domain-matching
# functions (and is_HDN)? See draft RFC 2965 errata.
# Strictness switches
# is_third_party()
# unverifiability / third_party blocking
# Netscape cookies work the same as RFC 2965 with regard to port.
# Set-Cookie with negative max age.
# If turn RFC 2965 handling off, Set-Cookie2 cookies should not clobber
# Set-Cookie cookies.
# Cookie2 should be sent if *any* cookies are not V1 (ie. V0 OR V2 etc.).
# Cookies (V1 and V0) with no expiry date should be set to be discarded.
# RFC 2965 Quoting:
# Should accept unquoted cookie-attribute values? check errata draft.
# Which are required on the way in and out?
# Should always return quoted cookie-attribute values?
# Proper testing of when RFC 2965 clobbers Netscape (waiting for errata).
# Path-match on return (same for V0 and V1).
# RFC 2965 acceptance and returning rules
# Set-Cookie2 without version attribute is rejected.
# Netscape peculiarities list from Ronald Tschalar.
# The first two still need tests, the rest are covered.
## - Quoting: only quotes around the expires value are recognized as such
## (and yes, some folks quote the expires value); quotes around any other
## value are treated as part of the value.
## - White space: white space around names and values is ignored
## - Default path: if no path parameter is given, the path defaults to the
## path in the request-uri up to, but not including, the last '/'. Note
## that this is entirely different from what the spec says.
## - Commas and other delimiters: Netscape just parses until the next ';'.
## This means it will allow commas etc inside values (and yes, both
## commas and equals are commonly appear in the cookie value). This also
## means that if you fold multiple Set-Cookie header fields into one,
## comma-separated list, it'll be a headache to parse (at least my head
## starts hurting everytime I think of that code).
## - Expires: You'll get all sorts of date formats in the expires,
## including emtpy expires attributes ("expires="). Be as flexible as you
## can, and certainly don't expect the weekday to be there; if you can't
## parse it, just ignore it and pretend it's a session cookie.
## - Domain-matching: Netscape uses the 2-dot rule for _all_ domains, not
## just the 7 special TLD's listed in their spec. And folks rely on
## that...
def test_policy(self):
import mechanize
policy = mechanize.DefaultCookiePolicy()
jar = mechanize.CookieJar()
jar.set_policy(policy)
self.assertEquals(jar.get_policy(), policy)
def test_make_cookies_doesnt_change_jar_state(self):
from mechanize import CookieJar, Request, Cookie
from mechanize._util import time2netscape
from mechanize._response import test_response
cookie = Cookie(0, "spam", "eggs",
"80", False,
"example.com", False, False,
"/", False,
False,
None,
False,
"",
"",
{})
jar = CookieJar()
jar._policy._now = jar._now = int(time.time())
jar.set_cookie(cookie)
self.assertEquals(len(jar), 1)
set_cookie = "spam=eggs; expires=%s" % time2netscape(time.time()- 1000)
url = "http://example.com/"
response = test_response(url=url, headers=[("Set-Cookie", set_cookie)])
jar.make_cookies(response, Request(url))
self.assertEquals(len(jar), 1)
def test_domain_return_ok(self):
# test optimization: .domain_return_ok() should filter out most
# domains in the CookieJar before we try to access them (because that
# may require disk access -- in particular, with MSIECookieJar)
# This is only a rough check for performance reasons, so it's not too
# critical as long as it's sufficiently liberal.
import mechanize
pol = mechanize.DefaultCookiePolicy()
for url, domain, ok in [
("http://foo.bar.com/", "blah.com", False),
("http://foo.bar.com/", "rhubarb.blah.com", False),
("http://foo.bar.com/", "rhubarb.foo.bar.com", False),
("http://foo.bar.com/", ".foo.bar.com", True),
("http://foo.bar.com/", "foo.bar.com", True),
("http://foo.bar.com/", ".bar.com", True),
("http://foo.bar.com/", "com", True),
("http://foo.com/", "rhubarb.foo.com", False),
("http://foo.com/", ".foo.com", True),
("http://foo.com/", "foo.com", True),
("http://foo.com/", "com", True),
("http://foo/", "rhubarb.foo", False),
("http://foo/", ".foo", True),
("http://foo/", "foo", True),
("http://foo/", "foo.local", True),
("http://foo/", ".local", True),
]:
request = mechanize.Request(url)
r = pol.domain_return_ok(domain, request)
if ok: self.assert_(r)
else: self.assert_(not r)
def test_missing_name(self):
from mechanize import MozillaCookieJar, lwp_cookie_str
# missing = sign in Cookie: header is regarded by Mozilla as a missing
# NAME. WE regard it as a missing VALUE.
filename = tempfile.mktemp()
c = MozillaCookieJar(filename)
interact_netscape(c, "http://www.acme.com/", 'eggs')
interact_netscape(c, "http://www.acme.com/", '"spam"; path=/foo/')
cookie = c._cookies["www.acme.com"]["/"]['eggs']
assert cookie.name == "eggs"
assert cookie.value is None
cookie = c._cookies["www.acme.com"]['/foo/']['"spam"']
assert cookie.name == '"spam"'
assert cookie.value is None
assert lwp_cookie_str(cookie) == (
r'"spam"; path="/foo/"; domain="www.acme.com"; '
'path_spec; discard; version=0')
old_str = repr(c)
c.save(ignore_expires=True, ignore_discard=True)
try:
c = MozillaCookieJar(filename)
c.revert(ignore_expires=True, ignore_discard=True)
finally:
os.unlink(c.filename)
# cookies unchanged apart from lost info re. whether path was specified
assert repr(c) == \
re.sub("path_specified=%s" % True, "path_specified=%s" % False,
old_str)
assert interact_netscape(c, "http://www.acme.com/foo/") == \
'"spam"; eggs'
def test_rfc2109_handling(self):
# 2109 cookies have rfc2109 attr set correctly, and are handled
# as 2965 or Netscape cookies depending on policy settings
from mechanize import CookieJar, DefaultCookiePolicy
for policy, version in [
(DefaultCookiePolicy(), 0),
(DefaultCookiePolicy(rfc2965=True), 1),
(DefaultCookiePolicy(rfc2109_as_netscape=True), 0),
(DefaultCookiePolicy(rfc2965=True, rfc2109_as_netscape=True), 0),
]:
c = CookieJar(policy)
interact_netscape(c, "http://www.example.com/", "ni=ni; Version=1")
cookie = c._cookies["www.example.com"]["/"]["ni"]
self.assert_(cookie.rfc2109)
self.assertEqual(cookie.version, version)
def test_ns_parser(self):
from mechanize import CookieJar
from mechanize._clientcookie import DEFAULT_HTTP_PORT
c = CookieJar()
interact_netscape(c, "http://www.acme.com/",
'spam=eggs; DoMain=.acme.com; port; blArgh="feep"')
interact_netscape(c, "http://www.acme.com/", 'ni=ni; port=80,8080')
interact_netscape(c, "http://www.acme.com:80/", 'nini=ni')
interact_netscape(c, "http://www.acme.com:80/", 'foo=bar; expires=')
interact_netscape(c, "http://www.acme.com:80/", 'spam=eggs; '
'expires="Foo Bar 25 33:22:11 3022"')
cookie = c._cookies[".acme.com"]["/"]["spam"]
assert cookie.domain == ".acme.com"
assert cookie.domain_specified
assert cookie.port == DEFAULT_HTTP_PORT
assert not cookie.port_specified
# case is preserved
assert (cookie.has_nonstandard_attr("blArgh") and
not cookie.has_nonstandard_attr("blargh"))
cookie = c._cookies["www.acme.com"]["/"]["ni"]
assert cookie.domain == "www.acme.com"
assert not cookie.domain_specified
assert cookie.port == "80,8080"
assert cookie.port_specified
cookie = c._cookies["www.acme.com"]["/"]["nini"]
assert cookie.port is None
assert not cookie.port_specified
# invalid expires should not cause cookie to be dropped
foo = c._cookies["www.acme.com"]["/"]["foo"]
spam = c._cookies["www.acme.com"]["/"]["foo"]
assert foo.expires is None
assert spam.expires is None
def test_ns_parser_special_names(self):
# names such as 'expires' are not special in first name=value pair
# of Set-Cookie: header
from mechanize import CookieJar
c = CookieJar()
interact_netscape(c, "http://www.acme.com/", 'expires=eggs')
interact_netscape(c, "http://www.acme.com/", 'version=eggs; spam=eggs')
cookies = c._cookies["www.acme.com"]["/"]
self.assert_(cookies.has_key('expires'))
self.assert_(cookies.has_key('version'))
def test_expires(self):
from mechanize._util import time2netscape
from mechanize import CookieJar
# if expires is in future, keep cookie...
c = CookieJar()
future = time2netscape(time.time()+3600)
interact_netscape(c, "http://www.acme.com/", 'spam="bar"; expires=%s' %
future)
assert len(c) == 1
now = time2netscape(time.time()-1)
# ... and if in past or present, discard it
interact_netscape(c, "http://www.acme.com/", 'foo="eggs"; expires=%s' %
now)
h = interact_netscape(c, "http://www.acme.com/")
assert len(c) == 1
assert h.find('spam="bar"') != -1 and h.find("foo") == -1
# max-age takes precedence over expires, and zero max-age is request to
# delete both new cookie and any old matching cookie
interact_netscape(c, "http://www.acme.com/", 'eggs="bar"; expires=%s' %
future)
interact_netscape(c, "http://www.acme.com/", 'bar="bar"; expires=%s' %
future)
assert len(c) == 3
interact_netscape(c, "http://www.acme.com/", 'eggs="bar"; '
'expires=%s; max-age=0' % future)
interact_netscape(c, "http://www.acme.com/", 'bar="bar"; '
'max-age=0; expires=%s' % future)
h = interact_netscape(c, "http://www.acme.com/")
assert len(c) == 1
# test expiry at end of session for cookies with no expires attribute
interact_netscape(c, "http://www.rhubarb.net/", 'whum="fizz"')
assert len(c) == 2
c.clear_session_cookies()
assert len(c) == 1
assert h.find('spam="bar"') != -1
# XXX RFC 2965 expiry rules (some apply to V0 too)
def test_default_path(self):
from mechanize import CookieJar, DefaultCookiePolicy
# RFC 2965
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
interact_2965(c, "http://www.acme.com/", 'spam="bar"; Version="1"')
assert c._cookies["www.acme.com"].has_key("/")
c = CookieJar(pol)
interact_2965(c, "http://www.acme.com/blah", 'eggs="bar"; Version="1"')
assert c._cookies["www.acme.com"].has_key("/")
c = CookieJar(pol)
interact_2965(c, "http://www.acme.com/blah/rhubarb",
'eggs="bar"; Version="1"')
assert c._cookies["www.acme.com"].has_key("/blah/")
c = CookieJar(pol)
interact_2965(c, "http://www.acme.com/blah/rhubarb/",
'eggs="bar"; Version="1"')
assert c._cookies["www.acme.com"].has_key("/blah/rhubarb/")
# Netscape
c = CookieJar()
interact_netscape(c, "http://www.acme.com/", 'spam="bar"')
assert c._cookies["www.acme.com"].has_key("/")
c = CookieJar()
interact_netscape(c, "http://www.acme.com/blah", 'eggs="bar"')
assert c._cookies["www.acme.com"].has_key("/")
c = CookieJar()
interact_netscape(c, "http://www.acme.com/blah/rhubarb", 'eggs="bar"')
assert c._cookies["www.acme.com"].has_key("/blah")
c = CookieJar()
interact_netscape(c, "http://www.acme.com/blah/rhubarb/", 'eggs="bar"')
assert c._cookies["www.acme.com"].has_key("/blah/rhubarb")
def test_default_path_with_query(self):
cj = mechanize.CookieJar()
uri = "http://example.com/?spam/eggs"
value = 'eggs="bar"'
interact_netscape(cj, uri, value)
# default path does not include query, so is "/", not "/?spam"
self.assertIn("/", cj._cookies["example.com"])
# cookie is sent back to the same URI
self.assertEqual(interact_netscape(cj, uri), value)
def test_escape_path(self):
from mechanize._clientcookie import escape_path
cases = [
# quoted safe
("/foo%2f/bar", "/foo%2F/bar"),
("/foo%2F/bar", "/foo%2F/bar"),
# quoted %
("/foo%%/bar", "/foo%%/bar"),
# quoted unsafe
("/fo%19o/bar", "/fo%19o/bar"),
("/fo%7do/bar", "/fo%7Do/bar"),
# unquoted safe
("/foo/bar&", "/foo/bar&"),
("/foo//bar", "/foo//bar"),
("\176/foo/bar", "\176/foo/bar"),
# unquoted unsafe
("/foo\031/bar", "/foo%19/bar"),
("/\175foo/bar", "/%7Dfoo/bar"),
# unicode
(u"/foo/bar\uabcd", "/foo/bar%EA%AF%8D"), # UTF-8 encoded
]
for arg, result in cases:
self.assert_(escape_path(arg) == result)
def test_request_path(self):
from mechanize._clientcookie import request_path
# with parameters
req = Request("http://www.example.com/rheum/rhaponticum;"
"foo=bar;sing=song?apples=pears&spam=eggs#ni")
self.assertEquals(request_path(req),
"/rheum/rhaponticum;foo=bar;sing=song")
# without parameters
req = Request("http://www.example.com/rheum/rhaponticum?"
"apples=pears&spam=eggs#ni")
self.assertEquals(request_path(req), "/rheum/rhaponticum")
# missing final slash
req = Request("http://www.example.com")
self.assert_(request_path(req) == "/")
def test_request_port(self):
from mechanize._clientcookie import request_port, DEFAULT_HTTP_PORT
req = Request("http://www.acme.com:1234/",
headers={"Host": "www.acme.com:4321"})
assert request_port(req) == "1234"
req = Request("http://www.acme.com/",
headers={"Host": "www.acme.com:4321"})
assert request_port(req) == DEFAULT_HTTP_PORT
def test_request_host_lc(self):
from mechanize._clientcookie import request_host_lc
# this request is illegal (RFC2616, 14.2.3)
req = Request("http://1.1.1.1/",
headers={"Host": "www.acme.com:80"})
# libwww-perl wants this response, but that seems wrong (RFC 2616,
# section 5.2, point 1., and RFC 2965 section 1, paragraph 3)
#assert request_host_lc(req) == "www.acme.com"
assert request_host_lc(req) == "1.1.1.1"
req = Request("http://www.acme.com/",
headers={"Host": "irrelevant.com"})
assert request_host_lc(req) == "www.acme.com"
# not actually sure this one is valid Request object, so maybe should
# remove test for no host in url in request_host_lc function?
req = Request("/resource.html",
headers={"Host": "www.acme.com"})
assert request_host_lc(req) == "www.acme.com"
# port shouldn't be in request-host
req = Request("http://www.acme.com:2345/resource.html",
headers={"Host": "www.acme.com:5432"})
assert request_host_lc(req) == "www.acme.com"
# the _lc function lower-cases the result
req = Request("http://EXAMPLE.com")
assert request_host_lc(req) == "example.com"
def test_effective_request_host(self):
from mechanize import effective_request_host
self.assertEquals(
effective_request_host(Request("http://www.EXAMPLE.com/spam")),
"www.EXAMPLE.com")
self.assertEquals(
effective_request_host(Request("http://bob/spam")),
"bob.local")
def test_is_HDN(self):
from mechanize._clientcookie import is_HDN
assert is_HDN("foo.bar.com")
assert is_HDN("1foo2.3bar4.5com")
assert not is_HDN("192.168.1.1")
assert not is_HDN("")
assert not is_HDN(".")
assert not is_HDN(".foo.bar.com")
assert not is_HDN("..foo")
assert not is_HDN("foo.")
def test_reach(self):
from mechanize._clientcookie import reach
assert reach("www.acme.com") == ".acme.com"
assert reach("acme.com") == "acme.com"
assert reach("acme.local") == ".local"
assert reach(".local") == ".local"
assert reach(".com") == ".com"
assert reach(".") == "."
assert reach("") == ""
assert reach("192.168.0.1") == "192.168.0.1"
def test_domain_match(self):
from mechanize._clientcookie import domain_match, user_domain_match
assert domain_match("192.168.1.1", "192.168.1.1")
assert not domain_match("192.168.1.1", ".168.1.1")
assert domain_match("x.y.com", "x.Y.com")
assert domain_match("x.y.com", ".Y.com")
assert not domain_match("x.y.com", "Y.com")
assert domain_match("a.b.c.com", ".c.com")
assert not domain_match(".c.com", "a.b.c.com")
assert domain_match("example.local", ".local")
assert not domain_match("blah.blah", "")
assert not domain_match("", ".rhubarb.rhubarb")
assert domain_match("", "")
assert user_domain_match("acme.com", "acme.com")
assert not user_domain_match("acme.com", ".acme.com")
assert user_domain_match("rhubarb.acme.com", ".acme.com")
assert user_domain_match("www.rhubarb.acme.com", ".acme.com")
assert user_domain_match("x.y.com", "x.Y.com")
assert user_domain_match("x.y.com", ".Y.com")
assert not user_domain_match("x.y.com", "Y.com")
assert user_domain_match("y.com", "Y.com")
assert not user_domain_match(".y.com", "Y.com")
assert user_domain_match(".y.com", ".Y.com")
assert user_domain_match("x.y.com", ".com")
assert not user_domain_match("x.y.com", "com")
assert not user_domain_match("x.y.com", "m")
assert not user_domain_match("x.y.com", ".m")
assert not user_domain_match("x.y.com", "")
assert not user_domain_match("x.y.com", ".")
assert user_domain_match("192.168.1.1", "192.168.1.1")
# not both HDNs, so must string-compare equal to match
assert not user_domain_match("192.168.1.1", ".168.1.1")
assert not user_domain_match("192.168.1.1", ".")
# empty string is a special case
assert not user_domain_match("192.168.1.1", "")
def test_wrong_domain(self):
"""Cookies whose ERH does not domain-match the domain are rejected.
ERH = effective request-host.
"""
# XXX far from complete
from mechanize import CookieJar
c = CookieJar()
interact_2965(c, "http://www.nasty.com/", 'foo=bar; domain=friendly.org; Version="1"')
assert len(c) == 0
def test_strict_domain(self):
# Cookies whose domain is a country-code tld like .co.uk should
# not be set if CookiePolicy.strict_domain is true.
from mechanize import CookieJar, DefaultCookiePolicy
cp = DefaultCookiePolicy(strict_domain=True)
cj = CookieJar(policy=cp)
interact_netscape(cj, "http://example.co.uk/", 'no=problemo')
interact_netscape(cj, "http://example.co.uk/",
'okey=dokey; Domain=.example.co.uk')
self.assertEquals(len(cj), 2)
for pseudo_tld in [".co.uk", ".org.za", ".tx.us", ".name.us"]:
interact_netscape(cj, "http://example.%s/" % pseudo_tld,
'spam=eggs; Domain=.co.uk')
self.assertEquals(len(cj), 2)
# XXXX This should be compared with the Konqueror (kcookiejar.cpp) and
# Mozilla implementations.
def test_two_component_domain_ns(self):
# Netscape: .www.bar.com, www.bar.com, .bar.com, bar.com, no domain should
# all get accepted, as should .acme.com, acme.com and no domain for
# 2-component domains like acme.com.
from mechanize import CookieJar, DefaultCookiePolicy
c = CookieJar()
# two-component V0 domain is OK
interact_netscape(c, "http://foo.net/", 'ns=bar')
assert len(c) == 1
assert c._cookies["foo.net"]["/"]["ns"].value == "bar"
assert interact_netscape(c, "http://foo.net/") == "ns=bar"
# *will* be returned to any other domain (unlike RFC 2965)...
assert interact_netscape(c, "http://www.foo.net/") == "ns=bar"
# ...unless requested otherwise
pol = DefaultCookiePolicy(
strict_ns_domain=DefaultCookiePolicy.DomainStrictNonDomain)
c.set_policy(pol)
assert interact_netscape(c, "http://www.foo.net/") == ""
# unlike RFC 2965, even explicit two-component domain is OK,
# because .foo.net matches foo.net
interact_netscape(c, "http://foo.net/foo/",
'spam1=eggs; domain=foo.net')
# even if starts with a dot -- in NS rules, .foo.net matches foo.net!
interact_netscape(c, "http://foo.net/foo/bar/",
'spam2=eggs; domain=.foo.net')
assert len(c) == 3
assert c._cookies[".foo.net"]["/foo"]["spam1"].value == "eggs"
assert c._cookies[".foo.net"]["/foo/bar"]["spam2"].value == "eggs"
assert interact_netscape(c, "http://foo.net/foo/bar/") == \
"spam2=eggs; spam1=eggs; ns=bar"
# top-level domain is too general
interact_netscape(c, "http://foo.net/", 'nini="ni"; domain=.net')
assert len(c) == 3
## # Netscape protocol doesn't allow non-special top level domains (such
## # as co.uk) in the domain attribute unless there are at least three
## # dots in it.
# Oh yes it does! Real implementations don't check this, and real
# cookies (of course) rely on that behaviour.
interact_netscape(c, "http://foo.co.uk", 'nasty=trick; domain=.co.uk')
## assert len(c) == 2
assert len(c) == 4
def test_two_component_domain_rfc2965(self):
from mechanize import CookieJar, DefaultCookiePolicy
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
# two-component V1 domain is OK
interact_2965(c, "http://foo.net/", 'foo=bar; Version="1"')
assert len(c) == 1
assert c._cookies["foo.net"]["/"]["foo"].value == "bar"
assert interact_2965(c, "http://foo.net/") == "$Version=1; foo=bar"
# won't be returned to any other domain (because domain was implied)
assert interact_2965(c, "http://www.foo.net/") == ""
# unless domain is given explicitly, because then it must be
# rewritten to start with a dot: foo.net --> .foo.net, which does
# not domain-match foo.net
interact_2965(c, "http://foo.net/foo",
'spam=eggs; domain=foo.net; path=/foo; Version="1"')
assert len(c) == 1
assert interact_2965(c, "http://foo.net/foo") == "$Version=1; foo=bar"
# explicit foo.net from three-component domain www.foo.net *does* get
# set, because .foo.net domain-matches .foo.net
interact_2965(c, "http://www.foo.net/foo/",
'spam=eggs; domain=foo.net; Version="1"')
assert c._cookies[".foo.net"]["/foo/"]["spam"].value == "eggs"
assert len(c) == 2
assert interact_2965(c, "http://foo.net/foo/") == "$Version=1; foo=bar"
assert interact_2965(c, "http://www.foo.net/foo/") == \
'$Version=1; spam=eggs; $Domain="foo.net"'
# top-level domain is too general
interact_2965(c, "http://foo.net/",
'ni="ni"; domain=".net"; Version="1"')
assert len(c) == 2
# RFC 2965 doesn't require blocking this
interact_2965(c, "http://foo.co.uk/",
'nasty=trick; domain=.co.uk; Version="1"')
assert len(c) == 3
def test_domain_allow(self):
from mechanize import CookieJar, DefaultCookiePolicy
c = CookieJar(policy=DefaultCookiePolicy(
blocked_domains=["acme.com"],
allowed_domains=["www.acme.com"]))
req = Request("http://acme.com/")
headers = ["Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/"]
res = FakeResponse(headers, "http://acme.com/")
c.extract_cookies(res, req)
assert len(c) == 0
req = Request("http://www.acme.com/")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
assert len(c) == 1
req = Request("http://www.coyote.com/")
res = FakeResponse(headers, "http://www.coyote.com/")
c.extract_cookies(res, req)
assert len(c) == 1
# set a cookie with non-allowed domain...
req = Request("http://www.coyote.com/")
res = FakeResponse(headers, "http://www.coyote.com/")
cookies = c.make_cookies(res, req)
c.set_cookie(cookies[0])
assert len(c) == 2
# ... and check is doesn't get returned
c.add_cookie_header(req)
assert not req.has_header("Cookie")
def test_domain_block(self):
from mechanize import CookieJar, DefaultCookiePolicy
#import logging; logging.getLogger("mechanize").setLevel(logging.DEBUG)
pol = DefaultCookiePolicy(
rfc2965=True, blocked_domains=[".acme.com"])
c = CookieJar(policy=pol)
headers = ["Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/"]
req = Request("http://www.acme.com/")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
assert len(c) == 0
pol.set_blocked_domains(["acme.com"])
c.extract_cookies(res, req)
assert len(c) == 1
c.clear()
req = Request("http://www.roadrunner.net/")
res = FakeResponse(headers, "http://www.roadrunner.net/")
c.extract_cookies(res, req)
assert len(c) == 1
req = Request("http://www.roadrunner.net/")
c.add_cookie_header(req)
assert (req.has_header("Cookie") and
req.has_header("Cookie2"))
c.clear()
pol.set_blocked_domains([".acme.com"])
c.extract_cookies(res, req)
assert len(c) == 1
# set a cookie with blocked domain...
req = Request("http://www.acme.com/")
res = FakeResponse(headers, "http://www.acme.com/")
cookies = c.make_cookies(res, req)
c.set_cookie(cookies[0])
assert len(c) == 2
# ... and check it doesn't get returned
c.add_cookie_header(req)
assert not req.has_header("Cookie")
def test_secure(self):
from mechanize import CookieJar, DefaultCookiePolicy
for ns in True, False:
for whitespace in " ", "":
c = CookieJar()
if ns:
pol = DefaultCookiePolicy(rfc2965=False)
int = interact_netscape
vs = ""
else:
pol = DefaultCookiePolicy(rfc2965=True)
int = interact_2965
vs = "; Version=1"
c.set_policy(pol)
url = "http://www.acme.com/"
int(c, url, "foo1=bar%s%s" % (vs, whitespace))
int(c, url, "foo2=bar%s; secure%s" % (vs, whitespace))
assert not c._cookies["www.acme.com"]["/"]["foo1"].secure, \
"non-secure cookie registered secure"
assert c._cookies["www.acme.com"]["/"]["foo2"].secure, \
"secure cookie registered non-secure"
def test_quote_cookie_value(self):
from mechanize import CookieJar, DefaultCookiePolicy
c = CookieJar(policy=DefaultCookiePolicy(rfc2965=True))
interact_2965(c, "http://www.acme.com/", r'foo=\b"a"r; Version=1')
h = interact_2965(c, "http://www.acme.com/")
assert h == r'$Version=1; foo=\\b\"a\"r'
def test_missing_final_slash(self):
# Missing slash from request URL's abs_path should be assumed present.
from mechanize import CookieJar, Request, DefaultCookiePolicy
url = "http://www.acme.com"
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
interact_2965(c, url, "foo=bar; Version=1")
req = Request(url)
assert len(c) == 1
c.add_cookie_header(req)
assert req.has_header("Cookie")
def test_domain_mirror(self):
from mechanize import CookieJar, DefaultCookiePolicy
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, "spam=eggs; Version=1")
h = interact_2965(c, url)
assert h.find( "Domain") == -1, \
"absent domain returned with domain present"
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; Domain=.bar.com')
h = interact_2965(c, url)
assert h.find('$Domain=".bar.com"') != -1, \
"domain not returned"
c = CookieJar(pol)
url = "http://foo.bar.com/"
# note missing initial dot in Domain
interact_2965(c, url, 'spam=eggs; Version=1; Domain=bar.com')
h = interact_2965(c, url)
assert h.find('$Domain="bar.com"') != -1, \
"domain not returned"
def test_path_mirror(self):
from mechanize import CookieJar, DefaultCookiePolicy
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, "spam=eggs; Version=1")
h = interact_2965(c, url)
assert h.find("Path") == -1, \
"absent path returned with path present"
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; Path=/')
h = interact_2965(c, url)
assert h.find('$Path="/"') != -1, "path not returned"
def test_port_mirror(self):
from mechanize import CookieJar, DefaultCookiePolicy
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, "spam=eggs; Version=1")
h = interact_2965(c, url)
assert h.find("Port") == -1, \
"absent port returned with port present"
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, "spam=eggs; Version=1; Port")
h = interact_2965(c, url)
assert re.search("\$Port([^=]|$)", h), \
"port with no value not returned with no value"
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; Port="80"')
h = interact_2965(c, url)
assert h.find('$Port="80"') != -1, \
"port with single value not returned with single value"
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; Port="80,8080"')
h = interact_2965(c, url)
assert h.find('$Port="80,8080"') != -1, \
"port with multiple values not returned with multiple values"
def test_no_return_comment(self):
from mechanize import CookieJar, DefaultCookiePolicy
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; '
'Comment="does anybody read these?"; '
'CommentURL="http://foo.bar.net/comment.html"')
h = interact_2965(c, url)
assert h.find("Comment") == -1, \
"Comment or CommentURL cookie-attributes returned to server"
# just pondering security here -- this isn't really a test (yet)
## def test_hack(self):
## from mechanize import CookieJar
## c = CookieJar()
## interact_netscape(c, "http://victim.mall.com/",
## 'prefs="foo"')
## interact_netscape(c, "http://cracker.mall.com/",
## 'prefs="bar"; Domain=.mall.com')
## interact_netscape(c, "http://cracker.mall.com/",
## '$Version="1"; Domain=.mall.com')
## h = interact_netscape(c, "http://victim.mall.com/")
## print h
def test_Cookie_iterator(self):
from mechanize import CookieJar, Cookie, DefaultCookiePolicy
cs = CookieJar(DefaultCookiePolicy(rfc2965=True))
# add some random cookies
interact_2965(cs, "http://blah.spam.org/", 'foo=eggs; Version=1; '
'Comment="does anybody read these?"; '
'CommentURL="http://foo.bar.net/comment.html"')
interact_netscape(cs, "http://www.acme.com/blah/", "spam=bar; secure")
interact_2965(cs, "http://www.acme.com/blah/", "foo=bar; secure; Version=1")
interact_2965(cs, "http://www.acme.com/blah/", "foo=bar; path=/; Version=1")
interact_2965(cs, "http://www.sol.no",
r'bang=wallop; version=1; domain=".sol.no"; '
r'port="90,100, 80,8080"; '
r'max-age=100; Comment = "Just kidding! (\"|\\\\) "')
versions = [1, 1, 1, 0, 1]
names = ["bang", "foo", "foo", "spam", "foo"]
domains = [".sol.no", "blah.spam.org", "www.acme.com",
"www.acme.com", "www.acme.com"]
paths = ["/", "/", "/", "/blah", "/blah/"]
# sequential iteration
for i in range(4):
i = 0
for c in cs:
assert isinstance(c, Cookie)
assert c.version == versions[i]
assert c.name == names[i]
assert c.domain == domains[i]
assert c.path == paths[i]
i = i + 1
self.assertRaises(IndexError, lambda cs=cs : cs[5])
# can't skip
cs[0]
cs[1]
self.assertRaises(IndexError, lambda cs=cs : cs[3])
# can't go backwards
cs[0]
cs[1]
cs[2]
self.assertRaises(IndexError, lambda cs=cs : cs[1])
def test_parse_ns_headers(self):
from mechanize._headersutil import parse_ns_headers
# missing domain value (invalid cookie)
assert parse_ns_headers(["foo=bar; path=/; domain"]) == [
[("foo", "bar"),
("path", "/"), ("domain", None), ("version", "0")]]
# invalid expires value
assert parse_ns_headers(
["foo=bar; expires=Foo Bar 12 33:22:11 2000"]) == \
[[("foo", "bar"), ("expires", None), ("version", "0")]]
# missing cookie name (valid cookie)
assert parse_ns_headers(["foo"]) == [[("foo", None), ("version", "0")]]
# shouldn't add version if header is empty
assert parse_ns_headers([""]) == []
def test_bad_cookie_header(self):
def cookiejar_from_cookie_headers(headers):
from mechanize import CookieJar, Request
c = CookieJar()
req = Request("http://www.example.com/")
r = FakeResponse(headers, "http://www.example.com/")
c.extract_cookies(r, req)
return c
# none of these bad headers should cause an exception to be raised
for headers in [
["Set-Cookie: "], # actually, nothing wrong with this
["Set-Cookie2: "], # ditto
# missing domain value
["Set-Cookie2: a=foo; path=/; Version=1; domain"],
# bad max-age
["Set-Cookie: b=foo; max-age=oops"],
# bad version
["Set-Cookie: b=foo; version=spam"],
]:
c = cookiejar_from_cookie_headers(headers)
# these bad cookies shouldn't be set
assert len(c) == 0
# cookie with invalid expires is treated as session cookie
headers = ["Set-Cookie: c=foo; expires=Foo Bar 12 33:22:11 2000"]
c = cookiejar_from_cookie_headers(headers)
cookie = c._cookies["www.example.com"]["/"]["c"]
assert cookie.expires is None
def test_cookies_for_request(self):
from mechanize import CookieJar, Request
cj = CookieJar()
interact_netscape(cj, "http://example.com/", "short=path")
interact_netscape(cj, "http://example.com/longer/path", "longer=path")
for_short_path = cj.cookies_for_request(Request("http://example.com/"))
self.assertEquals([cookie.name for cookie in for_short_path],
["short"])
for_long_path = cj.cookies_for_request(Request(
"http://example.com/longer/path"))
self.assertEquals([cookie.name for cookie in for_long_path],
["longer", "short"])
class CookieJarPersistenceTests(TempfileTestMixin, unittest.TestCase):
def _interact(self, cj):
year_plus_one = time.localtime(time.time())[0] + 1
interact_2965(cj, "http://www.acme.com/",
"foo1=bar; max-age=100; Version=1")
interact_2965(cj, "http://www.acme.com/",
'foo2=bar; port="80"; max-age=100; Discard; Version=1')
interact_2965(cj, "http://www.acme.com/", "foo3=bar; secure; Version=1")
expires = "expires=09-Nov-%d 23:12:40 GMT" % (year_plus_one,)
interact_netscape(cj, "http://www.foo.com/",
"fooa=bar; %s" % expires)
interact_netscape(cj, "http://www.foo.com/",
"foob=bar; Domain=.foo.com; %s" % expires)
interact_netscape(cj, "http://www.foo.com/",
"fooc=bar; Domain=www.foo.com; %s" % expires)
def test_firefox3_cookiejar_restore(self):
try:
from mechanize import Firefox3CookieJar
except ImportError:
pass
else:
from mechanize import DefaultCookiePolicy
filename = self.mktemp()
def create_cookiejar():
hide_experimental_warnings()
try:
cj = Firefox3CookieJar(
filename, policy=DefaultCookiePolicy(rfc2965=True))
finally:
reset_experimental_warnings()
cj.connect()
return cj
cj = create_cookiejar()
self._interact(cj)
self.assertEquals(len(cj), 6)
cj.close()
cj = create_cookiejar()
self.assert_("name='foo1', value='bar'" in repr(cj))
self.assertEquals(len(cj), 4)
def test_firefox3_cookiejar_iteration(self):
try:
from mechanize import Firefox3CookieJar
except ImportError:
pass
else:
from mechanize import DefaultCookiePolicy
filename = self.mktemp()
hide_experimental_warnings()
try:
cj = Firefox3CookieJar(
filename, policy=DefaultCookiePolicy(rfc2965=True))
finally:
reset_experimental_warnings()
cj.connect()
self._interact(cj)
summary = "\n".join([str(cookie) for cookie in cj])
self.assertEquals(summary,
"""\
<Cookie foo2=bar for www.acme.com:80/>
<Cookie foo3=bar for www.acme.com/>
<Cookie foo1=bar for www.acme.com/>
<Cookie fooa=bar for www.foo.com/>
<Cookie foob=bar for .foo.com/>
<Cookie fooc=bar for .www.foo.com/>""")
def test_firefox3_cookiejar_clear(self):
try:
from mechanize import Firefox3CookieJar
except ImportError:
pass
else:
from mechanize import DefaultCookiePolicy
filename = self.mktemp()
hide_experimental_warnings()
try:
cj = Firefox3CookieJar(
filename, policy=DefaultCookiePolicy(rfc2965=True))
finally:
reset_experimental_warnings()
cj.connect()
self._interact(cj)
cj.clear("www.acme.com", "/", "foo2")
def summary(): return "\n".join([str(cookie) for cookie in cj])
self.assertEquals(summary(),
"""\
<Cookie foo3=bar for www.acme.com/>
<Cookie foo1=bar for www.acme.com/>
<Cookie fooa=bar for www.foo.com/>
<Cookie foob=bar for .foo.com/>
<Cookie fooc=bar for .www.foo.com/>""")
cj.clear("www.acme.com")
self.assertEquals(summary(),
"""\
<Cookie fooa=bar for www.foo.com/>
<Cookie foob=bar for .foo.com/>
<Cookie fooc=bar for .www.foo.com/>""")
# if name is given, so must path and domain
self.assertRaises(ValueError, cj.clear, domain=".foo.com",
name="foob")
# nonexistent domain
self.assertRaises(KeyError, cj.clear, domain=".spam.com")
def test_firefox3_cookiejar_add_cookie_header(self):
try:
from mechanize import Firefox3CookieJar
except ImportError:
pass
else:
filename = self.mktemp()
hide_experimental_warnings()
try:
cj = Firefox3CookieJar(filename)
finally:
reset_experimental_warnings()
cj.connect()
# Session cookies (true .discard) and persistent cookies (false
# .discard) are stored differently. Check they both get sent.
year_plus_one = time.localtime(time.time())[0] + 1
expires = "expires=09-Nov-%d 23:12:40 GMT" % (year_plus_one,)
interact_netscape(cj, "http://www.foo.com/", "fooa=bar")
interact_netscape(cj, "http://www.foo.com/",
"foob=bar; %s" % expires)
ca, cb = cj
self.assert_(ca.discard)
self.assertFalse(cb.discard)
request = Request("http://www.foo.com/")
cj.add_cookie_header(request)
self.assertEquals(request.get_header("Cookie"),
"fooa=bar; foob=bar")
def test_mozilla_cookiejar(self):
# Save / load Mozilla/Netscape cookie file format.
from mechanize import MozillaCookieJar, DefaultCookiePolicy
filename = tempfile.mktemp()
c = MozillaCookieJar(filename,
policy=DefaultCookiePolicy(rfc2965=True))
self._interact(c)
def save_and_restore(cj, ignore_discard, filename=filename):
from mechanize import MozillaCookieJar, DefaultCookiePolicy
try:
cj.save(ignore_discard=ignore_discard)
new_c = MozillaCookieJar(filename,
DefaultCookiePolicy(rfc2965=True))
new_c.load(ignore_discard=ignore_discard)
finally:
try: os.unlink(filename)
except OSError: pass
return new_c
new_c = save_and_restore(c, True)
assert len(new_c) == 6 # none discarded
assert repr(new_c).find("name='foo1', value='bar'") != -1
new_c = save_and_restore(c, False)
assert len(new_c) == 4 # 2 of them discarded on save
assert repr(new_c).find("name='foo1', value='bar'") != -1
def test_mozilla_cookiejar_embedded_tab(self):
from mechanize import MozillaCookieJar
filename = tempfile.mktemp()
fh = open(filename, "w")
try:
fh.write(
MozillaCookieJar.header + "\n" +
"a.com\tFALSE\t/\tFALSE\t\tname\tval\tstillthevalue\n"
"a.com\tFALSE\t/\tFALSE\t\tname2\tvalue\n")
fh.close()
cj = MozillaCookieJar(filename)
cj.revert(ignore_discard=True)
cookies = cj._cookies["a.com"]["/"]
self.assertEquals(cookies["name"].value, "val\tstillthevalue")
self.assertEquals(cookies["name2"].value, "value")
finally:
try:
os.remove(filename)
except IOError, exc:
if exc.errno != errno.ENOENT:
raise
def test_mozilla_cookiejar_initial_dot_violation(self):
from mechanize import MozillaCookieJar, LoadError
filename = tempfile.mktemp()
fh = open(filename, "w")
try:
fh.write(
MozillaCookieJar.header + "\n" +
".a.com\tFALSE\t/\tFALSE\t\tname\tvalue\n")
fh.close()
cj = MozillaCookieJar(filename)
self.assertRaises(LoadError, cj.revert, ignore_discard=True)
finally:
try:
os.remove(filename)
except IOError, exc:
if exc.errno != errno.ENOENT:
raise
class LWPCookieTests(unittest.TestCase, TempfileTestMixin):
# Tests taken from libwww-perl, with a few modifications.
def test_netscape_example_1(self):
from mechanize import CookieJar, Request, DefaultCookiePolicy
#-------------------------------------------------------------------
# First we check that it works for the original example at
# http://www.netscape.com/newsref/std/cookie_spec.html
# Client requests a document, and receives in the response:
#
# Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/; expires=Wednesday, 09-Nov-99 23:12:40 GMT
#
# When client requests a URL in path "/" on this server, it sends:
#
# Cookie: CUSTOMER=WILE_E_COYOTE
#
# Client requests a document, and receives in the response:
#
# Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/
#
# When client requests a URL in path "/" on this server, it sends:
#
# Cookie: CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001
#
# Client receives:
#
# Set-Cookie: SHIPPING=FEDEX; path=/fo
#
# When client requests a URL in path "/" on this server, it sends:
#
# Cookie: CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001
#
# When client requests a URL in path "/foo" on this server, it sends:
#
# Cookie: CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001; SHIPPING=FEDEX
#
# The last Cookie is buggy, because both specifications say that the
# most specific cookie must be sent first. SHIPPING=FEDEX is the
# most specific and should thus be first.
year_plus_one = time.localtime(time.time())[0] + 1
headers = []
c = CookieJar(DefaultCookiePolicy(rfc2965 = True))
#req = Request("http://1.1.1.1/",
# headers={"Host": "www.acme.com:80"})
req = Request("http://www.acme.com:80/",
headers={"Host": "www.acme.com:80"})
headers.append(
"Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/ ; "
"expires=Wednesday, 09-Nov-%d 23:12:40 GMT" % year_plus_one)
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/")
c.add_cookie_header(req)
assert (req.get_header("Cookie") == "CUSTOMER=WILE_E_COYOTE" and
req.get_header("Cookie2") == '$Version="1"')
headers.append("Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/foo/bar")
c.add_cookie_header(req)
h = req.get_header("Cookie")
assert (h.find("PART_NUMBER=ROCKET_LAUNCHER_0001") != -1 and
h.find("CUSTOMER=WILE_E_COYOTE") != -1)
headers.append('Set-Cookie: SHIPPING=FEDEX; path=/foo')
res = FakeResponse(headers, "http://www.acme.com")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/")
c.add_cookie_header(req)
h = req.get_header("Cookie")
assert (h.find("PART_NUMBER=ROCKET_LAUNCHER_0001") != -1 and
h.find("CUSTOMER=WILE_E_COYOTE") != -1 and
not h.find("SHIPPING=FEDEX") != -1)
req = Request("http://www.acme.com/foo/")
c.add_cookie_header(req)
h = req.get_header("Cookie")
assert (h.find("PART_NUMBER=ROCKET_LAUNCHER_0001") != -1 and
h.find("CUSTOMER=WILE_E_COYOTE") != -1 and
h.startswith("SHIPPING=FEDEX;"))
def test_netscape_example_2(self):
from mechanize import CookieJar, Request
# Second Example transaction sequence:
#
# Assume all mappings from above have been cleared.
#
# Client receives:
#
# Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/
#
# When client requests a URL in path "/" on this server, it sends:
#
# Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001
#
# Client receives:
#
# Set-Cookie: PART_NUMBER=RIDING_ROCKET_0023; path=/ammo
#
# When client requests a URL in path "/ammo" on this server, it sends:
#
# Cookie: PART_NUMBER=RIDING_ROCKET_0023; PART_NUMBER=ROCKET_LAUNCHER_0001
#
# NOTE: There are two name/value pairs named "PART_NUMBER" due to
# the inheritance of the "/" mapping in addition to the "/ammo" mapping.
c = CookieJar()
headers = []
req = Request("http://www.acme.com/")
headers.append("Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/")
c.add_cookie_header(req)
assert (req.get_header("Cookie") == "PART_NUMBER=ROCKET_LAUNCHER_0001")
headers.append(
"Set-Cookie: PART_NUMBER=RIDING_ROCKET_0023; path=/ammo")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/ammo")
c.add_cookie_header(req)
assert re.search(r"PART_NUMBER=RIDING_ROCKET_0023;\s*"
"PART_NUMBER=ROCKET_LAUNCHER_0001",
req.get_header("Cookie"))
def test_ietf_example_1(self):
from mechanize import CookieJar, DefaultCookiePolicy
#-------------------------------------------------------------------
# Then we test with the examples from draft-ietf-http-state-man-mec-03.txt
#
# 5. EXAMPLES
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
#
# 5.1 Example 1
#
# Most detail of request and response headers has been omitted. Assume
# the user agent has no stored cookies.
#
# 1. User Agent -> Server
#
# POST /acme/login HTTP/1.1
# [form data]
#
# User identifies self via a form.
#
# 2. Server -> User Agent
#
# HTTP/1.1 200 OK
# Set-Cookie2: Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"
#
# Cookie reflects user's identity.
cookie = interact_2965(
c, 'http://www.acme.com/acme/login',
'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"')
assert not cookie
#
# 3. User Agent -> Server
#
# POST /acme/pickitem HTTP/1.1
# Cookie: $Version="1"; Customer="WILE_E_COYOTE"; $Path="/acme"
# [form data]
#
# User selects an item for ``shopping basket.''
#
# 4. Server -> User Agent
#
# HTTP/1.1 200 OK
# Set-Cookie2: Part_Number="Rocket_Launcher_0001"; Version="1";
# Path="/acme"
#
# Shopping basket contains an item.
cookie = interact_2965(c, 'http://www.acme.com/acme/pickitem',
'Part_Number="Rocket_Launcher_0001"; '
'Version="1"; Path="/acme"');
assert re.search(
r'^\$Version="?1"?; Customer="?WILE_E_COYOTE"?; \$Path="/acme"$',
cookie)
#
# 5. User Agent -> Server
#
# POST /acme/shipping HTTP/1.1
# Cookie: $Version="1";
# Customer="WILE_E_COYOTE"; $Path="/acme";
# Part_Number="Rocket_Launcher_0001"; $Path="/acme"
# [form data]
#
# User selects shipping method from form.
#
# 6. Server -> User Agent
#
# HTTP/1.1 200 OK
# Set-Cookie2: Shipping="FedEx"; Version="1"; Path="/acme"
#
# New cookie reflects shipping method.
cookie = interact_2965(c, "http://www.acme.com/acme/shipping",
'Shipping="FedEx"; Version="1"; Path="/acme"')
assert (re.search(r'^\$Version="?1"?;', cookie) and
re.search(r'Part_Number="?Rocket_Launcher_0001"?;'
'\s*\$Path="\/acme"', cookie) and
re.search(r'Customer="?WILE_E_COYOTE"?;\s*\$Path="\/acme"',
cookie))
#
# 7. User Agent -> Server
#
# POST /acme/process HTTP/1.1
# Cookie: $Version="1";
# Customer="WILE_E_COYOTE"; $Path="/acme";
# Part_Number="Rocket_Launcher_0001"; $Path="/acme";
# Shipping="FedEx"; $Path="/acme"
# [form data]
#
# User chooses to process order.
#
# 8. Server -> User Agent
#
# HTTP/1.1 200 OK
#
# Transaction is complete.
cookie = interact_2965(c, "http://www.acme.com/acme/process")
assert (re.search(r'Shipping="?FedEx"?;\s*\$Path="\/acme"', cookie) and
cookie.find("WILE_E_COYOTE") != -1)
#
# The user agent makes a series of requests on the origin server, after
# each of which it receives a new cookie. All the cookies have the same
# Path attribute and (default) domain. Because the request URLs all have
# /acme as a prefix, and that matches the Path attribute, each request
# contains all the cookies received so far.
def test_ietf_example_2(self):
from mechanize import CookieJar, DefaultCookiePolicy
# 5.2 Example 2
#
# This example illustrates the effect of the Path attribute. All detail
# of request and response headers has been omitted. Assume the user agent
# has no stored cookies.
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
# Imagine the user agent has received, in response to earlier requests,
# the response headers
#
# Set-Cookie2: Part_Number="Rocket_Launcher_0001"; Version="1";
# Path="/acme"
#
# and
#
# Set-Cookie2: Part_Number="Riding_Rocket_0023"; Version="1";
# Path="/acme/ammo"
interact_2965(
c, "http://www.acme.com/acme/ammo/specific",
'Part_Number="Rocket_Launcher_0001"; Version="1"; Path="/acme"',
'Part_Number="Riding_Rocket_0023"; Version="1"; Path="/acme/ammo"')
# A subsequent request by the user agent to the (same) server for URLs of
# the form /acme/ammo/... would include the following request header:
#
# Cookie: $Version="1";
# Part_Number="Riding_Rocket_0023"; $Path="/acme/ammo";
# Part_Number="Rocket_Launcher_0001"; $Path="/acme"
#
# Note that the NAME=VALUE pair for the cookie with the more specific Path
# attribute, /acme/ammo, comes before the one with the less specific Path
# attribute, /acme. Further note that the same cookie name appears more
# than once.
cookie = interact_2965(c, "http://www.acme.com/acme/ammo/...")
assert re.search(r"Riding_Rocket_0023.*Rocket_Launcher_0001", cookie)
# A subsequent request by the user agent to the (same) server for a URL of
# the form /acme/parts/ would include the following request header:
#
# Cookie: $Version="1"; Part_Number="Rocket_Launcher_0001"; $Path="/acme"
#
# Here, the second cookie's Path attribute /acme/ammo is not a prefix of
# the request URL, /acme/parts/, so the cookie does not get forwarded to
# the server.
cookie = interact_2965(c, "http://www.acme.com/acme/parts/")
assert (cookie.find("Rocket_Launcher_0001") != -1 and
not cookie.find("Riding_Rocket_0023") != -1)
def test_rejection(self):
# Test rejection of Set-Cookie2 responses based on domain, path, port.
from mechanize import LWPCookieJar, DefaultCookiePolicy
pol = DefaultCookiePolicy(rfc2965=True)
c = LWPCookieJar(policy=pol)
# illegal domain (no embedded dots)
cookie = interact_2965(c, "http://www.acme.com",
'foo=bar; domain=".com"; version=1')
assert not c
# legal domain
cookie = interact_2965(c, "http://www.acme.com",
'ping=pong; domain="acme.com"; version=1')
assert len(c) == 1
# illegal domain (host prefix "www.a" contains a dot)
cookie = interact_2965(c, "http://www.a.acme.com",
'whiz=bang; domain="acme.com"; version=1')
assert len(c) == 1
# legal domain
cookie = interact_2965(c, "http://www.a.acme.com",
'wow=flutter; domain=".a.acme.com"; version=1')
assert len(c) == 2
# can't partially match an IP-address
cookie = interact_2965(c, "http://125.125.125.125",
'zzzz=ping; domain="125.125.125"; version=1')
assert len(c) == 2
# illegal path (must be prefix of request path)
cookie = interact_2965(c, "http://www.sol.no",
'blah=rhubarb; domain=".sol.no"; path="/foo"; '
'version=1')
assert len(c) == 2
# legal path
cookie = interact_2965(c, "http://www.sol.no/foo/bar",
'bing=bong; domain=".sol.no"; path="/foo"; '
'version=1')
assert len(c) == 3
# illegal port (request-port not in list)
cookie = interact_2965(c, "http://www.sol.no",
'whiz=ffft; domain=".sol.no"; port="90,100"; '
'version=1')
assert len(c) == 3
# legal port
cookie = interact_2965(
c, "http://www.sol.no",
r'bang=wallop; version=1; domain=".sol.no"; '
r'port="90,100, 80,8080"; '
r'max-age=100; Comment = "Just kidding! (\"|\\\\) "')
assert len(c) == 4
# port attribute without any value (current port)
cookie = interact_2965(c, "http://www.sol.no",
'foo9=bar; version=1; domain=".sol.no"; port; '
'max-age=100;')
assert len(c) == 5
# encoded path
# LWP has this test, but unescaping allowed path characters seems
# like a bad idea, so I think this should fail:
## cookie = interact_2965(c, "http://www.sol.no/foo/",
## r'foo8=bar; version=1; path="/%66oo"')
# but this is OK, because '<' is not an allowed HTTP URL path
# character:
interact_2965(c, "http://www.sol.no/<oo/",
r'foo8=bar; version=1; path="/%3coo"')
assert len(c) == 6
# save and restore
filename = tempfile.mktemp()
try:
c.save(filename, ignore_discard=True)
old = repr(c)
c = LWPCookieJar(policy=pol)
c.load(filename, ignore_discard=True)
finally:
try: os.unlink(filename)
except OSError: pass
assert old == repr(c)
def test_url_encoding(self):
# Try some URL encodings of the PATHs.
# (the behaviour here has changed from libwww-perl)
from mechanize import CookieJar, DefaultCookiePolicy
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
interact_2965(c, "http://www.acme.com/foo%2f%25/%3c%3c%0Anew%E5/%E5",
"foo = bar; version = 1")
cookie = interact_2965(
c, "http://www.acme.com/foo%2f%25/<<%0anew\345/\346\370\345",
'bar=baz; path="/foo/"; version=1');
version_re = re.compile(r'^\$version=\"?1\"?', re.I)
assert (cookie.find("foo=bar") != -1 and
version_re.search(cookie))
cookie = interact_2965(
c, "http://www.acme.com/foo/%25/<<%0anew\345/\346\370\345")
assert not cookie
# unicode URL doesn't raise exception, as it used to!
cookie = interact_2965(c, u"http://www.acme.com/\xfc")
def test_netscape_misc(self):
# Some additional Netscape cookies tests.
from mechanize import CookieJar, Request
c = CookieJar()
headers = []
req = Request("http://foo.bar.acme.com/foo")
# Netscape allows a host part that contains dots
headers.append("Set-Cookie: Customer=WILE_E_COYOTE; domain=.acme.com")
res = FakeResponse(headers, "http://www.acme.com/foo")
c.extract_cookies(res, req)
# and that the domain is the same as the host without adding a leading
# dot to the domain. Should not quote even if strange chars are used
# in the cookie value.
headers.append("Set-Cookie: PART_NUMBER=3,4; domain=foo.bar.acme.com")
res = FakeResponse(headers, "http://www.acme.com/foo")
c.extract_cookies(res, req)
req = Request("http://foo.bar.acme.com/foo")
c.add_cookie_header(req)
assert (
req.get_header("Cookie").find("PART_NUMBER=3,4") != -1 and
req.get_header("Cookie").find("Customer=WILE_E_COYOTE") != -1)
def test_intranet_domains_2965(self):
# Test handling of local intranet hostnames without a dot.
from mechanize import CookieJar, DefaultCookiePolicy
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
interact_2965(c, "http://example/",
"foo1=bar; PORT; Discard; Version=1;")
cookie = interact_2965(c, "http://example/",
'foo2=bar; domain=".local"; Version=1')
assert cookie.find("foo1=bar") >= 0
interact_2965(c, "http://example/", 'foo3=bar; Version=1')
cookie = interact_2965(c, "http://example/")
assert cookie.find("foo2=bar") >= 0 and len(c) == 3
def test_intranet_domains_ns(self):
from mechanize import CookieJar, DefaultCookiePolicy
c = CookieJar(DefaultCookiePolicy(rfc2965 = False))
interact_netscape(c, "http://example/", "foo1=bar")
cookie = interact_netscape(c, "http://example/",
'foo2=bar; domain=.local')
assert len(c) == 2
assert cookie.find("foo1=bar") >= 0
cookie = interact_netscape(c, "http://example/")
assert cookie.find("foo2=bar") >= 0 and len(c) == 2
def test_empty_path(self):
from mechanize import CookieJar, Request, DefaultCookiePolicy
# Test for empty path
# Broken web-server ORION/1.3.38 returns to the client response like
#
# Set-Cookie: JSESSIONID=ABCDERANDOM123; Path=
#
# ie. with Path set to nothing.
# In this case, extract_cookies() must set cookie to / (root)
c = CookieJar(DefaultCookiePolicy(rfc2965 = True))
headers = []
req = Request("http://www.ants.com/")
headers.append("Set-Cookie: JSESSIONID=ABCDERANDOM123; Path=")
res = FakeResponse(headers, "http://www.ants.com/")
c.extract_cookies(res, req)
req = Request("http://www.ants.com/")
c.add_cookie_header(req)
assert (req.get_header("Cookie") == "JSESSIONID=ABCDERANDOM123" and
req.get_header("Cookie2") == '$Version="1"')
# missing path in the request URI
req = Request("http://www.ants.com:8080")
c.add_cookie_header(req)
assert (req.get_header("Cookie") == "JSESSIONID=ABCDERANDOM123" and
req.get_header("Cookie2") == '$Version="1"')
# The correctness of this test is undefined, in the absence of RFC 2965 errata.
## def test_netscape_rfc2965_interop(self):
## # Test mixing of Set-Cookie and Set-Cookie2 headers.
## from mechanize import CookieJar
## # Example from http://www.trip.com/trs/trip/flighttracker/flight_tracker_home.xsl
## # which gives up these headers:
## #
## # HTTP/1.1 200 OK
## # Connection: close
## # Date: Fri, 20 Jul 2001 19:54:58 GMT
## # Server: Apache/1.3.19 (Unix) ApacheJServ/1.1.2
## # Content-Type: text/html
## # Content-Type: text/html; charset=iso-8859-1
## # Link: </trip/stylesheet.css>; rel="stylesheet"; type="text/css"
## # Servlet-Engine: Tomcat Web Server/3.2.1 (JSP 1.1; Servlet 2.2; Java 1.3.0; SunOS 5.8 sparc; java.vendor=Sun Microsystems Inc.)
## # Set-Cookie: trip.appServer=1111-0000-x-024;Domain=.trip.com;Path=/
## # Set-Cookie: JSESSIONID=fkumjm7nt1.JS24;Path=/trs
## # Set-Cookie2: JSESSIONID=fkumjm7nt1.JS24;Version=1;Discard;Path="/trs"
## # Title: TRIP.com Travel - FlightTRACKER
## # X-Meta-Description: Trip.com privacy policy
## # X-Meta-Keywords: privacy policy
## req = mechanize.Request(
## 'http://www.trip.com/trs/trip/flighttracker/flight_tracker_home.xsl')
## headers = []
## headers.append("Set-Cookie: trip.appServer=1111-0000-x-024;Domain=.trip.com;Path=/")
## headers.append("Set-Cookie: JSESSIONID=fkumjm7nt1.JS24;Path=/trs")
## headers.append('Set-Cookie2: JSESSIONID=fkumjm7nt1.JS24;Version=1;Discard;Path="/trs"')
## res = FakeResponse(
## headers,
## 'http://www.trip.com/trs/trip/flighttracker/flight_tracker_home.xsl')
## #print res
## c = CookieJar()
## c.extract_cookies(res, req)
## #print c
## print str(c)
## print """Set-Cookie3: trip.appServer="1111-0000-x-024"; path="/"; domain=".trip.com"; path_spec; discard; version=0
## Set-Cookie3: JSESSIONID="fkumjm7nt1.JS24"; path="/trs"; domain="www.trip.com"; path_spec; discard; version=1
## """
## assert c.as_lwp_str() == """Set-Cookie3: trip.appServer="1111-0000-x-024"; path="/"; domain=".trip.com"; path_spec; discard; version=0
## Set-Cookie3: JSESSIONID="fkumjm7nt1.JS24"; path="/trs"; domain="www.trip.com"; path_spec; discard; version=1
## """
def test_session_cookies(self):
from mechanize import CookieJar, Request
year_plus_one = time.localtime(time.time())[0] + 1
# Check session cookies are deleted properly by
# CookieJar.clear_session_cookies method
req = Request('http://www.perlmeister.com/scripts')
headers = []
headers.append("Set-Cookie: s1=session;Path=/scripts")
headers.append("Set-Cookie: p1=perm; Domain=.perlmeister.com;"
"Path=/;expires=Fri, 02-Feb-%d 23:24:20 GMT" %
year_plus_one)
headers.append("Set-Cookie: p2=perm;Path=/;expires=Fri, "
"02-Feb-%d 23:24:20 GMT" % year_plus_one)
headers.append("Set-Cookie: s2=session;Path=/scripts;"
"Domain=.perlmeister.com")
headers.append('Set-Cookie2: s3=session;Version=1;Discard;Path="/"')
res = FakeResponse(headers, 'http://www.perlmeister.com/scripts')
c = CookieJar()
c.extract_cookies(res, req)
# How many session/permanent cookies do we have?
counter = {"session_after": 0,
"perm_after": 0,
"session_before": 0,
"perm_before": 0}
for cookie in c:
key = "%s_before" % cookie.value
counter[key] = counter[key] + 1
c.clear_session_cookies()
# How many now?
for cookie in c:
key = "%s_after" % cookie.value
counter[key] = counter[key] + 1
assert not (
# a permanent cookie got lost accidently
counter["perm_after"] != counter["perm_before"] or
# a session cookie hasn't been cleared
counter["session_after"] != 0 or
# we didn't have session cookies in the first place
counter["session_before"] == 0)
if __name__ == "__main__":
import unittest
unittest.main()
| mit | 3,186,641,744,499,388,000 | 39.656977 | 145 | 0.554542 | false |
newemailjdm/scipy | scipy/optimize/setup.py | 45 | 3158 | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
from os.path import join
from scipy._build_utils import numpy_nodepr_api
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
config = Configuration('optimize',parent_package, top_path)
minpack_src = [join('minpack','*f')]
config.add_library('minpack',sources=minpack_src)
config.add_extension('_minpack',
sources=['_minpackmodule.c'],
libraries=['minpack'],
depends=(["minpack.h","__minpack.h"]
+ minpack_src),
**numpy_nodepr_api)
rootfind_src = [join('Zeros','*.c')]
rootfind_hdr = [join('Zeros','zeros.h')]
config.add_library('rootfind',
sources=rootfind_src,
headers=rootfind_hdr,
**numpy_nodepr_api)
config.add_extension('_zeros',
sources=['zeros.c'],
libraries=['rootfind'],
depends=(rootfind_src + rootfind_hdr),
**numpy_nodepr_api)
lapack = get_info('lapack_opt')
if 'define_macros' in numpy_nodepr_api:
if ('define_macros' in lapack) and (lapack['define_macros'] is not None):
lapack['define_macros'] = (lapack['define_macros'] +
numpy_nodepr_api['define_macros'])
else:
lapack['define_macros'] = numpy_nodepr_api['define_macros']
sources = ['lbfgsb.pyf', 'lbfgsb.f', 'linpack.f', 'timer.f']
config.add_extension('_lbfgsb',
sources=[join('lbfgsb',x) for x in sources],
**lapack)
sources = ['moduleTNC.c','tnc.c']
config.add_extension('moduleTNC',
sources=[join('tnc',x) for x in sources],
depends=[join('tnc','tnc.h')],
**numpy_nodepr_api)
config.add_extension('_cobyla',
sources=[join('cobyla',x) for x in ['cobyla.pyf',
'cobyla2.f',
'trstlp.f']],
**numpy_nodepr_api)
sources = ['minpack2.pyf', 'dcsrch.f', 'dcstep.f']
config.add_extension('minpack2',
sources=[join('minpack2',x) for x in sources],
**numpy_nodepr_api)
sources = ['slsqp.pyf', 'slsqp_optmz.f']
config.add_extension('_slsqp', sources=[join('slsqp', x) for x in sources],
**numpy_nodepr_api)
config.add_extension('_nnls', sources=[join('nnls', x)
for x in ["nnls.f","nnls.pyf"]],
**numpy_nodepr_api)
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| bsd-3-clause | -5,902,669,916,625,253,000 | 38.974684 | 81 | 0.4962 | false |
malaterre/ITK | Modules/Filtering/Smoothing/wrapping/test/PythonAutoPipelineTest.py | 2 | 1371 | #==========================================================================
#
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
import itk
from sys import argv
# instantiate an auto pipeline
p = itk.auto_pipeline()
# from now, and until we call p.Stop(), all the new objects will be connected
# to the pipeline p, without having to give a name to any filter
itk.ImageFileReader.IUC2.New(FileName=argv[1])
itk.MedianImageFilter.IUC2IUC2.New(Radius=eval(argv[3]))
itk.CastImageFilter.IUC2IUC2.New()
# stop the auto_pipeline and test that the next (imcompatible) filter is not
# automatically connected, and restart the auto pipeline
p.Stop()
itk.CastImageFilter.IF2IF2.New()
p.Start()
itk.ImageFileWriter.IUC2.New(FileName=argv[2])
p.Update()
| apache-2.0 | 6,751,332,355,130,213,000 | 34.153846 | 77 | 0.665208 | false |
lotooo/AlJazeera.bundle | Contents/Code/__init__.py | 1 | 4092 | VIDEO_PREFIX = "/video/aljazeera"
NAME = L('Title')
# make sure to replace artwork with what you want
# these filenames reference the example files in
# the Contents/Resources/ folder in the bundle
ART = 'art-default.jpg'
ICON = 'icon-default.png'
####################################################################################################
def Start():
## make this plugin show up in the 'Video' section
## in Plex. The L() function pulls the string out of the strings
## file in the Contents/Strings/ folder in the bundle
## see also:
## http://dev.plexapp.com/docs/mod_Plugin.html
## http://dev.plexapp.com/docs/Bundle.html#the-strings-directory
Plugin.AddPrefixHandler(VIDEO_PREFIX, VideoMainMenu, NAME, ICON, ART)
Plugin.AddViewGroup("InfoList", viewMode="InfoList", mediaType="items")
Plugin.AddViewGroup("List", viewMode="List", mediaType="items")
## set some defaults so that you don't have to
## pass these parameters to these object types
## every single time
## see also:
## http://dev.plexapp.com/docs/Objects.html
MediaContainer.title1 = NAME
MediaContainer.viewGroup = "List"
MediaContainer.art = R(ART)
DirectoryItem.thumb = R(ICON)
VideoItem.thumb = R(ICON)
HTTP.CacheTime = CACHE_1HOUR
#### the rest of these are user created functions and
#### are not reserved by the plugin framework.
#### see: http://dev.plexapp.com/docs/Functions.html for
#### a list of reserved functions above
#
# Example main menu referenced in the Start() method
# for the 'Video' prefix handler
#
def VideoMainMenu():
oc = ObjectContainer(title1='Menu')
titre = 'Live'
thumb = R(ICON)
# This RMTP URL is not working as the player I tested (plex.tv, PHT or Roku) aren't able to play the stream
#video_url = 'rtmp://aljazeeraflashlivefs.fplive.net:443/aljazeeraflashlive-live?videoId=883816736001&lineUpId=&pubId=665003303001&playerId=751182905001&affiliateId=/aljazeera_eng_med?videoId=883816736001&lineUpId=&pubId=665003303001&playerId=751182905001&affiliateId=%20&live=true'
# URL found in the web source.
# It looks like the ipad/iphone version
video_url_m3u8 = 'http://aljazeera-eng-apple-live.adaptive.level3.net/apple/aljazeera/english/appleman.m3u8'
rating_key = 'live'
art = R(ART)
summary = 'Watch Al Jazeera Live'
tagline = 'Live'
oc.add(
VideoClipObject(
key = Callback(Lookup, title=titre, thumb=thumb, rating_key=rating_key, url=video_url_m3u8, art=art, summary=summary, tagline=tagline),
title=titre,
tagline=tagline,
rating_key = rating_key,
items = [
MediaObject(
parts = [PartObject(key=HTTPLiveStreamURL(Callback(PlayAJE, url=video_url_m3u8)))]
)
],
summary=L(summary),
thumb=thumb,
art=art
)
)
return oc
@route('/video/aljazeera/media')
def Lookup(title, thumb, rating_key, url, art, summary, tagline):
Log.Debug("Entering Lookup")
oc = ObjectContainer()
oc.add(
VideoClipObject(
key = Callback(Lookup, title=title, thumb=thumb, rating_key=rating_key, url=url, art=art, summary=summary, tagline=tagline),
title = title,
thumb = thumb,
tagline = tagline,
rating_key = rating_key,
summary = summary,
art = art,
items = [
MediaObject(
parts = [PartObject(key=HTTPLiveStreamURL(Callback(PlayAJE, url=url)))]
)
]
)
)
return oc
@indirect
def PlayAJE(url):
#return Redirect(url)
return IndirectResponse(VideoClipObject, key=HTTPLiveStreamURL(url=url))
| gpl-3.0 | -106,976,280,067,141,390 | 33.275862 | 288 | 0.5826 | false |
randomtask1155/gpdb | gpMgmt/bin/gppylib/system/ComputeCatalogUpdate.py | 26 | 19491 | #!/usr/bin/env python
# Line too long - pylint: disable=C0301
# Invalid name - pylint: disable=C0103
"""
ComputeCatalogUpdate.py
Copyright (c) EMC/Greenplum Inc 2011. All Rights Reserved.
Used by updateSystemConfig() to compare the db state and
goal state of a gpArray containing the Greenplum segment
configruation details and computes appropriate changes.
"""
import copy
from gppylib.gplog import *
from gppylib.gparray import ROLE_PRIMARY, ROLE_MIRROR, MASTER_CONTENT_ID
logger = get_default_logger()
class ComputeCatalogUpdate:
"""
Helper class for GpConfigurationProvider.updateSystemConfig().
This computes seven lists of GpDb objects (commonly referenced as 'seg')
from a GpArray, reflecting the logical changes that need to be made
to the database catalog to make it match the system as defined.
The names of the lists are reasonably descriptive:
mirror_to_remove - to be removed (e.g. via gp_remove_segment_mirror())
primary_to_remove - to be removed (e.g. via gp_remove_segment())
primary_to_add - to be added (e.g. via gp_add_segment())
mirror_to_add - to be added (e.g. via gp_add_segment_mirror())
mirror_to_remove_and_add - change or force list requires this mirror
to be removed and then added back
segment_to_update - to be updated (e.g. via SQL)
segment_unchanged - needs no update (included for validation)
"""
def __init__(self, gpArray, forceMap, useUtilityMode, allowPrimary):
"""
This class just holds lists of objects in the underlying gpArray.
As such, it has no methods - the constructor does all the computation.
@param gpArray the array containing the goal and db segment states.
@param forceMap a map of dbid->True for mirrors for which we should force updating via remove/add
@param useUtilityMode True if the operations we're doing are expected to run via utility moed
@param allowPrimary True if caller authorizes add/remove primary operations (e.g. gpexpand)
"""
forceMap = forceMap or {}
self.useUtilityMode = useUtilityMode
self.allowPrimary = allowPrimary
# 'dbsegmap' reflects the current state of the catalog
self.dbsegmap = dict([(seg.getSegmentDbId(), seg) for seg in gpArray.getSegmentsAsLoadedFromDb()])
# 'goalsegmap' reflects the desired state of the catalog
self.goalsegmap = dict([(seg.getSegmentDbId(), seg) for seg in gpArray.getDbList(includeExpansionSegs=True)])
# find mirrors and primaries to remove
self.mirror_to_remove = [
seg for seg in self.dbsegmap.values() # segment in database
if seg.isSegmentMirror() # segment is a mirror
and (seg.getSegmentDbId() not in self.goalsegmap) # but not in goal configuration
]
self.debuglog("mirror_to_remove: %s", self.mirror_to_remove)
self.primary_to_remove = [
seg for seg in self.dbsegmap.values() # segment is database
if seg.isSegmentPrimary() # segment is a primary
and (seg.getSegmentDbId() not in self.goalsegmap) # but not in goal configuration
]
self.debuglog("primary_to_remove: %s", self.primary_to_remove)
# find primaries and mirrors to add
self.primary_to_add = [
seg for seg in self.goalsegmap.values() # segment in goal configuration
if seg.isSegmentPrimary() # segment is a primary
and (seg.getSegmentDbId() not in self.dbsegmap) # but not in the database
]
self.debuglog("primary_to_add: %s", self.primary_to_add)
self.mirror_to_add = [
seg for seg in self.goalsegmap.values() # segment in goal configuration
if seg.isSegmentMirror() # segment is a mirror
and (seg.getSegmentDbId() not in self.dbsegmap) # but not in the database
]
self.debuglog("mirror_to_add: %s", self.mirror_to_add)
# find segments to update
initial_segment_to_update = [
seg for seg in self.goalsegmap.values() # segment in goal configuration
if (seg.getSegmentDbId() in self.dbsegmap) # and also in the database
and (seg != self.dbsegmap[ seg.getSegmentDbId() ]) # but some attributes differ
]
self.debuglog("initial_segment_to_update: %s", initial_segment_to_update)
# create a map of the segments which we can't update in the
# ordinary way either because they were on the forceMap or
# they differ in an attribute other than mode, status or replication port
removeandaddmap = {}
for seg in initial_segment_to_update:
dbid = seg.getSegmentDbId()
if dbid in forceMap:
removeandaddmap[dbid] = seg
continue
if not seg.equalIgnoringModeAndStatusAndReplicationPort(self.dbsegmap[dbid]):
removeandaddmap[dbid] = seg
continue
# create list of mirrors to update via remove/add
self.mirror_to_remove_and_add = [seg for seg in removeandaddmap.values()]
self.debuglog("mirror_to_remove_and_add: %s", self.mirror_to_remove_and_add)
# find segments to update in the ordinary way
self.segment_to_update = [
seg for seg in initial_segment_to_update # segments to update
if seg.getSegmentDbId() not in removeandaddmap # that don't require remove/add
]
self.debuglog("segment_to_update: %s", self.segment_to_update)
# find segments that don't need change
self.segment_unchanged = [
seg for seg in self.goalsegmap.values() # segment in goal configuration
if (seg.getSegmentDbId() in self.dbsegmap) # and also in the database
and (seg == self.dbsegmap[ seg.getSegmentDbId() ]) # and attribtutes are all the same
]
self.debuglog("segment_unchanged: %s", self.segment_unchanged)
def final_segments(self):
"""
Generate a series of segments appearing in the final configuration.
"""
for seg in self.primary_to_add:
yield seg
for seg in self.mirror_to_add:
yield seg
for seg in self.mirror_to_remove_and_add:
yield seg
for seg in self.segment_to_update:
yield seg
for seg in self.segment_unchanged:
yield seg
def validate(self):
"""
Check that the operation and new configuration is valid.
"""
# Validate that we're not adding or removing primaries unless authorized
#
if not self.allowPrimary:
if len(self.primary_to_add) > 0:
p = self.primary_to_add[0]
raise Exception("Internal error: Operation may not add primary: %s" % repr(p))
if len(self.primary_to_remove) > 0:
p = self.primary_to_remove[0]
raise Exception("Internal error: Operation may not remove primary: %s" % repr(p))
# Validate that operations do not result in a contentid with a pair of segments in same preferred role
#
final = { ROLE_PRIMARY:{}, ROLE_MIRROR:{} }
for seg in self.final_segments():
subset = final[ seg.getSegmentPreferredRole() ]
other = subset.get( seg.getSegmentContentId() )
if other is not None:
raise Exception("Segments sharing a content id may not have same preferred role: %s and %s" % (repr(seg), repr(other)))
subset[ seg.getSegmentContentId() ] = seg
# Validate that if we have any mirrors, that all primaries have mirrors
#
if len(final[ ROLE_MIRROR ]) > 0:
for contentId in final[ ROLE_PRIMARY ]:
if contentId != MASTER_CONTENT_ID and final[ ROLE_MIRROR ].get( contentId ) is None:
seg = final[ ROLE_PRIMARY ][ contentId ]
raise Exception("Primary must have mirror when mirroring enabled: %s" % repr(seg))
# Validate that the remove/add list contains only qualified mirrors.
# In particular, we disallow remove/add of the master, standby or a primary.
#
for seg in self.mirror_to_remove_and_add:
originalSeg = self.dbsegmap.get(seg.getSegmentDbId())
# filespace and other core has changed, or it's a mirror and we are recovering full
# (in which case we want to call removeMirror and addMirror so we mark
# the primary as full-resyncing)
#
if seg.isSegmentMaster(current_role=True) or seg.isSegmentStandby(current_role=True):
#
# Assertion here -- user should not be allowed to change master/standby info.
#
raise Exception("Internal error: Can only change core details of segments, not masters" \
" (on segment %s) (seg %s vs original %s)" %
(seg.getSegmentDbId(), repr(seg), repr(originalSeg)))
if not seg.isSegmentMirror(current_role=True):
#
# Assertion here -- user should not be allowed to change primary info.
#
raise Exception("Internal error: Can only change core details of mirrors, not primaries" \
" (on segment %s) (seg %s vs original %s)" %
(seg.getSegmentDbId(), repr(seg), repr(originalSeg)))
if self.useUtilityMode:
raise Exception("Cannot change core details of mirrors in utility mode")
def debuglog(self, msg, seglist):
"""
Write debugging details about the specified segment list.
"""
logger.debug(msg % ("%s segments" % len(seglist)))
for seg in seglist:
logger.debug(msg % repr(seg))
# minimal test framework when run from command line
#
if __name__ == '__main__':
ROLE_PRIMARY = 'p'
ROLE_MIRROR = 'm'
MODE_NOT_INITIALIZED = ''
MODE_CHANGELOGGING = 'c'
MODE_SYNCHRONIZED = 's'
MODE_RESYNCHRONIZATION = 'r'
class GpDb:
def __init__(self,dbid,content,pref,mode='',curr=None,status='u',rport=0,misc=None):
self.dbid = dbid
self.content = content
self.preferred_role = pref
self.mode = mode
self.role = curr or pref
self.status = status
self.rport = rport
self.misc = misc
def getSegmentDbId(self): return self.dbid
def getSegmentContentId(self): return self.content
def getSegmentPreferredRole(self): return self.preferred_role
def getSegmentMode(self): return self.mode
def getSegmentRole(self): return self.role
def getSegmentStatus(self): return self.status
def getSegmentReplicationPort(self): return self.rport
def setSegmentMode(self,mode): self.mode = mode
def setSegmentStatus(self,status): self.status = status
def setSegmentReplicationPort(self,rport): self.rport = rport
def isSegmentPrimary(self, current_role=False):
role = self.role if current_role else self.preferred_role
return self.content >= 0 and role == ROLE_PRIMARY
def isSegmentMirror(self, current_role=False):
role = self.role if current_role else self.preferred_role
return self.content >= 0 and role == ROLE_MIRROR
def isSegmentMaster(self, current_role=False):
role = self.role if current_role else self.preferred_role
return self.content < 0 and role == ROLE_PRIMARY
def isSegmentStandby(self, current_role=False):
role = self.role if current_role else self.preferred_role
return self.content < 0 and role == ROLE_MIRROR
def __cmp__(self,other): return cmp(repr(self), repr(other))
def __repr__(s):
return '(%s,%s,%s,%s,%s,%s,%s,%s)' % (s.dbid, s.content, s.preferred_role, s.mode, s.role, s.status, s.rport, s.misc)
def equalIgnoringModeAndStatusAndReplicationPort(self, other):
tmp = copy.copy(self)
tmp.setSegmentMode( other.getSegmentMode() )
tmp.setSegmentStatus( other.getSegmentStatus() )
tmp.setSegmentReplicationPort( other.getSegmentReplicationPort() )
return tmp == other
class xxx:
def xxx():
print dbsegmap
print goalsegmap
print 'db not goal', [seg for seg in dbsegmap.values() if seg.getSegmentDbId() not in goalsegmap]
print 'goal not db', [seg for seg in goalsegmap.values() if seg.getSegmentDbId() not in dbsegmap]
class GpArray:
def __init__(s, forceMap=None, useUtilityMode=False, allowPrimary=True):
s.c = ComputeCatalogUpdate(s,forceMap,useUtilityMode,allowPrimary)
s.dump()
def dump(s):
print s.__class__.__name__, s.__class__.__doc__
s.c.validate()
print " -m", s.c.mirror_to_remove,
print " -p", s.c.primary_to_remove,
print " +p", s.c.primary_to_add,
print " +m", s.c.mirror_to_add,
print " +/-m", s.c.mirror_to_remove_and_add,
print " u", s.c.segment_to_update,
print " n", s.c.segment_unchanged
def __repr__(s):
return '<%s,%s>' % (s.getDbList(), s.getSegmentsAsLoadedFromDb())
class GpArrayBad(GpArray):
def __init__(s, forceMap=None, useUtilityMode=False, allowPrimary=True):
try:
GpArray.__init__(s,forceMap,useUtilityMode,allowPrimary)
print " ERROR: expected exception"
except Exception, e:
print " EXPECTED: ", str(e)
class GpArray1(GpArray):
"expect no change"
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray1()
class GpArray1a(GpArray):
"expect update of mirror"
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p'), GpDb(2,1,'m','a')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray1a()
class GpArray2(GpArray):
"expect add mirror"
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p','a'), GpDb(2,1,'m','a')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p')]
GpArray2()
class GpArray3(GpArray):
"expect remove mirror"
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m','a')]
GpArray3()
class GpArray4(GpArray):
"expect add primary and mirror"
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p'), GpDb(2,1,'m'), GpDb(3,2,'p'), GpDb(4,2,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray4()
class GpArray5(GpArray):
"expect remove primary/mirror and add/primary mirror"
def getDbList(self,includeExpansionSegs): return [GpDb(3,2,'p'), GpDb(4,2,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m','a')]
GpArray5()
class GpArray6(GpArray):
"expect update via add/remove"
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m',misc='x')]
GpArray6()
class GpArray7(GpArrayBad):
"can't rely on remove/add for primary"
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p',misc='x'), GpDb(2,1,'m'), GpDb(3,2,'p'), GpDb(4,2,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray7()
class GpArray8(GpArrayBad):
"can't rely on remove/add for master"
def getDbList(self,includeExpansionSegs): return [GpDb(0,-1,'p',misc="x"), GpDb(1,1,'p'), GpDb(2,1,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(0,-1,'p'), GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray8()
class GpArray9(GpArrayBad):
"can't rely on remove/add for master"
def __init__(s): GpArrayBad.__init__(s,[0])
def getDbList(self,includeExpansionSegs): return [GpDb(0,-1,'p',rport=2), GpDb(1,1,'p'), GpDb(2,1,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(0,-1,'p'), GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray9()
class GpArray10(GpArray):
"expect update"
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p'), GpDb(2,1,'m',rport=2)]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray10()
class GpArray11(GpArray):
"expect update via add/remove"
def __init__(s): GpArray.__init__(s,[2])
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p'), GpDb(2,1,'m',rport=2)]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray11()
class GpArray12(GpArrayBad):
"can't add primary"
def __init__(s): GpArrayBad.__init__(s,allowPrimary=False)
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p'), GpDb(2,1,'m'), GpDb(3,2,'p'), GpDb(4,2,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray12()
class GpArray13(GpArrayBad):
"can't remove primary"
def __init__(s): GpArrayBad.__init__(s,allowPrimary=False)
def getDbList(self,includeExpansionSegs): return [GpDb(2,1,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray13()
class GpArray14(GpArrayBad):
"can't have pair in same preferred role"
def __init__(s): GpArrayBad.__init__(s)
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p'), GpDb(2,1,'p')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray14()
class GpArray15(GpArrayBad):
"can't have pair in same preferred role"
def __init__(s): GpArrayBad.__init__(s)
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'m'), GpDb(2,1,'m')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray15()
class GpArray16(GpArrayBad):
"all primaries must have mirrors when mirroring"
def __init__(s): GpArrayBad.__init__(s)
def getDbList(self,includeExpansionSegs): return [GpDb(1,1,'p'), GpDb(2,1,'m'), GpDb(3,2,'p')]
def getSegmentsAsLoadedFromDb(self): return [GpDb(1,1,'p'), GpDb(2,1,'m')]
GpArray16()
| apache-2.0 | -2,496,157,834,579,271,700 | 45.407143 | 135 | 0.595146 | false |
quarckster/cfme_tests | scripts/template_upload_rhevm_qcow2.py | 4 | 25584 | #!/usr/bin/env python2
"""This script takes various parameters specified in
cfme_data['template_upload']['template_upload_rhevm'] and/or by command-line arguments.
Parameters specified by command-line have higher priority, and override data in cfme_data.
This script is designed to run either as a standalone rhevm template uploader, or it can be used
together with template_upload_all script. This is why all the function calls, which would
normally be placed in main function, are located in function run(**kwargs).
"""
import subprocess
import argparse
import fauxfactory
import sys
from threading import Lock, Thread
from ovirtsdk.xml import params
from cfme.utils import net, trackerbot
from cfme.utils.conf import cfme_data, credentials
from cfme.utils.log import logger, add_stdout_handler
from cfme.utils.providers import get_mgmt, list_provider_keys
from cfme.utils.ssh import SSHClient
from cfme.utils.wait import wait_for
lock = Lock()
add_stdout_handler(logger)
def parse_cmd_line():
parser = argparse.ArgumentParser(argument_default=None)
parser.add_argument('--stream', dest='stream',
help='stream name: downstream-##z, upstream, upstream_stable, etc',
default=None)
parser.add_argument("--image_url", dest="image_url",
help="URL of qcow2 file to upload", default=None)
parser.add_argument("--template_name", dest="template_name",
help="Name of the new template", default=None)
parser.add_argument("--sdomain", dest="sdomain",
help="Storage domain for vm and disk", default=None)
parser.add_argument("--cluster", dest="cluster",
help="Set cluster to operate in", default=None)
parser.add_argument("--disk_size", dest="disk_size",
help="Size of the second (database) disk, in B",
default=None, type=int)
parser.add_argument("--disk_format", dest="disk_format",
help="Format of the second (database) disk", default=None)
parser.add_argument("--disk_interface", dest="disk_interface",
help="Interface of second (database) disk", default=None)
parser.add_argument("--provider", dest="provider",
help="Rhevm provider (to look for in cfme_data)", default=None)
parser.add_argument("--glance", dest="glance",
help="Glance server to upload images to", default='glance11-server')
args = parser.parse_args()
return args
def make_ssh_client(rhevip, sshname, sshpass):
connect_kwargs = {
'username': sshname,
'password': sshpass,
'hostname': rhevip
}
return SSHClient(**connect_kwargs)
def is_ovirt_engine_running(rhevm_ip, sshname, sshpass):
try:
with make_ssh_client(rhevm_ip, sshname, sshpass) as ssh_client:
stdout = ssh_client.run_command('systemctl status ovirt-engine')[1]
# fallback to sysV commands if necessary
if 'command not found' in stdout:
stdout = ssh_client.run_command('service ovirt-engine status')[1]
return 'running' in stdout
except Exception:
logger.exception('RHEVM: While checking status of ovirt engine, an exception happened')
return False
def get_qcow_name(qcowurl):
"""Returns ova filename."""
return qcowurl.split("/")[-1]
def download_qcow(qcowurl):
"""Downloads qcow2 file from and url
Args:
ssh_client: :py:class:`utils.ssh.SSHClient` instance
qcowurl: URL of ova file
"""
rc = subprocess.call(
['curl', '-O', qcowurl])
if rc == 0:
print('Successfully downloaded qcow2 file')
else:
print('There was an error while downloading qcow2 file')
sys.exit(127)
def add_glance(api, provider, glance_server):
provider_dict = cfme_data['template_upload'][glance_server]
creds_key = provider_dict['credentials']
def is_glance_added(api, name):
for domain in api.openstackimageproviders.list():
if domain.get_name() == glance_server:
return True
else:
return False
# Get the list of OpenStack image providers (a.k.a. Glance providers)
# that match the name that we want to use:
providers = [
domain for domain in api.openstackimageproviders.list()
if domain.get_name() == glance_server
]
try:
# If there is no such provider, then add it:
if len(providers) == 0:
glance_sd = api.openstackimageproviders.add(
params.OpenStackImageProvider(
name=glance_server,
description=glance_server,
url=provider_dict['url'],
requires_authentication=True,
authentication_url=provider_dict['auth_url'],
username=credentials[creds_key]['username'],
password=credentials[creds_key]['password'],
tenant_name=credentials[creds_key]['tenant']
)
)
else:
logger.info("RHEVM:%r Warning: Found a Glance provider with this name (%r).",
provider, glance_server)
logger.info("RHEVM:%r Skipping this step, attempting to continue", provider)
return
wait_for(is_glance_added, [api, glance_server],
fail_condition=False, delay=5, num_sec=240)
if not api.openstackimageproviders.get(name=glance_server):
logger.error("RHV:%s Glance provider %s could not be attached", provider,
glance_server)
sys.exit(127)
logger.info('RHV:%s Attached Glance provider %s', provider, glance_sd.get_name())
except Exception:
logger.exception("RHV:%r add_glance failed:", provider)
def import_template_from_glance(api, sdomain, cluster, temp_template_name,
glance_server, provider, template_name):
try:
if api.templates.get(temp_template_name) is not None:
logger.info("RHEVM:%r Warning: found another template with this name.", provider)
logger.info("RHEVM:%r Skipping this step, attempting to continue...", provider)
return
# Find the storage domain:
sd = api.storagedomains.get(name=glance_server)
# Find the image:
image = sd.images.get(name=template_name)
# Import the image:
image.import_image(params.Action(
async=True,
import_as_template=True,
template=params.Template(
name=temp_template_name
),
cluster=params.Cluster(
name=cluster
),
storage_domain=params.StorageDomain(
name=sdomain
)
)
)
def is_image_imported(api, name):
if api.templates.get(name):
return True
else:
return False
wait_for(is_image_imported, [api, temp_template_name],
fail_condition=False, delay=5, num_sec=240)
if not api.templates.get(temp_template_name):
logger.error("RHEVM:%r Failed to import template from Glance", provider)
sys.exit(127)
logger.info("RHEVM:%r Successfully imported template from Glance", provider)
except Exception:
logger.exception("RHEVM:%r import_template_from_glance() failed:", provider)
def make_vm_from_template(api, stream, cfme_data, cluster, temp_template_name,
temp_vm_name, provider, mgmt_network=None):
"""Makes temporary VM from imported template. This template will be later deleted.
It's used to add a new disk and to convert back to template.
Args:
api: API to chosen RHEVM provider.
cluster: Cluster to save the temporary VM on.
mgmt_network: management network on RHEVM box, its 'ovirtmgmt' by default on rhv4.0 and
'rhevm' on older RHEVM versions.
temp_template_name: temporary template name created from ova
temp_vm_name: temporary vm name to be created.
provider: provider_key
"""
cores = cfme_data['template_upload']['hardware'][stream]['cores']
sockets = cfme_data['template_upload']['hardware'][stream]['sockets']
cpu = params.CPU(topology=params.CpuTopology(cores=cores, sockets=sockets))
vm_memory = cfme_data['template_upload']['hardware'][stream]['memory'] * 1024 * 1024 * 1024
try:
if api.vms.get(temp_vm_name) is not None:
logger.info("RHEVM:%r Warning: found another VM with this name (%r).",
provider, temp_vm_name)
logger.info("RHEVM:%r Skipping this step, attempting to continue...", provider)
return
actual_template = api.templates.get(temp_template_name)
actual_cluster = api.clusters.get(cluster)
params_vm = params.VM(name=temp_vm_name, template=actual_template, cluster=actual_cluster,
memory=vm_memory, cpu=cpu)
api.vms.add(params_vm)
# we must wait for the vm do become available
def check_status():
return api.vms.get(temp_vm_name).get_status().state == 'down'
wait_for(check_status, fail_condition=False, delay=5, num_sec=240)
if mgmt_network:
vm = api.vms.get(temp_vm_name)
nic = vm.nics.get('eth0')
nic.network = params.Network(
name=mgmt_network)
nic.interface = 'virtio'
nic.update()
# check, if the vm is really there
if not api.vms.get(temp_vm_name):
logger.error("RHEVM:%r temp VM could not be provisioned", provider)
sys.exit(127)
logger.info("RHEVM:%r successfully provisioned temp vm", provider)
except Exception:
logger.exception("RHEVM:%r Make_temp_vm_from_template failed:", provider)
def check_disks(api, temp_vm_name):
disks = api.vms.get(temp_vm_name).disks.list()
for disk in disks:
if disk.get_status().state != "ok":
return False
return True
def add_disk_to_vm(api, sdomain, disk_size, disk_format, disk_interface, temp_vm_name,
provider):
"""Adds second disk to a temporary VM.
Args:
api: API to chosen RHEVM provider.
sdomain: Storage domain to save new disk onto.
disk_size: Size of the new disk (in B).
disk_format: Format of the new disk.
disk_interface: Interface of the new disk.
"""
try:
if len(api.vms.get(temp_vm_name).disks.list()) > 1:
logger.info("RHEVM:%r Warning: found more than one disk in existing VM (%r).",
provider, temp_vm_name)
logger.info("RHEVM:%r Skipping this step, attempting to continue...", provider)
return
actual_sdomain = api.storagedomains.get(sdomain)
temp_vm = api.vms.get(temp_vm_name)
storage_id = params.StorageDomains(storage_domain=[params.StorageDomain
(id=actual_sdomain.get_id())])
params_disk = params.Disk(storage_domains=storage_id, size=disk_size,
interface=disk_interface, format=disk_format)
temp_vm.disks.add(params_disk)
wait_for(check_disks, [api, temp_vm_name], fail_condition=False, delay=5, num_sec=900)
# check, if there are two disks
if len(api.vms.get(temp_vm_name).disks.list()) < 2:
logger.error("RHEVM:%r Disk failed to add", provider)
sys.exit(127)
logger.info("RHEVM:%r Successfully added disk", provider)
except Exception:
logger.exception("RHEVM:%r add_disk_to_temp_vm failed:", provider)
def templatize_vm(api, template_name, cluster, temp_vm_name, provider):
"""Templatizes temporary VM. Result is template with two disks.
Args:
api: API to chosen RHEVM provider.
template_name: Name of the final template.
cluster: Cluster to save the final template onto.
"""
try:
if api.templates.get(template_name) is not None:
logger.info("RHEVM:%r Warning: found finished template with this name (%r).",
provider, template_name)
logger.info("RHEVM:%r Skipping this step, attempting to continue", provider)
return
temporary_vm = api.vms.get(temp_vm_name)
actual_cluster = api.clusters.get(cluster)
new_template = params.Template(name=template_name, vm=temporary_vm, cluster=actual_cluster)
api.templates.add(new_template)
wait_for(check_disks, [api, temp_vm_name], fail_condition=False, delay=5, num_sec=900)
# check, if template is really there
if not api.templates.get(template_name):
logger.error("RHEVM:%r templatizing temporary VM failed", provider)
sys.exit(127)
logger.info("RHEVM:%r successfully templatized the temporary VM", provider)
except Exception:
logger.exception("RHEVM:%r templatizing temporary VM failed", provider)
def cleanup(api, qcowname, provider, temp_template_name, temp_vm_name):
"""Cleans up all the mess that the previous functions left behind.
Args:
api: API to chosen RHEVM provider.
edomain: Export domain of chosen RHEVM provider.
"""
try:
logger.info("RHEVM:%r Deleting the .qcow2 file...", provider)
rc = subprocess.call(
['rm', qcowname])
if rc != 0:
print('Failure deleting qcow2 file')
logger.info("RHEVM:%r Deleting the temp_vm on sdomain...", provider)
temporary_vm = api.vms.get(temp_vm_name)
if temporary_vm:
temporary_vm.delete()
logger.info("RHEVM:%r Deleting the temp_template on sdomain...", provider)
temporary_template = api.templates.get(temp_template_name)
if temporary_template:
temporary_template.delete()
except Exception:
logger.exception("RHEVM:%r Exception occurred in cleanup method:", provider)
return False
def api_params_resolution(item_list, item_name, item_param):
"""Picks and prints info about parameter obtained by api call.
Args:
item_list: List of possible candidates to pick from.
item_name: Name of parameter obtained by api call.
item_param: Name of parameter representing data in the script.
"""
if len(item_list) == 0:
logger.info("RHEVM: Cannot find %r (%r) automatically.", item_name, item_param)
logger.info("Please specify it by cmd-line parameter '--%r' or in cfme_data.", item_param)
return None
elif len(item_list) > 1:
logger.info("RHEVM: Found multiple instances of %r. Picking '%r'.", item_name, item_list[0])
else:
logger.info("RHEVM: Found %r '%r'.", item_name, item_list[0])
return item_list[0]
def get_sdomain(api):
"""Discovers suitable storage domain automatically.
Args:
api: API to RHEVM instance.
"""
sdomain_names = []
for domain in api.storagedomains.list(status=None):
if domain.get_type() == 'data':
sdomain_names.append(domain.get_name())
return api_params_resolution(sdomain_names, 'storage domain', 'sdomain')
def get_cluster(api):
"""Discovers suitable cluster automatically.
Args:
api: API to RHEVM instance.
"""
cluster_names = []
for cluster in api.clusters.list():
for host in api.hosts.list():
if host.get_cluster().id == cluster.id:
cluster_names.append(cluster.get_name())
return api_params_resolution(cluster_names, 'cluster', 'cluster')
def check_kwargs(**kwargs):
for key, val in kwargs.iteritems():
if val is None:
logger.error("RHEVM: please supply required parameter '%r'.", key)
sys.exit(127)
def update_params_api(api, **kwargs):
"""Updates parameters with ones determined from api call.
Args:
api: API to RHEVM instance.
kwargs: Kwargs generated from cfme_data['template_upload']['template_upload_rhevm']
"""
if kwargs.get('sdomain') is None:
kwargs['sdomain'] = get_sdomain(api)
if kwargs.get('cluster') is None:
kwargs['cluster'] = get_cluster(api)
return kwargs
def make_kwargs(args, cfme_data, **kwargs):
"""Assembles all the parameters in case of running as a standalone script.
Makes sure, that the parameters given by command-line arguments have higher priority.
Makes sure, that all the needed parameters have proper values.
Args:
args: Arguments given from command-line.
cfme_data: Data in cfme_data.yaml
kwargs: Kwargs generated from cfme_data['template_upload']['template_upload_rhevm']
"""
args_kwargs = dict(args._get_kwargs())
if len(kwargs) is 0:
return args_kwargs
template_name = kwargs.get('template_name')
if template_name is None:
template_name = cfme_data['basic_info']['appliance_template']
kwargs.update({'template_name': template_name})
for kkey, kval in kwargs.iteritems():
for akey, aval in args_kwargs.iteritems():
if aval is not None:
if kkey == akey:
if kval != aval:
kwargs[akey] = aval
for akey, aval in args_kwargs.iteritems():
if akey not in kwargs.iterkeys():
kwargs[akey] = aval
return kwargs
def make_kwargs_rhevm(cfmeqe_data, provider):
data = cfmeqe_data['management_systems'][provider]
temp_up = cfme_data['template_upload']['template_upload_rhevm']
sdomain = data['template_upload'].get('sdomain')
cluster = data['template_upload'].get('cluster')
mgmt_network = data['template_upload'].get('management_network')
disk_size = temp_up.get('disk_size')
disk_format = temp_up.get('disk_format')
disk_interface = temp_up.get('disk_interface')
kwargs = {'provider': provider}
if sdomain:
kwargs['sdomain'] = sdomain
if cluster:
kwargs['cluster'] = cluster
if disk_size:
kwargs['disk_size'] = disk_size
if disk_format:
kwargs['disk_format'] = disk_format
if disk_interface:
kwargs['disk_interface'] = disk_interface
if mgmt_network:
kwargs['mgmt_network'] = mgmt_network
return kwargs
def upload_template(rhevip, sshname, sshpass, username, password,
provider, image_url, template_name, provider_data, stream, glance):
try:
logger.info("RHEVM:%r Template %r upload started", provider, template_name)
if provider_data:
kwargs = make_kwargs_rhevm(provider_data, provider)
providers = provider_data['management_systems']
api = get_mgmt(kwargs.get('provider'), providers=providers).api
else:
kwargs = make_kwargs_rhevm(cfme_data, provider)
api = get_mgmt(kwargs.get('provider')).api
kwargs['image_url'] = image_url
kwargs['template_name'] = template_name
qcowname = get_qcow_name(image_url)
temp_template_name = ('auto-tmp-{}-'.format(
fauxfactory.gen_alphanumeric(8))) + template_name
temp_vm_name = ('auto-vm-{}-'.format(
fauxfactory.gen_alphanumeric(8))) + template_name
if template_name is None:
template_name = cfme_data['basic_info']['appliance_template']
kwargs = update_params_api(api, **kwargs)
check_kwargs(**kwargs)
if api.templates.get(template_name) is not None:
logger.info("RHEVM:%r Found finished template with name %r.", provider, template_name)
logger.info("RHEVM:%r The script will now end.", provider)
return True
logger.info("RHEVM:%r Downloading .qcow2 file...", provider)
download_qcow(kwargs.get('image_url'))
try:
logger.info("RHEVM:%r Uploading template to Glance", provider)
glance_args = {'image': qcowname, 'image_name_in_glance': template_name,
'provider': glance, 'disk_format': 'qcow2'}
getattr(__import__('image_upload_glance'), "run")(**glance_args)
logger.info("RHEVM:%r Adding Glance", provider)
add_glance(api, provider, glance)
logger.info("RHEVM:%r Importing new template to data domain", provider)
import_template_from_glance(api, kwargs.get('sdomain'), kwargs.get('cluster'),
temp_template_name, glance, provider, template_name)
logger.info("RHEVM:%r Making a temporary VM from new template", provider)
make_vm_from_template(api, stream, cfme_data, kwargs.get('cluster'), temp_template_name,
temp_vm_name, provider, mgmt_network=kwargs.get('mgmt_network'))
logger.info("RHEVM:%r Adding disk to created VM", provider)
add_disk_to_vm(api, kwargs.get('sdomain'), kwargs.get('disk_size'),
kwargs.get('disk_format'), kwargs.get('disk_interface'),
temp_vm_name, provider)
logger.info("RHEVM:%r Templatizing VM", provider)
templatize_vm(api, template_name, kwargs.get('cluster'), temp_vm_name, provider)
if not provider_data:
logger.info("RHEVM:%r Add template %r to trackerbot", provider, template_name)
trackerbot.trackerbot_add_provider_template(stream, provider, template_name)
finally:
cleanup(api, qcowname, provider, temp_template_name, temp_vm_name)
api.disconnect()
logger.info("RHEVM:%r Template %r upload Ended", provider, template_name)
if provider_data and api.templates.get(template_name):
logger.info("RHEVM:%r Deploying Template %r", provider, template_name)
vm_name = 'test_{}_{}'.format(template_name, fauxfactory.gen_alphanumeric(8))
deploy_args = {'provider': provider, 'vm_name': vm_name,
'template': template_name, 'deploy': True}
getattr(__import__('clone_template'), "main")(**deploy_args)
logger.info("RHEVM:%r Template %r upload Ended", provider, template_name)
except Exception:
logger.exception("RHEVM:%r Template %r upload exception", provider, template_name)
return False
def run(**kwargs):
"""Calls all the functions needed to upload new template to RHEVM.
This is called either by template_upload_all script, or by main function.
Args:
**kwargs: Kwargs generated from cfme_data['template_upload']['template_upload_rhevm'].
"""
thread_queue = []
valid_providers = []
providers = list_provider_keys("rhevm")
if kwargs['provider_data']:
mgmt_sys = providers = kwargs['provider_data']['management_systems']
for provider in providers:
if kwargs['provider_data']:
if mgmt_sys[provider]['type'] != 'rhevm':
continue
sshname = mgmt_sys[provider]['sshname']
sshpass = mgmt_sys[provider]['sshpass']
rhevip = mgmt_sys[provider]['ipaddress']
else:
mgmt_sys = cfme_data['management_systems']
ssh_rhevm_creds = mgmt_sys[provider]['ssh_creds']
sshname = credentials[ssh_rhevm_creds]['username']
sshpass = credentials[ssh_rhevm_creds]['password']
rhevip = mgmt_sys[provider]['ipaddress']
if (mgmt_sys[provider].get('template_upload') and
mgmt_sys[provider]['template_upload'].get('block_upload')):
# Providers template_upload section indicates upload should not happen on this provider
continue
logger.info("RHEVM:%r verifying provider's state before template upload", provider)
if not net.is_pingable(rhevip):
continue
elif not is_ovirt_engine_running(rhevip, sshname, sshpass):
logger.info('RHEVM:%r ovirt-engine service not running..', provider)
continue
valid_providers.append(provider)
for provider in valid_providers:
if kwargs['provider_data']:
sshname = mgmt_sys[provider]['sshname']
sshpass = mgmt_sys[provider]['sshpass']
username = mgmt_sys[provider]['username']
password = mgmt_sys[provider]['password']
else:
ssh_rhevm_creds = mgmt_sys[provider]['ssh_creds']
sshname = credentials[ssh_rhevm_creds]['username']
sshpass = credentials[ssh_rhevm_creds]['password']
rhevm_credentials = mgmt_sys[provider]['credentials']
username = credentials[rhevm_credentials]['username']
password = credentials[rhevm_credentials]['password']
rhevip = mgmt_sys[provider]['ipaddress']
thread = Thread(target=upload_template,
args=(rhevip, sshname, sshpass, username, password, provider,
kwargs.get('image_url'), kwargs.get('template_name'),
kwargs['provider_data'], kwargs['stream'],
kwargs['glance']))
thread.daemon = True
thread_queue.append(thread)
thread.start()
for thread in thread_queue:
thread.join()
if __name__ == "__main__":
args = parse_cmd_line()
kwargs = cfme_data['template_upload']['template_upload_rhevm']
final_kwargs = make_kwargs(args, cfme_data, **kwargs)
run(**final_kwargs)
| gpl-2.0 | 2,697,047,169,316,477,400 | 39.037559 | 100 | 0.616635 | false |
sebastiandres/mat281 | clases/Unidad3-ModelamientoyError/Clase01-TrainingTestError/mat281_code/data_analysis.py | 2 | 1944 | import numpy as np
from matplotlib import pyplot as plt
# Define error function
def error(vector_e):
return abs(vector_e).mean()
# Load the data
N = 200
data = np.loadtxt("dataN%d.txt"%N)
sorted = True
s = "sorted" if sorted else ""
nmax = 71
# Some properties
color_t = "b"
color_p = "g"
# Sort or keep it unsorted
if sorted:
data = data[np.argsort(data[:,0])]
# Split into training and prediction data
t = int(N*.7)
x_t = data[:t,0]
x_p = data[t:,0]
y_t = data[:t,1]
y_p = data[t:,1]
# Some helper variables for nicer plotting
x = np.linspace(data[:,0].min(), data[:,0].max(), 1000)
# Fit best several models and record training error and prediction error
n_range = range(1, nmax)
error_t = []
error_p = []
for n in n_range:
fit_n = np.polyfit(x_t, y_t, n) # Obtains the best fitted polynomial of degree n
pol_n = np.poly1d(fit_n) # Creates the polynomial with coefficients as in fit n
plt.plot(x_t, y_t, 's'+color_t, alpha=0.5, label="Datos de Entrenamiento de Modelo")
if t<N:
plt.plot(x_p, y_p, 'o'+color_p, alpha=0.5, label="Datos para Testeo de Modelo")
plt.plot(x, 5*np.cos(.25*np.pi*x), 'k-', lw=2.0, label="Relacion determinista")
plt.plot(x, pol_n(x), 'r-', lw=2.0, label="Polinomio de grado %d"%n)
plt.xlim([-2.5,2.5])
plt.ylim([-5,10])
plt.legend(numpoints = 1, loc="lower center")
plt.savefig("images/data%sN%dpol%02d.png"%(s,N,n))
plt.close()
error_t.append( error(y_t - pol_n(x_t)) )
error_p.append( error(y_p - pol_n(x_p)) )
# Plot the errors
plt.loglog(n_range, error_t, "-s"+color_t, lw=2.0, label="Training error")
if t<N:
plt.loglog(n_range, error_p, "-o"+color_p, lw=2.0, label="Prediction error")
plt.legend(numpoints= 1)
plt.xlabel("Grado del polinomio")
plt.ylabel("Error")
plt.savefig("images/data%s_trainpred.png"%s)
plt.close()
# Save the error
data = np.array([ np.array(error_t), np.array(error_p)]).T
#print data
np.savetxt("images/data%serror_trainpred.txt"%s, data)
| cc0-1.0 | 6,435,367,202,859,878,000 | 28.454545 | 86 | 0.658951 | false |
yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/ams/operations/account.py | 2 | 4506 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.util import CLIError
from azure.mgmt.media.models import (ApiErrorException, MediaService, MediaServiceIdentity, StorageAccount)
def get_mediaservice(client, account_name, resource_group_name=None):
return client.get(resource_group_name,
account_name) if resource_group_name else client.get_by_subscription(account_name)
def list_mediaservices(client, resource_group_name=None):
return client.list(resource_group_name) if resource_group_name else client.list_by_subscription()
def create_mediaservice(client, resource_group_name, account_name, storage_account, location=None,
assign_identity=False, tags=None):
storage_account_primary = StorageAccount(type='Primary', id=storage_account)
return create_or_update_mediaservice(client, resource_group_name, account_name, [storage_account_primary],
location, assign_identity,
tags)
def add_mediaservice_secondary_storage(client, resource_group_name, account_name, storage_account):
ams = client.get(resource_group_name, account_name)
storage_accounts_filtered = list(filter(lambda s: storage_account in s.id, ams.storage_accounts))
storage_account_secondary = StorageAccount(type='Secondary', id=storage_account)
if not storage_accounts_filtered:
ams.storage_accounts.append(storage_account_secondary)
return create_or_update_mediaservice(client, resource_group_name, account_name,
ams.storage_accounts,
ams.location,
ams.tags)
def remove_mediaservice_secondary_storage(client, resource_group_name, account_name, storage_account):
ams = client.get(resource_group_name, account_name)
storage_accounts_filtered = list(filter(lambda s: storage_account not in s.id and 'Secondary' in s.type,
ams.storage_accounts))
primary_storage_account = list(filter(lambda s: 'Primary' in s.type, ams.storage_accounts))[0]
storage_accounts_filtered.append(primary_storage_account)
return create_or_update_mediaservice(client, resource_group_name, account_name, storage_accounts_filtered,
ams.location,
ams.tags)
def set_mediaservice_trusted_storage(client, resource_group_name, account_name,
storage_auth):
ams = client.get(resource_group_name, account_name)
media_service = MediaService(location=ams.location, storage_accounts=ams.storage_accounts,
storage_authentication=storage_auth)
return client.create_or_update(resource_group_name, account_name, media_service)
def create_or_update_mediaservice(client, resource_group_name, account_name, storage_accounts=None,
location=None, assign_identity=False,
tags=None):
identity = 'SystemAssigned' if assign_identity else 'None'
media_service = MediaService(location=location, storage_accounts=storage_accounts,
identity=MediaServiceIdentity(type=identity), tags=tags)
return client.create_or_update(resource_group_name, account_name, media_service)
def mediaservice_update_getter(client, resource_group_name, account_name):
try:
return client.get(resource_group_name, account_name)
except ApiErrorException as ex:
raise CLIError(ex.message)
def update_mediaservice(instance, tags=None):
if not instance:
raise CLIError('The account resource was not found.')
if tags:
instance.tags = tags
return instance
def check_name_availability(client, location, account_name):
availability = client.check_name_availability(location_name=location, name=account_name,
type='MICROSOFT.MEDIA/MEDIASERVICES')
if availability.name_available:
return 'Name available.'
return availability.message
| mit | 8,153,248,803,653,017,000 | 42.747573 | 110 | 0.633378 | false |
FibercorpLabs/FibercorpDevops | vmware/vcenter/rebootVM.py | 1 | 3239 | #rebootVM.py
from VMWConfigFile import *
from pyVim import connect
from pyVim.connect import SmartConnect, Disconnect
from pyVmomi import vim, vmodl
import atexit
import os
import sys
import ssl
import requests
import argparse
import time
import getpass
# Disabling urllib3 ssl warnings
requests.packages.urllib3.disable_warnings()
# Disabling SSL certificate verification
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_NONE
def get_obj(content, vimtype, name):
# """
# Get the vsphere object associated with a given text name
# """
obj = None
container = content.viewManager.CreateContainerView(content.rootFolder, vimtype, True)
for c in container.view:
if c.name == name:
obj = c
break
return obj
def wait_for_task(task, actionName='job', hideResult=False):
# """
# Waits and provides updates on a vSphere task
# """
while task.info.state == vim.TaskInfo.State.running:
time.sleep(2)
if task.info.state == vim.TaskInfo.State.success:
if task.info.result is not None and not hideResult:
out = '%s completed successfully, result: %s' % (actionName, task.info.result)
print out
else:
out = '%s completed successfully.' % actionName
print out
else:
out = '%s did not complete successfully: %s' % (actionName, task.info.error)
raise task.info.error
print out
return task.info.result
def get_args():
""" Get arguments from CLI """
parser = argparse.ArgumentParser(description='Reboot VM soft or hard way')
parser.add_argument('-u', '--user', help='VC User', required=True)
parser.add_argument('-p', '--passw', help='VC User Pass', required=False)
parser.add_argument('-v', '--vm-name', required=True, help='Name of the VM')
parser.add_argument('--soft', help='Soft reboot.', action='store_true')
parser.add_argument('--hard', help='Hard reboot.', action='store_true')
args = parser.parse_args()
if not args.passw:
args.passw = getpass.getpass(
prompt='Enter password')
return args
def main():
args = get_args()
try:
si = None
try:
#si = Service Instance of vCenter
si = connect.SmartConnect(host=vc_settings["vcenter"],
user=args.user,
pwd=args.passw,
port=443,
sslContext=context)
except IOError, e:
pass
atexit.register(Disconnect, si)
content = si.RetrieveContent()
vm = get_obj(content, [vim.VirtualMachine], args.vm_name)
if vm:
if args.soft:
vm.RebootGuest()
return
if args.hard:
vm.ResetVM_Task()
return
else:
print "ERROR: VM not found"
except vmodl.MethodFault, e:
print "Caught vmodl fault: %s" % e.msg
return 1
except Exception, e:
print "Caught exception: %s" % str(e)
return 1
if __name__ == "__main__":
main()
| gpl-3.0 | -5,401,962,250,236,540,000 | 24.304688 | 90 | 0.582587 | false |
jiegec/gnuradio | grc/gui/MainWindow.py | 14 | 13437 | """
Copyright 2008, 2009, 2011 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
from Constants import \
NEW_FLOGRAPH_TITLE, DEFAULT_REPORTS_WINDOW_WIDTH
import Actions
import pygtk
pygtk.require('2.0')
import gtk
import Bars
from BlockTreeWindow import BlockTreeWindow
from Dialogs import TextDisplay, MessageDialogHelper
from NotebookPage import NotebookPage
import Preferences
import Messages
import Utils
import os
MAIN_WINDOW_TITLE_TMPL = """\
#if not $saved
*#slurp
#end if
#if $basename
$basename#slurp
#else
$new_flowgraph_title#slurp
#end if
#if $read_only
(read only)#slurp
#end if
#if $dirname
- $dirname#slurp
#end if
- $platform_name#slurp
"""
PAGE_TITLE_MARKUP_TMPL = """\
#set $foreground = $saved and 'black' or 'red'
<span foreground="$foreground">$encode($title or $new_flowgraph_title)</span>#slurp
#if $read_only
(ro)#slurp
#end if
"""
############################################################
# Main window
############################################################
class MainWindow(gtk.Window):
"""The topmost window with menus, the tool bar, and other major windows."""
def __init__(self, platform):
"""
MainWindow contructor
Setup the menu, toolbar, flowgraph editor notebook, block selection window...
"""
self._platform = platform
#setup window
gtk.Window.__init__(self, gtk.WINDOW_TOPLEVEL)
vbox = gtk.VBox()
self.hpaned = gtk.HPaned()
self.add(vbox)
#create the menu bar and toolbar
self.add_accel_group(Actions.get_accel_group())
vbox.pack_start(Bars.MenuBar(), False)
vbox.pack_start(Bars.Toolbar(), False)
vbox.pack_start(self.hpaned)
#create the notebook
self.notebook = gtk.Notebook()
self.page_to_be_closed = None
self.current_page = None
self.notebook.set_show_border(False)
self.notebook.set_scrollable(True) #scroll arrows for page tabs
self.notebook.connect('switch-page', self._handle_page_change)
#setup containers
self.flow_graph_vpaned = gtk.VPaned()
#flow_graph_box.pack_start(self.scrolled_window)
self.flow_graph_vpaned.pack1(self.notebook)
self.hpaned.pack1(self.flow_graph_vpaned)
self.btwin = BlockTreeWindow(platform, self.get_flow_graph);
self.hpaned.pack2(self.btwin, False) #dont allow resize
#create the reports window
self.text_display = TextDisplay()
#house the reports in a scrolled window
self.reports_scrolled_window = gtk.ScrolledWindow()
self.reports_scrolled_window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.reports_scrolled_window.add(self.text_display)
self.reports_scrolled_window.set_size_request(-1, DEFAULT_REPORTS_WINDOW_WIDTH)
self.flow_graph_vpaned.pack2(self.reports_scrolled_window, False) #dont allow resize
#load preferences and show the main window
Preferences.load(platform)
self.resize(*Preferences.main_window_size())
self.flow_graph_vpaned.set_position(Preferences.reports_window_position())
self.hpaned.set_position(Preferences.blocks_window_position())
self.show_all()
self.reports_scrolled_window.hide()
self.btwin.hide()
############################################################
# Event Handlers
############################################################
def _quit(self, window, event):
"""
Handle the delete event from the main window.
Generated by pressing X to close, alt+f4, or right click+close.
This method in turns calls the state handler to quit.
Returns:
true
"""
Actions.APPLICATION_QUIT()
return True
def _handle_page_change(self, notebook, page, page_num):
"""
Handle a page change. When the user clicks on a new tab,
reload the flow graph to update the vars window and
call handle states (select nothing) to update the buttons.
Args:
notebook: the notebook
page: new page
page_num: new page number
"""
self.current_page = self.notebook.get_nth_page(page_num)
Messages.send_page_switch(self.current_page.get_file_path())
Actions.PAGE_CHANGE()
############################################################
# Report Window
############################################################
def add_report_line(self, line):
"""
Place line at the end of the text buffer, then scroll its window all the way down.
Args:
line: the new text
"""
self.text_display.insert(line)
############################################################
# Pages: create and close
############################################################
def new_page(self, file_path='', show=False):
"""
Create a new notebook page.
Set the tab to be selected.
Args:
file_path: optional file to load into the flow graph
show: true if the page should be shown after loading
"""
#if the file is already open, show the open page and return
if file_path and file_path in self._get_files(): #already open
page = self.notebook.get_nth_page(self._get_files().index(file_path))
self._set_page(page)
return
try: #try to load from file
if file_path: Messages.send_start_load(file_path)
flow_graph = self._platform.get_new_flow_graph()
flow_graph.grc_file_path = file_path;
#print flow_graph
page = NotebookPage(
self,
flow_graph=flow_graph,
file_path=file_path,
)
if file_path: Messages.send_end_load()
except Exception, e: #return on failure
Messages.send_fail_load(e)
if isinstance(e, KeyError) and str(e) == "'options'":
# This error is unrecoverable, so crash gracefully
exit(-1)
return
#add this page to the notebook
self.notebook.append_page(page, page.get_tab())
try: self.notebook.set_tab_reorderable(page, True)
except: pass #gtk too old
self.notebook.set_tab_label_packing(page, False, False, gtk.PACK_START)
#only show if blank or manual
if not file_path or show: self._set_page(page)
def close_pages(self):
"""
Close all the pages in this notebook.
Returns:
true if all closed
"""
open_files = filter(lambda file: file, self._get_files()) #filter blank files
open_file = self.get_page().get_file_path()
#close each page
for page in sorted(self.get_pages(), key=lambda p: p.get_saved()):
self.page_to_be_closed = page
closed = self.close_page(False)
if not closed:
break
if self.notebook.get_n_pages(): return False
#save state before closing
Preferences.files_open(open_files)
Preferences.file_open(open_file)
Preferences.main_window_size(self.get_size())
Preferences.reports_window_position(self.flow_graph_vpaned.get_position())
Preferences.blocks_window_position(self.hpaned.get_position())
Preferences.save()
return True
def close_page(self, ensure=True):
"""
Close the current page.
If the notebook becomes empty, and ensure is true,
call new page upon exit to ensure that at least one page exists.
Args:
ensure: boolean
"""
if not self.page_to_be_closed: self.page_to_be_closed = self.get_page()
#show the page if it has an executing flow graph or is unsaved
if self.page_to_be_closed.get_proc() or not self.page_to_be_closed.get_saved():
self._set_page(self.page_to_be_closed)
#unsaved? ask the user
if not self.page_to_be_closed.get_saved():
response = self._save_changes() # return value is either OK, CLOSE, or CANCEL
if response == gtk.RESPONSE_OK:
Actions.FLOW_GRAPH_SAVE() #try to save
if not self.page_to_be_closed.get_saved(): #still unsaved?
self.page_to_be_closed = None #set the page to be closed back to None
return False
elif response == gtk.RESPONSE_CANCEL:
self.page_to_be_closed = None
return False
#stop the flow graph if executing
if self.page_to_be_closed.get_proc(): Actions.FLOW_GRAPH_KILL()
#remove the page
self.notebook.remove_page(self.notebook.page_num(self.page_to_be_closed))
if ensure and self.notebook.get_n_pages() == 0: self.new_page() #no pages, make a new one
self.page_to_be_closed = None #set the page to be closed back to None
return True
############################################################
# Misc
############################################################
def update(self):
"""
Set the title of the main window.
Set the titles on the page tabs.
Show/hide the reports window.
Args:
title: the window title
"""
gtk.Window.set_title(self, Utils.parse_template(MAIN_WINDOW_TITLE_TMPL,
basename=os.path.basename(self.get_page().get_file_path()),
dirname=os.path.dirname(self.get_page().get_file_path()),
new_flowgraph_title=NEW_FLOGRAPH_TITLE,
read_only=self.get_page().get_read_only(),
saved=self.get_page().get_saved(),
platform_name=self._platform.get_name(),
)
)
#set tab titles
for page in self.get_pages(): page.set_markup(
Utils.parse_template(PAGE_TITLE_MARKUP_TMPL,
#get filename and strip out file extension
title=os.path.splitext(os.path.basename(page.get_file_path()))[0],
read_only=page.get_read_only(), saved=page.get_saved(),
new_flowgraph_title=NEW_FLOGRAPH_TITLE,
)
)
#show/hide notebook tabs
self.notebook.set_show_tabs(len(self.get_pages()) > 1)
def update_pages(self):
"""
Forces a reload of all the pages in this notebook.
"""
for page in self.get_pages():
success = page.get_flow_graph().reload()
if success: # Only set saved if errors occurred during import
page.set_saved(False)
def get_page(self):
"""
Get the selected page.
Returns:
the selected page
"""
return self.current_page
def get_flow_graph(self):
"""
Get the selected flow graph.
Returns:
the selected flow graph
"""
return self.get_page().get_flow_graph()
def get_focus_flag(self):
"""
Get the focus flag from the current page.
Returns:
the focus flag
"""
return self.get_page().get_drawing_area().get_focus_flag()
############################################################
# Helpers
############################################################
def _set_page(self, page):
"""
Set the current page.
Args:
page: the page widget
"""
self.current_page = page
self.notebook.set_current_page(self.notebook.page_num(self.current_page))
def _save_changes(self):
"""
Save changes to flow graph?
Returns:
the response_id (see buttons variable below)
"""
buttons = (
'Close without saving', gtk.RESPONSE_CLOSE,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE, gtk.RESPONSE_OK
)
return MessageDialogHelper(
gtk.MESSAGE_QUESTION, gtk.BUTTONS_NONE, 'Unsaved Changes!',
'Would you like to save changes before closing?', gtk.RESPONSE_OK, buttons
)
def _get_files(self):
"""
Get the file names for all the pages, in order.
Returns:
list of file paths
"""
return map(lambda page: page.get_file_path(), self.get_pages())
def get_pages(self):
"""
Get a list of all pages in the notebook.
Returns:
list of pages
"""
return [self.notebook.get_nth_page(page_num) for page_num in range(self.notebook.get_n_pages())]
| gpl-3.0 | 5,718,824,262,487,318,000 | 34.736702 | 104 | 0.573342 | false |
ngonzalvez/sentry | tests/sentry/api/endpoints/test_group_notes.py | 24 | 1679 | from django.core.urlresolvers import reverse
from sentry.models import Activity
from sentry.testutils import APITestCase
class GroupNoteTest(APITestCase):
def test_simple(self):
group = self.group
activity = Activity.objects.create(
group=group,
project=group.project,
type=Activity.NOTE,
user=self.user,
data={'text': 'hello world'},
)
self.login_as(user=self.user)
url = reverse('sentry-api-0-group-notes', kwargs={
'group_id': self.group.id,
})
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 1
assert response.data[0]['id'] == str(activity.id)
class GroupNoteCreateTest(APITestCase):
def test_simple(self):
group = self.group
self.login_as(user=self.user)
url = reverse('sentry-api-0-group-notes', kwargs={
'group_id': self.group.id,
})
response = self.client.post(url, format='json')
assert response.status_code == 400
response = self.client.post(url, format='json', data={
'text': 'hello world',
})
assert response.status_code == 201, response.content
activity = Activity.objects.get(id=response.data['id'])
assert activity.user == self.user
assert activity.group == group
assert activity.data == {'text': 'hello world'}
response = self.client.post(url, format='json', data={
'text': 'hello world',
})
assert response.status_code == 400, response.content
| bsd-3-clause | -7,394,578,444,712,941,000 | 29.527273 | 63 | 0.593806 | false |
mikewiebe-ansible/ansible | lib/ansible/modules/cloud/docker/docker_swarm_info.py | 14 | 13683 | #!/usr/bin/python
#
# (c) 2019 Piotr Wojciechowski <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: docker_swarm_info
short_description: Retrieves facts about Docker Swarm cluster.
description:
- Retrieves facts about a Docker Swarm.
- Returns lists of swarm objects names for the services - nodes, services, tasks.
- The output differs depending on API version available on docker host.
- Must be run on Swarm Manager node; otherwise module fails with error message.
It does return boolean flags in on both error and success which indicate whether
the docker daemon can be communicated with, whether it is in Swarm mode, and
whether it is a Swarm Manager node.
version_added: "2.8"
author:
- Piotr Wojciechowski (@WojciechowskiPiotr)
options:
nodes:
description:
- Whether to list swarm nodes.
type: bool
default: no
nodes_filters:
description:
- A dictionary of filter values used for selecting nodes to list.
- "For example, C(name: mynode)."
- See L(the docker documentation,https://docs.docker.com/engine/reference/commandline/node_ls/#filtering)
for more information on possible filters.
type: dict
services:
description:
- Whether to list swarm services.
type: bool
default: no
services_filters:
description:
- A dictionary of filter values used for selecting services to list.
- "For example, C(name: myservice)."
- See L(the docker documentation,https://docs.docker.com/engine/reference/commandline/service_ls/#filtering)
for more information on possible filters.
type: dict
tasks:
description:
- Whether to list containers.
type: bool
default: no
tasks_filters:
description:
- A dictionary of filter values used for selecting tasks to list.
- "For example, C(node: mynode-1)."
- See L(the docker documentation,https://docs.docker.com/engine/reference/commandline/service_ps/#filtering)
for more information on possible filters.
type: dict
unlock_key:
description:
- Whether to retrieve the swarm unlock key.
type: bool
default: no
verbose_output:
description:
- When set to C(yes) and I(nodes), I(services) or I(tasks) is set to C(yes), then the module output will
contain verbose information about objects matching the full output of API method.
- For details see the documentation of your version of Docker API at U(https://docs.docker.com/engine/api/).
- The verbose output in this module contains only subset of information returned by I(_info) module
for each type of the objects.
type: bool
default: no
extends_documentation_fragment:
- docker
- docker.docker_py_1_documentation
requirements:
- "L(Docker SDK for Python,https://docker-py.readthedocs.io/en/stable/) >= 1.10.0 (use L(docker-py,https://pypi.org/project/docker-py/) for Python 2.6)"
- "Docker API >= 1.24"
'''
EXAMPLES = '''
- name: Get info on Docker Swarm
docker_swarm_info:
ignore_errors: yes
register: result
- name: Inform about basic flags
debug:
msg: |
Was able to talk to docker daemon: {{ result.can_talk_to_docker }}
Docker in Swarm mode: {{ result.docker_swarm_active }}
This is a Manager node: {{ result.docker_swarm_manager }}
- block:
- name: Get info on Docker Swarm and list of registered nodes
docker_swarm_info:
nodes: yes
register: result
- name: Get info on Docker Swarm and extended list of registered nodes
docker_swarm_info:
nodes: yes
verbose_output: yes
register: result
- name: Get info on Docker Swarm and filtered list of registered nodes
docker_swarm_info:
nodes: yes
nodes_filter:
name: mynode
register: result
- debug:
var: result.swarm_facts
- name: Get the swarm unlock key
docker_swarm_info:
unlock_key: yes
register: result
- debug:
var: result.swarm_unlock_key
'''
RETURN = '''
can_talk_to_docker:
description:
- Will be C(true) if the module can talk to the docker daemon.
returned: both on success and on error
type: bool
docker_swarm_active:
description:
- Will be C(true) if the module can talk to the docker daemon,
and the docker daemon is in Swarm mode.
returned: both on success and on error
type: bool
docker_swarm_manager:
description:
- Will be C(true) if the module can talk to the docker daemon,
the docker daemon is in Swarm mode, and the current node is
a manager node.
- Only if this one is C(true), the module will not fail.
returned: both on success and on error
type: bool
swarm_facts:
description:
- Facts representing the basic state of the docker Swarm cluster.
- Contains tokens to connect to the Swarm
returned: always
type: dict
swarm_unlock_key:
description:
- Contains the key needed to unlock the swarm.
returned: When I(unlock_key) is C(true).
type: str
nodes:
description:
- List of dict objects containing the basic information about each volume.
Keys matches the C(docker node ls) output unless I(verbose_output=yes).
See description for I(verbose_output).
returned: When I(nodes) is C(yes)
type: list
elements: dict
services:
description:
- List of dict objects containing the basic information about each volume.
Keys matches the C(docker service ls) output unless I(verbose_output=yes).
See description for I(verbose_output).
returned: When I(services) is C(yes)
type: list
elements: dict
tasks:
description:
- List of dict objects containing the basic information about each volume.
Keys matches the C(docker service ps) output unless I(verbose_output=yes).
See description for I(verbose_output).
returned: When I(tasks) is C(yes)
type: list
elements: dict
'''
import traceback
try:
from docker.errors import DockerException, APIError
except ImportError:
# missing Docker SDK for Python handled in ansible.module_utils.docker_common
pass
from ansible.module_utils._text import to_native
from ansible.module_utils.docker.swarm import AnsibleDockerSwarmClient
from ansible.module_utils.docker.common import (
DockerBaseClass,
clean_dict_booleans_for_docker_api,
RequestException,
)
class DockerSwarmManager(DockerBaseClass):
def __init__(self, client, results):
super(DockerSwarmManager, self).__init__()
self.client = client
self.results = results
self.verbose_output = self.client.module.params['verbose_output']
listed_objects = ['tasks', 'services', 'nodes']
self.client.fail_task_if_not_swarm_manager()
self.results['swarm_facts'] = self.get_docker_swarm_facts()
for docker_object in listed_objects:
if self.client.module.params[docker_object]:
returned_name = docker_object
filter_name = docker_object + "_filters"
filters = clean_dict_booleans_for_docker_api(client.module.params.get(filter_name))
self.results[returned_name] = self.get_docker_items_list(docker_object, filters)
if self.client.module.params['unlock_key']:
self.results['swarm_unlock_key'] = self.get_docker_swarm_unlock_key()
def get_docker_swarm_facts(self):
try:
return self.client.inspect_swarm()
except APIError as exc:
self.client.fail("Error inspecting docker swarm: %s" % to_native(exc))
def get_docker_items_list(self, docker_object=None, filters=None):
items = None
items_list = []
try:
if docker_object == 'nodes':
items = self.client.nodes(filters=filters)
elif docker_object == 'tasks':
items = self.client.tasks(filters=filters)
elif docker_object == 'services':
items = self.client.services(filters=filters)
except APIError as exc:
self.client.fail("Error inspecting docker swarm for object '%s': %s" %
(docker_object, to_native(exc)))
if self.verbose_output:
return items
for item in items:
item_record = dict()
if docker_object == 'nodes':
item_record = self.get_essential_facts_nodes(item)
elif docker_object == 'tasks':
item_record = self.get_essential_facts_tasks(item)
elif docker_object == 'services':
item_record = self.get_essential_facts_services(item)
if item_record['Mode'] == 'Global':
item_record['Replicas'] = len(items)
items_list.append(item_record)
return items_list
@staticmethod
def get_essential_facts_nodes(item):
object_essentials = dict()
object_essentials['ID'] = item.get('ID')
object_essentials['Hostname'] = item['Description']['Hostname']
object_essentials['Status'] = item['Status']['State']
object_essentials['Availability'] = item['Spec']['Availability']
if 'ManagerStatus' in item:
object_essentials['ManagerStatus'] = item['ManagerStatus']['Reachability']
if 'Leader' in item['ManagerStatus'] and item['ManagerStatus']['Leader'] is True:
object_essentials['ManagerStatus'] = "Leader"
else:
object_essentials['ManagerStatus'] = None
object_essentials['EngineVersion'] = item['Description']['Engine']['EngineVersion']
return object_essentials
def get_essential_facts_tasks(self, item):
object_essentials = dict()
object_essentials['ID'] = item['ID']
# Returning container ID to not trigger another connection to host
# Container ID is sufficient to get extended info in other tasks
object_essentials['ContainerID'] = item['Status']['ContainerStatus']['ContainerID']
object_essentials['Image'] = item['Spec']['ContainerSpec']['Image']
object_essentials['Node'] = self.client.get_node_name_by_id(item['NodeID'])
object_essentials['DesiredState'] = item['DesiredState']
object_essentials['CurrentState'] = item['Status']['State']
if 'Err' in item['Status']:
object_essentials['Error'] = item['Status']['Err']
else:
object_essentials['Error'] = None
return object_essentials
@staticmethod
def get_essential_facts_services(item):
object_essentials = dict()
object_essentials['ID'] = item['ID']
object_essentials['Name'] = item['Spec']['Name']
if 'Replicated' in item['Spec']['Mode']:
object_essentials['Mode'] = "Replicated"
object_essentials['Replicas'] = item['Spec']['Mode']['Replicated']['Replicas']
elif 'Global' in item['Spec']['Mode']:
object_essentials['Mode'] = "Global"
# Number of replicas have to be updated in calling method or may be left as None
object_essentials['Replicas'] = None
object_essentials['Image'] = item['Spec']['TaskTemplate']['ContainerSpec']['Image']
if 'Ports' in item['Spec']['EndpointSpec']:
object_essentials['Ports'] = item['Spec']['EndpointSpec']['Ports']
else:
object_essentials['Ports'] = []
return object_essentials
def get_docker_swarm_unlock_key(self):
unlock_key = self.client.get_unlock_key() or {}
return unlock_key.get('UnlockKey') or None
def main():
argument_spec = dict(
nodes=dict(type='bool', default=False),
nodes_filters=dict(type='dict'),
tasks=dict(type='bool', default=False),
tasks_filters=dict(type='dict'),
services=dict(type='bool', default=False),
services_filters=dict(type='dict'),
unlock_key=dict(type='bool', default=False),
verbose_output=dict(type='bool', default=False),
)
option_minimal_versions = dict(
unlock_key=dict(docker_py_version='2.7.0', docker_api_version='1.25'),
)
client = AnsibleDockerSwarmClient(
argument_spec=argument_spec,
supports_check_mode=True,
min_docker_version='1.10.0',
min_docker_api_version='1.24',
option_minimal_versions=option_minimal_versions,
fail_results=dict(
can_talk_to_docker=False,
docker_swarm_active=False,
docker_swarm_manager=False,
),
)
client.fail_results['can_talk_to_docker'] = True
client.fail_results['docker_swarm_active'] = client.check_if_swarm_node()
client.fail_results['docker_swarm_manager'] = client.check_if_swarm_manager()
try:
results = dict(
changed=False,
)
DockerSwarmManager(client, results)
results.update(client.fail_results)
client.module.exit_json(**results)
except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(e), exception=traceback.format_exc())
except RequestException as e:
client.fail('An unexpected requests error occurred when docker-py tried to talk to the docker daemon: {0}'.format(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 | 1,616,440,225,463,591,000 | 34.265464 | 159 | 0.646788 | false |
fibbo/DIRAC | Workflow/Modules/ModuleBase.py | 1 | 21520 | """ ModuleBase - contains the base class for workflow modules. Defines several common utility methods.
The modules defined within this package are developed in a way to be executed by a DIRAC.Core.Worfklow.Worfklow.
In particular, a DIRAC.Core.Workflow.Worfklow object will only call the "execute" function, that is defined here.
These modules, inspired by the LHCb experience, give the possibility to define simple user and production jobs.
Many VOs might want to extend this package. And actually, for some cases, it will be necessary. For example,
defining the LFN output at runtime (within the "UploadOutputs" module is a VO specific operation.
The DIRAC APIs are used to create Jobs that make use of these modules.
"""
import os, copy
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport
from DIRAC.TransformationSystem.Client.FileReport import FileReport
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.RequestManagementSystem.private.RequestValidator import RequestValidator
from DIRAC.DataManagementSystem.Client.DataManager import DataManager
class ModuleBase( object ):
""" Base class for Modules - works only within DIRAC workflows
This module, inheriting by "object", can use cooperative methods, very useful here.
"""
#############################################################################
def __init__( self, loggerIn = None ):
""" Initialization of module base.
loggerIn is a logger object that can be passed so that the logging will be more clear.
"""
if not loggerIn:
self.log = gLogger.getSubLogger( 'ModuleBase' )
else:
self.log = loggerIn
# These 2 are used in many places, so it's good to have them available here.
self.opsH = Operations()
self.dm = DataManager()
# Some job parameters
self.production_id = 0
self.prod_job_id = 0
self.jobID = 0
self.step_number = 0
self.step_id = 0
self.jobType = ''
self.executable = ''
self.command = None
self.workflowStatus = None
self.stepStatus = None
self.workflow_commons = None
self.step_commons = None
# These are useful objects (see the getFileReporter(), getJobReporter() and getRequestContainer() functions)
self.fileReport = None
self.jobReport = None
self.request = None
#############################################################################
def execute( self ):
""" Function called by all super classes. This is the only function that Workflow will call automatically.
The design adopted here is that all the modules are inheriting from this class,
and will NOT override this function. Instead, the inherited modules will override the following functions:
_resolveInputVariables()
_initialize()
_setCommand()
_executeCommand()
_execute()
that are called here exactly in this order.
Each implementation of these functions, in the subclasses, should never return S_OK, S_ERROR.
This choice has been made for convenience of coding, and for the high level of inheritance implemented here.
Instead, they should return:
- None when no issues arise
- a RuntimeError exception when there are issues
- a GracefulTermination exception (defined also here) when the module should be terminated gracefully
The various parameters in input to this method are used almost only for testing purposes.
"""
if not self.production_id:
# self.PRODUCTION_ID is always set by the workflow
self.production_id = int( self.PRODUCTION_ID )
if not self.prod_job_id:
# self.JOB_ID is set by the workflow, but this is not the WMS job id, but the transformation (production) task id
self.prod_job_id = int( self.JOB_ID )
if not self.jobID:
# this is the real wms job ID
if os.environ.has_key( 'JOBID' ):
self.jobID = int( os.environ['JOBID'] )
if not self.step_number:
# self.STEP_NUMBER is always set by the workflow
self.step_number = int( self.STEP_NUMBER )
if not self.step_id:
self.step_id = '%d_%d_%d' % ( self.production_id, self.prod_job_id, self.step_number )
try:
# This is what has to be extended in the modules
self._resolveInputVariables()
self._initialize()
self._setCommand()
self._executeCommand()
self._execute()
self._finalize()
# If everything is OK
except GracefulTermination, status:
self.setApplicationStatus( status )
self.log.info( status )
return S_OK( status )
# This catches everything that is voluntarily thrown within the modules, so an error
except RuntimeError, e:
self.log.error( e )
self.setApplicationStatus( e )
return S_ERROR( e )
# This catches everything that is not voluntarily thrown (here, really writing an exception)
except Exception, e:
self.log.exception( e )
self.setApplicationStatus( e )
return S_ERROR( e )
finally:
self.finalize()
def _resolveInputVariables( self ):
""" By convention the module input parameters are resolved here.
fileReport, jobReport, and request objects are instantiated/recorded here.
This will also call the resolution of the input workflow.
The resolution of the input step should instead be done on a step basis.
NB: Never forget to call this base method when extending it.
"""
self.log.verbose( "workflow_commons = ", self.workflow_commons )
self.log.verbose( "step_commons = ", self.step_commons )
if not self.fileReport:
self.fileReport = self._getFileReporter()
if not self.jobReport:
self.jobReport = self._getJobReporter()
if not self.request:
self.request = self._getRequestContainer()
self._resolveInputWorkflow()
def _initialize( self ):
""" TBE
For initializing the module, whatever operation this can be
"""
pass
def _setCommand( self ):
""" TBE
For "executors" modules, set the command to be used in the self.command variable.
"""
pass
def _executeCommand( self ):
""" TBE
For "executors" modules, executes self.command as set in the _setCommand() method
"""
pass
def _execute( self ):
""" TBE
Executes, whatever this means for the module implementing it
"""
pass
def _finalize( self, status = '' ):
""" TBE
By default, the module finalizes correctly
"""
if not status:
status = '%s correctly finalized' % str( self.__class__ )
raise GracefulTermination, status
#############################################################################
def finalize( self ):
""" Just finalizing the module execution by flushing the logs. This will be done always.
"""
self.log.flushAllMessages( 0 )
self.log.info( '===== Terminating ' + str( self.__class__ ) + ' ===== ' )
#############################################################################
def _getJobReporter( self ):
""" just return the job reporter (object, always defined by dirac-jobexec)
"""
if self.workflow_commons.has_key( 'JobReport' ):
return self.workflow_commons['JobReport']
else:
jobReport = JobReport( self.jobID )
self.workflow_commons['JobReport'] = jobReport
return jobReport
#############################################################################
def _getFileReporter( self ):
""" just return the file reporter (object)
"""
if self.workflow_commons.has_key( 'FileReport' ):
return self.workflow_commons['FileReport']
else:
fileReport = FileReport()
self.workflow_commons['FileReport'] = fileReport
return fileReport
#############################################################################
def _getRequestContainer( self ):
""" just return the RequestContainer reporter (object)
"""
if self.workflow_commons.has_key( 'Request' ):
return self.workflow_commons['Request']
else:
request = Request()
self.workflow_commons['Request'] = request
return request
#############################################################################
def _resolveInputWorkflow( self ):
""" Resolve the input variables that are in the workflow_commons
"""
if self.workflow_commons.has_key( 'JobType' ):
self.jobType = self.workflow_commons['JobType']
self.InputData = ''
if self.workflow_commons.has_key( 'InputData' ):
if self.workflow_commons['InputData']:
self.InputData = self.workflow_commons['InputData']
if self.workflow_commons.has_key( 'ParametricInputData' ):
pID = copy.deepcopy( self.workflow_commons['ParametricInputData'] )
if pID:
if type( pID ) == type( [] ):
pID = ';'.join( pID )
# self.InputData += ';' + pID
self.InputData = pID
self.InputData = self.InputData.rstrip( ';' )
if self.InputData == ';':
self.InputData = ''
self.inputDataList = [lfn.strip( 'LFN:' ) for lfn in self.InputData.split( ';' ) if lfn]
if self.workflow_commons.has_key( 'appSteps' ):
self.appSteps = self.workflow_commons['appSteps']
if self.workflow_commons.has_key( 'outputDataFileMask' ):
self.outputDataFileMask = self.workflow_commons['outputDataFileMask']
if not type( self.outputDataFileMask ) == type( [] ):
self.outputDataFileMask = [i.lower().strip() for i in self.outputDataFileMask.split( ';' )]
#############################################################################
def _resolveInputStep( self ):
""" Resolve the input variables for an application step
"""
self.stepName = self.step_commons['STEP_INSTANCE_NAME']
if self.step_commons.has_key( 'executable' ) and self.step_commons['executable']:
self.executable = self.step_commons['executable']
else:
self.executable = 'Unknown'
if self.step_commons.has_key( 'applicationName' ) and self.step_commons['applicationName']:
self.applicationName = self.step_commons['applicationName']
else:
self.applicationName = 'Unknown'
if self.step_commons.has_key( 'applicationVersion' ) and self.step_commons['applicationVersion']:
self.applicationVersion = self.step_commons['applicationVersion']
else:
self.applicationVersion = 'Unknown'
if self.step_commons.has_key( 'applicationLog' ):
self.applicationLog = self.step_commons['applicationLog']
else:
self.applicationLog = 'applicationLog.txt'
stepInputData = []
if self.step_commons.has_key( 'inputData' ):
if self.step_commons['inputData']:
stepInputData = self.step_commons['inputData']
elif self.InputData:
stepInputData = copy.deepcopy( self.InputData )
if stepInputData:
stepInputData = self._determineStepInputData( stepInputData, )
self.stepInputData = [sid.strip( 'LFN:' ) for sid in stepInputData]
#############################################################################
def _determineStepInputData( self, inputData ):
""" determine the input data for the step
"""
if inputData == 'previousStep':
stepIndex = self.appSteps.index( self.stepName )
previousStep = self.appSteps[stepIndex - 1]
stepInputData = []
for outputF in self.workflow_commons['outputList']:
try:
if outputF['stepName'] == previousStep and outputF['outputDataType'].lower() == self.inputDataType.lower():
stepInputData.append( outputF['outputDataName'] )
except KeyError:
raise RuntimeError, 'Can\'t find output of step %s' % previousStep
return stepInputData
else:
return [x.strip( 'LFN:' ) for x in inputData.split( ';' )]
#############################################################################
def setApplicationStatus( self, status, sendFlag = True ):
"""Wraps around setJobApplicationStatus of state update client
"""
if not self._WMSJob():
return 0 # e.g. running locally prior to submission
if self._checkWFAndStepStatus( noPrint = True ):
# The application status won't be updated in case the workflow or the step is failed already
if not type( status ) == type( '' ):
status = str( status )
self.log.verbose( 'setJobApplicationStatus(%d, %s)' % ( self.jobID, status ) )
jobStatus = self.jobReport.setApplicationStatus( status, sendFlag )
if not jobStatus['OK']:
self.log.warn( jobStatus['Message'] )
#############################################################################
def _WMSJob( self ):
""" Check if this job is running via WMS
"""
return True if self.jobID else False
#############################################################################
def _enableModule( self ):
""" Enable module if it's running via WMS
"""
if not self._WMSJob():
self.log.info( 'No WMS JobID found, disabling module via control flag' )
return False
else:
self.log.verbose( 'Found WMS JobID = %d' % self.jobID )
return True
#############################################################################
def _checkWFAndStepStatus( self, noPrint = False ):
""" Check the WF and Step status
"""
if not self.workflowStatus['OK'] or not self.stepStatus['OK']:
if not noPrint:
self.log.info( 'Skip this module, failure detected in a previous step :' )
self.log.info( 'Workflow status : %s' % ( self.workflowStatus ) )
self.log.info( 'Step Status : %s' % ( self.stepStatus ) )
return False
else:
return True
#############################################################################
def setJobParameter( self, name, value, sendFlag = True ):
"""Wraps around setJobParameter of state update client
"""
if not self._WMSJob():
return 0 # e.g. running locally prior to submission
self.log.verbose( 'setJobParameter(%d,%s,%s)' % ( self.jobID, name, value ) )
jobParam = self.jobReport.setJobParameter( str( name ), str( value ), sendFlag )
if not jobParam['OK']:
self.log.warn( jobParam['Message'] )
#############################################################################
def getCandidateFiles( self, outputList, outputLFNs, fileMask, stepMask = '' ):
""" Returns list of candidate files to upload, check if some outputs are missing.
outputList has the following structure:
[ {'outputDataType':'','outputDataSE':'','outputDataName':''} , {...} ]
outputLFNs is the list of output LFNs for the job
fileMask is the output file extensions to restrict the outputs to
returns dictionary containing type, SE and LFN for files restricted by mask
"""
fileInfo = {}
for outputFile in outputList:
if outputFile.has_key( 'outputDataType' ) \
and outputFile.has_key( 'outputDataSE' ) \
and outputFile.has_key( 'outputDataName' ):
fname = outputFile['outputDataName']
fileSE = outputFile['outputDataSE']
fileType = outputFile['outputDataType']
fileInfo[fname] = {'type':fileType, 'workflowSE':fileSE}
else:
self.log.error( 'Ignoring malformed output data specification', str( outputFile ) )
for lfn in outputLFNs:
if os.path.basename( lfn ) in fileInfo.keys():
fileInfo[os.path.basename( lfn )]['lfn'] = lfn
self.log.verbose( 'Found LFN %s for file %s' % ( lfn, os.path.basename( lfn ) ) )
# check local existance
self._checkLocalExistance( fileInfo.keys() )
# Select which files have to be uploaded: in principle all
candidateFiles = self._applyMask( fileInfo, fileMask, stepMask )
# Sanity check all final candidate metadata keys are present (return S_ERROR if not)
self._checkSanity( candidateFiles )
return candidateFiles
#############################################################################
def _applyMask( self, candidateFilesIn, fileMask, stepMask ):
""" Select which files have to be uploaded: in principle all
"""
candidateFiles = copy.deepcopy( candidateFilesIn )
if fileMask and type( fileMask ) != type( [] ):
fileMask = [fileMask]
if type( stepMask ) == type( 1 ):
stepMask = str( stepMask )
if stepMask and type( stepMask ) != type( [] ):
stepMask = [stepMask]
if fileMask and fileMask != ['']:
for fileName, metadata in candidateFiles.items():
if ( ( metadata['type'].lower() not in fileMask ) ): # and ( fileName.split( '.' )[-1] not in fileMask ) ):
del( candidateFiles[fileName] )
self.log.info( 'Output file %s was produced but will not be treated (fileMask is %s)' % ( fileName,
', '.join( fileMask ) ) )
else:
self.log.info( 'No outputDataFileMask provided, the files with all the extensions will be considered' )
if stepMask and stepMask != ['']:
# FIXME: This supposes that the LFN contains the step ID
for fileName, metadata in candidateFiles.items():
if fileName.split( '_' )[-1].split( '.' )[0] not in stepMask:
del( candidateFiles[fileName] )
self.log.info( 'Output file %s was produced but will not be treated (stepMask is %s)' % ( fileName,
', '.join( stepMask ) ) )
else:
self.log.info( 'No outputDataStep provided, the files output of all the steps will be considered' )
return candidateFiles
#############################################################################
def _checkSanity( self, candidateFiles ):
""" Sanity check all final candidate metadata keys are present
"""
notPresentKeys = []
mandatoryKeys = ['type', 'workflowSE', 'lfn'] # filedict is used for requests
for fileName, metadata in candidateFiles.items():
for key in mandatoryKeys:
if not metadata.has_key( key ):
notPresentKeys.append( ( fileName, key ) )
if notPresentKeys:
for fileName_keys in notPresentKeys:
self.log.error( 'File %s has missing %s' % ( fileName_keys[0], fileName_keys[1] ) )
raise ValueError
#############################################################################
def _checkLocalExistance( self, fileList ):
""" Check that the list of output files are present locally
"""
notPresentFiles = []
for fileName in fileList:
if not os.path.exists( fileName ):
notPresentFiles.append( fileName )
if notPresentFiles:
self.log.error( 'Output data file list %s does not exist locally' % notPresentFiles )
raise os.error
#############################################################################
def generateFailoverFile( self ):
""" Retrieve the accumulated reporting request, and produce a JSON file that is consumed by the JobWrapper
"""
reportRequest = None
result = self.jobReport.generateForwardDISET()
if not result['OK']:
self.log.warn( "Could not generate Operation for job report with result:\n%s" % ( result ) )
else:
reportRequest = result['Value']
if reportRequest:
self.log.info( "Populating request with job report information" )
self.request.addOperation( reportRequest )
accountingReport = None
if self.workflow_commons.has_key( 'AccountingReport' ):
accountingReport = self.workflow_commons['AccountingReport']
if accountingReport:
result = accountingReport.commit()
if not result['OK']:
self.log.error( "!!! Both accounting and RequestDB are down? !!!" )
return result
if len( self.request ):
isValid = RequestValidator().validate( self.request )
if not isValid['OK']:
raise RuntimeError, "Failover request is not valid: %s" % isValid['Message']
else:
requestJSON = self.request.toJSON()
if requestJSON['OK']:
self.log.info( "Creating failover request for deferred operations for job %d" % self.jobID )
request_string = str( requestJSON['Value'] )
self.log.debug( request_string )
# Write out the request string
fname = '%d_%d_request.json' % ( self.production_id, self.prod_job_id )
jsonFile = open( fname, 'w' )
jsonFile.write( request_string )
jsonFile.close()
self.log.info( "Created file containing failover request %s" % fname )
result = self.request.getDigest()
if result['OK']:
self.log.info( "Digest of the request: %s" % result['Value'] )
else:
self.log.error( "No digest? That's not sooo important, anyway:", result['Message'] )
else:
raise RuntimeError, requestJSON['Message']
#############################################################################
#############################################################################
class GracefulTermination( Exception ):
pass
#############################################################################
| gpl-3.0 | -2,820,127,240,254,847,500 | 36.29636 | 120 | 0.595864 | false |
PaloAltoNetworks/terraform-templates | pan_guard_duty/lambda_code/pan/wfapi.py | 2 | 28343 | #
# Copyright (c) 2013-2017 Kevin Steves <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
"""Interface to the WildFire API
The pan.wfapi module implements the PanWFapi class. It provides an
interface to the WildFire API on Palo Alto Networks' WildFire Cloud
and WildFire appliance.
"""
# XXX Using the requests module which uses urllib3 and has support
# for multipart form-data would make this much simpler/cleaner (main
# issue is support for Python 2.x and 3.x in one source). However I
# decided to not require non-default modules. That decision may need
# to be revisited as some parts of this are not clean.
from __future__ import print_function
import socket
import sys
import os
from io import BytesIO
import email
import email.errors
import email.utils
import logging
try:
# 3.2
from urllib.request import Request, \
build_opener, HTTPErrorProcessor, HTTPSHandler
from urllib.error import URLError
from urllib.parse import urlencode
from http.client import responses
_legacy_urllib = False
except ImportError:
# 2.7
from urllib2 import Request, URLError, \
build_opener, HTTPErrorProcessor, HTTPSHandler
from urllib import urlencode
from httplib import responses
_legacy_urllib = True
import xml.etree.ElementTree as etree
from . import __version__, DEBUG1, DEBUG2, DEBUG3
import pan.rc
try:
import ssl
except ImportError:
raise ValueError('SSL support not available')
try:
import certifi
_have_certifi = True
except ImportError:
_have_certifi = False
_cloud_server = 'wildfire.paloaltonetworks.com'
_encoding = 'utf-8'
_rfc2231_encode = False
_wildfire_responses = {
418: 'Unsupported File Type',
}
BENIGN = 0
MALWARE = 1
GRAYWARE = 2
PHISHING = 4
PENDING = -100
ERROR = -101
UNKNOWN = -102
INVALID = -103
VERDICTS = {
BENIGN: ('benign', None),
MALWARE: ('malware', None),
GRAYWARE: ('grayware', None),
PHISHING: ('phishing', None),
PENDING: ('pending', 'sample exists and verdict not known'),
ERROR: ('error', 'sample is in error state'),
UNKNOWN: ('unknown', 'sample does not exist'),
INVALID: ('invalid', 'hash is invalid'),
}
def _isunicode(s):
try:
if isinstance(s, unicode):
return True
return False
except NameError:
if isinstance(s, str):
return True
return False
def _isbytes(s):
try:
if isinstance(s, basestring) and isinstance(s, bytes):
return True
return False
except NameError:
if isinstance(s, bytes):
return True
return False
class PanWFapiError(Exception):
pass
class PanWFapi:
def __init__(self,
tag=None,
hostname=None,
api_key=None,
timeout=None,
http=False,
ssl_context=None):
self._log = logging.getLogger(__name__).log
self.tag = tag
self.hostname = hostname
self.api_key = None
self.timeout = timeout
self.ssl_context = ssl_context
self._log(DEBUG3, 'Python version: %s', sys.version)
self._log(DEBUG3, 'xml.etree.ElementTree version: %s', etree.VERSION)
self._log(DEBUG3, 'ssl: %s', ssl.OPENSSL_VERSION)
self._log(DEBUG3, 'pan-python version: %s', __version__)
if self.timeout is not None:
try:
self.timeout = int(self.timeout)
if not self.timeout > 0:
raise ValueError
except ValueError:
raise PanWFapiError('Invalid timeout: %s' % self.timeout)
if self.ssl_context is not None:
try:
ssl.SSLContext(ssl.PROTOCOL_SSLv23)
except AttributeError:
raise PanWFapiError('SSL module has no SSLContext()')
elif _have_certifi:
self.ssl_context = self._certifi_ssl_context()
# handle Python versions with no ssl.CertificateError
if hasattr(ssl, 'CertificateError'):
self._certificateerror = ssl.CertificateError
else:
self._certificateerror = NotImplementedError # XXX Can't happen
init_panrc = {} # .panrc args from constructor
if hostname is not None:
init_panrc['hostname'] = hostname
if api_key is not None:
init_panrc['api_key'] = api_key
try:
panrc = pan.rc.PanRc(tag=self.tag,
init_panrc=init_panrc)
except pan.rc.PanRcError as msg:
raise PanWFapiError(str(msg))
if 'api_key' in panrc.panrc:
self.api_key = panrc.panrc['api_key']
if 'hostname' in panrc.panrc:
self.hostname = panrc.panrc['hostname']
else:
self.hostname = _cloud_server
if self.api_key is None:
raise PanWFapiError('api_key required')
if http:
self.uri = 'http://%s' % self.hostname
else:
self.uri = 'https://%s' % self.hostname
if _legacy_urllib:
self._log(DEBUG2, 'using legacy urllib')
def __str__(self):
x = self.__dict__.copy()
for k in x:
if k in ['api_key'] and x[k] is not None:
x[k] = '*' * 6
return '\n'.join((': '.join((k, str(x[k]))))
for k in sorted(x))
def __clear_response(self):
# XXX naming
self._msg = None
self.http_code = None
self.http_reason = None
self.response_body = None
self.response_type = None
self.xml_element_root = None
self.attachment = None
def __set_response(self, response):
message_body = response.read()
content_type = self._message.get_content_type()
if not content_type:
if self._msg is None:
self._msg = 'no content-type response header'
return False
if content_type == 'application/octet-stream':
return self.__set_stream_response(response, message_body)
# XXX text/xml RFC 3023
elif (content_type == 'application/xml' or
content_type == 'text/xml'):
return self.__set_xml_response(message_body)
elif content_type == 'text/html':
return self.__set_html_response(message_body)
else:
msg = 'no handler for content-type: %s' % content_type
self._msg = msg
return False
def __set_stream_response(self, response, message_body):
filename = self._message.get_filename()
if not filename:
self._msg = 'no content-disposition response header'
return False
attachment = {}
attachment['filename'] = filename
attachment['content'] = message_body
self.attachment = attachment
return True
def __set_xml_response(self, message_body):
self._log(DEBUG2, '__set_xml_response: %s', repr(message_body))
self.response_type = 'xml'
_message_body = message_body.decode(_encoding)
if len(_message_body) == 0:
return True
self.response_body = _message_body
# ParseError: "XML or text declaration not at start of entity"
# fix: remove leading blank lines if exist
_message_body = message_body
while (_message_body[0:1] == b'\r' or
_message_body[0:1] == b'\n'):
_message_body = _message_body[1:]
if len(_message_body) == 0:
return True
try:
element = etree.fromstring(_message_body)
except etree.ParseError as msg:
self._msg = 'ElementTree.fromstring ParseError: %s' % msg
return False
self.xml_element_root = element
return True
def __set_html_response(self, message_body):
self._log(DEBUG2, '__set_html_response: %s', repr(message_body))
self.response_type = 'html'
_message_body = message_body.decode()
if len(_message_body) == 0:
return True
self.response_body = _message_body
return True
# XXX store tostring() results?
# XXX rework this
def xml_root(self):
if self.xml_element_root is None:
return None
s = etree.tostring(self.xml_element_root, encoding=_encoding)
if not s:
return None
self._log(DEBUG3, 'xml_root: %s', type(s))
self._log(DEBUG3, 'xml_root.decode(): %s', type(s.decode(_encoding)))
return s.decode(_encoding)
# XXX Unicode notes
# 2.7
# decode() str (bytes) -> unicode
# encode() unicode -> str (bytes)
# encode() of str will call decode()
# 3.x
# decode() bytes -> str (unicode)
# encode() str (unicode) -> bytes
# cannot encode() bytes
# cannot decode() str
def __api_request(self, request_uri, body, headers={}):
url = self.uri
url += request_uri
# body must by type 'bytes' for 3.x
if _isunicode(body):
body = body.encode()
request = Request(url, body, headers)
self._log(DEBUG1, 'URL: %s', url)
self._log(DEBUG1, 'method: %s', request.get_method())
self._log(DEBUG1, 'headers: %s', request.header_items())
# XXX leaks apikey
# self._log(DEBUG3, 'body: %s', repr(body))
kwargs = {
'url': request,
}
if self.ssl_context is not None:
kwargs['context'] = self.ssl_context
if self.timeout is not None:
kwargs['timeout'] = self.timeout
try:
response = self._urlopen(**kwargs)
except self._certificateerror as e:
self._msg = 'ssl.CertificateError: %s' % e
return False
except (URLError, IOError) as e:
self._log(DEBUG2, 'urlopen() exception: %s', sys.exc_info())
self._msg = str(e)
return False
self.http_code = response.getcode()
if hasattr(response, 'reason'):
# 3.2
self.http_reason = response.reason
elif hasattr(response, 'msg'):
# 2.7
self.http_reason = response.msg
if self.http_reason == '':
if self.http_code in _wildfire_responses:
self.http_reason = _wildfire_responses[self.http_code]
elif self.http_code in responses:
self.http_reason = responses[self.http_code]
try:
self._message = email.message_from_string(str(response.info()))
except (TypeError, email.errors.MessageError) as e:
raise PanWFapiError('email.message_from_string() %s' % e)
self._log(DEBUG2, 'HTTP response code: %s', self.http_code)
self._log(DEBUG2, 'HTTP response reason: %s', self.http_reason)
self._log(DEBUG2, 'HTTP response headers:')
self._log(DEBUG2, '%s', self._message)
if not (200 <= self.http_code < 300):
self._msg = 'HTTP Error %s: %s' % (self.http_code,
self.http_reason)
self.__set_response(response)
return False
return response
def _read_file(self, path):
try:
f = open(path, 'rb')
except IOError as e:
msg = 'open: %s: %s' % (path, e)
self._msg = msg
return None
buf = f.read()
f.close()
self._log(DEBUG2, 'path: %s %d', type(path), len(path))
self._log(DEBUG2, 'path: %s size: %d', path, len(buf))
if logging.getLogger(__name__).getEffectiveLevel() == DEBUG3:
import hashlib
md5 = hashlib.md5()
md5.update(buf)
sha256 = hashlib.sha256()
sha256.update(buf)
self._log(DEBUG3, 'MD5: %s', md5.hexdigest())
self._log(DEBUG3, 'SHA256: %s', sha256.hexdigest())
return buf
def report(self,
hash=None,
format=None):
self.__clear_response()
request_uri = '/publicapi/get/report'
query = {}
query['apikey'] = self.api_key
if hash is not None:
query['hash'] = hash
if format is not None:
query['format'] = format
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def verdict(self,
hash=None):
self.__clear_response()
request_uri = '/publicapi/get/verdict'
query = {}
query['apikey'] = self.api_key
if hash is not None:
query['hash'] = hash
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def verdicts(self,
hashes=None):
self.__clear_response()
request_uri = '/publicapi/get/verdicts'
form = _MultiPartFormData()
form.add_field('apikey', self.api_key)
if hashes is not None:
form.add_field('file', '\n'.join(hashes))
headers = form.http_headers()
body = form.http_body()
response = self.__api_request(request_uri=request_uri,
body=body, headers=headers)
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def verdicts_changed(self,
date=None):
self.__clear_response()
request_uri = '/publicapi/get/verdicts/changed'
query = {}
query['apikey'] = self.api_key
if date is not None:
query['date'] = date
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def sample(self,
hash=None):
self.__clear_response()
request_uri = '/publicapi/get/sample'
query = {}
query['apikey'] = self.api_key
if hash is not None:
query['hash'] = hash
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def pcap(self,
hash=None,
platform=None):
self.__clear_response()
request_uri = '/publicapi/get/pcap'
query = {}
query['apikey'] = self.api_key
if hash is not None:
query['hash'] = hash
if platform is not None:
query['platform'] = platform
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def testfile(self):
self.__clear_response()
request_uri = '/publicapi/test/pe'
query = {}
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def submit(self,
file=None,
url=None,
links=None):
self.__clear_response()
if (sum(bool(x) for x in [file, url, links]) != 1):
raise PanWFapiError('must submit one of file, url or links')
if file is not None:
request_uri = '/publicapi/submit/file'
elif url is not None:
request_uri = '/publicapi/submit/url'
elif len(links) < 2:
request_uri = '/publicapi/submit/link'
elif len(links) > 1:
request_uri = '/publicapi/submit/links'
form = _MultiPartFormData()
form.add_field('apikey', self.api_key)
if file is not None:
buf = self._read_file(file)
if buf is None:
raise PanWFapiError(self._msg)
filename = os.path.basename(file)
form.add_file(filename, buf)
if url is not None:
form.add_field('url', url)
if links is not None:
if len(links) == 1:
form.add_field('link', links[0])
elif len(links) > 1:
magic = 'panlnk' # XXX should be optional in future
# XXX requires filename in Content-Disposition header
if links[0] == magic:
form.add_file(filename='pan',
body='\n'.join(links))
else:
form.add_file(filename='pan',
body=magic + '\n' + '\n'.join(links))
headers = form.http_headers()
body = form.http_body()
response = self.__api_request(request_uri=request_uri,
body=body, headers=headers)
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def change_request(self,
hash=None,
verdict=None,
email=None,
comment=None):
self.__clear_response()
request_uri = '/publicapi/submit/change-request'
form = _MultiPartFormData()
form.add_field('apikey', self.api_key)
if hash is not None:
form.add_field('hash', hash)
if verdict is not None:
form.add_field('verdict', verdict)
if email is not None:
form.add_field('email', email)
if comment is not None:
form.add_field('comment', comment)
headers = form.http_headers()
body = form.http_body()
response = self.__api_request(request_uri=request_uri,
body=body, headers=headers)
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
# allow non-2XX error codes
# see http://bugs.python.org/issue18543 for why we can't just
# install a new HTTPErrorProcessor()
@staticmethod
def _urlopen(url, data=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
cafile=None, capath=None, cadefault=False,
context=None):
def http_response(request, response):
return response
http_error_processor = HTTPErrorProcessor()
http_error_processor.https_response = http_response
if context:
https_handler = HTTPSHandler(context=context)
opener = build_opener(https_handler, http_error_processor)
else:
opener = build_opener(http_error_processor)
return opener.open(url, data, timeout)
def _certifi_ssl_context(self):
if (sys.version_info.major == 2 and sys.hexversion >= 0x02070900 or
sys.version_info.major == 3 and sys.hexversion >= 0x03040300):
where = certifi.where()
self._log(DEBUG1, 'certifi %s: %s', certifi.__version__, where)
return ssl.create_default_context(
purpose=ssl.Purpose.SERVER_AUTH,
cafile=where)
else:
return None
#
# XXX USE OF cloud_ssl_context() IS DEPRECATED!
#
# If your operating system certificate store is out of date you can
# install certifi (https://pypi.python.org/pypi/certifi) and its CA
# bundle will be used for SSL server certificate verification when
# ssl_context is None.
#
def cloud_ssl_context():
# WildFire cloud cafile:
# https://certs.godaddy.com/anonymous/repository.pki
# Go Daddy Class 2 Certification Authority Root Certificate
# use:
# $ openssl x509 -in wfapi.py -text
# to view text form.
gd_class2_root_crt = b'''
-----BEGIN CERTIFICATE-----
MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
ReYNnyicsbkqWletNw+vHX/bvZ8=
-----END CERTIFICATE-----
'''
if (sys.version_info.major == 2 and sys.hexversion >= 0x02070900 or
sys.version_info.major == 3 and sys.hexversion >= 0x03040300):
# XXX python >= 2.7.9 needs catada as Unicode, or we get:
# 'ssl.SSLError: nested asn1 error'
return ssl.create_default_context(
purpose=ssl.Purpose.SERVER_AUTH,
cadata=gd_class2_root_crt.decode())
else:
return None
# Minimal RFC 2388 implementation
# Content-Type: multipart/form-data; boundary=___XXX
#
# Content-Disposition: form-data; name="apikey"
#
# XXXkey
# --___XXX
# Content-Disposition: form-data; name="file"; filename="XXXname"
# Content-Type: application/octet-stream
#
# XXXfilecontents
# --___XXX--
class _MultiPartFormData:
def __init__(self):
self._log = logging.getLogger(__name__).log
self.parts = []
self.boundary = self._boundary()
def add_field(self, name, value):
part = _FormDataPart(name=name,
body=value)
self.parts.append(part)
def add_file(self, filename=None, body=None):
part = _FormDataPart(name='file')
if filename is not None:
part.append_header('filename', filename)
if body is not None:
part.add_header(b'Content-Type: application/octet-stream')
part.add_body(body)
self.parts.append(part)
def _boundary(self):
rand_bytes = 48
prefix_char = b'_'
prefix_len = 16
import base64
try:
import os
seq = os.urandom(rand_bytes)
self._log(DEBUG1, '_MultiPartFormData._boundary: %s',
'using os.urandom')
except NotImplementedError:
import random
self._log(DEBUG1, '_MultiPartFormData._boundary: %s',
'using random')
seq = bytearray()
[seq.append(random.randrange(256)) for i in range(rand_bytes)]
prefix = prefix_char * prefix_len
boundary = prefix + base64.b64encode(seq)
return boundary
def http_headers(self):
# headers cannot be bytes
boundary = self.boundary.decode('ascii')
headers = {
'Content-Type':
'multipart/form-data; boundary=' + boundary,
}
return headers
def http_body(self):
bio = BytesIO()
boundary = b'--' + self.boundary
for part in self.parts:
bio.write(boundary)
bio.write(b'\r\n')
bio.write(part.serialize())
bio.write(b'\r\n')
bio.write(boundary)
bio.write(b'--')
return bio.getvalue()
class _FormDataPart:
def __init__(self, name=None, body=None):
self._log = logging.getLogger(__name__).log
self.headers = []
self.add_header(b'Content-Disposition: form-data')
self.append_header('name', name)
self.body = None
if body is not None:
self.add_body(body)
def add_header(self, header):
self.headers.append(header)
self._log(DEBUG1, '_FormDataPart.add_header: %s', self.headers[-1])
def append_header(self, name, value):
self.headers[-1] += b'; ' + self._encode_field(name, value)
self._log(DEBUG1, '_FormDataPart.append_header: %s', self.headers[-1])
def _encode_field(self, name, value):
self._log(DEBUG1, '_FormDataPart._encode_field: %s %s',
type(name), type(value))
if not _rfc2231_encode:
s = '%s="%s"' % (name, value)
self._log(DEBUG1, '_FormDataPart._encode_field: %s %s',
type(s), s)
if _isunicode(s):
s = s.encode('utf-8')
self._log(DEBUG1, '_FormDataPart._encode_field: %s %s',
type(s), s)
return s
if not [ch for ch in '\r\n\\' if ch in value]:
try:
return ('%s="%s"' % (name, value)).encode('ascii')
except UnicodeEncodeError:
self._log(DEBUG1, 'UnicodeEncodeError 3.x')
except UnicodeDecodeError: # 2.x
self._log(DEBUG1, 'UnicodeDecodeError 2.x')
# RFC 2231
value = email.utils.encode_rfc2231(value, 'utf-8')
return ('%s*=%s' % (name, value)).encode('ascii')
def add_body(self, body):
if _isunicode(body):
body = body.encode('latin-1')
self.body = body
self._log(DEBUG1, '_FormDataPart.add_body: %s %d',
type(self.body), len(self.body))
def serialize(self):
bio = BytesIO()
bio.write(b'\r\n'.join(self.headers))
bio.write(b'\r\n\r\n')
if self.body is not None:
bio.write(self.body)
return bio.getvalue()
if __name__ == '__main__':
# python -m pan.wfapi [tag] [sha256]
import pan.wfapi
tag = None
sha256 = '5f31d8658a41aa138ada548b7fb2fc758219d40b557aaeab80681d314f739f92'
if len(sys.argv) > 1 and sys.argv[1]:
tag = sys.argv[1]
if len(sys.argv) > 2:
hash = sys.argv[2]
try:
wfapi = pan.wfapi.PanWFapi(tag=tag)
except pan.wfapi.PanWFapiError as msg:
print('pan.wfapi.PanWFapi:', msg, file=sys.stderr)
sys.exit(1)
try:
wfapi.report(hash=sha256)
except pan.wfapi.PanWFapiError as msg:
print('report: %s' % msg, file=sys.stderr)
sys.exit(1)
if (wfapi.response_body is not None):
print(wfapi.response_body)
| apache-2.0 | -3,523,264,815,343,682,000 | 30.810325 | 79 | 0.579896 | false |
fusionbox/django_polymorphic | example/pexp/management/commands/pcmd.py | 4 | 1110 | # -*- coding: utf-8 -*-
"""
This module is a scratchpad for general development, testing & debugging.
"""
from django.core.management.base import NoArgsCommand
from django.db.models import connection
from pprint import pprint
from pexp.models import *
def reset_queries():
connection.queries=[]
def show_queries():
print; print 'QUERIES:',len(connection.queries); pprint(connection.queries); print; connection.queries=[]
class Command(NoArgsCommand):
help = ""
def handle_noargs(self, **options):
Project.objects.all().delete()
a=Project.objects.create(topic="John's gathering")
b=ArtProject.objects.create(topic="Sculpting with Tim", artist="T. Turner")
c=ResearchProject.objects.create(topic="Swallow Aerodynamics", supervisor="Dr. Winter")
print Project.objects.all()
print
ModelA.objects.all().delete()
a=ModelA.objects.create(field1='A1')
b=ModelB.objects.create(field1='B1', field2='B2')
c=ModelC.objects.create(field1='C1', field2='C2', field3='C3')
print ModelA.objects.all()
print
| bsd-3-clause | 3,803,204,990,946,562,000 | 30.714286 | 109 | 0.676577 | false |
halfwit/qutebrowser | tests/end2end/features/test_editor_bdd.py | 8 | 2227 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
import sys
import textwrap
import pytest_bdd as bdd
bdd.scenarios('editor.feature')
@bdd.when(bdd.parsers.parse('I set up a fake editor replacing "{text}" by '
'"{replacement}"'))
def set_up_editor_replacement(quteproc, httpbin, tmpdir, text, replacement):
"""Set up general->editor to a small python script doing a replacement."""
text = text.replace('(port)', str(httpbin.port))
script = tmpdir / 'script.py'
script.write(textwrap.dedent("""
import sys
with open(sys.argv[1], encoding='utf-8') as f:
data = f.read()
data = data.replace("{text}", "{replacement}")
with open(sys.argv[1], 'w', encoding='utf-8') as f:
f.write(data)
""".format(text=text, replacement=replacement)))
editor = '"{}" "{}" {{}}'.format(sys.executable, script)
quteproc.set_setting('general', 'editor', editor)
@bdd.when(bdd.parsers.parse('I set up a fake editor returning "{text}"'))
def set_up_editor(quteproc, httpbin, tmpdir, text):
"""Set up general->editor to a small python script inserting a text."""
script = tmpdir / 'script.py'
script.write(textwrap.dedent("""
import sys
with open(sys.argv[1], 'w', encoding='utf-8') as f:
f.write({text!r})
""".format(text=text)))
editor = '"{}" "{}" {{}}'.format(sys.executable, script)
quteproc.set_setting('general', 'editor', editor)
| gpl-3.0 | 5,305,178,909,674,312,000 | 36.745763 | 78 | 0.663673 | false |
crunchr/silk | silk/views/profiling.py | 4 | 5330 | from django.template.context_processors import csrf
from django.db.models import Count, Sum
from django.shortcuts import render
from django.utils.decorators import method_decorator
from django.views.generic import View
from silk.auth import login_possibly_required, permissions_possibly_required
from silk.models import Profile, Request
from silk.request_filters import BaseFilter, filters_from_request
class ProfilingView(View):
show = [5, 10, 25, 100, 250]
default_show = 25
order_by = ['Recent',
'Name',
'Function Name',
'Num. Queries',
'Time',
'Time on queries']
defualt_order_by = 'Recent'
session_key_profile_filters = 'session_key_profile_filters'
def __init__(self, **kwargs):
super(ProfilingView, self).__init__(**kwargs)
def _get_distinct_values(self, field, silk_request):
if silk_request:
query_set = Profile.objects.filter(request=silk_request)
else:
query_set = Profile.objects.all()
function_names = [x[field] for x in query_set.values(field).distinct()]
# Ensure top, default option is ''
try:
function_names.remove('')
except ValueError:
pass
return [''] + function_names
def _get_function_names(self, silk_request=None):
return self._get_distinct_values('func_name', silk_request)
def _get_names(self, silk_request=None):
return self._get_distinct_values('name', silk_request)
def _get_objects(self, show=None, order_by=None, name=None, func_name=None, silk_request=None, filters=None):
if not filters:
filters = []
if not show:
show = self.default_show
manager = Profile.objects
if silk_request:
query_set = manager.filter(request=silk_request)
else:
query_set = manager.all()
if not order_by:
order_by = self.defualt_order_by
if order_by == 'Recent':
query_set = query_set.order_by('-start_time')
elif order_by == 'Name':
query_set = query_set.order_by('-name')
elif order_by == 'Function Name':
query_set = query_set.order_by('-func_name')
elif order_by == 'Num. Queries':
query_set = query_set.annotate(num_queries=Count('queries')).order_by('-num_queries')
elif order_by == 'Time':
query_set = query_set.order_by('-time_taken')
elif order_by == 'Time on queries':
query_set = query_set.annotate(db_time=Sum('queries__time_taken')).order_by('-db_time')
elif order_by:
raise RuntimeError('Unknown order_by: "%s"' % order_by)
if func_name:
query_set = query_set.filter(func_name=func_name)
if name:
query_set = query_set.filter(name=name)
for f in filters:
query_set = f.contribute_to_query_set(query_set)
query_set = query_set.filter(f)
return list(query_set[:show])
def _create_context(self, request, *args, **kwargs):
request_id = kwargs.get('request_id')
if request_id:
silk_request = Request.objects.get(pk=request_id)
else:
silk_request = None
show = request.GET.get('show', self.default_show)
order_by = request.GET.get('order_by', self.defualt_order_by)
if show:
show = int(show)
func_name = request.GET.get('func_name', None)
name = request.GET.get('name', None)
filters = request.session.get(self.session_key_profile_filters, {})
context = {
'show': show,
'order_by': order_by,
'request': request,
'func_name': func_name,
'options_show': self.show,
'options_order_by': self.order_by,
'options_func_names': self._get_function_names(silk_request),
'options_names': self._get_names(silk_request),
'filters': filters
}
context.update(csrf(request))
if silk_request:
context['silk_request'] = silk_request
if func_name:
context['func_name'] = func_name
if name:
context['name'] = name
objs = self._get_objects(show=show,
order_by=order_by,
func_name=func_name,
silk_request=silk_request,
name=name,
filters=[BaseFilter.from_dict(x) for _, x in filters.items()])
context['results'] = objs
return context
@method_decorator(login_possibly_required)
@method_decorator(permissions_possibly_required)
def get(self, request, *args, **kwargs):
return render(request, 'silk/profiling.html', self._create_context(request, *args, **kwargs))
@method_decorator(login_possibly_required)
@method_decorator(permissions_possibly_required)
def post(self, request):
filters = filters_from_request(request)
request.session[self.session_key_profile_filters] = {ident: f.as_dict() for ident, f in filters.items()}
return render(request, 'silk/profiling.html', self._create_context(request))
| mit | -987,536,538,163,589,600 | 39.687023 | 113 | 0.581051 | false |
csmengwan/autorest | AutoRest/Generators/Python/Python.Tests/Expected/AcceptanceTests/BodyDateTimeRfc1123/autorestrfc1123datetimetestservice/operations/datetimerfc1123.py | 5 | 14823 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class Datetimerfc1123(object):
"""Datetimerfc1123 operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_null(
self, custom_headers={}, raw=False, **operation_config):
"""
Get null datetime value
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetimerfc1123/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('rfc-1123', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_invalid(
self, custom_headers={}, raw=False, **operation_config):
"""
Get invalid datetime value
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetimerfc1123/invalid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('rfc-1123', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_overflow(
self, custom_headers={}, raw=False, **operation_config):
"""
Get overflow datetime value
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetimerfc1123/overflow'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('rfc-1123', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_underflow(
self, custom_headers={}, raw=False, **operation_config):
"""
Get underflow datetime value
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetimerfc1123/underflow'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('rfc-1123', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_utc_max_date_time(
self, datetime_body, custom_headers={}, raw=False, **operation_config):
"""
Put max datetime value Fri, 31 Dec 9999 23:59:59 GMT
:param datetime_body:
:type datetime_body: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetimerfc1123/max'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(datetime_body, 'rfc-1123')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_utc_lowercase_max_date_time(
self, custom_headers={}, raw=False, **operation_config):
"""
Get max datetime value fri, 31 dec 9999 23:59:59 gmt
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetimerfc1123/max/lowercase'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('rfc-1123', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_utc_uppercase_max_date_time(
self, custom_headers={}, raw=False, **operation_config):
"""
Get max datetime value FRI, 31 DEC 9999 23:59:59 GMT
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetimerfc1123/max/uppercase'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('rfc-1123', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_utc_min_date_time(
self, datetime_body, custom_headers={}, raw=False, **operation_config):
"""
Put min datetime value Mon, 1 Jan 0001 00:00:00 GMT
:param datetime_body:
:type datetime_body: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetimerfc1123/min'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(datetime_body, 'rfc-1123')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_utc_min_date_time(
self, custom_headers={}, raw=False, **operation_config):
"""
Get min datetime value Mon, 1 Jan 0001 00:00:00 GMT
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: datetime
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/datetimerfc1123/min'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('rfc-1123', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| mit | -2,344,175,173,345,570,000 | 33.795775 | 84 | 0.626729 | false |
fsantini/rasPyCNCController | gcode/gcodeconv.py | 1 | 2866 | # rasPyCNCController
# Copyright 2016 Francesco Santini <[email protected]>
#
# This file is part of rasPyCNCController.
#
# rasPyCNCController is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# rasPyCNCController is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with rasPyCNCController. If not, see <http://www.gnu.org/licenses/>.
import fileinput
import re
class GCodeConverter:
lastG = 'G0'
lastX = 'X0'
lastY = 'Y0'
def __init__(self):
pass
def convert(self, line):
line = line.strip().upper()
if line.startswith('G'):
self.lastG = line.split(' ')[0] # get the GCode
try:
gcodeVal = int(self.lastG[1:])
if gcodeVal not in [0, 1, 2, 3]: # if it's not a movement gcode then don't modify
return line
except: # gcode is not an int: return unmodified
return line
# is there an X?
xre = re.search('X[0-9.-]+', line)
xfound = False
if xre is not None:
self.lastX = xre.group(0)
xfound = True
yre = re.search('Y[0-9.-]+', line)
yfound = False
if yre is not None:
self.lastY = yre.group(0)
yfound = True
# there can't be an X without Y
if xfound and not yfound:
line += ' ' + self.lastY
elif yfound and not xfound:
line += ' ' + self.lastX
return line
elif line.startswith('X') or line.startswith('Y') or line.startswith('Z'):
# is there an X?
xre = re.search('X[0-9.-]+', line)
xfound = False
if xre is not None:
self.lastX = xre.group(0)
xfound = True
yre = re.search('Y[0-9.-]+', line)
yfound = False
if yre is not None:
self.lastY = yre.group(0)
yfound = True
# there can't be an X without Y
if xfound and not yfound:
line += ' ' + self.lastY
elif yfound and not xfound:
line += ' ' + self.lastX
return self.lastG + ' ' + line
else:
return line
if __name__ == "__main__":
conv = GCodeConverter()
for line in fileinput.input():
print conv.convert(line)
| gpl-3.0 | 4,997,108,930,499,210,000 | 31.942529 | 98 | 0.544313 | false |
Heaven91/Robotics-AI | Chapter4-Search/path_search.py | 1 | 2671 | # ----------
# User Instructions:
#
# Define a function, search() that returns a list
# in the form of [optimal path length, row, col]. For
# the grid shown below, your function should output
# [11, 4, 5].
#
# If there is no valid path from the start point
# to the goal, your function should return the string
# 'fail'
# ----------
# Grid format:
# 0 = Navigable space
# 1 = Occupied space
grid = [[0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 1, 0]]
init = [0, 0]
goal = [len(grid)-1, len(grid[0])-1]
cost = 1
delta = [[-1, 0], # go up
[ 0,-1], # go left
[ 1, 0], # go down
[ 0, 1]] # go right
delta_name = ['^', '<', 'v', '>']
def search(grid,init,goal,cost):
# ----------------------------------------
# insert code here
open = []
check = [[0 for row in range(len(grid[0]))] for col in range(len(grid))] # initialize a check list used for indicate whether
# the corresponding grid is expanded, if yes, then ingnore that possiblity
y = init[0]
x = init[1]
g = 0
check[y][x] = 1 # the start grid is checked
found = False # used for indicate we find a path successfully
no_solution = False # used for indicate whether there exist a solution
open.append([g, y, x])
while (not found) and (not no_solution):
if len(open) == 0:
no_solution = True
print "fail, no validate path exist"
else:
open.sort()
open.reverse()
next = open.pop() # retrieve the last element, here the last element is the item with the smallest g value
if next[1] == goal[0] and next[2] == goal[1]:
found = True
print next # print what we want
else:
for i in range(len(delta)):
y2 = next[1] + delta[i][0]
x2 = next[2] + delta[i][1]
if 0 <= x2 < len(grid[0]) and 0 <= y2 < len(grid) and grid[y2][x2] == 0 and check[y2][x2] == 0:
# the above condition include: 1) coordinate in a valid range
# 2) the new expanded grid is obstacle-free
# 3) the grid is not checked
g2 = next[0] + cost
open.append([g2, y2, x2])
check[y2][x2] = 1 # grid checked
print open[-1]
# ----------------------------------------
search(grid,init,goal,cost)
| agpl-3.0 | 8,629,244,642,511,873,000 | 32.688312 | 130 | 0.466866 | false |
webu/django-categories | categories/admin.py | 10 | 3089 | from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from .genericcollection import GenericCollectionTabularInline
from .settings import RELATION_MODELS, JAVASCRIPT_URL, REGISTER_ADMIN
from .models import Category
from .base import CategoryBaseAdminForm, CategoryBaseAdmin
from .settings import MODEL_REGISTRY
class NullTreeNodeChoiceField(forms.ModelChoiceField):
"""A ModelChoiceField for tree nodes."""
def __init__(self, level_indicator=u'---', *args, **kwargs):
self.level_indicator = level_indicator
super(NullTreeNodeChoiceField, self).__init__(*args, **kwargs)
def label_from_instance(self, obj):
"""
Creates labels which represent the tree level of each node when
generating option labels.
"""
return u'%s %s' % (self.level_indicator * getattr(
obj, obj._mptt_meta.level_attr), obj)
if RELATION_MODELS:
from .models import CategoryRelation
class InlineCategoryRelation(GenericCollectionTabularInline):
model = CategoryRelation
class CategoryAdminForm(CategoryBaseAdminForm):
class Meta:
model = Category
fields = '__all__'
def clean_alternate_title(self):
if self.instance is None or not self.cleaned_data['alternate_title']:
return self.cleaned_data['name']
else:
return self.cleaned_data['alternate_title']
class CategoryAdmin(CategoryBaseAdmin):
form = CategoryAdminForm
list_display = ('name', 'alternate_title', 'active')
fieldsets = (
(None, {
'fields': ('parent', 'name', 'thumbnail', 'active')
}),
(_('Meta Data'), {
'fields': ('alternate_title', 'alternate_url', 'description',
'meta_keywords', 'meta_extra'),
'classes': ('collapse',),
}),
(_('Advanced'), {
'fields': ('order', 'slug'),
'classes': ('collapse',),
}),
)
if RELATION_MODELS:
inlines = [InlineCategoryRelation, ]
class Media:
js = (JAVASCRIPT_URL + 'genericcollections.js',)
if REGISTER_ADMIN:
admin.site.register(Category, CategoryAdmin)
for model, modeladmin in admin.site._registry.items():
if model in MODEL_REGISTRY.values() and modeladmin.fieldsets:
fieldsets = getattr(modeladmin, 'fieldsets', ())
fields = [cat.split('.')[2] for cat in MODEL_REGISTRY if MODEL_REGISTRY[cat] == model]
# check each field to see if already defined
for cat in fields:
for k, v in fieldsets:
if cat in v['fields']:
fields.remove(cat)
# if there are any fields left, add them under the categories fieldset
if len(fields) > 0:
admin.site.unregister(model)
admin.site.register(model, type('newadmin', (modeladmin.__class__,), {
'fieldsets': fieldsets + (('Categories', {
'fields': fields
}),)
}))
| apache-2.0 | -3,075,587,696,611,963,400 | 34.505747 | 94 | 0.608287 | false |
capstone-rust/capstone-rs | capstone-sys/capstone/suite/synctools/strinforeduce/instrinfo2.py | 2 | 1105 | #!/usr/bin/python
# convert LLVM GenInstrInfo.inc for Capstone disassembler.
# by Nguyen Anh Quynh, 2019
import sys
if len(sys.argv) == 1:
print("Syntax: %s <GenInstrInfo.inc>" %sys.argv[0])
sys.exit(1)
count = 0
last_line = None
f = open(sys.argv[1])
lines = f.readlines()
f.close()
# 1st enum is register enum
for line in lines:
line = line.rstrip()
# skip all MCPhysReg line
if 'static const MCPhysReg ' in line:
continue
# skip all MCOperandInfo line
if 'static const MCOperandInfo ' in line:
continue
# skip InitX86MCInstrInfo()
if 'static inline void InitX86MCInstrInfo' in line:
continue
if 'II->InitMCInstrInfo' in line:
last_line = line
continue
# skip the next line after II->InitMCInstrInfo
if last_line:
last_line = None
continue
if 'extern const MCInstrDesc ' in line:
count += 1
continue
if count == 1:
if line == '};':
# done with first enum
count += 1
continue
else:
print(line)
| mit | 6,670,256,145,928,131,000 | 19.090909 | 58 | 0.59095 | false |
yarbelk/django-admin2 | djadmin2/tests/test_auth_admin.py | 2 | 2224 | from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import RequestFactory
import floppyforms
import djadmin2
from ..admin2 import UserAdmin2
class UserAdminTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = User(
username='admin',
is_staff=True,
is_superuser=True)
self.user.set_password('admin')
self.user.save()
def test_create_form_uses_floppyform_widgets(self):
form = UserAdmin2.create_form_class()
self.assertTrue(
isinstance(form.fields['username'].widget,
floppyforms.TextInput))
request = self.factory.get(reverse('admin2:auth_user_create'))
request.user = self.user
model_admin = UserAdmin2(User, djadmin2.default)
view = model_admin.create_view.view.as_view(
**model_admin.get_create_kwargs())
response = view(request)
form = response.context_data['form']
self.assertTrue(
isinstance(form.fields['username'].widget,
floppyforms.TextInput))
def test_update_form_uses_floppyform_widgets(self):
form = UserAdmin2.update_form_class()
self.assertTrue(
isinstance(form.fields['username'].widget,
floppyforms.TextInput))
self.assertTrue(
isinstance(form.fields['date_joined'].widget,
floppyforms.DateTimeInput))
request = self.factory.get(
reverse('admin2:auth_user_update', args=(self.user.pk,)))
request.user = self.user
model_admin = UserAdmin2(User, djadmin2.default)
view = model_admin.update_view.view.as_view(
**model_admin.get_update_kwargs())
response = view(request, pk=self.user.pk)
form = response.context_data['form']
self.assertTrue(
isinstance(form.fields['username'].widget,
floppyforms.TextInput))
self.assertTrue(
isinstance(form.fields['date_joined'].widget,
floppyforms.DateTimeInput))
| bsd-3-clause | 366,867,051,360,102,600 | 35.459016 | 70 | 0.61196 | false |
alexdzul/Pyclue | pyclue/apps/main/ui/ui_sizes.py | 1 | 1073 | # -*- coding: utf-8 -*-
__author__ = 'alex'
from PyQt5 import QtCore
from pyclue.appSettings import OS_RUNING
def get_launch_size(self):
if OS_RUNING == "linux2":
self.resize(300, 316)
self.setMinimumSize(QtCore.QSize(300, 316))
self.setMaximumSize(QtCore.QSize(300, 316))
if OS_RUNING == "darwin":
self.resize(300, 360)
self.setMinimumSize(QtCore.QSize(300, 360))
self.setMaximumSize(QtCore.QSize(300, 360))
if OS_RUNING == "win32":
self.resize(300, 316)
self.setMinimumSize(QtCore.QSize(300, 316))
self.setMaximumSize(QtCore.QSize(300, 316))
def get_main_size(self):
if OS_RUNING == "linux2":
self.resize(400, 472)
self.setMinimumSize(QtCore.QSize(400, 472))
self.setMaximumSize(QtCore.QSize(569, 511))
self.resize(386, 400)
if OS_RUNING == "darwin":
self.resize(600, 550)
self.setMinimumSize(QtCore.QSize(600, 550))
if OS_RUNING == "win32":
self.resize(600, 472)
self.setMinimumSize(QtCore.QSize(600, 472)) | gpl-2.0 | -6,266,093,616,980,740,000 | 31.545455 | 51 | 0.624418 | false |
Havate/havate-openstack | proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/dashboards/project/volumes/urls.py | 11 | 1364 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls.defaults import patterns # noqa
from django.conf.urls.defaults import url # noqa
from openstack_dashboard.dashboards.project.volumes import views
urlpatterns = patterns('openstack_dashboard.dashboards.project.volumes.views',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^create/$', views.CreateView.as_view(), name='create'),
url(r'^(?P<volume_id>[^/]+)/attach/$',
views.EditAttachmentsView.as_view(),
name='attach'),
url(r'^(?P<volume_id>[^/]+)/create_snapshot/$',
views.CreateSnapshotView.as_view(),
name='create_snapshot'),
url(r'^(?P<volume_id>[^/]+)/$',
views.DetailView.as_view(),
name='detail'),
)
| apache-2.0 | 722,085,455,465,868,700 | 37.971429 | 78 | 0.681085 | false |
ar4k/RAM | OctoPrint/src/octoprint/plugins/cura/__init__.py | 2 | 14719 | # coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <[email protected]>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import logging
import logging.handlers
import os
import flask
import math
import octoprint.plugin
import octoprint.util
import octoprint.slicing
import octoprint.settings
from .profile import Profile
class CuraPlugin(octoprint.plugin.SlicerPlugin,
octoprint.plugin.SettingsPlugin,
octoprint.plugin.TemplatePlugin,
octoprint.plugin.AssetPlugin,
octoprint.plugin.BlueprintPlugin,
octoprint.plugin.StartupPlugin):
def __init__(self):
self._logger = logging.getLogger("octoprint.plugins.cura")
self._cura_logger = logging.getLogger("octoprint.plugins.cura.engine")
# setup job tracking across threads
import threading
self._slicing_commands = dict()
self._cancelled_jobs = []
self._job_mutex = threading.Lock()
##~~ StartupPlugin API
def on_startup(self, host, port):
# setup our custom logger
cura_logging_handler = logging.handlers.RotatingFileHandler(self._settings.get_plugin_logfile_path(postfix="engine"), maxBytes=2*1024*1024)
cura_logging_handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
cura_logging_handler.setLevel(logging.DEBUG)
self._cura_logger.addHandler(cura_logging_handler)
self._cura_logger.setLevel(logging.DEBUG if self._settings.get_boolean(["debug_logging"]) else logging.CRITICAL)
self._cura_logger.propagate = False
##~~ BlueprintPlugin API
@octoprint.plugin.BlueprintPlugin.route("/import", methods=["POST"])
def import_cura_profile(self):
import datetime
import tempfile
from octoprint.server import slicingManager
input_name = "file"
input_upload_name = input_name + "." + self._settings.global_get(["server", "uploads", "nameSuffix"])
input_upload_path = input_name + "." + self._settings.global_get(["server", "uploads", "pathSuffix"])
if input_upload_name in flask.request.values and input_upload_path in flask.request.values:
filename = flask.request.values[input_upload_name]
try:
profile_dict = Profile.from_cura_ini(flask.request.values[input_upload_path])
except Exception as e:
self._logger.exception("Error while converting the imported profile")
return flask.make_response("Something went wrong while converting imported profile: {message}".format(message=str(e)), 500)
else:
self._logger.warn("No profile file included for importing, aborting")
return flask.make_response("No file included", 400)
if profile_dict is None:
self._logger.warn("Could not convert profile, aborting")
return flask.make_response("Could not convert Cura profile", 400)
name, _ = os.path.splitext(filename)
# default values for name, display name and description
profile_name = _sanitize_name(name)
profile_display_name = name
profile_description = "Imported from {filename} on {date}".format(filename=filename, date=octoprint.util.get_formatted_datetime(datetime.datetime.now()))
profile_allow_overwrite = False
# overrides
if "name" in flask.request.values:
profile_name = flask.request.values["name"]
if "displayName" in flask.request.values:
profile_display_name = flask.request.values["displayName"]
if "description" in flask.request.values:
profile_description = flask.request.values["description"]
if "allowOverwrite" in flask.request.values:
from octoprint.server.api import valid_boolean_trues
profile_allow_overwrite = flask.request.values["allowOverwrite"] in valid_boolean_trues
try:
slicingManager.save_profile("cura",
profile_name,
profile_dict,
allow_overwrite=profile_allow_overwrite,
display_name=profile_display_name,
description=profile_description)
except octoprint.slicing.ProfileAlreadyExists:
self._logger.warn("Profile {profile_name} already exists, aborting".format(**locals()))
return flask.make_response("A profile named {profile_name} already exists for slicer cura".format(**locals()), 409)
result = dict(
resource=flask.url_for("api.slicingGetSlicerProfile", slicer="cura", name=profile_name, _external=True),
displayName=profile_display_name,
description=profile_description
)
r = flask.make_response(flask.jsonify(result), 201)
r.headers["Location"] = result["resource"]
return r
##~~ AssetPlugin API
def get_assets(self):
return {
"js": ["js/cura.js"],
"less": ["less/cura.less"],
"css": ["css/cura.css"]
}
##~~ SettingsPlugin API
def on_settings_save(self, data):
old_debug_logging = self._settings.get_boolean(["debug_logging"])
octoprint.plugin.SettingsPlugin.on_settings_save(self, data)
new_debug_logging = self._settings.get_boolean(["debug_logging"])
if old_debug_logging != new_debug_logging:
if new_debug_logging:
self._cura_logger.setLevel(logging.DEBUG)
else:
self._cura_logger.setLevel(logging.CRITICAL)
def get_settings_defaults(self):
return dict(
cura_engine=None,
default_profile=None,
debug_logging=False
)
##~~ SlicerPlugin API
def is_slicer_configured(self):
cura_engine = self._settings.get(["cura_engine"])
if cura_engine is not None and os.path.exists(cura_engine):
return True
else:
self._logger.info("Path to CuraEngine has not been configured yet or does not exist (currently set to %r), Cura will not be selectable for slicing" % cura_engine)
def get_slicer_properties(self):
return dict(
type="cura",
name="CuraEngine",
same_device=True,
progress_report=True
)
def get_slicer_default_profile(self):
path = self._settings.get(["default_profile"])
if not path:
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "profiles", "default.profile.yaml")
return self.get_slicer_profile(path)
def get_slicer_profile(self, path):
profile_dict = self._load_profile(path)
display_name = None
description = None
if "_display_name" in profile_dict:
display_name = profile_dict["_display_name"]
del profile_dict["_display_name"]
if "_description" in profile_dict:
description = profile_dict["_description"]
del profile_dict["_description"]
properties = self.get_slicer_properties()
return octoprint.slicing.SlicingProfile(properties["type"], "unknown", profile_dict, display_name=display_name, description=description)
def save_slicer_profile(self, path, profile, allow_overwrite=True, overrides=None):
if os.path.exists(path) and not allow_overwrite:
raise octoprint.slicing.ProfileAlreadyExists("cura", profile.name)
new_profile = Profile.merge_profile(profile.data, overrides=overrides)
if profile.display_name is not None:
new_profile["_display_name"] = profile.display_name
if profile.description is not None:
new_profile["_description"] = profile.description
self._save_profile(path, new_profile, allow_overwrite=allow_overwrite)
def do_slice(self, model_path, printer_profile, machinecode_path=None, profile_path=None, position=None, on_progress=None, on_progress_args=None, on_progress_kwargs=None):
try:
with self._job_mutex:
if not profile_path:
profile_path = self._settings.get(["default_profile"])
if not machinecode_path:
path, _ = os.path.splitext(model_path)
machinecode_path = path + ".gco"
if position and isinstance(position, dict) and "x" in position and "y" in position:
posX = position["x"]
posY = position["y"]
else:
posX = None
posY = None
if on_progress:
if not on_progress_args:
on_progress_args = ()
if not on_progress_kwargs:
on_progress_kwargs = dict()
self._cura_logger.info("### Slicing %s to %s using profile stored at %s" % (model_path, machinecode_path, profile_path))
engine_settings = self._convert_to_engine(profile_path, printer_profile, posX, posY)
executable = self._settings.get(["cura_engine"])
if not executable:
return False, "Path to CuraEngine is not configured "
working_dir, _ = os.path.split(executable)
args = ['"%s"' % executable, '-v', '-p']
for k, v in engine_settings.items():
args += ["-s", '"%s=%s"' % (k, str(v))]
args += ['-o', '"%s"' % machinecode_path, '"%s"' % model_path]
import sarge
command = " ".join(args)
self._logger.info("Running %r in %s" % (command, working_dir))
p = sarge.run(command, cwd=working_dir, async=True, stdout=sarge.Capture(), stderr=sarge.Capture())
p.wait_events()
self._slicing_commands[machinecode_path] = p.commands[0]
try:
layer_count = None
step_factor = dict(
inset=0,
skin=1,
export=2
)
analysis = None
while p.returncode is None:
line = p.stderr.readline(timeout=0.5)
if not line:
p.commands[0].poll()
continue
self._cura_logger.debug(line.strip())
if on_progress is not None:
# The Cura slicing process has three individual steps, each consisting of <layer_count> substeps:
#
# - inset
# - skin
# - export
#
# So each layer will be processed three times, once for each step, resulting in a total amount of
# substeps of 3 * <layer_count>.
#
# The CuraEngine reports the calculated layer count and the continuous progress on stderr.
# The layer count gets reported right at the beginning in a line of the format:
#
# Layer count: <layer_count>
#
# The individual progress per each of the three steps gets reported on stderr in a line of
# the format:
#
# Progress:<step>:<current_layer>:<layer_count>
#
# Thus, for determining the overall progress the following formula applies:
#
# progress = <step_factor> * <layer_count> + <current_layer> / <layer_count> * 3
#
# with <step_factor> being 0 for "inset", 1 for "skin" and 2 for "export".
if line.startswith("Layer count:") and layer_count is None:
try:
layer_count = float(line[len("Layer count:"):].strip())
except:
pass
elif line.startswith("Progress:"):
split_line = line[len("Progress:"):].strip().split(":")
if len(split_line) == 3:
step, current_layer, _ = split_line
try:
current_layer = float(current_layer)
except:
pass
else:
if not step in step_factor:
continue
on_progress_kwargs["_progress"] = (step_factor[step] * layer_count + current_layer) / (layer_count * 3)
on_progress(*on_progress_args, **on_progress_kwargs)
elif line.startswith("Print time:"):
try:
print_time = int(line[len("Print time:"):].strip())
if analysis is None:
analysis = dict()
analysis["estimatedPrintTime"] = print_time
except:
pass
elif line.startswith("Filament:") or line.startswith("Filament2:"):
if line.startswith("Filament:"):
filament_str = line[len("Filament:"):].strip()
tool_key = "tool0"
else:
filament_str = line[len("Filament2:"):].strip()
tool_key = "tool1"
try:
filament = int(filament_str)
if analysis is None:
analysis = dict()
if not "filament" in analysis:
analysis["filament"] = dict()
if not tool_key in analysis["filament"]:
analysis["filament"][tool_key] = dict()
analysis["filament"][tool_key]["length"] = filament
if "filamentDiameter" in engine_settings:
radius_in_cm = float(int(engine_settings["filamentDiameter"]) / 10000.0) / 2.0
filament_in_cm = filament / 10.0
analysis["filament"][tool_key]["volume"] = filament_in_cm * math.pi * radius_in_cm * radius_in_cm
except:
pass
finally:
p.close()
with self._job_mutex:
if machinecode_path in self._cancelled_jobs:
self._cura_logger.info("### Cancelled")
raise octoprint.slicing.SlicingCancelled()
self._cura_logger.info("### Finished, returncode %d" % p.returncode)
if p.returncode == 0:
return True, dict(analysis=analysis)
else:
self._logger.warn("Could not slice via Cura, got return code %r" % p.returncode)
return False, "Got returncode %r" % p.returncode
except octoprint.slicing.SlicingCancelled as e:
raise e
except:
self._logger.exception("Could not slice via Cura, got an unknown error")
return False, "Unknown error, please consult the log file"
finally:
with self._job_mutex:
if machinecode_path in self._cancelled_jobs:
self._cancelled_jobs.remove(machinecode_path)
if machinecode_path in self._slicing_commands:
del self._slicing_commands[machinecode_path]
self._cura_logger.info("-" * 40)
def cancel_slicing(self, machinecode_path):
with self._job_mutex:
if machinecode_path in self._slicing_commands:
self._cancelled_jobs.append(machinecode_path)
command = self._slicing_commands[machinecode_path]
if command is not None:
command.terminate()
self._logger.info("Cancelled slicing of %s" % machinecode_path)
def _load_profile(self, path):
import yaml
profile_dict = dict()
with open(path, "r") as f:
try:
profile_dict = yaml.safe_load(f)
except:
raise IOError("Couldn't read profile from {path}".format(path=path))
return profile_dict
def _save_profile(self, path, profile, allow_overwrite=True):
import yaml
with open(path, "wb") as f:
yaml.safe_dump(profile, f, default_flow_style=False, indent=" ", allow_unicode=True)
def _convert_to_engine(self, profile_path, printer_profile, posX, posY):
profile = Profile(self._load_profile(profile_path), printer_profile, posX, posY)
return profile.convert_to_engine()
def _sanitize_name(name):
if name is None:
return None
if "/" in name or "\\" in name:
raise ValueError("name must not contain / or \\")
import string
valid_chars = "-_.() {ascii}{digits}".format(ascii=string.ascii_letters, digits=string.digits)
sanitized_name = ''.join(c for c in name if c in valid_chars)
sanitized_name = sanitized_name.replace(" ", "_")
return sanitized_name.lower()
__plugin_name__ = "CuraEngine"
__plugin_author__ = "Gina Häußge"
__plugin_url__ = "https://github.com/foosel/OctoPrint/wiki/Plugin:-Cura"
__plugin_description__ = "Adds support for slicing via CuraEngine from within OctoPrint"
__plugin_license__ = "AGPLv3"
__plugin_implementation__ = CuraPlugin()
| agpl-3.0 | 8,456,527,211,300,981,000 | 34.62954 | 172 | 0.671696 | false |
home-assistant/home-assistant | homeassistant/components/qwikswitch/sensor.py | 5 | 2121 | """Support for Qwikswitch Sensors."""
import logging
from pyqwikswitch.qwikswitch import SENSORS
from homeassistant.components.sensor import SensorEntity
from homeassistant.core import callback
from . import DOMAIN as QWIKSWITCH, QSEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, _, add_entities, discovery_info=None):
"""Add sensor from the main Qwikswitch component."""
if discovery_info is None:
return
qsusb = hass.data[QWIKSWITCH]
_LOGGER.debug("Setup qwikswitch.sensor %s, %s", qsusb, discovery_info)
devs = [QSSensor(sensor) for sensor in discovery_info[QWIKSWITCH]]
add_entities(devs)
class QSSensor(QSEntity, SensorEntity):
"""Sensor based on a Qwikswitch relay/dimmer module."""
_val = None
def __init__(self, sensor):
"""Initialize the sensor."""
super().__init__(sensor["id"], sensor["name"])
self.channel = sensor["channel"]
sensor_type = sensor["type"]
self._decode, self.unit = SENSORS[sensor_type]
# this cannot happen because it only happens in bool and this should be redirected to binary_sensor
assert not isinstance(
self.unit, type
), f"boolean sensor id={sensor['id']} name={sensor['name']}"
@callback
def update_packet(self, packet):
"""Receive update packet from QSUSB."""
val = self._decode(packet, channel=self.channel)
_LOGGER.debug(
"Update %s (%s:%s) decoded as %s: %s",
self.entity_id,
self.qsid,
self.channel,
val,
packet,
)
if val is not None:
self._val = val
self.async_write_ha_state()
@property
def state(self):
"""Return the value of the sensor."""
return str(self._val)
@property
def unique_id(self):
"""Return a unique identifier for this sensor."""
return f"qs{self.qsid}:{self.channel}"
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self.unit
| apache-2.0 | 5,524,854,079,138,561,000 | 28.458333 | 107 | 0.61669 | false |
gemmellr/qpid-proton-j | tests/python/proton_tests/sasl.py | 2 | 7495 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
import sys, os
from . import common
from . import engine
from proton import *
from .common import pump, Skipped
from proton._compat import str2bin
def _sslCertpath(file):
""" Return the full path to the certificate,keyfile, etc.
"""
if os.name=="nt":
if file.find("private-key")!=-1:
# The private key is not in a separate store
return None
# Substitute pkcs#12 equivalent for the CA/key store
if file.endswith(".pem"):
file = file[:-4] + ".p12"
return os.path.join(os.path.dirname(__file__),
"ssl_db/%s" % file)
def _testSaslMech(self, mech, clientUser='user@proton', authUser='user@proton', encrypted=False, authenticated=True):
self.s1.allowed_mechs(mech)
self.c1.open()
self.c2.open()
pump(self.t1, self.t2, 1024)
if encrypted is not None:
assert self.t2.encrypted == encrypted, encrypted
assert self.t1.encrypted == encrypted, encrypted
assert self.t2.authenticated == authenticated, authenticated
assert self.t1.authenticated == authenticated, authenticated
if authenticated:
# Server
assert self.t2.user == authUser
assert self.s2.user == authUser
assert self.s2.mech == mech.strip()
assert self.s2.outcome == SASL.OK, self.s2.outcome
assert self.c2.state & Endpoint.LOCAL_ACTIVE and self.c2.state & Endpoint.REMOTE_ACTIVE,\
"local_active=%s, remote_active=%s" % (self.c1.state & Endpoint.LOCAL_ACTIVE, self.c1.state & Endpoint.REMOTE_ACTIVE)
# Client
assert self.t1.user == clientUser
assert self.s1.user == clientUser
assert self.s1.mech == mech.strip()
assert self.s1.outcome == SASL.OK, self.s1.outcome
assert self.c1.state & Endpoint.LOCAL_ACTIVE and self.c1.state & Endpoint.REMOTE_ACTIVE,\
"local_active=%s, remote_active=%s" % (self.c1.state & Endpoint.LOCAL_ACTIVE, self.c1.state & Endpoint.REMOTE_ACTIVE)
else:
# Server
assert self.t2.user == None
assert self.s2.user == None
assert self.s2.outcome != SASL.OK, self.s2.outcome
# Client
assert self.t1.user == clientUser
assert self.s1.user == clientUser
assert self.s1.outcome != SASL.OK, self.s1.outcome
class Test(common.Test):
pass
def consumeAllOuput(t):
stops = 0
while stops<1:
out = t.peek(1024)
l = len(out) if out else 0
t.pop(l)
if l <= 0:
stops += 1
class SaslTest(Test):
def setUp(self):
self.t1 = Transport()
self.s1 = SASL(self.t1)
self.t2 = Transport(Transport.SERVER)
self.t2.max_frame_size = 65536
self.s2 = SASL(self.t2)
def pump(self):
pump(self.t1, self.t2, 1024)
def testPipelinedClient(self):
# TODO: When PROTON-1136 is fixed then remove this test
if "java" in sys.platform:
raise Skipped("Proton-J does not support pipelined client input")
# Server
self.s2.allowed_mechs('ANONYMOUS')
c2 = Connection()
self.t2.bind(c2)
assert self.s2.outcome is None
# Push client bytes into server
self.t2.push(str2bin(
# SASL
'AMQP\x03\x01\x00\x00'
# @sasl-init(65) [mechanism=:ANONYMOUS, initial-response=b"anonymous@fuschia"]
'\x00\x00\x002\x02\x01\x00\x00\x00SA\xd0\x00\x00\x00"\x00\x00\x00\x02\xa3\x09ANONYMOUS\xa0\x11anonymous@fuschia'
# AMQP
'AMQP\x00\x01\x00\x00'
# @open(16) [container-id="", channel-max=1234]
'\x00\x00\x00!\x02\x00\x00\x00\x00S\x10\xd0\x00\x00\x00\x11\x00\x00\x00\x0a\xa1\x00@@`\x04\xd2@@@@@@'
))
consumeAllOuput(self.t2)
assert not self.t2.condition
assert self.s2.outcome == SASL.OK
assert c2.state & Endpoint.REMOTE_ACTIVE
def testPipelinedServer(self):
# Client
self.s1.allowed_mechs('ANONYMOUS')
c1 = Connection()
self.t1.bind(c1)
assert self.s1.outcome is None
# Push server bytes into client
# Commented out lines in this test are where the client input processing doesn't
# run after output processing even though there is input waiting
self.t1.push(str2bin(
# SASL
'AMQP\x03\x01\x00\x00'
# @sasl-mechanisms(64) [sasl-server-mechanisms=@PN_SYMBOL[:ANONYMOUS]]
'\x00\x00\x00\x1c\x02\x01\x00\x00\x00S@\xc0\x0f\x01\xe0\x0c\x01\xa3\tANONYMOUS'
# @sasl-outcome(68) [code=0]
'\x00\x00\x00\x10\x02\x01\x00\x00\x00SD\xc0\x03\x01P\x00'
# AMQP
'AMQP\x00\x01\x00\x00'
# @open(16) [container-id="", channel-max=1234]
'\x00\x00\x00!\x02\x00\x00\x00\x00S\x10\xd0\x00\x00\x00\x11\x00\x00\x00\x0a\xa1\x00@@`\x04\xd2@@@@@@'
))
consumeAllOuput(self.t1)
assert self.s1.outcome == SASL.OK
assert c1.state & Endpoint.REMOTE_ACTIVE
def testPipelined2(self):
if "java" in sys.platform:
raise Skipped("Proton-J does not support client pipelining")
out1 = self.t1.peek(1024)
self.t1.pop(len(out1))
self.t2.push(out1)
self.s2.allowed_mechs('ANONYMOUS')
c2 = Connection()
c2.open()
self.t2.bind(c2)
out2 = self.t2.peek(1024)
self.t2.pop(len(out2))
self.t1.push(out2)
out1 = self.t1.peek(1024)
assert len(out1) > 0
def testFracturedSASL(self):
""" PROTON-235
"""
assert self.s1.outcome is None
# self.t1.trace(Transport.TRACE_FRM)
out = self.t1.peek(1024)
self.t1.pop(len(out))
self.t1.push(str2bin("AMQP\x03\x01\x00\x00"))
out = self.t1.peek(1024)
self.t1.pop(len(out))
self.t1.push(str2bin("\x00\x00\x00"))
out = self.t1.peek(1024)
self.t1.pop(len(out))
self.t1.push(str2bin("6\x02\x01\x00\x00\x00S@\xc04\x01\xe01\x04\xa3\x05PLAIN\x0aDIGEST-MD5\x09ANONYMOUS\x08CRAM-MD5"))
out = self.t1.peek(1024)
self.t1.pop(len(out))
self.t1.push(str2bin("\x00\x00\x00\x10\x02\x01\x00\x00\x00SD\xc0\x03\x01P\x00"))
out = self.t1.peek(1024)
self.t1.pop(len(out))
while out:
out = self.t1.peek(1024)
self.t1.pop(len(out))
assert self.s1.outcome == SASL.OK, self.s1.outcome
def test_singleton(self):
"""Verify that only a single instance of SASL can exist per Transport"""
transport = Transport()
attr = object()
sasl1 = SASL(transport)
sasl1.my_attribute = attr
sasl2 = transport.sasl()
sasl3 = SASL(transport)
assert sasl1 == sasl2
assert sasl1 == sasl3
assert sasl1.my_attribute == attr
assert sasl2.my_attribute == attr
assert sasl3.my_attribute == attr
transport = Transport()
sasl1 = transport.sasl()
sasl1.my_attribute = attr
sasl2 = SASL(transport)
assert sasl1 == sasl2
assert sasl1.my_attribute == attr
assert sasl2.my_attribute == attr
| apache-2.0 | 555,442,843,219,407,360 | 31.306034 | 123 | 0.656037 | false |
nicozhang/startUp | d3status/mail.py | 4 | 4253 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2012 feilong.me. All rights reserved.
#
# @author: Felinx Lee <[email protected]>
# Created on Jun 30, 2012
#
import re
import logging
import smtplib
import time
from datetime import datetime, timedelta
from email import encoders
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email.utils import COMMASPACE
from email.utils import formatdate
from tornado.escape import utf8
from tornado.options import options
__all__ = ("send_email", "EmailAddress")
# borrow email re pattern from django
_email_re = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
r')@(?:[A-Z0-9]+(?:-*[A-Z0-9]+)*\.)+[A-Z]{2,6}$', re.IGNORECASE) # domain
def send_email(fr, to, subject, body, html=None, attachments=[]):
"""Send an email.
If an HTML string is given, a mulitpart message will be generated with
plain text and HTML parts. Attachments can be added by providing as a
list of (filename, data) tuples.
"""
# convert EmailAddress to pure string
if isinstance(fr, EmailAddress):
fr = str(fr)
else:
fr = utf8(fr)
to = [utf8(t) for t in to]
if html:
# Multipart HTML and plain text
message = MIMEMultipart("alternative")
message.attach(MIMEText(body, "plain"))
message.attach(MIMEText(html, "html"))
else:
# Plain text
message = MIMEText(body)
if attachments:
part = message
message = MIMEMultipart("mixed")
message.attach(part)
for filename, data in attachments:
part = MIMEBase("application", "octet-stream")
part.set_payload(data)
encoders.encode_base64(part)
part.add_header("Content-Disposition", "attachment",
filename=filename)
message.attach(part)
message["Date"] = formatdate(time.time())
message["From"] = fr
message["To"] = COMMASPACE.join(to)
message["Subject"] = utf8(subject)
_get_session().send_mail(fr, to, utf8(message.as_string()))
class EmailAddress(object):
def __init__(self, addr, name=""):
assert _email_re.match(addr), "Email address(%s) is invalid." % addr
self.addr = addr
if name:
self.name = name
else:
self.name = addr.split("@")[0]
def __str__(self):
return '%s <%s>' % (utf8(self.name), utf8(self.addr))
class _SMTPSession(object):
def __init__(self, host, user='', password='', duration=30, tls=False):
self.host = host
self.user = user
self.password = password
self.duration = duration
self.tls = tls
self.session = None
self.deadline = datetime.now()
self.renew()
def send_mail(self, fr, to, message):
if self.timeout:
self.renew()
try:
self.session.sendmail(fr, to, message)
except Exception, e:
err = "Send email from %s to %s failed!\n Exception: %s!" \
% (fr, to, e)
logging.error(err)
self.renew()
@property
def timeout(self):
if datetime.now() < self.deadline:
return False
else:
return True
def renew(self):
try:
if self.session:
self.session.quit()
except Exception:
pass
self.session = smtplib.SMTP(self.host)
if self.user and self.password:
if self.tls:
self.session.starttls()
self.session.login(self.user, self.password)
self.deadline = datetime.now() + timedelta(seconds=self.duration * 60)
def _get_session():
global _session
if _session is None:
_session = _SMTPSession(options.smtp['host'],
options.smtp['user'],
options.smtp['password'],
options.smtp['duration'],
options.smtp['tls'])
return _session
_session = None
| apache-2.0 | -2,992,475,760,051,103,000 | 27.931973 | 101 | 0.567364 | false |
rajul/tvb-framework | tvb/adapters/uploaders/abcuploader.py | 1 | 5652 | # -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Mihai Andrei <[email protected]>
"""
import os
import numpy
from abc import abstractmethod
from scipy import io as scipy_io
from tvb.basic.logger.builder import get_logger
from tvb.core.entities import model
from tvb.core.adapters.abcadapter import ABCSynchronous
from tvb.core.entities.transient.structure_entities import DataTypeMetaData
from tvb.core.entities.storage import dao
class ABCUploader(ABCSynchronous):
"""
Base class of the uploaders
"""
LOGGER = get_logger(__name__)
def get_input_tree(self):
"""
:return: the result of get_upload_input_tree concatenated with "subject" input field.
"""
subject_node = [{'name': DataTypeMetaData.KEY_SUBJECT, 'type': 'str', 'required': True,
'label': 'Subject', 'default': DataTypeMetaData.DEFAULT_SUBJECT}]
return subject_node + self.get_upload_input_tree()
@abstractmethod
def get_upload_input_tree(self):
"""
Build the list of dictionaries describing the input required for this uploader.
:return: The input tree specific for this uploader
"""
return []
def _prelaunch(self, operation, uid=None, available_disk_space=0, **kwargs):
"""
Before going with the usual prelaunch, get from input parameters the 'subject'.
"""
if DataTypeMetaData.KEY_SUBJECT in kwargs:
subject = kwargs.pop(DataTypeMetaData.KEY_SUBJECT)
else:
subject = DataTypeMetaData.DEFAULT_SUBJECT
self.meta_data.update({DataTypeMetaData.KEY_SUBJECT: subject})
return ABCSynchronous._prelaunch(self, operation, uid, available_disk_space, **kwargs)
def get_required_memory_size(self, **kwargs):
"""
Return the required memory to run this algorithm.
As it is an upload algorithm and we do not have information about data, we can not approximate this.
"""
return -1
def get_required_disk_size(self, **kwargs):
"""
As it is an upload algorithm and we do not have information about data, we can not approximate this.
"""
return 0
def ensure_db(self):
"""
Ensure algorithm exists in DB and add it if not
"""
cat = dao.get_uploader_categories()[0]
cls = self.__class__
cmd, cnm = cls.__module__, cls.__name__
gp = dao.find_group(cmd, cnm)
if gp is None:
gp = model.AlgorithmGroup(cmd, cnm, cat.id)
gp = dao.store_entity(gp)
dao.store_entity(model.Algorithm(gp.id, cnm, cnm))
self.algorithm_group = gp
@staticmethod
def read_list_data(full_path, dimensions=None, dtype=numpy.float64, skiprows=0, usecols=None):
"""
Read numpy.array from a text file or a npy/npz file.
"""
try:
if full_path.endswith(".npy") or full_path.endswith(".npz"):
array_result = numpy.load(full_path)
else:
array_result = numpy.loadtxt(full_path, dtype=dtype, skiprows=skiprows, usecols=usecols)
if dimensions:
return array_result.reshape(dimensions)
return array_result
except ValueError, exc:
file_ending = os.path.split(full_path)[1]
exc.args = (exc.args[0] + " In file: " + file_ending,)
raise
@staticmethod
def read_matlab_data(path, matlab_data_name=None):
"""
Read array from matlab file.
"""
try:
matlab_data = scipy_io.matlab.loadmat(path)
except NotImplementedError:
ABCUploader.LOGGER.error("Could not read Matlab content from: " + path)
ABCUploader.LOGGER.error("Matlab files must be saved in a format <= -V7...")
raise
try:
return matlab_data[matlab_data_name]
except KeyError:
def double__(n):
n = str(n)
return n.startswith('__') and n.endswith('__')
available = [s for s in matlab_data if not double__(s)]
raise KeyError("Could not find dataset named %s. Available datasets: %s" % (matlab_data_name, available))
| gpl-2.0 | 1,824,912,816,090,967,300 | 35.701299 | 117 | 0.643135 | false |
wasit7/tutorials | randomforest/dataset.py | 1 | 7748 | """
GNU GENERAL PUBLIC LICENSE Version 2
Created on Tue Oct 14 18:52:01 2014
@author: Wasit
"""
import numpy as np
import os
try:
import json
except ImportError:
import simplejson as json
class dataset:
def __init__(self,index=0):
'''
To create and initialise
self.dimtheta--(m)dimension of theta. theta is a column vector
self.size------(n)number of samples in the root bag
self.I---------prepocessed data
self.samples---the marix which has size of [(p+1)xn],
where p is size of vector that identify location
of a sample in self.I.
Note that the fist row of self.sample is label
'''
#1 self.cmax: maximum number of classes
#2 self.spi: number of samples per image [removed]
#3 self.theta_dim: the number of elements in a theta (a number of parameter in theta)
#4 self.size: number of all samples in the root bag
#5 self.I: the data
#6 self.samples: samples[x]=[class]
#7 self.theta_range: range of theta for generating value in getParam()
'''
Example: In order to extract LBP feature, the possible setup is theta_dim=5
when 4 dimensions is used to indicate the 2 corners of rectangular window.
The last dimension represent the bin of the LBP histogram.
Then we can set theta=[r1, c1, r2, c2, bin]^T
In this particular case (|theta| = 5 ). The theta dimension is called "theta_dim"
In the getParam() the random proposals are generated by random funtion within a curtain range, which is called "theta_range".
#3 self.theta_dim:
# r1,r2 {margin~rmax-margin},
# c1,c2 {margin~cmax-margin},
# bin {0~3}
# L1(r1c1)----L2(r1c2)
# | |
# L3(r2c1)----L4(r2c2)
'''
import pickle
self.index=index
self.path='dataset/dataset%02d.pic'%(self.index)
pickleFile = open(self.path, 'rb')
self.clmax,self.theta_dim,self.theta_range,self.size,self.samples,self.I = pickle.load(pickleFile)
if self.samples is None:
self.samples=np.zeros(self.I.shape[0],dtype=np.uint32)
pickleFile.close()
def __str__(self):
return 'datset_pickle: path=./"%s" cmax=%d, theta_dim=%d, theta_range=%d \n\
size=%d, label.shape=%s, I.shape=%s'\
%(self.path,self.clmax,self.theta_dim,self.theta_range,self.size,self.samples.shape,self.I.shape)
def __del__(self):
del self.clmax
del self.theta_dim
del self.theta_range
del self.size
del self.samples#samples contains only label
del self.I
def getX(self):
'''
input:
void
output:
[1D ndarray dtype=np.uint32]
'''
return np.random.permutation(self.size)
def getL(self,x):
'''
input:
[1D ndarray dtype=np.uint32]
output:
[1D ndarray dtype=np.uint32]
'''
return self.samples[x]
def setL(self,x,L):
'''
input:
x: [1D ndarray dtype=np.uint32]
L: [1D ndarray dtype=np.uint32]
'''
self.samples[x]=L
###here
def getIs(self,thetas,x):
'''
input:
x: [1D ndarray dtype=np.uint32]\n
thetas: [2D ndarray float]
output:
[1D ndarray dtype=float]
Description:
In spiral case, it uses only first row of the thetas
'''
#dataset.getParam() calls this
#theta and x have same number of column
#3 self.theta_dim: [0_r1, 1_c1, 2_r2, 3_c2, 4_bin]^T
# r1,r2 {margin~rmax-margin},
# c1,c2 {margin~cmax-margin},
# bin {0~3}
# L1(r1c1)----L2(r1c2)
# | |
# L3(r2c1)----L4(r2c2)
##########
#6 self.samples: samples[x]=[0_class, 1_img, 2_row, 3_column]^T
# r1=self.samples[2,x]+thetas[0,:]
# c1=self.samples[3,x]+thetas[1,:]
# r2=self.samples[2,x]+thetas[2,:]
# c2=self.samples[3,x]+thetas[3,:]
# bins=thetas[self.theta_dim-1,:]
# f=np.zeros(len(x))
# for i,ix in enumerate(x):
# img=self.samples[1,ix]
# L1=self.I[img][r1[i],c1[i],bins[i]]
# L2=self.I[img][r1[i],c2[i],bins[i]]
# L3=self.I[img][r2[i],c1[i],bins[i]]
# L4=self.I[img][r2[i],c2[i],bins[i]]
# f[i]=float(L4+L1-L2-L3)
##need to check
f=np.zeros(len(x))
for i in xrange(len(x)):
f[i]=self.I[x[i],thetas[0,i]]
return f
def getI(self,theta,x):
'''
input:
x: [1D ndarray dtype=np.uint32]\n
theta: [1D ndarray float]
output:
[1D ndarray dtype=float]
Description:
In spiral case, it uses only first row of the thetas
'''
#engine.getQH() call this
##original
# r1=self.samples[2,x]+theta[0]
# c1=self.samples[3,x]+theta[1]
# r2=self.samples[2,x]+theta[2]
# c2=self.samples[3,x]+theta[3]
# bins=theta[self.theta_dim-1]
# f=np.zeros(len(x))
# for i,ix in enumerate(x):
# img=self.samples[1,ix]
# L1=self.I[img][r1[i],c1[i],bins]
# L2=self.I[img][r1[i],c2[i],bins]
# L3=self.I[img][r2[i],c1[i],bins]
# L4=self.I[img][r2[i],c2[i],bins]
# f[i]=float(L4+L1-L2-L3)
# return f
f=np.zeros(len(x))
f=self.I[x[:],theta[0]]
return f
def getParam(self,x):
'''
input:
x: [1D ndarray dtype=np.uint32]
output:
thetas: [2D ndarray float] rmax=theta_dim, cmax=len(x)
taus: [1D ndarray dtype=np.uint32]
Description:
In spiral case, it uses only first row of the thetas
'''
#3 self.theta_dim: [0_r1, 1_c1, 2_r2, 3_c2, 4_bin]^T
#6 self.samples: samples[x]=[0_class, 1_img, 2_row, 3_column]^T
n_proposal=100
if len(x)>n_proposal:
x=np.random.permutation(x)[:n_proposal]
#ux=np.random.randint(-mtran,mtran,size=len(x))
#uy=np.random.randint(-mtran,mtran,size=len(x))
#hx=np.random.randint(8,mrec,size=len(x))
#hy=np.random.randint(8,mrec,size=len(x))
#bins=np.random.randint(0,self.dim_bin,size=len(x))
thetas=np.zeros((self.theta_dim,len(x)))
thetas[0,:]=np.random.randint(0,self.theta_range,size=len(x))
thetas.astype(int)
taus = self.getIs(thetas, x)
return thetas,taus
def show(self):
#show dataset
print self.samples
if __name__ == '__main__':
# import matplotlib.pyplot as plt
dset=dataset()
print dset
x=dset.getX()
print x
# print("number of images: {}".format(len(dset.I)))
# markers=['ko','ro','go','bo','po']
# for i in xrange(len(dset.jsonfiles)):
# f=open(dset.jsonfiles[i],"r")
# js=json.loads(f.read())
# f.close()
# img_path= rootdir + js['path'][1:]
# print(img_path)
# im=np.array(Image.open(img_path).convert('L'))
# plt.hold(False)
# plt.imshow(im)
# plt.hold(True)
# for j in range(dset.size):
# #samples[x]=[0_class,1_img, 2_row, 3_column]^T
# if dset.samples[1,j]==i:
# plt.plot(dset.samples[3,j],dset.samples[2,j],markers[dset.samples[0,j]])
# plt.set_cmap('gray')
# plt.show()
# plt.ginput()
# plt.close('all')
#-- | mit | -3,365,483,726,966,565,400 | 33.287611 | 133 | 0.527749 | false |
KaranToor/MA450 | google-cloud-sdk/lib/surface/compute/url_maps/remove_host_rule.py | 6 | 4341 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for removing a host rule from a URL map."""
import copy
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.compute.url_maps import flags
class RemoveHostRule(base_classes.ReadWriteCommand):
"""Remove a host rule from a URL map."""
URL_MAP_ARG = None
@classmethod
def Args(cls, parser):
cls.URL_MAP_ARG = flags.UrlMapArgument()
cls.URL_MAP_ARG.AddArgument(parser)
parser.add_argument(
'--host',
required=True,
help='One of the hosts in the host rule to remove.')
parser.add_argument(
'--delete-orphaned-path-matcher',
action='store_true',
default=False,
help=('If provided and a path matcher is orphaned as a result of this '
'command, the command removes the orphaned path matcher instead '
'of failing.'))
@property
def service(self):
return self.compute.urlMaps
@property
def resource_type(self):
return 'urlMaps'
def CreateReference(self, args):
return self.URL_MAP_ARG.ResolveAsResource(args, self.resources)
def GetGetRequest(self, args):
"""Returns the request for the existing URL map resource."""
return (self.service,
'Get',
self.messages.ComputeUrlMapsGetRequest(
urlMap=self.ref.Name(),
project=self.project))
def GetSetRequest(self, args, replacement, existing):
return (self.service,
'Update',
self.messages.ComputeUrlMapsUpdateRequest(
urlMap=self.ref.Name(),
urlMapResource=replacement,
project=self.project))
def Modify(self, args, existing):
"""Returns a modified URL map message."""
replacement = copy.deepcopy(existing)
path_matcher_to_remove = None
new_host_rules = []
for host_rule in existing.hostRules:
if args.host in host_rule.hosts:
path_matcher_to_remove = host_rule.pathMatcher
else:
new_host_rules.append(host_rule)
if not path_matcher_to_remove:
raise exceptions.ToolException(
'No host rule contains the host [{0}].'.format(args.host))
replacement.hostRules = new_host_rules
path_matcher_is_used_by_other_rules = False
for host_rule in replacement.hostRules:
if host_rule.pathMatcher == path_matcher_to_remove:
path_matcher_is_used_by_other_rules = True
break
if not path_matcher_is_used_by_other_rules:
if args.delete_orphaned_path_matcher:
replacement.pathMatchers = [
path_matcher for path_matcher in existing.pathMatchers
if path_matcher.name != path_matcher_to_remove]
else:
raise exceptions.ToolException(
'This operation will orphan the path matcher [{0}]. To '
'delete the orphan path matcher, rerun this command with '
'[--delete-orphaned-path-matcher] or use [gcloud compute '
'url-maps edit] to modify the URL map by hand.'.format(
host_rule.pathMatcher))
return replacement
RemoveHostRule.detailed_help = {
'brief': 'Remove a host rule from a URL map',
'DESCRIPTION': """\
*{command}* is used to remove a host rule from a URL map. When
a host rule is removed, its path matcher is only removed if
it is not referenced by any other host rules and
``--delete-orphaned-path-matcher'' is provided.
""",
'EXAMPLES': """\
To remove a host rule that contains the host ``google.com''
from the URL map named ``MY-URL-MAP'', you can use this
command:
$ {command} MY-URL-MAP --host google.com
""",
}
| apache-2.0 | -7,240,817,619,046,039,000 | 32.651163 | 79 | 0.655379 | false |
tammoippen/nest-simulator | pynest/nest/tests/test_events.py | 11 | 2374 | # -*- coding: utf-8 -*-
#
# test_events.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Test of events
"""
import unittest
import nest
@nest.check_stack
class EventsTestCase(unittest.TestCase):
"""Tests of the Connect API"""
def test_Events_1(self):
"""Recorder Events"""
nest.ResetKernel()
sd = nest.Create('spike_detector', 1, {'withtime': True})
d = nest.GetStatus(sd, 'events')[0]
senders = d['senders']
times = d['times']
vm = nest.Create('voltmeter', 1, {'withtime': True})
d = nest.GetStatus(vm, 'events')[0]
senders = d['V_m']
times = d['times']
def test_EventsVoltage(self):
"""Voltage Events"""
nest.ResetKernel()
nest.sr('20 setverbosity')
n = nest.Create('iaf_neuron')
vm = nest.Create('voltmeter', 1, {'withtime': True, 'interval': 1.})
nest.Connect(vm, n)
nest.SetKernelStatus({'print_time': False})
nest.Simulate(10)
d = nest.GetStatus(vm, 'events')[0]
self.assertEqual(len(d['V_m']), 9)
def test_EventsSpikes(self):
"""Spike Events"""
nest.ResetKernel()
nest.sr('20 setverbosity')
n = nest.Create('iaf_neuron', 1, {'I_e': 1000.})
sd = nest.Create('spike_detector', 1, {'withtime': True})
nest.Connect(n, sd)
nest.SetKernelStatus({'print_time': False})
nest.Simulate(1000)
d = nest.GetStatus(sd, 'events')[0]
self.assert_(len(d['times']) > 0)
def suite():
suite = unittest.makeSuite(EventsTestCase, 'test')
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
| gpl-2.0 | 4,608,285,133,973,120,500 | 23.729167 | 76 | 0.613732 | false |
davecranwell/wagtail | wagtail/wagtailcore/migrations/0014_add_verbose_name.py | 2 | 4662 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0013_update_golive_expire_help_text'),
]
operations = [
migrations.AlterField(
model_name='grouppagepermission',
name='group',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name='Group', related_name='page_permissions', to='auth.Group'),
preserve_default=True,
),
migrations.AlterField(
model_name='grouppagepermission',
name='page',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name='Page', related_name='group_permissions', to='wagtailcore.Page'),
preserve_default=True,
),
migrations.AlterField(
model_name='grouppagepermission',
name='permission_type',
field=models.CharField(
choices=[
('add', 'Add/edit pages you own'),
('edit', 'Add/edit any page'),
('publish', 'Publish any page'),
('lock', 'Lock/unlock any page')
],
max_length=20,
verbose_name='Permission type'
),
preserve_default=True,
),
migrations.AlterField(
model_name='page',
name='search_description',
field=models.TextField(blank=True, verbose_name='Search description'),
preserve_default=True,
),
migrations.AlterField(
model_name='page',
name='show_in_menus',
field=models.BooleanField(
default=False,
help_text='Whether a link to this page will appear in automatically generated menus',
verbose_name='Show in menus'
),
preserve_default=True,
),
migrations.AlterField(
model_name='page',
name='slug',
field=models.SlugField(
help_text='The name of the page as it will appear in URLs e.g http://domain.com/blog/[my-slug]/',
max_length=255,
verbose_name='Slug'
),
preserve_default=True,
),
migrations.AlterField(
model_name='page',
name='title',
field=models.CharField(
help_text="The page title as you'd like it to be seen by the public",
max_length=255,
verbose_name='Title'
),
preserve_default=True,
),
migrations.AlterField(
model_name='pageviewrestriction',
name='page',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name='Page', related_name='view_restrictions', to='wagtailcore.Page'),
preserve_default=True,
),
migrations.AlterField(
model_name='pageviewrestriction',
name='password',
field=models.CharField(max_length=255, verbose_name='Password'),
preserve_default=True,
),
migrations.AlterField(
model_name='site',
name='hostname',
field=models.CharField(db_index=True, max_length=255, verbose_name='Hostname'),
preserve_default=True,
),
migrations.AlterField(
model_name='site',
name='is_default_site',
field=models.BooleanField(
default=False,
help_text='If true, this site will handle requests for all other hostnames'
' that do not have a site entry of their own',
verbose_name='Is default site'
),
preserve_default=True,
),
migrations.AlterField(
model_name='site',
name='port',
field=models.IntegerField(
default=80,
help_text='Set this to something other than 80 if you need a specific port number'
' to appear in URLs (e.g. development on port 8000). Does not affect request handling'
' (so port forwarding still works).',
verbose_name='Port'
),
preserve_default=True,
),
migrations.AlterField(
model_name='site',
name='root_page',
field=models.ForeignKey(on_delete=models.CASCADE, verbose_name='Root page', related_name='sites_rooted_here', to='wagtailcore.Page'),
preserve_default=True,
),
]
| bsd-3-clause | -8,437,711,458,458,802,000 | 36.596774 | 145 | 0.534106 | false |
nevir/plexability | extern/depot_tools/presubmit_canned_checks.py | 1 | 38666 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generic presubmit checks that can be reused by other presubmit checks."""
import os as _os
_HERE = _os.path.dirname(_os.path.abspath(__file__))
### Description checks
def CheckChangeHasTestField(input_api, output_api):
"""Requires that the changelist have a TEST= field."""
if input_api.change.TEST:
return []
else:
return [output_api.PresubmitNotifyResult(
'If this change requires manual test instructions to QA team, add '
'TEST=[instructions].')]
def CheckChangeHasBugField(input_api, output_api):
"""Requires that the changelist have a BUG= field."""
if input_api.change.BUG:
return []
else:
return [output_api.PresubmitNotifyResult(
'If this change has an associated bug, add BUG=[bug number].')]
def CheckChangeHasTestedField(input_api, output_api):
"""Requires that the changelist have a TESTED= field."""
if input_api.change.TESTED:
return []
else:
return [output_api.PresubmitError('Changelist must have a TESTED= field.')]
def CheckChangeHasQaField(input_api, output_api):
"""Requires that the changelist have a QA= field."""
if input_api.change.QA:
return []
else:
return [output_api.PresubmitError('Changelist must have a QA= field.')]
def CheckDoNotSubmitInDescription(input_api, output_api):
"""Checks that the user didn't add 'DO NOT ''SUBMIT' to the CL description.
"""
keyword = 'DO NOT ''SUBMIT'
if keyword in input_api.change.DescriptionText():
return [output_api.PresubmitError(
keyword + ' is present in the changelist description.')]
else:
return []
def CheckChangeHasDescription(input_api, output_api):
"""Checks the CL description is not empty."""
text = input_api.change.DescriptionText()
if text.strip() == '':
if input_api.is_committing:
return [output_api.PresubmitError('Add a description.')]
else:
return [output_api.PresubmitNotifyResult('Add a description.')]
return []
def CheckChangeWasUploaded(input_api, output_api):
"""Checks that the issue was uploaded before committing."""
if input_api.is_committing and not input_api.change.issue:
return [output_api.PresubmitError(
'Issue wasn\'t uploaded. Please upload first.')]
return []
### Content checks
def CheckDoNotSubmitInFiles(input_api, output_api):
"""Checks that the user didn't add 'DO NOT ''SUBMIT' to any files."""
# We want to check every text file, not just source files.
file_filter = lambda x : x
keyword = 'DO NOT ''SUBMIT'
errors = _FindNewViolationsOfRule(lambda _, line : keyword not in line,
input_api, file_filter)
text = '\n'.join('Found %s in %s' % (keyword, loc) for loc in errors)
if text:
return [output_api.PresubmitError(text)]
return []
def CheckChangeLintsClean(input_api, output_api, source_file_filter=None):
"""Checks that all '.cc' and '.h' files pass cpplint.py."""
_RE_IS_TEST = input_api.re.compile(r'.*tests?.(cc|h)$')
result = []
# Initialize cpplint.
import cpplint
# Access to a protected member _XX of a client class
# pylint: disable=W0212
cpplint._cpplint_state.ResetErrorCounts()
# Justifications for each filter:
#
# - build/include : Too many; fix in the future.
# - build/include_order : Not happening; #ifdefed includes.
# - build/namespace : I'm surprised by how often we violate this rule.
# - readability/casting : Mistakes a whole bunch of function pointer.
# - runtime/int : Can be fixed long term; volume of errors too high
# - runtime/virtual : Broken now, but can be fixed in the future?
# - whitespace/braces : We have a lot of explicit scoping in chrome code.
cpplint._SetFilters('-build/include,-build/include_order,-build/namespace,'
'-readability/casting,-runtime/int,-runtime/virtual,'
'-whitespace/braces')
# Replace <hash_map> and <hash_set> as headers that need to be included
# with "base/hash_tables.h" instead.
cpplint._re_pattern_templates = [
(a, b, 'base/hash_tables.h')
if header in ('<hash_map>', '<hash_set>') else (a, b, header)
for (a, b, header) in cpplint._re_pattern_templates
]
# We currently are more strict with normal code than unit tests; 4 and 5 are
# the verbosity level that would normally be passed to cpplint.py through
# --verbose=#. Hopefully, in the future, we can be more verbose.
files = [f.AbsoluteLocalPath() for f in
input_api.AffectedSourceFiles(source_file_filter)]
for file_name in files:
if _RE_IS_TEST.match(file_name):
level = 5
else:
level = 4
cpplint.ProcessFile(file_name, level)
if cpplint._cpplint_state.error_count > 0:
if input_api.is_committing:
res_type = output_api.PresubmitError
else:
res_type = output_api.PresubmitPromptWarning
result = [res_type('Changelist failed cpplint.py check.')]
return result
def CheckChangeHasNoCR(input_api, output_api, source_file_filter=None):
"""Checks no '\r' (CR) character is in any source files."""
cr_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
if '\r' in input_api.ReadFile(f, 'rb'):
cr_files.append(f.LocalPath())
if cr_files:
return [output_api.PresubmitPromptWarning(
'Found a CR character in these files:', items=cr_files)]
return []
def CheckSvnModifiedDirectories(input_api, output_api, source_file_filter=None):
"""Checks for files in svn modified directories.
They will get submitted on accident because svn commits recursively by
default, and that's very dangerous.
"""
if input_api.change.scm != 'svn':
return []
errors = []
current_cl_files = input_api.change.GetModifiedFiles()
all_modified_files = input_api.change.GetAllModifiedFiles()
# Filter out files in the current CL.
modified_files = [f for f in all_modified_files if f not in current_cl_files]
modified_abspaths = [input_api.os_path.abspath(f) for f in modified_files]
for f in input_api.AffectedFiles(file_filter=source_file_filter):
if f.Action() == 'M' and f.IsDirectory():
curpath = f.AbsoluteLocalPath()
bad_files = []
# Check if any of the modified files in other CLs are under curpath.
for i in xrange(len(modified_files)):
abspath = modified_abspaths[i]
if input_api.os_path.commonprefix([curpath, abspath]) == curpath:
bad_files.append(modified_files[i])
if bad_files:
if input_api.is_committing:
error_type = output_api.PresubmitPromptWarning
else:
error_type = output_api.PresubmitNotifyResult
errors.append(error_type(
'Potential accidental commits in changelist %s:' % f.LocalPath(),
items=bad_files))
return errors
def CheckChangeHasOnlyOneEol(input_api, output_api, source_file_filter=None):
"""Checks the files ends with one and only one \n (LF)."""
eof_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
# Check that the file ends in one and only one newline character.
if len(contents) > 1 and (contents[-1:] != '\n' or contents[-2:-1] == '\n'):
eof_files.append(f.LocalPath())
if eof_files:
return [output_api.PresubmitPromptWarning(
'These files should end in one (and only one) newline character:',
items=eof_files)]
return []
def CheckChangeHasNoCrAndHasOnlyOneEol(input_api, output_api,
source_file_filter=None):
"""Runs both CheckChangeHasNoCR and CheckChangeHasOnlyOneEOL in one pass.
It is faster because it is reading the file only once.
"""
cr_files = []
eof_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
if '\r' in contents:
cr_files.append(f.LocalPath())
# Check that the file ends in one and only one newline character.
if len(contents) > 1 and (contents[-1:] != '\n' or contents[-2:-1] == '\n'):
eof_files.append(f.LocalPath())
outputs = []
if cr_files:
outputs.append(output_api.PresubmitPromptWarning(
'Found a CR character in these files:', items=cr_files))
if eof_files:
outputs.append(output_api.PresubmitPromptWarning(
'These files should end in one (and only one) newline character:',
items=eof_files))
return outputs
def _ReportErrorFileAndLine(filename, line_num, dummy_line):
"""Default error formatter for _FindNewViolationsOfRule."""
return '%s, line %s' % (filename, line_num)
def _FindNewViolationsOfRule(callable_rule, input_api, source_file_filter=None,
error_formatter=_ReportErrorFileAndLine):
"""Find all newly introduced violations of a per-line rule (a callable).
Arguments:
callable_rule: a callable taking a file extension and line of input and
returning True if the rule is satisfied and False if there was a problem.
input_api: object to enumerate the affected files.
source_file_filter: a filter to be passed to the input api.
error_formatter: a callable taking (filename, line_number, line) and
returning a formatted error string.
Returns:
A list of the newly-introduced violations reported by the rule.
"""
errors = []
for f in input_api.AffectedFiles(include_deletes=False,
file_filter=source_file_filter):
# For speed, we do two passes, checking first the full file. Shelling out
# to the SCM to determine the changed region can be quite expensive on
# Win32. Assuming that most files will be kept problem-free, we can
# skip the SCM operations most of the time.
extension = str(f.LocalPath()).rsplit('.', 1)[-1]
if all(callable_rule(extension, line) for line in f.NewContents()):
continue # No violation found in full text: can skip considering diff.
for line_num, line in f.ChangedContents():
if not callable_rule(extension, line):
errors.append(error_formatter(f.LocalPath(), line_num, line))
return errors
def CheckChangeHasNoTabs(input_api, output_api, source_file_filter=None):
"""Checks that there are no tab characters in any of the text files to be
submitted.
"""
# In addition to the filter, make sure that makefiles are blacklisted.
if not source_file_filter:
# It's the default filter.
source_file_filter = input_api.FilterSourceFile
def filter_more(affected_file):
basename = input_api.os_path.basename(affected_file.LocalPath())
return (not (basename in ('Makefile', 'makefile') or
basename.endswith('.mk')) and
source_file_filter(affected_file))
tabs = _FindNewViolationsOfRule(lambda _, line : '\t' not in line,
input_api, filter_more)
if tabs:
return [output_api.PresubmitPromptWarning('Found a tab character in:',
long_text='\n'.join(tabs))]
return []
def CheckChangeTodoHasOwner(input_api, output_api, source_file_filter=None):
"""Checks that the user didn't add TODO(name) without an owner."""
unowned_todo = input_api.re.compile('TO''DO[^(]')
errors = _FindNewViolationsOfRule(lambda _, x : not unowned_todo.search(x),
input_api, source_file_filter)
errors = ['Found TO''DO with no owner in ' + x for x in errors]
if errors:
return [output_api.PresubmitPromptWarning('\n'.join(errors))]
return []
def CheckChangeHasNoStrayWhitespace(input_api, output_api,
source_file_filter=None):
"""Checks that there is no stray whitespace at source lines end."""
errors = _FindNewViolationsOfRule(lambda _, line : line.rstrip() == line,
input_api, source_file_filter)
if errors:
return [output_api.PresubmitPromptWarning(
'Found line ending with white spaces in:',
long_text='\n'.join(errors))]
return []
def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None):
"""Checks that there aren't any lines longer than maxlen characters in any of
the text files to be submitted.
"""
maxlens = {
'java': 100,
# This is specifically for Android's handwritten makefiles (Android.mk).
'mk': 200,
'': maxlen,
}
# Note: these are C++ specific but processed on all languages. :(
MACROS = ('#define', '#include', '#import', '#pragma', '#if', '#endif')
# Special java statements.
SPECIAL_JAVA_STARTS = ('package ', 'import ')
def no_long_lines(file_extension, line):
# Allow special java statements to be as long as neccessary.
if file_extension == 'java' and line.startswith(SPECIAL_JAVA_STARTS):
return True
file_maxlen = maxlens.get(file_extension, maxlens[''])
# Stupidly long symbols that needs to be worked around if takes 66% of line.
long_symbol = file_maxlen * 2 / 3
# Hard line length limit at 50% more.
extra_maxlen = file_maxlen * 3 / 2
line_len = len(line)
if line_len <= file_maxlen:
return True
if line_len > extra_maxlen:
return False
return (
line.startswith(MACROS) or
any((url in line) for url in ('http://', 'https://')) or
input_api.re.match(
r'.*[A-Za-z][A-Za-z_0-9]{%d,}.*' % long_symbol, line))
def format_error(filename, line_num, line):
return '%s, line %s, %s chars' % (filename, line_num, len(line))
errors = _FindNewViolationsOfRule(no_long_lines, input_api,
source_file_filter,
error_formatter=format_error)
if errors:
msg = 'Found lines longer than %s characters (first 5 shown).' % maxlen
return [output_api.PresubmitPromptWarning(msg, items=errors[:5])]
else:
return []
def CheckLicense(input_api, output_api, license_re, source_file_filter=None,
accept_empty_files=True):
"""Verifies the license header.
"""
license_re = input_api.re.compile(license_re, input_api.re.MULTILINE)
bad_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
if accept_empty_files and not contents:
continue
if not license_re.search(contents):
bad_files.append(f.LocalPath())
if bad_files:
if input_api.is_committing:
res_type = output_api.PresubmitPromptWarning
else:
res_type = output_api.PresubmitNotifyResult
return [res_type(
'License must match:\n%s\n' % license_re.pattern +
'Found a bad license header in these files:', items=bad_files)]
return []
def CheckChangeSvnEolStyle(input_api, output_api, source_file_filter=None):
"""Checks that the source files have svn:eol-style=LF."""
return CheckSvnProperty(input_api, output_api,
'svn:eol-style', 'LF',
input_api.AffectedSourceFiles(source_file_filter))
def CheckSvnForCommonMimeTypes(input_api, output_api):
"""Checks that common binary file types have the correct svn:mime-type."""
output = []
files = input_api.AffectedFiles(include_deletes=False)
def IsExts(x, exts):
path = x.LocalPath()
for extension in exts:
if path.endswith(extension):
return True
return False
def FilterFiles(extension):
return filter(lambda x: IsExts(x, extension), files)
def RunCheck(mime_type, files):
output.extend(CheckSvnProperty(input_api, output_api, 'svn:mime-type',
mime_type, files))
RunCheck('application/pdf', FilterFiles(['.pdf']))
RunCheck('image/bmp', FilterFiles(['.bmp']))
RunCheck('image/gif', FilterFiles(['.gif']))
RunCheck('image/png', FilterFiles(['.png']))
RunCheck('image/jpeg', FilterFiles(['.jpg', '.jpeg', '.jpe']))
RunCheck('image/vnd.microsoft.icon', FilterFiles(['.ico']))
return output
def CheckSvnProperty(input_api, output_api, prop, expected, affected_files):
"""Checks that affected_files files have prop=expected."""
if input_api.change.scm != 'svn':
return []
bad = filter(lambda f: f.Property(prop) != expected, affected_files)
if bad:
if input_api.is_committing:
res_type = output_api.PresubmitError
else:
res_type = output_api.PresubmitNotifyResult
message = 'Run the command: svn pset %s %s \\' % (prop, expected)
return [res_type(message, items=bad)]
return []
### Other checks
def CheckDoNotSubmit(input_api, output_api):
return (
CheckDoNotSubmitInDescription(input_api, output_api) +
CheckDoNotSubmitInFiles(input_api, output_api)
)
def CheckTreeIsOpen(input_api, output_api,
url=None, closed=None, json_url=None):
"""Check whether to allow commit without prompt.
Supports two styles:
1. Checks that an url's content doesn't match a regexp that would mean that
the tree is closed. (old)
2. Check the json_url to decide whether to allow commit without prompt.
Args:
input_api: input related apis.
output_api: output related apis.
url: url to use for regex based tree status.
closed: regex to match for closed status.
json_url: url to download json style status.
"""
if not input_api.is_committing:
return []
try:
if json_url:
connection = input_api.urllib2.urlopen(json_url)
status = input_api.json.loads(connection.read())
connection.close()
if not status['can_commit_freely']:
short_text = 'Tree state is: ' + status['general_state']
long_text = status['message'] + '\n' + json_url
return [output_api.PresubmitError(short_text, long_text=long_text)]
else:
# TODO(bradnelson): drop this once all users are gone.
connection = input_api.urllib2.urlopen(url)
status = connection.read()
connection.close()
if input_api.re.match(closed, status):
long_text = status + '\n' + url
return [output_api.PresubmitError('The tree is closed.',
long_text=long_text)]
except IOError as e:
return [output_api.PresubmitError('Error fetching tree status.',
long_text=str(e))]
return []
def RunUnitTestsInDirectory(
input_api, output_api, directory, whitelist=None, blacklist=None):
"""Lists all files in a directory and runs them. Doesn't recurse.
It's mainly a wrapper for RunUnitTests. USe whitelist and blacklist to filter
tests accordingly.
"""
unit_tests = []
test_path = input_api.os_path.abspath(
input_api.os_path.join(input_api.PresubmitLocalPath(), directory))
def check(filename, filters):
return any(True for i in filters if input_api.re.match(i, filename))
to_run = found = 0
for filename in input_api.os_listdir(test_path):
found += 1
fullpath = input_api.os_path.join(test_path, filename)
if not input_api.os_path.isfile(fullpath):
continue
if whitelist and not check(filename, whitelist):
continue
if blacklist and check(filename, blacklist):
continue
unit_tests.append(input_api.os_path.join(directory, filename))
to_run += 1
input_api.logging.debug('Found %d files, running %d' % (found, to_run))
if not to_run:
return [
output_api.PresubmitPromptWarning(
'Out of %d files, found none that matched w=%r, b=%r in directory %s'
% (found, whitelist, blacklist, directory))
]
return RunUnitTests(input_api, output_api, unit_tests)
def RunUnitTests(input_api, output_api, unit_tests):
"""Runs all unit tests in a directory.
On Windows, sys.executable is used for unit tests ending with ".py".
"""
# We don't want to hinder users from uploading incomplete patches.
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitPromptWarning
results = []
for unit_test in unit_tests:
cmd = []
if input_api.platform == 'win32' and unit_test.endswith('.py'):
# Windows needs some help.
cmd = [input_api.python_executable]
cmd.append(unit_test)
if input_api.verbose:
print('Running %s' % unit_test)
cmd.append('--verbose')
try:
if input_api.verbose:
input_api.subprocess.check_call(cmd, cwd=input_api.PresubmitLocalPath())
else:
input_api.subprocess.check_output(
cmd,
stderr=input_api.subprocess.STDOUT,
cwd=input_api.PresubmitLocalPath())
except (OSError, input_api.subprocess.CalledProcessError), e:
results.append(message_type('%s failed!\n%s' % (unit_test, e)))
return results
def RunPythonUnitTests(input_api, output_api, unit_tests):
"""Run the unit tests out of process, capture the output and use the result
code to determine success.
DEPRECATED.
"""
# We don't want to hinder users from uploading incomplete patches.
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitNotifyResult
results = []
for unit_test in unit_tests:
# Run the unit tests out of process. This is because some unit tests
# stub out base libraries and don't clean up their mess. It's too easy to
# get subtle bugs.
cwd = None
env = None
unit_test_name = unit_test
# 'python -m test.unit_test' doesn't work. We need to change to the right
# directory instead.
if '.' in unit_test:
# Tests imported in submodules (subdirectories) assume that the current
# directory is in the PYTHONPATH. Manually fix that.
unit_test = unit_test.replace('.', '/')
cwd = input_api.os_path.dirname(unit_test)
unit_test = input_api.os_path.basename(unit_test)
env = input_api.environ.copy()
# At least on Windows, it seems '.' must explicitly be in PYTHONPATH
backpath = [
'.', input_api.os_path.pathsep.join(['..'] * (cwd.count('/') + 1))
]
if env.get('PYTHONPATH'):
backpath.append(env.get('PYTHONPATH'))
env['PYTHONPATH'] = input_api.os_path.pathsep.join((backpath))
cmd = [input_api.python_executable, '-m', '%s' % unit_test]
try:
input_api.subprocess.check_output(
cmd, stderr=input_api.subprocess.STDOUT, cwd=cwd, env=env)
except (OSError, input_api.subprocess.CalledProcessError), e:
results.append(message_type('%s failed!\n%s' % (unit_test_name, e)))
return results
def _FetchAllFiles(input_api, white_list, black_list):
"""Hack to fetch all files."""
# We cannot use AffectedFiles here because we want to test every python
# file on each single python change. It's because a change in a python file
# can break another unmodified file.
# Use code similar to InputApi.FilterSourceFile()
def Find(filepath, filters):
for item in filters:
if input_api.re.match(item, filepath):
return True
return False
files = []
path_len = len(input_api.PresubmitLocalPath())
for dirpath, dirnames, filenames in input_api.os_walk(
input_api.PresubmitLocalPath()):
# Passes dirnames in black list to speed up search.
for item in dirnames[:]:
filepath = input_api.os_path.join(dirpath, item)[path_len + 1:]
if Find(filepath, black_list):
dirnames.remove(item)
for item in filenames:
filepath = input_api.os_path.join(dirpath, item)[path_len + 1:]
if Find(filepath, white_list) and not Find(filepath, black_list):
files.append(filepath)
return files
def RunPylint(input_api, output_api, white_list=None, black_list=None,
disabled_warnings=None, extra_paths_list=None):
"""Run pylint on python files.
The default white_list enforces looking only at *.py files.
"""
white_list = tuple(white_list or ('.*\.py$',))
black_list = tuple(black_list or input_api.DEFAULT_BLACK_LIST)
extra_paths_list = extra_paths_list or []
if input_api.is_committing:
error_type = output_api.PresubmitError
else:
error_type = output_api.PresubmitPromptWarning
# Only trigger if there is at least one python file affected.
def rel_path(regex):
"""Modifies a regex for a subject to accept paths relative to root."""
def samefile(a, b):
# Default implementation for platforms lacking os.path.samefile
# (like Windows).
return input_api.os_path.abspath(a) == input_api.os_path.abspath(b)
samefile = getattr(input_api.os_path, 'samefile', samefile)
if samefile(input_api.PresubmitLocalPath(),
input_api.change.RepositoryRoot()):
return regex
prefix = input_api.os_path.join(input_api.os_path.relpath(
input_api.PresubmitLocalPath(), input_api.change.RepositoryRoot()), '')
return input_api.re.escape(prefix) + regex
src_filter = lambda x: input_api.FilterSourceFile(
x, map(rel_path, white_list), map(rel_path, black_list))
if not input_api.AffectedSourceFiles(src_filter):
input_api.logging.info('Skipping pylint: no matching changes.')
return []
extra_args = ['--rcfile=%s' % input_api.os_path.join(_HERE, 'pylintrc')]
if disabled_warnings:
extra_args.extend(['-d', ','.join(disabled_warnings)])
files = _FetchAllFiles(input_api, white_list, black_list)
if not files:
return []
input_api.logging.info('Running pylint on %d files', len(files))
input_api.logging.debug('Running pylint on: %s', files)
# Copy the system path to the environment so pylint can find the right
# imports.
env = input_api.environ.copy()
import sys
env['PYTHONPATH'] = input_api.os_path.pathsep.join(
extra_paths_list + sys.path).encode('utf8')
def run_lint(files):
# We can't import pylint directly due to licensing issues, so we run
# it in another process. Windows needs help running python files so we
# explicitly specify the interpreter to use. It also has limitations on
# the size of the command-line, so we pass arguments via a pipe.
command = [input_api.python_executable,
input_api.os_path.join(_HERE, 'third_party', 'pylint.py'),
'--args-on-stdin']
try:
child = input_api.subprocess.Popen(command, env=env,
stdin=input_api.subprocess.PIPE)
# Dump the arguments to the child process via a pipe.
for filename in files:
child.stdin.write(filename + '\n')
for arg in extra_args:
child.stdin.write(arg + '\n')
child.stdin.close()
child.communicate()
return child.returncode
except OSError:
return 'Pylint failed!'
result = None
# Always run pylint and pass it all the py files at once.
# Passing py files one at time is slower and can produce
# different results. input_api.verbose used to be used
# to enable this behaviour but differing behaviour in
# verbose mode is not desirable.
# Leave this unreachable code in here so users can make
# a quick local edit to diagnose pylint issues more
# easily.
if True:
print('Running pylint on %d files.' % len(files))
result = run_lint(sorted(files))
else:
for filename in sorted(files):
print('Running pylint on %s' % filename)
result = run_lint([filename]) or result
if isinstance(result, basestring):
return [error_type(result)]
elif result:
return [error_type('Fix pylint errors first.')]
return []
# TODO(dpranke): Get the host_url from the input_api instead
def CheckRietveldTryJobExecution(dummy_input_api, dummy_output_api,
dummy_host_url, dummy_platforms,
dummy_owner):
# Temporarily 'fix' the check while the Rietveld API is being upgraded to
# something sensible.
return []
def CheckBuildbotPendingBuilds(input_api, output_api, url, max_pendings,
ignored):
try:
connection = input_api.urllib2.urlopen(url)
raw_data = connection.read()
connection.close()
except IOError:
return [output_api.PresubmitNotifyResult('%s is not accessible' % url)]
try:
data = input_api.json.loads(raw_data)
except ValueError:
return [output_api.PresubmitNotifyResult('Received malformed json while '
'looking up buildbot status')]
out = []
for (builder_name, builder) in data.iteritems():
if builder_name in ignored:
continue
if builder.get('state', '') == 'offline':
continue
pending_builds_len = len(builder.get('pending_builds', []))
if pending_builds_len > max_pendings:
out.append('%s has %d build(s) pending' %
(builder_name, pending_builds_len))
if out:
return [output_api.PresubmitPromptWarning(
'Build(s) pending. It is suggested to wait that no more than %d '
'builds are pending.' % max_pendings,
long_text='\n'.join(out))]
return []
def CheckOwners(input_api, output_api, source_file_filter=None,
author_counts_as_owner=True):
if input_api.is_committing:
if input_api.tbr:
return [output_api.PresubmitNotifyResult(
'--tbr was specified, skipping OWNERS check')]
if not input_api.change.issue:
return [output_api.PresubmitError("OWNERS check failed: this change has "
"no Rietveld issue number, so we can't check it for approvals.")]
needed = 'LGTM from an OWNER'
output = output_api.PresubmitError
else:
needed = 'OWNER reviewers'
output = output_api.PresubmitNotifyResult
affected_files = set([f.LocalPath() for f in
input_api.change.AffectedFiles(file_filter=source_file_filter)])
owners_db = input_api.owners_db
owner_email, reviewers = _RietveldOwnerAndReviewers(
input_api,
owners_db.email_regexp,
approval_needed=input_api.is_committing)
owner_email = owner_email or input_api.change.author_email
if author_counts_as_owner and owner_email:
reviewers_plus_owner = set([owner_email]).union(reviewers)
missing_files = owners_db.files_not_covered_by(affected_files,
reviewers_plus_owner)
else:
missing_files = owners_db.files_not_covered_by(affected_files, reviewers)
if missing_files:
output_list = [
output('Missing %s for these files:\n %s' %
(needed, '\n '.join(missing_files)))]
if not input_api.is_committing:
suggested_owners = owners_db.reviewers_for(affected_files, owner_email)
output_list.append(output('Suggested OWNERS:\n %s' %
('\n '.join(suggested_owners or []))))
return output_list
if input_api.is_committing and not reviewers:
return [output('Missing LGTM from someone other than %s' % owner_email)]
return []
def _GetRietveldIssueProps(input_api, messages):
"""Gets the issue properties from rietveld."""
issue = input_api.change.issue
if issue and input_api.rietveld:
return input_api.rietveld.get_issue_properties(
issue=int(issue), messages=messages)
def _RietveldOwnerAndReviewers(input_api, email_regexp, approval_needed=False):
"""Return the owner and reviewers of a change, if any.
If approval_needed is True, only reviewers who have approved the change
will be returned.
"""
issue_props = _GetRietveldIssueProps(input_api, True)
if not issue_props:
return None, set()
if not approval_needed:
return issue_props['owner_email'], set(issue_props['reviewers'])
owner_email = issue_props['owner_email']
def match_reviewer(r):
return email_regexp.match(r) and r != owner_email
messages = issue_props.get('messages', [])
approvers = set(
m['sender'] for m in messages
if m.get('approval') and match_reviewer(m['sender']))
return owner_email, approvers
def _CheckConstNSObject(input_api, output_api, source_file_filter):
"""Checks to make sure no objective-c files have |const NSSomeClass*|."""
pattern = input_api.re.compile(
r'const\s+NS(?!(Point|Range|Rect|Size)\s*\*)\w*\s*\*')
def objective_c_filter(f):
return (source_file_filter(f) and
input_api.os_path.splitext(f.LocalPath())[1] in ('.h', '.m', '.mm'))
files = []
for f in input_api.AffectedSourceFiles(objective_c_filter):
contents = input_api.ReadFile(f)
if pattern.search(contents):
files.append(f)
if files:
if input_api.is_committing:
res_type = output_api.PresubmitPromptWarning
else:
res_type = output_api.PresubmitNotifyResult
return [ res_type('|const NSClass*| is wrong, see ' +
'http://dev.chromium.org/developers/clang-mac',
files) ]
return []
def CheckSingletonInHeaders(input_api, output_api, source_file_filter=None):
"""Checks to make sure no header files have |Singleton<|."""
pattern = input_api.re.compile(r'(?<!class\s)Singleton\s*<')
files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
contents = input_api.ReadFile(f)
for line in contents.splitlines(False):
if (not input_api.re.match(r'//', line) and # Strip C++ comment.
pattern.search(line)):
files.append(f)
break
if files:
return [ output_api.PresubmitError(
'Found Singleton<T> in the following header files.\n' +
'Please move them to an appropriate source file so that the ' +
'template gets instantiated in a single compilation unit.',
files) ]
return []
def PanProjectChecks(input_api, output_api,
excluded_paths=None, text_files=None,
license_header=None, project_name=None,
owners_check=True, maxlen=80):
"""Checks that ALL chromium orbit projects should use.
These are checks to be run on all Chromium orbit project, including:
Chromium
Native Client
V8
When you update this function, please take this broad scope into account.
Args:
input_api: Bag of input related interfaces.
output_api: Bag of output related interfaces.
excluded_paths: Don't include these paths in common checks.
text_files: Which file are to be treated as documentation text files.
license_header: What license header should be on files.
project_name: What is the name of the project as it appears in the license.
Returns:
A list of warning or error objects.
"""
excluded_paths = tuple(excluded_paths or [])
text_files = tuple(text_files or (
r'.+\.txt$',
r'.+\.json$',
))
project_name = project_name or 'Chromium'
# Accept any year number from 2006 to the current year, or the special
# 2006-2008 string used on the oldest files. 2006-2008 is deprecated, but
# tolerate it until it's removed from all files.
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2006, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + '|2006-2008)'
# The (c) is deprecated, but tolerate it until it's removed from all files.
license_header = license_header or (
r'.*? Copyright (\(c\) )?%(year)s The %(project)s Authors\. '
r'All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.(?: \*/)?\n'
) % {
'year': years_re,
'project': project_name,
}
results = []
# This code loads the default black list (e.g. third_party, experimental, etc)
# and add our black list (breakpad, skia and v8 are still not following
# google style and are not really living this repository).
# See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage.
black_list = input_api.DEFAULT_BLACK_LIST + excluded_paths
white_list = input_api.DEFAULT_WHITE_LIST + text_files
sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
text_files = lambda x: input_api.FilterSourceFile(
x, black_list=black_list, white_list=white_list)
snapshot_memory = []
def snapshot(msg):
"""Measures & prints performance warning if a rule is running slow."""
dt2 = input_api.time.clock()
if snapshot_memory:
delta_ms = int(1000*(dt2 - snapshot_memory[0]))
if delta_ms > 500:
print " %s took a long time: %dms" % (snapshot_memory[1], delta_ms)
snapshot_memory[:] = (dt2, msg)
if owners_check:
snapshot("checking owners")
results.extend(input_api.canned_checks.CheckOwners(
input_api, output_api, source_file_filter=None))
snapshot("checking long lines")
results.extend(input_api.canned_checks.CheckLongLines(
input_api, output_api, maxlen, source_file_filter=sources))
snapshot( "checking tabs")
results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
input_api, output_api, source_file_filter=sources))
snapshot( "checking stray whitespace")
results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
input_api, output_api, source_file_filter=sources))
snapshot("checking nsobjects")
results.extend(_CheckConstNSObject(
input_api, output_api, source_file_filter=sources))
snapshot("checking singletons")
results.extend(CheckSingletonInHeaders(
input_api, output_api, source_file_filter=sources))
# The following checks are only done on commit, since the commit bot will
# auto-fix most of these.
if input_api.is_committing:
snapshot("checking eol style")
results.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
input_api, output_api, source_file_filter=text_files))
snapshot("checking svn mime types")
results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes(
input_api, output_api))
snapshot("checking license")
results.extend(input_api.canned_checks.CheckLicense(
input_api, output_api, license_header, source_file_filter=sources))
snapshot("checking was uploaded")
results.extend(input_api.canned_checks.CheckChangeWasUploaded(
input_api, output_api))
snapshot("checking description")
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
results.extend(input_api.canned_checks.CheckDoNotSubmitInDescription(
input_api, output_api))
snapshot("checking do not submit in files")
results.extend(input_api.canned_checks.CheckDoNotSubmitInFiles(
input_api, output_api))
snapshot("done")
return results
| gpl-2.0 | 659,827,465,840,699,800 | 36.759766 | 80 | 0.66583 | false |
mscuthbert/abjad | abjad/tools/labeltools/test/test_labeltools_color_contents_of_container.py | 2 | 1445 | # -*- encoding: utf-8 -*-
from abjad import *
def test_labeltools_color_contents_of_container_01():
staff = Staff()
staff.append(Measure((2, 8), "c'8 d'8"))
staff.append(Measure((2, 8), "e'8 f'8"))
staff.append(Measure((2, 8), "g'8 a'8"))
labeltools.color_contents_of_container(staff[1], 'blue')
assert systemtools.TestManager.compare(
staff,
r'''
\new Staff {
{
\time 2/8
c'8
d'8
}
{
\override Accidental #'color = #blue
\override Beam #'color = #blue
\override Dots #'color = #blue
\override NoteHead #'color = #blue
\override Rest #'color = #blue
\override Stem #'color = #blue
\override TupletBracket #'color = #blue
\override TupletNumber #'color = #blue
e'8
f'8
\revert Accidental #'color
\revert Beam #'color
\revert Dots #'color
\revert NoteHead #'color
\revert Rest #'color
\revert Stem #'color
\revert TupletBracket #'color
\revert TupletNumber #'color
}
{
g'8
a'8
}
}
'''
)
assert inspect_(staff).is_well_formed() | gpl-3.0 | -2,890,309,610,952,873,500 | 27.92 | 60 | 0.439446 | false |
Esser420/EvilTwinFramework | core/AuxiliaryModules/dnsmasqhandler.py | 1 | 3694 | """
This class is responsible for configuring all pages to spoof,
adding them to the hosts file and setting them up in apache
with help of the httpserver class as well as configuring dnsmasq
"""
import os
from utils.utils import FileHandler
from textwrap import dedent
class DNSMasqHandler(object):
def __init__(self, dnsmasq_config_path):
self.dnsmasq_config_path = dnsmasq_config_path
self.captive_portal_mode = False
self.dnsmasq_running = False
self.file_handler = None
def set_captive_portal_mode(self, captive_portal_mode):
self.captive_portal_mode = captive_portal_mode
def write_dnsmasq_configurations(self, interface, ip_gw, dhcp_range=[], nameservers=[], virtInterfaces = 0):
# Argument cleanup
if type(nameservers) is not list:
nameservers = [nameservers]
dhcp_range_string = "\ndhcp-range={interface}, {dhcp_start}, {dhcp_end}, 12h".format(interface = interface,
dhcp_start = dhcp_range[0],
dhcp_end = dhcp_range[1])
for i in range(virtInterfaces):
dhcp_start = ".".join(dhcp_range[0].split(".")[0:2] + [str(int(dhcp_range[0].split(".")[2]) + i + 1)] + [dhcp_range[0].split(".")[3]])
dhcp_end = ".".join(dhcp_range[1].split(".")[0:2] + [str(int(dhcp_range[1].split(".")[2]) + i + 1)] + [dhcp_range[1].split(".")[3]])
dhcp_range_string += "\ndhcp-range={interface}_{index}, {dhcp_start}, {dhcp_end}, 12h".format(
interface = interface,
index = i,
dhcp_start = dhcp_start,
dhcp_end = dhcp_end)
configurations = dedent("""
{dhcp_range}
dhcp-option=3,{ip_gw}
dhcp-option=6,{ip_gw}
""".format( dhcp_range = dhcp_range_string,
interface = interface,
ip_gw = ip_gw))
configurations += "bind-interfaces\n"
if self.captive_portal_mode:
configurations += "no-resolv\n"
configurations += "address=/#/{ip_gw}\n".format(ip_gw = ip_gw)
else:
for server in nameservers:
configurations += "server={server}\n".format(server = server)
return self._safe_write_config(configurations, self.dnsmasq_config_path)
def _safe_write_config(self, configurations, config_path):
if self.file_handler:
self.file_handler.write(configurations)
else:
self.file_handler = FileHandler(config_path)
self.file_handler.write(configurations)
def start_dnsmasq(self):
print "[+] Starting dnsmasq service"
if not os.system('service dnsmasq restart') == 0:
return False
self.dnsmasq_running = True
return True
def stop_dnsmasq(self):
os.system('service dnsmasq stop')
os.system('pkill dnsmasq') # Cleanup
self.dnsmasq_running = False
def cleanup(self):
if self.file_handler is not None:
self.file_handler.restore_file()
| gpl-2.0 | 4,458,079,139,631,978,000 | 45.175 | 146 | 0.490796 | false |
IlyaSukhanov/moto | moto/swf/responses.py | 7 | 20587 | import json
import six
from moto.core.responses import BaseResponse
from .exceptions import SWFSerializationException, SWFValidationException
from .models import swf_backends
class SWFResponse(BaseResponse):
@property
def swf_backend(self):
return swf_backends[self.region]
# SWF parameters are passed through a JSON body, so let's ease retrieval
@property
def _params(self):
return json.loads(self.body.decode("utf-8"))
def _check_int(self, parameter):
if not isinstance(parameter, int):
raise SWFSerializationException(parameter)
def _check_float_or_int(self, parameter):
if not isinstance(parameter, float):
if not isinstance(parameter, int):
raise SWFSerializationException(parameter)
def _check_none_or_string(self, parameter):
if parameter is not None:
self._check_string(parameter)
def _check_string(self, parameter):
if not isinstance(parameter, six.string_types):
raise SWFSerializationException(parameter)
def _check_none_or_list_of_strings(self, parameter):
if parameter is not None:
self._check_list_of_strings(parameter)
def _check_list_of_strings(self, parameter):
if not isinstance(parameter, list):
raise SWFSerializationException(parameter)
for i in parameter:
if not isinstance(i, six.string_types):
raise SWFSerializationException(parameter)
def _check_exclusivity(self, **kwargs):
if list(kwargs.values()).count(None) >= len(kwargs) - 1:
return
keys = kwargs.keys()
if len(keys) == 2:
message = 'Cannot specify both a {0} and a {1}'.format(keys[0],
keys[1])
else:
message = 'Cannot specify more than one exclusive filters in the' \
' same query: {0}'.format(keys)
raise SWFValidationException(message)
def _list_types(self, kind):
domain_name = self._params["domain"]
status = self._params["registrationStatus"]
reverse_order = self._params.get("reverseOrder", None)
self._check_string(domain_name)
self._check_string(status)
types = self.swf_backend.list_types(kind, domain_name, status, reverse_order=reverse_order)
return json.dumps({
"typeInfos": [_type.to_medium_dict() for _type in types]
})
def _describe_type(self, kind):
domain = self._params["domain"]
_type_args = self._params["{0}Type".format(kind)]
name = _type_args["name"]
version = _type_args["version"]
self._check_string(domain)
self._check_string(name)
self._check_string(version)
_type = self.swf_backend.describe_type(kind, domain, name, version)
return json.dumps(_type.to_full_dict())
def _deprecate_type(self, kind):
domain = self._params["domain"]
_type_args = self._params["{0}Type".format(kind)]
name = _type_args["name"]
version = _type_args["version"]
self._check_string(domain)
self._check_string(name)
self._check_string(version)
self.swf_backend.deprecate_type(kind, domain, name, version)
return ""
# TODO: implement pagination
def list_domains(self):
status = self._params["registrationStatus"]
self._check_string(status)
reverse_order = self._params.get("reverseOrder", None)
domains = self.swf_backend.list_domains(status, reverse_order=reverse_order)
return json.dumps({
"domainInfos": [domain.to_short_dict() for domain in domains]
})
def list_closed_workflow_executions(self):
domain = self._params['domain']
start_time_filter = self._params.get('startTimeFilter', None)
close_time_filter = self._params.get('closeTimeFilter', None)
execution_filter = self._params.get('executionFilter', None)
workflow_id = execution_filter['workflowId'] if execution_filter else None
maximum_page_size = self._params.get('maximumPageSize', 1000)
reverse_order = self._params.get('reverseOrder', None)
tag_filter = self._params.get('tagFilter', None)
type_filter = self._params.get('typeFilter', None)
close_status_filter = self._params.get('closeStatusFilter', None)
self._check_string(domain)
self._check_none_or_string(workflow_id)
self._check_exclusivity(executionFilter=execution_filter,
typeFilter=type_filter,
tagFilter=tag_filter,
closeStatusFilter=close_status_filter)
self._check_exclusivity(startTimeFilter=start_time_filter,
closeTimeFilter=close_time_filter)
if start_time_filter is None and close_time_filter is None:
raise SWFValidationException('Must specify time filter')
if start_time_filter:
self._check_float_or_int(start_time_filter['oldestDate'])
if 'latestDate' in start_time_filter:
self._check_float_or_int(start_time_filter['latestDate'])
if close_time_filter:
self._check_float_or_int(close_time_filter['oldestDate'])
if 'latestDate' in close_time_filter:
self._check_float_or_int(close_time_filter['latestDate'])
if tag_filter:
self._check_string(tag_filter['tag'])
if type_filter:
self._check_string(type_filter['name'])
self._check_string(type_filter['version'])
if close_status_filter:
self._check_string(close_status_filter['status'])
self._check_int(maximum_page_size)
workflow_executions = self.swf_backend.list_closed_workflow_executions(
domain_name=domain,
start_time_filter=start_time_filter,
close_time_filter=close_time_filter,
execution_filter=execution_filter,
tag_filter=tag_filter,
type_filter=type_filter,
maximum_page_size=maximum_page_size,
reverse_order=reverse_order,
workflow_id=workflow_id,
close_status_filter=close_status_filter
)
return json.dumps({
'executionInfos': [wfe.to_list_dict() for wfe in workflow_executions]
})
def list_open_workflow_executions(self):
domain = self._params['domain']
start_time_filter = self._params['startTimeFilter']
execution_filter = self._params.get('executionFilter', None)
workflow_id = execution_filter['workflowId'] if execution_filter else None
maximum_page_size = self._params.get('maximumPageSize', 1000)
reverse_order = self._params.get('reverseOrder', None)
tag_filter = self._params.get('tagFilter', None)
type_filter = self._params.get('typeFilter', None)
self._check_string(domain)
self._check_none_or_string(workflow_id)
self._check_exclusivity(executionFilter=execution_filter,
typeFilter=type_filter,
tagFilter=tag_filter)
self._check_float_or_int(start_time_filter['oldestDate'])
if 'latestDate' in start_time_filter:
self._check_float_or_int(start_time_filter['latestDate'])
if tag_filter:
self._check_string(tag_filter['tag'])
if type_filter:
self._check_string(type_filter['name'])
self._check_string(type_filter['version'])
self._check_int(maximum_page_size)
workflow_executions = self.swf_backend.list_open_workflow_executions(
domain_name=domain,
start_time_filter=start_time_filter,
execution_filter=execution_filter,
tag_filter=tag_filter,
type_filter=type_filter,
maximum_page_size=maximum_page_size,
reverse_order=reverse_order,
workflow_id=workflow_id
)
return json.dumps({
'executionInfos': [wfe.to_list_dict() for wfe in workflow_executions]
})
def register_domain(self):
name = self._params["name"]
retention = self._params["workflowExecutionRetentionPeriodInDays"]
description = self._params.get("description")
self._check_string(retention)
self._check_string(name)
self._check_none_or_string(description)
self.swf_backend.register_domain(name, retention,
description=description)
return ""
def deprecate_domain(self):
name = self._params["name"]
self._check_string(name)
self.swf_backend.deprecate_domain(name)
return ""
def describe_domain(self):
name = self._params["name"]
self._check_string(name)
domain = self.swf_backend.describe_domain(name)
return json.dumps(domain.to_full_dict())
# TODO: implement pagination
def list_activity_types(self):
return self._list_types("activity")
def register_activity_type(self):
domain = self._params["domain"]
name = self._params["name"]
version = self._params["version"]
default_task_list = self._params.get("defaultTaskList")
if default_task_list:
task_list = default_task_list.get("name")
else:
task_list = None
default_task_heartbeat_timeout = self._params.get("defaultTaskHeartbeatTimeout")
default_task_schedule_to_close_timeout = self._params.get("defaultTaskScheduleToCloseTimeout")
default_task_schedule_to_start_timeout = self._params.get("defaultTaskScheduleToStartTimeout")
default_task_start_to_close_timeout = self._params.get("defaultTaskStartToCloseTimeout")
description = self._params.get("description")
self._check_string(domain)
self._check_string(name)
self._check_string(version)
self._check_none_or_string(task_list)
self._check_none_or_string(default_task_heartbeat_timeout)
self._check_none_or_string(default_task_schedule_to_close_timeout)
self._check_none_or_string(default_task_schedule_to_start_timeout)
self._check_none_or_string(default_task_start_to_close_timeout)
self._check_none_or_string(description)
# TODO: add defaultTaskPriority when boto gets to support it
self.swf_backend.register_type(
"activity", domain, name, version, task_list=task_list,
default_task_heartbeat_timeout=default_task_heartbeat_timeout,
default_task_schedule_to_close_timeout=default_task_schedule_to_close_timeout,
default_task_schedule_to_start_timeout=default_task_schedule_to_start_timeout,
default_task_start_to_close_timeout=default_task_start_to_close_timeout,
description=description,
)
return ""
def deprecate_activity_type(self):
return self._deprecate_type("activity")
def describe_activity_type(self):
return self._describe_type("activity")
def list_workflow_types(self):
return self._list_types("workflow")
def register_workflow_type(self):
domain = self._params["domain"]
name = self._params["name"]
version = self._params["version"]
default_task_list = self._params.get("defaultTaskList")
if default_task_list:
task_list = default_task_list.get("name")
else:
task_list = None
default_child_policy = self._params.get("defaultChildPolicy")
default_task_start_to_close_timeout = self._params.get("defaultTaskStartToCloseTimeout")
default_execution_start_to_close_timeout = self._params.get("defaultExecutionStartToCloseTimeout")
description = self._params.get("description")
self._check_string(domain)
self._check_string(name)
self._check_string(version)
self._check_none_or_string(task_list)
self._check_none_or_string(default_child_policy)
self._check_none_or_string(default_task_start_to_close_timeout)
self._check_none_or_string(default_execution_start_to_close_timeout)
self._check_none_or_string(description)
# TODO: add defaultTaskPriority when boto gets to support it
# TODO: add defaultLambdaRole when boto gets to support it
self.swf_backend.register_type(
"workflow", domain, name, version, task_list=task_list,
default_child_policy=default_child_policy,
default_task_start_to_close_timeout=default_task_start_to_close_timeout,
default_execution_start_to_close_timeout=default_execution_start_to_close_timeout,
description=description,
)
return ""
def deprecate_workflow_type(self):
return self._deprecate_type("workflow")
def describe_workflow_type(self):
return self._describe_type("workflow")
def start_workflow_execution(self):
domain = self._params["domain"]
workflow_id = self._params["workflowId"]
_workflow_type = self._params["workflowType"]
workflow_name = _workflow_type["name"]
workflow_version = _workflow_type["version"]
_default_task_list = self._params.get("defaultTaskList")
if _default_task_list:
task_list = _default_task_list.get("name")
else:
task_list = None
child_policy = self._params.get("childPolicy")
execution_start_to_close_timeout = self._params.get("executionStartToCloseTimeout")
input_ = self._params.get("input")
tag_list = self._params.get("tagList")
task_start_to_close_timeout = self._params.get("taskStartToCloseTimeout")
self._check_string(domain)
self._check_string(workflow_id)
self._check_string(workflow_name)
self._check_string(workflow_version)
self._check_none_or_string(task_list)
self._check_none_or_string(child_policy)
self._check_none_or_string(execution_start_to_close_timeout)
self._check_none_or_string(input_)
self._check_none_or_list_of_strings(tag_list)
self._check_none_or_string(task_start_to_close_timeout)
wfe = self.swf_backend.start_workflow_execution(
domain, workflow_id, workflow_name, workflow_version,
task_list=task_list, child_policy=child_policy,
execution_start_to_close_timeout=execution_start_to_close_timeout,
input=input_, tag_list=tag_list,
task_start_to_close_timeout=task_start_to_close_timeout
)
return json.dumps({
"runId": wfe.run_id
})
def describe_workflow_execution(self):
domain_name = self._params["domain"]
_workflow_execution = self._params["execution"]
run_id = _workflow_execution["runId"]
workflow_id = _workflow_execution["workflowId"]
self._check_string(domain_name)
self._check_string(run_id)
self._check_string(workflow_id)
wfe = self.swf_backend.describe_workflow_execution(domain_name, run_id, workflow_id)
return json.dumps(wfe.to_full_dict())
def get_workflow_execution_history(self):
domain_name = self._params["domain"]
_workflow_execution = self._params["execution"]
run_id = _workflow_execution["runId"]
workflow_id = _workflow_execution["workflowId"]
reverse_order = self._params.get("reverseOrder", None)
wfe = self.swf_backend.describe_workflow_execution(domain_name, run_id, workflow_id)
events = wfe.events(reverse_order=reverse_order)
return json.dumps({
"events": [evt.to_dict() for evt in events]
})
def poll_for_decision_task(self):
domain_name = self._params["domain"]
task_list = self._params["taskList"]["name"]
identity = self._params.get("identity")
reverse_order = self._params.get("reverseOrder", None)
self._check_string(domain_name)
self._check_string(task_list)
decision = self.swf_backend.poll_for_decision_task(
domain_name, task_list, identity=identity
)
if decision:
return json.dumps(
decision.to_full_dict(reverse_order=reverse_order)
)
else:
return json.dumps({"previousStartedEventId": 0, "startedEventId": 0})
def count_pending_decision_tasks(self):
domain_name = self._params["domain"]
task_list = self._params["taskList"]["name"]
self._check_string(domain_name)
self._check_string(task_list)
count = self.swf_backend.count_pending_decision_tasks(domain_name, task_list)
return json.dumps({"count": count, "truncated": False})
def respond_decision_task_completed(self):
task_token = self._params["taskToken"]
execution_context = self._params.get("executionContext")
decisions = self._params.get("decisions")
self._check_string(task_token)
self._check_none_or_string(execution_context)
self.swf_backend.respond_decision_task_completed(
task_token, decisions=decisions, execution_context=execution_context
)
return ""
def poll_for_activity_task(self):
domain_name = self._params["domain"]
task_list = self._params["taskList"]["name"]
identity = self._params.get("identity")
self._check_string(domain_name)
self._check_string(task_list)
self._check_none_or_string(identity)
activity_task = self.swf_backend.poll_for_activity_task(
domain_name, task_list, identity=identity
)
if activity_task:
return json.dumps(
activity_task.to_full_dict()
)
else:
return json.dumps({"startedEventId": 0})
def count_pending_activity_tasks(self):
domain_name = self._params["domain"]
task_list = self._params["taskList"]["name"]
self._check_string(domain_name)
self._check_string(task_list)
count = self.swf_backend.count_pending_activity_tasks(domain_name, task_list)
return json.dumps({"count": count, "truncated": False})
def respond_activity_task_completed(self):
task_token = self._params["taskToken"]
result = self._params.get("result")
self._check_string(task_token)
self._check_none_or_string(result)
self.swf_backend.respond_activity_task_completed(
task_token, result=result
)
return ""
def respond_activity_task_failed(self):
task_token = self._params["taskToken"]
reason = self._params.get("reason")
details = self._params.get("details")
self._check_string(task_token)
# TODO: implement length limits on reason and details (common pb with client libs)
self._check_none_or_string(reason)
self._check_none_or_string(details)
self.swf_backend.respond_activity_task_failed(
task_token, reason=reason, details=details
)
return ""
def terminate_workflow_execution(self):
domain_name = self._params["domain"]
workflow_id = self._params["workflowId"]
child_policy = self._params.get("childPolicy")
details = self._params.get("details")
reason = self._params.get("reason")
run_id = self._params.get("runId")
self._check_string(domain_name)
self._check_string(workflow_id)
self._check_none_or_string(child_policy)
self._check_none_or_string(details)
self._check_none_or_string(reason)
self._check_none_or_string(run_id)
self.swf_backend.terminate_workflow_execution(
domain_name, workflow_id, child_policy=child_policy,
details=details, reason=reason, run_id=run_id
)
return ""
def record_activity_task_heartbeat(self):
task_token = self._params["taskToken"]
details = self._params.get("details")
self._check_string(task_token)
self._check_none_or_string(details)
self.swf_backend.record_activity_task_heartbeat(
task_token, details=details
)
# TODO: make it dynamic when we implement activity tasks cancellation
return json.dumps({"cancelRequested": False})
| apache-2.0 | 4,522,801,220,733,088,000 | 40.843496 | 106 | 0.6235 | false |
sebgoa/client-python | kubernetes/client/models/v1_label_selector.py | 2 | 4636 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1LabelSelector(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, match_expressions=None, match_labels=None):
"""
V1LabelSelector - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'match_expressions': 'list[V1LabelSelectorRequirement]',
'match_labels': 'dict(str, str)'
}
self.attribute_map = {
'match_expressions': 'matchExpressions',
'match_labels': 'matchLabels'
}
self._match_expressions = match_expressions
self._match_labels = match_labels
@property
def match_expressions(self):
"""
Gets the match_expressions of this V1LabelSelector.
matchExpressions is a list of label selector requirements. The requirements are ANDed.
:return: The match_expressions of this V1LabelSelector.
:rtype: list[V1LabelSelectorRequirement]
"""
return self._match_expressions
@match_expressions.setter
def match_expressions(self, match_expressions):
"""
Sets the match_expressions of this V1LabelSelector.
matchExpressions is a list of label selector requirements. The requirements are ANDed.
:param match_expressions: The match_expressions of this V1LabelSelector.
:type: list[V1LabelSelectorRequirement]
"""
self._match_expressions = match_expressions
@property
def match_labels(self):
"""
Gets the match_labels of this V1LabelSelector.
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is \"key\", the operator is \"In\", and the values array contains only \"value\". The requirements are ANDed.
:return: The match_labels of this V1LabelSelector.
:rtype: dict(str, str)
"""
return self._match_labels
@match_labels.setter
def match_labels(self, match_labels):
"""
Sets the match_labels of this V1LabelSelector.
matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is \"key\", the operator is \"In\", and the values array contains only \"value\". The requirements are ANDed.
:param match_labels: The match_labels of this V1LabelSelector.
:type: dict(str, str)
"""
self._match_labels = match_labels
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1LabelSelector):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | 8,237,589,894,733,720,000 | 31.41958 | 269 | 0.586497 | false |
sprax/python | kwargs.py | 1 | 3390 |
#!/usr/bin/env python3
#
# # -*- coding: utf-8 -*-
'''Example classes with args and kwargs in __init__'''
import sys
from pdb import set_trace
class CopyCtor:
''' init can be used as a simple copy-constructor.
When args[0] is an instance, it is copied and kwargs are ignored.
Otherwise, the kwargs are used and args is ignored.
'''
def __init__(self, *args, **kwargs):
if len(args) == 1 and isinstance(args[0], type(self)) and not kwargs:
# Copy Constructor
other = args[0]
# copy all the other's attributes:
self.__dict__ = dict(other.__dict__)
if kwargs:
print("WARNING: %s.%s: ignoring kwargs: "
% (type(self).__name__, self.__init__.__name__), **kwargs)
else:
if args:
# import pdb; pdb.set_trace()
# print("WARNING: %s.%s: ignoring args: "
# % (type(self).__name__, sys._getframe().f_code.co_name), *args)
print("WARNING: %s.%s: ignoring args: "
% (type(self).__name__, self.__init__.__name__), *args)
self.__dict__ = kwargs
class BothCopyCtor:
''' init can be used as a copy-constructor with updates (differences from original).
If args[0] is an instance, it is copied and the kwargs are used to update the new object.
Otherwise, the kwargs are used and args is ignored.
'''
def __init__(self, *args, **kwargs):
if len(args) > 0 and isinstance(args[0], type(self)):
# Copy Constructor
other = args[0]
# copy all the other's attributes:
self.__dict__ = dict(other.__dict__)
if kwargs:
self.__dict__.update(kwargs)
else:
if args:
# import pdb; pdb.set_trace()
# print("WARNING: %s.%s: ignoring args: "
# % (type(self).__name__, sys._getframe().f_code.co_name), *args)
print("WARNING: %s.%s: ignoring args: "
% (type(self).__name__, self.__init__.__name__), *args)
self.__dict__ = kwargs
class KwargsOnly:
'''init takes kwargs only, and uses only the kwargs that are listed as valid.'''
def __init__(self, **kwargs):
valid_kwargs = ['name', 'kind', 'text']
for key, val in kwargs.items():
if key not in valid_kwargs:
raise TypeError("Invalid keyword argument %s" % key)
setattr(self, key, val)
def test_kwargs(*args):
'''Test the class constructors'''
orig = CopyCtor(*args, foo="FOO", bar="BAR")
print("orig:", orig.__dict__)
copy = CopyCtor(orig)
print("copy:", copy.__dict__)
print()
both = BothCopyCtor(*args, foo="Foosball", bar="Barbell")
print("both:", both.__dict__)
diff = BothCopyCtor(both, bar="Beer", baz="Bazaar")
print("diff:", diff.__dict__)
print()
try:
bust = KwargsOnly(name='myKwargsOnly', kind='checked', test='Four square')
print("bust:", bust.__dict__)
except TypeError as ex:
print("Caught expected TypeError from KwargsOnly(...test=...):", ex)
only = KwargsOnly(name='myKwargsOnly', kind='checked', text='Four score')
print("only:", only.__dict__)
if __name__ == '__main__':
test_kwargs(sys.argv)
| lgpl-3.0 | -2,787,854,587,606,249,500 | 34.684211 | 97 | 0.533038 | false |
aaron-fz/neutron_full_sync | neutron/db/migration/alembic_migrations/versions/3d6fae8b70b0_nvp_lbaas_plugin.py | 8 | 2863 | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""nvp lbaas plugin
Revision ID: 3d6fae8b70b0
Revises: 3ed8f075e38a
Create Date: 2013-09-13 19:34:41.522665
"""
# revision identifiers, used by Alembic.
revision = '3d6fae8b70b0'
down_revision = '3ed8f075e38a'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.nicira.NeutronServicePlugin.NvpAdvancedPlugin'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.create_table(
'vcns_edge_pool_bindings',
sa.Column('pool_id', sa.String(length=36), nullable=False),
sa.Column('edge_id', sa.String(length=36), nullable=False),
sa.Column('pool_vseid', sa.String(length=36), nullable=True),
sa.ForeignKeyConstraint(['pool_id'], ['pools.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('pool_id', 'edge_id')
)
op.create_table(
'vcns_edge_monitor_bindings',
sa.Column('monitor_id', sa.String(length=36), nullable=False),
sa.Column('edge_id', sa.String(length=36), nullable=False),
sa.Column('monitor_vseid', sa.String(length=36), nullable=True),
sa.ForeignKeyConstraint(['monitor_id'], ['healthmonitors.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('monitor_id', 'edge_id')
)
op.create_table(
'vcns_edge_vip_bindings',
sa.Column('vip_id', sa.String(length=36), nullable=False),
sa.Column('edge_id', sa.String(length=36), nullable=True),
sa.Column('vip_vseid', sa.String(length=36), nullable=True),
sa.Column('app_profileid', sa.String(length=36), nullable=True),
sa.ForeignKeyConstraint(['vip_id'], ['vips.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('vip_id')
)
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.drop_table('vcns_edge_vip_bindings')
op.drop_table('vcns_edge_monitor_bindings')
op.drop_table('vcns_edge_pool_bindings')
| apache-2.0 | 1,234,670,035,684,456,700 | 34.7875 | 78 | 0.660845 | false |
lowitty/server | libsLinux/twisted/web/_flatten.py | 5 | 16371 | # -*- test-case-name: twisted.web.test.test_flatten -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Context-free flattener/serializer for rendering Python objects, possibly
complex or arbitrarily nested, as strings.
"""
from __future__ import division, absolute_import
from io import BytesIO
from sys import exc_info
from types import GeneratorType
from traceback import extract_tb
from twisted.internet.defer import Deferred
from twisted.python.compat import unicode, nativeString, iteritems
from twisted.web._stan import Tag, slot, voidElements, Comment, CDATA, CharRef
from twisted.web.error import UnfilledSlot, UnsupportedType, FlattenerError
from twisted.web.iweb import IRenderable
def escapeForContent(data):
"""
Escape some character or UTF-8 byte data for inclusion in an HTML or XML
document, by replacing metacharacters (C{&<>}) with their entity
equivalents (C{&<>}).
This is used as an input to L{_flattenElement}'s C{dataEscaper} parameter.
@type data: C{bytes} or C{unicode}
@param data: The string to escape.
@rtype: C{bytes}
@return: The quoted form of C{data}. If C{data} is unicode, return a utf-8
encoded string.
"""
if isinstance(data, unicode):
data = data.encode('utf-8')
data = data.replace(b'&', b'&'
).replace(b'<', b'<'
).replace(b'>', b'>')
return data
def attributeEscapingDoneOutside(data):
"""
Escape some character or UTF-8 byte data for inclusion in the top level of
an attribute. L{attributeEscapingDoneOutside} actually passes the data
through unchanged, because L{flattenWithAttributeEscaping} handles the
quoting of the text within attributes outside the generator returned by
L{_flattenElement}; this is used as the C{dataEscaper} argument to that
L{_flattenElement} call so that that generator does not redundantly escape
its text output.
@type data: C{bytes} or C{unicode}
@param data: The string to escape.
@return: The string, unchanged, except for encoding.
@rtype: C{bytes}
"""
if isinstance(data, unicode):
return data.encode("utf-8")
return data
def flattenWithAttributeEscaping(root):
"""
Decorate the generator returned by L{_flattenElement} so that its output is
properly quoted for inclusion within an XML attribute value.
If a L{Tag <twisted.web.template.Tag>} C{x} is flattened within the context
of the contents of another L{Tag <twisted.web.template.Tag>} C{y}, the
metacharacters (C{<>&"}) delimiting C{x} should be passed through
unchanged, but the textual content of C{x} should still be quoted, as
usual. For example: C{<y><x>&</x></y>}. That is the default behavior
of L{_flattenElement} when L{escapeForContent} is passed as the
C{dataEscaper}.
However, when a L{Tag <twisted.web.template.Tag>} C{x} is flattened within
the context of an I{attribute} of another L{Tag <twisted.web.template.Tag>}
C{y}, then the metacharacters delimiting C{x} should be quoted so that it
can be parsed from the attribute's value. In the DOM itself, this is not a
valid thing to do, but given that renderers and slots may be freely moved
around in a L{twisted.web.template} template, it is a condition which may
arise in a document and must be handled in a way which produces valid
output. So, for example, you should be able to get C{<y attr="<x />"
/>}. This should also be true for other XML/HTML meta-constructs such as
comments and CDATA, so if you were to serialize a L{comment
<twisted.web.template.Comment>} in an attribute you should get C{<y
attr="<-- comment -->" />}. Therefore in order to capture these
meta-characters, the attribute generator from L{_flattenElement} context is
wrapped with an L{flattenWithAttributeEscaping}.
Because I{all} characters serialized in the context of an attribute are
quoted before they are yielded by the generator returned by
L{flattenWithAttributeEscaping}, on the "outside" of the L{_flattenElement}
call, the L{_flattenElement} generator therefore no longer needs to quote
text that appears directly within the attribute itself.
The final case, and hopefully the much more common one as compared to
serializing L{Tag <twisted.web.template.Tag>} and arbitrary L{IRenderable}
objects within an attribute, is to serialize a simple string, and those
should be passed through for L{flattenWithAttributeEscaping} to quote
without applying a second, redundant level of quoting.
@param root: A value that may be yielded by L{_flattenElement}; either an
iterable yielding L{bytes} (or more iterables), or bytes itself.
@type root: L{bytes} or C{iterable}
@return: The same type as L{_flattenElement} returns, with all the bytes
encoded for representation within an attribute.
@rtype: the same type as the C{subFlatten} argument
"""
if isinstance(root, bytes):
root = escapeForContent(root)
root = root.replace(b'"', b'"')
yield root
elif isinstance(root, Deferred):
yield root.addCallback(flattenWithAttributeEscaping)
else:
for subroot in root:
yield flattenWithAttributeEscaping(subroot)
def escapedCDATA(data):
"""
Escape CDATA for inclusion in a document.
@type data: C{str} or C{unicode}
@param data: The string to escape.
@rtype: C{str}
@return: The quoted form of C{data}. If C{data} is unicode, return a utf-8
encoded string.
"""
if isinstance(data, unicode):
data = data.encode('utf-8')
return data.replace(b']]>', b']]]]><![CDATA[>')
def escapedComment(data):
"""
Escape a comment for inclusion in a document.
@type data: C{str} or C{unicode}
@param data: The string to escape.
@rtype: C{str}
@return: The quoted form of C{data}. If C{data} is unicode, return a utf-8
encoded string.
"""
if isinstance(data, unicode):
data = data.encode('utf-8')
data = data.replace(b'--', b'- - ').replace(b'>', b'>')
if data and data[-1:] == b'-':
data += b' '
return data
def _getSlotValue(name, slotData, default=None):
"""
Find the value of the named slot in the given stack of slot data.
"""
for slotFrame in slotData[::-1]:
if slotFrame is not None and name in slotFrame:
return slotFrame[name]
else:
if default is not None:
return default
raise UnfilledSlot(name)
def _flattenElement(request, root, slotData, renderFactory, dataEscaper):
"""
Make C{root} slightly more flat by yielding all its immediate contents as
strings, deferreds or generators that are recursive calls to itself.
@param request: A request object which will be passed to
L{IRenderable.render}.
@param root: An object to be made flatter. This may be of type C{unicode},
C{str}, L{slot}, L{Tag <twisted.web.template.Tag>}, L{URL}, L{tuple},
L{list}, L{GeneratorType}, L{Deferred}, or an object that implements
L{IRenderable}.
@param slotData: A C{list} of C{dict} mapping C{str} slot names to data
with which those slots will be replaced.
@param renderFactory: If not C{None}, an object that provides
L{IRenderable}.
@param dataEscaper: A 1-argument callable which takes L{bytes} or
L{unicode} and returns L{bytes}, quoted as appropriate for the
rendering context. This is really only one of two values:
L{attributeEscapingDoneOutside} or L{escapeForContent}, depending on
whether the rendering context is within an attribute or not. See the
explanation in L{flattenWithAttributeEscaping}.
@return: An iterator that eventually yields L{bytes} that should be written
to the output. However it may also yield other iterators or
L{Deferred}s; if it yields another iterator, the caller will iterate
it; if it yields a L{Deferred}, the result of that L{Deferred} will
either be L{bytes}, in which case it's written, or another generator,
in which case it is iterated. See L{_flattenTree} for the trampoline
that consumes said values.
@rtype: An iterator which yields L{bytes}, L{Deferred}, and more iterators
of the same type.
"""
def keepGoing(newRoot, dataEscaper=dataEscaper,
renderFactory=renderFactory):
return _flattenElement(request, newRoot, slotData, renderFactory,
dataEscaper)
if isinstance(root, (bytes, unicode)):
yield dataEscaper(root)
elif isinstance(root, slot):
slotValue = _getSlotValue(root.name, slotData, root.default)
yield keepGoing(slotValue)
elif isinstance(root, CDATA):
yield b'<![CDATA['
yield escapedCDATA(root.data)
yield b']]>'
elif isinstance(root, Comment):
yield b'<!--'
yield escapedComment(root.data)
yield b'-->'
elif isinstance(root, Tag):
slotData.append(root.slotData)
if root.render is not None:
rendererName = root.render
rootClone = root.clone(False)
rootClone.render = None
renderMethod = renderFactory.lookupRenderMethod(rendererName)
result = renderMethod(request, rootClone)
yield keepGoing(result)
slotData.pop()
return
if not root.tagName:
yield keepGoing(root.children)
return
yield b'<'
if isinstance(root.tagName, unicode):
tagName = root.tagName.encode('ascii')
else:
tagName = root.tagName
yield tagName
for k, v in iteritems(root.attributes):
if isinstance(k, unicode):
k = k.encode('ascii')
yield b' ' + k + b'="'
# Serialize the contents of the attribute, wrapping the results of
# that serialization so that _everything_ is quoted.
attribute = keepGoing(v, attributeEscapingDoneOutside)
yield flattenWithAttributeEscaping(attribute)
yield b'"'
if root.children or nativeString(tagName) not in voidElements:
yield b'>'
# Regardless of whether we're in an attribute or not, switch back
# to the escapeForContent dataEscaper. The contents of a tag must
# be quoted no matter what; in the top-level document, just so
# they're valid, and if they're within an attribute, they have to
# be quoted so that after applying the *un*-quoting required to re-
# parse the tag within the attribute, all the quoting is still
# correct.
yield keepGoing(root.children, escapeForContent)
yield b'</' + tagName + b'>'
else:
yield b' />'
elif isinstance(root, (tuple, list, GeneratorType)):
for element in root:
yield keepGoing(element)
elif isinstance(root, CharRef):
escaped = '&#%d;' % (root.ordinal,)
yield escaped.encode('ascii')
elif isinstance(root, Deferred):
yield root.addCallback(lambda result: (result, keepGoing(result)))
elif IRenderable.providedBy(root):
result = root.render(request)
yield keepGoing(result, renderFactory=root)
else:
raise UnsupportedType(root)
def _flattenTree(request, root):
"""
Make C{root} into an iterable of L{bytes} and L{Deferred} by doing a depth
first traversal of the tree.
@param request: A request object which will be passed to
L{IRenderable.render}.
@param root: An object to be made flatter. This may be of type C{unicode},
L{bytes}, L{slot}, L{Tag <twisted.web.template.Tag>}, L{tuple},
L{list}, L{GeneratorType}, L{Deferred}, or something providing
L{IRenderable}.
@return: An iterator which yields objects of type L{bytes} and L{Deferred}.
A L{Deferred} is only yielded when one is encountered in the process of
flattening C{root}. The returned iterator must not be iterated again
until the L{Deferred} is called back.
"""
stack = [_flattenElement(request, root, [], None, escapeForContent)]
while stack:
try:
frame = stack[-1].gi_frame
element = next(stack[-1])
except StopIteration:
stack.pop()
except Exception as e:
stack.pop()
roots = []
for generator in stack:
roots.append(generator.gi_frame.f_locals['root'])
roots.append(frame.f_locals['root'])
raise FlattenerError(e, roots, extract_tb(exc_info()[2]))
else:
if type(element) is bytes:
yield element
elif isinstance(element, Deferred):
def cbx(originalAndToFlatten):
original, toFlatten = originalAndToFlatten
stack.append(toFlatten)
return original
yield element.addCallback(cbx)
else:
stack.append(element)
def _writeFlattenedData(state, write, result):
"""
Take strings from an iterator and pass them to a writer function.
@param state: An iterator of C{str} and L{Deferred}. C{str} instances will
be passed to C{write}. L{Deferred} instances will be waited on before
resuming iteration of C{state}.
@param write: A callable which will be invoked with each C{str}
produced by iterating C{state}.
@param result: A L{Deferred} which will be called back when C{state} has
been completely flattened into C{write} or which will be errbacked if
an exception in a generator passed to C{state} or an errback from a
L{Deferred} from state occurs.
@return: C{None}
"""
while True:
try:
element = next(state)
except StopIteration:
result.callback(None)
except:
result.errback()
else:
if type(element) is bytes:
write(element)
continue
else:
def cby(original):
_writeFlattenedData(state, write, result)
return original
element.addCallbacks(cby, result.errback)
break
def flatten(request, root, write):
"""
Incrementally write out a string representation of C{root} using C{write}.
In order to create a string representation, C{root} will be decomposed into
simpler objects which will themselves be decomposed and so on until strings
or objects which can easily be converted to strings are encountered.
@param request: A request object which will be passed to the C{render}
method of any L{IRenderable} provider which is encountered.
@param root: An object to be made flatter. This may be of type L{unicode},
L{bytes}, L{slot}, L{Tag <twisted.web.template.Tag>}, L{tuple},
L{list}, L{GeneratorType}, L{Deferred}, or something that provides
L{IRenderable}.
@param write: A callable which will be invoked with each L{bytes} produced
by flattening C{root}.
@return: A L{Deferred} which will be called back when C{root} has been
completely flattened into C{write} or which will be errbacked if an
unexpected exception occurs.
"""
result = Deferred()
state = _flattenTree(request, root)
_writeFlattenedData(state, write, result)
return result
def flattenString(request, root):
"""
Collate a string representation of C{root} into a single string.
This is basically gluing L{flatten} to a L{NativeStringIO} and returning
the results. See L{flatten} for the exact meanings of C{request} and
C{root}.
@return: A L{Deferred} which will be called back with a single string as
its result when C{root} has been completely flattened into C{write} or
which will be errbacked if an unexpected exception occurs.
"""
io = BytesIO()
d = flatten(request, root, io.write)
d.addCallback(lambda _: io.getvalue())
return d
| mit | 1,089,659,628,398,338,000 | 37.52 | 79 | 0.652556 | false |
syhpoon/xyzcmd | libxyz/core/plugins/virtual.py | 1 | 1038 | #-*- coding: utf8 -*
#
# Max E. Kuznecov ~syhpoon <[email protected]> 2008
#
# This file is part of XYZCommander.
# XYZCommander is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# XYZCommander is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
# You should have received a copy of the GNU Lesser Public License
# along with XYZCommander. If not, see <http://www.gnu.org/licenses/>.
from libxyz.exceptions import PluginError
from libxyz.core.plugins import BasePlugin
class VirtualPlugin(BasePlugin):
"""
Virtual (sys) plugin
"""
NAMESPACE = u"sys"
def __init__(self, xyz, name):
self.NAME = name
super(VirtualPlugin, self).__init__(xyz)
| gpl-3.0 | -8,681,466,543,642,965,000 | 33.6 | 70 | 0.72736 | false |
lpramuk/robottelo | robottelo/rhsso_utils.py | 2 | 4102 | """Utility module to handle the rhsso-satellite configure UI/CLI/API testing"""
import json
import random
from fauxfactory import gen_string
from robottelo import ssh
from robottelo.cli.base import CLIReturnCodeError
from robottelo.config import settings
from robottelo.constants import KEY_CLOAK_CLI
from robottelo.constants import RHSSO_NEW_USER
from robottelo.constants import RHSSO_RESET_PASSWORD
from robottelo.datafactory import valid_emails_list
satellite = settings.server.hostname
rhsso_host = settings.rhsso.host_name
realm = settings.rhsso.realm
rhsso_user = settings.rhsso.rhsso_user
rhsso_password = settings.rhsso.password
def run_command(cmd, hostname=satellite, timeout=None):
"""helper function for ssh command and avoiding the return code check in called function"""
if timeout:
result = ssh.command(cmd=cmd, hostname=hostname, timeout=timeout)
else:
result = ssh.command(cmd=cmd, hostname=hostname)
if result.return_code != 0:
raise CLIReturnCodeError(
result.return_code, result.stderr, f"Failed to run the command : {cmd}",
)
else:
return result.stdout
def get_rhsso_client_id():
"""Getter method for fetching the client id and can be used other functions"""
client_name = f"{satellite}-foreman-openidc"
run_command(
cmd="{0} config credentials "
"--server {1}/auth "
"--realm {2} "
"--user {3} "
"--password {4}".format(
KEY_CLOAK_CLI,
settings.rhsso.host_url.replace("https://", "http://"),
realm,
rhsso_user,
rhsso_password,
),
hostname=rhsso_host,
)
result = run_command(
cmd=f"{KEY_CLOAK_CLI} get clients --fields id,clientId", hostname=rhsso_host,
)
result_json = json.loads("[{{{0}".format("".join(result)))
client_id = None
for client in result_json:
if client_name in client['clientId']:
client_id = client['id']
break
return client_id
def get_rhsso_user_details(username):
"""Getter method to receive the user id"""
result = run_command(
cmd=f"{KEY_CLOAK_CLI} get users -r {realm} -q username={username}", hostname=rhsso_host,
)
result_json = json.loads("[{{{0}".format("".join(result)))
return result_json[0]
def upload_rhsso_entity(json_content, entity_name):
"""Helper method upload the entity json request as file on RHSSO Server"""
with open(entity_name, "w") as file:
json.dump(json_content, file)
ssh.upload_file(entity_name, entity_name, hostname=rhsso_host)
def create_mapper(json_content, client_id):
"""Helper method to create the RH-SSO Client Mapper"""
upload_rhsso_entity(json_content, "mapper_file")
run_command(
cmd="{} create clients/{}/protocol-mappers/models -r {} -f {}".format(
KEY_CLOAK_CLI, client_id, realm, "mapper_file"
),
hostname=rhsso_host,
)
def create_new_rhsso_user(client_id, username=None):
"""create new user in RHSSO instance and set the password"""
if not username:
username = gen_string('alphanumeric')
RHSSO_NEW_USER['username'] = username
RHSSO_NEW_USER['email'] = random.choice(valid_emails_list())
RHSSO_RESET_PASSWORD['value'] = rhsso_password
upload_rhsso_entity(RHSSO_NEW_USER, "create_user")
upload_rhsso_entity(RHSSO_RESET_PASSWORD, "reset_password")
run_command(
cmd=f"{KEY_CLOAK_CLI} create users -r {realm} -f create_user", hostname=rhsso_host,
)
user_details = get_rhsso_user_details(RHSSO_NEW_USER['username'])
run_command(
cmd="{} update -r {} users/{}/reset-password -f {}".format(
KEY_CLOAK_CLI, realm, user_details['id'], "reset_password"
),
hostname=rhsso_host,
)
return RHSSO_NEW_USER
def delete_rhsso_user(username):
"""Delete the RHSSO user"""
user_details = get_rhsso_user_details(username)
run_command(
cmd=f"{KEY_CLOAK_CLI} delete -r {realm} users/{user_details['id']}", hostname=rhsso_host,
)
| gpl-3.0 | -5,934,376,025,130,462,000 | 33.183333 | 97 | 0.651146 | false |
Subsets and Splits