code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
import subprocess
import sys
import setup_util
import os
def start(args, logfile, errfile):
try:
subprocess.check_call("mvn clean compile assembly:single", shell=True, cwd="netty", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar netty-example-0.1-jar-with-dependencies.jar".rsplit(" "), cwd="netty/target", stderr=errfile, stdout=logfile)
return 0
except subprocess.CalledProcessError:
return 1
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%netty-example%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'netty-example' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
| morrisonlevi/FrameworkBenchmarks | netty/setup.py | Python | bsd-3-clause | 860 |
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Signing Model Objects
This module contains classes that encapsulate data about the signing process.
"""
import os.path
class CodeSignedProduct(object):
"""Represents a build product that will be signed with `codesign(1)`."""
def __init__(self,
path,
identifier,
options=None,
requirements=None,
identifier_requirement=True,
sign_with_identifier=False,
entitlements=None,
verify_options=None):
"""A build product to be codesigned.
Args:
path: The path to the product to be signed. This is relative to a
work directory containing the build products.
identifier: The unique identifier set when code signing. This is
only explicitly passed with the `--identifier` flag if
|sign_with_identifier| is True.
options: Options flags to pass to `codesign --options`, from
|CodeSignOptions|.
requirements: String for additional `--requirements` to pass to the
`codesign` command. These are joined with a space to the
|config.CodeSignConfig.codesign_requirements_basic| string. See
|CodeSignedProduct.requirements_string()| for details.
identifier_requirement: If True, a designated identifier requirement
based on |identifier| will be inserted into the requirements
string. If False, then no designated requirement will be
generated based on the identifier.
sign_with_identifier: If True, then the identifier will be specified
when running the `codesign` command. If False, `codesign` will
infer the identifier itself.
entitlements: File name of the entitlements file to sign the product
with. The file should reside in the |Paths.packaging_dir|.
verify_options: Flags to pass to `codesign --verify`, from
|VerifyOptions|.
"""
self.path = path
self.identifier = identifier
if not CodeSignOptions.valid(options):
raise ValueError('Invalid CodeSignOptions: {}'.format(options))
self.options = options
self.requirements = requirements
self.identifier_requirement = identifier_requirement
self.sign_with_identifier = sign_with_identifier
self.entitlements = entitlements
if not VerifyOptions.valid(verify_options):
raise ValueError('Invalid VerifyOptions: {}'.format(verify_options))
self.verify_options = verify_options
def requirements_string(self, config):
"""Produces a full requirements string for the product.
Args:
config: A |config.CodeSignConfig| object.
Returns:
A string for designated requirements of the product, which can be
passed to `codesign --requirements`.
"""
# If the signing identity indicates ad-hoc (i.e. no real signing
# identity), do not enforce any requirements. Ad hoc signing will append
# a hash to the identifier, which would violate the
# identifier_requirement and most other requirements that would be
# specified.
if config.identity == '-':
return ''
reqs = []
if self.identifier_requirement:
reqs.append('designated => identifier "{identifier}"'.format(
identifier=self.identifier))
if self.requirements:
reqs.append(self.requirements)
if config.codesign_requirements_basic:
reqs.append(config.codesign_requirements_basic)
return ' '.join(reqs)
def __repr__(self):
return 'CodeSignedProduct(identifier={0.identifier}, ' \
'options={0.options}, path={0.path})'.format(self)
def make_enum(class_name, options):
"""Makes a new class type for an enum.
Args:
class_name: Name of the new type to make.
options: A dictionary of enum options to use. The keys will become
attributes on the class, and the values will be wrapped in a tuple
so that the options can be joined together.
Returns:
A new class for the enum.
"""
attrs = {}
@classmethod
def valid(cls, opts_to_check):
"""Tests if the specified |opts_to_check| are valid.
Args:
options: Iterable of option strings.
Returns:
True if all the options are valid, False if otherwise.
"""
if opts_to_check is None:
return True
valid_values = options.values()
return all([option in valid_values for option in opts_to_check])
attrs['valid'] = valid
for name, value in options.items():
assert type(name) is str
assert type(value) is str
attrs[name] = (value,)
return type(class_name, (object,), attrs)
"""Enum for the options that can be specified when validating the results of
code signing.
These options are passed to `codesign --verify` after the
|CodeSignedProduct| has been signed.
"""
VerifyOptions = make_enum(
'signing.model.VerifyOptions', {
'DEEP': '--deep',
'STRICT': '--strict',
'NO_STRICT': '--no-strict',
'IGNORE_RESOURCES': '--ignore-resources',
})
CodeSignOptions = make_enum(
'signing.model.CodeSignOptions', {
'RESTRICT': 'restrict',
'LIBRARY_VALIDATION': 'library',
'HARDENED_RUNTIME': 'runtime',
'KILL': 'kill',
})
# Specify the components of HARDENED_RUNTIME that are also available on
# older macOS versions.
CodeSignOptions.FULL_HARDENED_RUNTIME_OPTIONS = (
CodeSignOptions.HARDENED_RUNTIME + CodeSignOptions.RESTRICT +
CodeSignOptions.LIBRARY_VALIDATION + CodeSignOptions.KILL)
class Distribution(object):
"""A Distribution represents a final, signed, and potentially channel-
customized Chrome product.
Channel customization refers to modifying parts of the app bundle structure
to have different file names, internal identifiers, and assets.
"""
def __init__(self,
channel=None,
branding_code=None,
app_name_fragment=None,
packaging_name_fragment=None,
product_dirname=None,
creator_code=None,
channel_customize=False,
package_as_dmg=True,
package_as_pkg=False):
"""Creates a new Distribution object. All arguments are optional.
Args:
channel: The release channel for the product.
branding_code: A branding code helps track how users acquired the
product from various marketing channels.
app_name_fragment: If present, this string fragment is appended to
the |config.CodeSignConfig.app_product|. This renames the binary
and outer app bundle.
packaging_name_fragment: If present, this is appended to the
|config.CodeSignConfig.packaging_basename| to help differentiate
different |branding_code|s.
product_dirname: If present, this string value is set in the app's
Info.plist with the key "CrProductDirName". This key influences
the browser's default user-data-dir location.
creator_code: If present, this will set a new macOS creator code
in the Info.plist "CFBundleSignature" key and in the PkgInfo
file. If this is not specified, the original values from the
build products will be kept.
channel_customize: If True, then the product will be modified in
several ways:
- The |channel| will be appended to the
|config.CodeSignConfig.base_bundle_id|.
- The product will be renamed with |app_name_fragment|.
- Different assets will be used for icons in the app.
package_as_dmg: If True, then a .dmg file will be created containing
the product.
package_as_pkg: If True, then a .pkg file will be created containing
the product.
"""
self.channel = channel
self.branding_code = branding_code
self.app_name_fragment = app_name_fragment
self.packaging_name_fragment = packaging_name_fragment
self.product_dirname = product_dirname
self.creator_code = creator_code
self.channel_customize = channel_customize
self.package_as_dmg = package_as_dmg
self.package_as_pkg = package_as_pkg
def to_config(self, base_config):
"""Produces a derived |config.CodeSignConfig| for the Distribution.
Args:
base_config: The base CodeSignConfig to derive.
Returns:
A new CodeSignConfig instance that uses information in the
Distribution to alter various properties of the |base_config|.
"""
this = self
class DistributionCodeSignConfig(base_config.__class__):
@property
def base_config(self):
return base_config
@property
def distribution(self):
return this
@property
def app_product(self):
if this.channel_customize:
return '{} {}'.format(base_config.app_product,
this.app_name_fragment)
return base_config.app_product
@property
def base_bundle_id(self):
base_bundle_id = base_config.base_bundle_id
if this.channel_customize:
return base_bundle_id + '.' + this.channel
return base_bundle_id
@property
def provisioning_profile_basename(self):
profile = base_config.provisioning_profile_basename
if profile and this.channel_customize:
return '{}_{}'.format(profile, this.app_name_fragment)
return profile
@property
def packaging_basename(self):
if this.packaging_name_fragment:
return '{}-{}-{}'.format(
self.app_product.replace(' ', ''), self.version,
this.packaging_name_fragment)
return super(DistributionCodeSignConfig,
self).packaging_basename
return DistributionCodeSignConfig(
base_config.identity, base_config.installer_identity,
base_config.notary_user, base_config.notary_password,
base_config.notary_asc_provider)
class Paths(object):
"""Paths holds the three file path contexts for signing operations.
The input directory always remains un-modified.
The output directory is where final, signed products are stored.
The work directory is set by internal operations.
"""
def __init__(self, input, output, work):
self._input = input
self._output = output
self._work = work
@property
def input(self):
return self._input
@property
def output(self):
return self._output
@property
def work(self):
return self._work
def packaging_dir(self, config):
"""Returns the path to the product packaging directory, which contains
scripts and assets used in signing.
Args:
config: The |config.CodeSignConfig| object.
Returns:
Path to the packaging directory.
"""
return os.path.join(self.input, '{} Packaging'.format(config.product))
def replace_work(self, new_work):
"""Creates a new Paths with the same input and output directories, but
with |work| set to |new_work|."""
return Paths(self.input, self.output, new_work)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return (self._input == other._input and
self._output == other._output and self._work == other._work)
def __repr__(self):
return 'Paths(input={0.input}, output={0.output}, ' \
'work={0.work})'.format(self)
| endlessm/chromium-browser | chrome/installer/mac/signing/model.py | Python | bsd-3-clause | 12,611 |
import hashlib
import os
import re
import time
import uuid
import subprocess
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.contrib.staticfiles.finders import find as find_static_path
from olympia.lib.jingo_minify_helpers import ensure_path_exists
def run_command(command):
"""Run a command and correctly poll the output and write that to stdout"""
process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
while True:
output = process.stdout.readline()
if output == '' and process.poll() is not None:
break
if output:
print(output.strip())
return process.poll()
class Command(BaseCommand):
help = ('Compresses css and js assets defined in settings.MINIFY_BUNDLES')
# This command must not do any system checks because Django runs db-field
# related checks since 1.10 which require a working MySQL connection.
# We don't have that during our docker builds and since `compress_assets`
# is being used while building our docker images we have to disable them.
requires_system_checks = False
checked_hash = {}
bundle_hashes = {}
missing_files = 0
minify_skipped = 0
def add_arguments(self, parser):
"""Handle command arguments."""
parser.add_argument(
'force', action='store_true',
help='Ignores modified/created dates and forces compression.')
def generate_build_id(self):
return uuid.uuid4().hex[:8]
def update_hashes(self):
# Adds a time based hash on to the build id.
self.build_id = '%s-%s' % (
self.generate_build_id(), hex(int(time.time()))[2:])
build_id_file = os.path.realpath(
os.path.join(settings.ROOT, 'build.py'))
with open(build_id_file, 'w') as f:
f.write('BUILD_ID_CSS = "%s"\n' % self.build_id)
f.write('BUILD_ID_JS = "%s"\n' % self.build_id)
f.write('BUILD_ID_IMG = "%s"\n' % self.build_id)
f.write('BUNDLE_HASHES = %s\n' % self.bundle_hashes)
def handle(self, **options):
self.force_compress = options.get('force', False)
# This will loop through every bundle, and do the following:
# - Concat all files into one
# - Cache bust all images in CSS files
# - Minify the concatted files
for ftype, bundle in settings.MINIFY_BUNDLES.iteritems():
for name, files in bundle.iteritems():
# Set the paths to the files.
concatted_file = os.path.join(
settings.ROOT, 'static',
ftype, '%s-all.%s' % (name, ftype,))
compressed_file = os.path.join(
settings.ROOT, 'static',
ftype, '%s-min.%s' % (name, ftype,))
ensure_path_exists(concatted_file)
ensure_path_exists(compressed_file)
files_all = []
for fn in files:
processed = self._preprocess_file(fn)
# If the file can't be processed, we skip it.
if processed is not None:
files_all.append(processed)
# Concat all the files.
tmp_concatted = '%s.tmp' % concatted_file
if len(files_all) == 0:
raise CommandError(
'No input files specified in '
'MINIFY_BUNDLES["%s"]["%s"] in settings.py!' %
(ftype, name)
)
run_command('cat {files} > {tmp}'.format(
files=' '.join(files_all),
tmp=tmp_concatted
))
# Cache bust individual images in the CSS.
if ftype == 'css':
bundle_hash = self._cachebust(tmp_concatted, name)
self.bundle_hashes['%s:%s' % (ftype, name)] = bundle_hash
# Compresses the concatenations.
is_changed = self._is_changed(concatted_file)
self._clean_tmp(concatted_file)
if is_changed or not os.path.isfile(compressed_file):
self._minify(ftype, concatted_file, compressed_file)
else:
print(
'File unchanged, skipping minification of %s' % (
concatted_file))
self.minify_skipped += 1
# Write out the hashes
self.update_hashes()
if self.minify_skipped:
print(
'Unchanged files skipped for minification: %s' % (
self.minify_skipped))
def _preprocess_file(self, filename):
"""Preprocess files and return new filenames."""
css_bin = filename.endswith('.less') and settings.LESS_BIN
source = find_static_path(filename)
target = source
if css_bin:
target = '%s.css' % source
run_command('{lessc} {source} {target}'.format(
lessc=css_bin,
source=str(source),
target=str(target)))
return target
def _is_changed(self, concatted_file):
"""Check if the file has been changed."""
if self.force_compress:
return True
tmp_concatted = '%s.tmp' % concatted_file
file_exists = (
os.path.exists(concatted_file) and
os.path.getsize(concatted_file) == os.path.getsize(tmp_concatted))
if file_exists:
orig_hash = self._file_hash(concatted_file)
temp_hash = self._file_hash(tmp_concatted)
return orig_hash != temp_hash
return True # Different filesize, so it was definitely changed
def _clean_tmp(self, concatted_file):
"""Replace the old file with the temp file."""
tmp_concatted = '%s.tmp' % concatted_file
if os.path.exists(concatted_file):
os.remove(concatted_file)
os.rename(tmp_concatted, concatted_file)
def _cachebust(self, css_file, bundle_name):
"""Cache bust images. Return a new bundle hash."""
self.stdout.write(
'Cache busting images in %s\n' % re.sub('.tmp$', '', css_file))
if not os.path.exists(css_file):
return
css_content = ''
with open(css_file, 'r') as css_in:
css_content = css_in.read()
def _parse(url):
return self._cachebust_regex(url, css_file)
css_parsed = re.sub('url\(([^)]*?)\)', _parse, css_content)
with open(css_file, 'w') as css_out:
css_out.write(css_parsed)
# Return bundle hash for cachebusting JS/CSS files.
file_hash = hashlib.md5(css_parsed).hexdigest()[0:7]
self.checked_hash[css_file] = file_hash
if self.missing_files:
self.stdout.write(
' - Error finding %s images\n' % (self.missing_files,))
self.missing_files = 0
return file_hash
def _minify(self, ftype, file_in, file_out):
"""Run the proper minifier on the file."""
if ftype == 'js' and hasattr(settings, 'UGLIFY_BIN'):
opts = {'method': 'UglifyJS', 'bin': settings.UGLIFY_BIN}
run_command('{uglify} -v -o {target} {source} -m'.format(
uglify=opts['bin'],
target=file_out,
source=file_in))
elif ftype == 'css' and hasattr(settings, 'CLEANCSS_BIN'):
opts = {'method': 'clean-css', 'bin': settings.CLEANCSS_BIN}
run_command('{cleancss} -o {target} {source}'.format(
cleancss=opts['bin'],
target=file_out,
source=file_in))
self.stdout.write(
'Minifying %s (using %s)\n' % (file_in, opts['method']))
def _file_hash(self, url):
"""Open the file and get a hash of it."""
if url in self.checked_hash:
return self.checked_hash[url]
file_hash = ''
try:
with open(url) as f:
file_hash = hashlib.md5(f.read()).hexdigest()[0:7]
except IOError:
self.missing_files += 1
self.stdout.write(' - Could not find file %s\n' % url)
self.checked_hash[url] = file_hash
return file_hash
def _cachebust_regex(self, img, parent):
"""Run over the regex; img is the structural regex object."""
url = img.group(1).strip('"\'')
if url.startswith('data:') or url.startswith('http'):
return 'url(%s)' % url
url = url.split('?')[0]
full_url = os.path.join(
settings.ROOT, os.path.dirname(parent), url)
return 'url(%s?%s)' % (url, self._file_hash(full_url))
| atiqueahmedziad/addons-server | src/olympia/amo/management/commands/compress_assets.py | Python | bsd-3-clause | 8,858 |
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
from mezzanine.core.views import direct_to_template
from mezzanine.conf import settings
from hs_core.api import v1_api
from theme import views as theme
import autocomplete_light
autocomplete_light.autodiscover()
admin.autodiscover()
# Add the urlpatterns for any custom Django applications here.
# You can also change the ``home`` view to add your own functionality
# to the project's homepage.
urlpatterns = i18n_patterns("",
# Change the admin prefix here to use an alternate URL for the
# admin interface, which would be marginally more secure.
url("^admin/", include(admin.site.urls)),
url('^ga_resources/', include('ga_resources.urls')),
url('^ga_interactive/', include('ga_interactive.urls')),
url('^r/(?P<shortkey>[A-z0-9\-_]+)', 'hs_core.views.short_url'),
# url('^party/', include('hs_scholar_profile.urls'))
url(r'^user/$', theme.UserProfileView.as_view()),
url(r'^user/(?P<user>.*)/', theme.UserProfileView.as_view()),
url(r'^verify/(?P<pk>[0-9]*)/', 'hs_core.views.verify'),
url(r'^django_irods/', include('django_irods.urls')),
url(r'^autocomplete/', include('autocomplete_light.urls')),
url(r'^hs_metrics/', include('hs_metrics.urls')),
)
# Filebrowser admin media library.
if getattr(settings, "PACKAGE_NAME_FILEBROWSER") in settings.INSTALLED_APPS:
urlpatterns += i18n_patterns("",
("^admin/media-library/", include("%s.urls" %
settings.PACKAGE_NAME_FILEBROWSER)),
)
# Put API URLs before Mezzanine so that Mezzanine doesn't consume them
urlpatterns += patterns('',
(r'^api/', include(v1_api.urls) ),
url("^api/%s/doc/" % (v1_api.api_name,),
include('tastypie_swagger.urls',
namespace='tastypie_swagger'),
kwargs={'tastypie_api_module':'hs_core.api.v1_api',
'namespace':'tastypie_swagger'}
),
url('^hsapi/', include('hs_core.urls'))
url('^party/', include('hs_party.urls'))
)
urlpatterns += patterns('',
# We don't want to presume how your homepage works, so here are a
# few patterns you can use to set it up.
# HOMEPAGE AS STATIC TEMPLATE
# ---------------------------
# This pattern simply loads the index.html template. It isn't
# commented out like the others, so it's the default. You only need
# one homepage pattern, so if you use a different one, comment this
# one out.
# url("^$", direct_to_template, {"template": "index.html"}, name="home"),
# HOMEPAGE AS AN EDITABLE PAGE IN THE PAGE TREE
# ---------------------------------------------
# This pattern gives us a normal ``Page`` object, so that your
# homepage can be managed via the page tree in the admin. If you
# use this pattern, you'll need to create a page in the page tree,
# and specify its URL (in the Meta Data section) as "/", which
# is the value used below in the ``{"slug": "/"}`` part.
# Also note that the normal rule of adding a custom
# template per page with the template name using the page's slug
# doesn't apply here, since we can't have a template called
# "/.html" - so for this case, the template "pages/index.html"
# should be used if you want to customize the homepage's template.
url("^$", "mezzanine.pages.views.page", {"slug": "/"}, name="home"),
# HOMEPAGE FOR A BLOG-ONLY SITE
# -----------------------------
# This pattern points the homepage to the blog post listing page,
# and is useful for sites that are primarily blogs. If you use this
# pattern, you'll also need to set BLOG_SLUG = "" in your
# ``settings.py`` module, and delete the blog page object from the
# page tree in the admin if it was installed.
# url("^$", "mezzanine.blog.views.blog_post_list", name="home"),
# MEZZANINE'S URLS
# ----------------
# ADD YOUR OWN URLPATTERNS *ABOVE* THE LINE BELOW.
# ``mezzanine.urls`` INCLUDES A *CATCH ALL* PATTERN
# FOR PAGES, SO URLPATTERNS ADDED BELOW ``mezzanine.urls``
# WILL NEVER BE MATCHED!
# If you'd like more granular control over the patterns in
# ``mezzanine.urls``, go right ahead and take the parts you want
# from it, and use them directly below instead of using
# ``mezzanine.urls``.
("^", include("mezzanine.urls")),
# MOUNTING MEZZANINE UNDER A PREFIX
# ---------------------------------
# You can also mount all of Mezzanine's urlpatterns under a
# URL prefix if desired. When doing this, you need to define the
# ``SITE_PREFIX`` setting, which will contain the prefix. Eg:
# SITE_PREFIX = "my/site/prefix"
# For convenience, and to avoid repeating the prefix, use the
# commented out pattern below (commenting out the one above of course)
# which will make use of the ``SITE_PREFIX`` setting. Make sure to
# add the import ``from django.conf import settings`` to the top
# of this file as well.
# Note that for any of the various homepage patterns above, you'll
# need to use the ``SITE_PREFIX`` setting as well.
# ("^%s/" % settings.SITE_PREFIX, include("mezzanine.urls"))
)
# Adds ``STATIC_URL`` to the context of error pages, so that error
# pages can use JS, CSS and images.
handler404 = "mezzanine.core.views.page_not_found"
handler500 = "mezzanine.core.views.server_error"
| hydroshare/hydroshare_temp | urls.py | Python | bsd-3-clause | 5,696 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import webapp2
import hinet
import seednet
import StringIO
import PyRSS2Gen
import urllib
import datetime
import hashlib
#from google.appengine.ext import ndb
from google.appengine.api import memcache
HTTP_DATE_FMT = '%a, %d %b %Y %H:%M:%S %Z'
def check_date_fmt(date):
date = date.strip().split(' ')
if len(date) == 5:
HTTP_DATE_FMT = '%a, %d %b %Y %H:%M:%S'
elif len(date) == 6:
HTTP_DATE_FMT = '%a, %d %b %Y %H:%M:%S %Z'
return HTTP_DATE_FMT
#not use yet
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.headers['Cache-Control'] = 'max-age=3600, must-revalidate'
self.response.write('')
#generate hinet rss
class Hinet(webapp2.RequestHandler):
def output_content(self, content, serve=True):
if serve:
self.response.out.write(content)
else:
self.response.set_status(304)
def set_headers(self):
self.response.headers['Content-Type'] = 'application/xhtml+xml'
self.response.headers['Cache-Control'] = 'public, max-age=3600, must-revalidate'
def get_cache_data(self, rss):
output = memcache.get(rss)
mtime = memcache.get('h_mtime')
etag = memcache.get('h_etag')
if mtime is None:
mtime = datetime.datetime.utcnow().strftime(HTTP_DATE_FMT) + 'GMT'
self.response.headers['Last-Modified'] = mtime
return output, mtime, etag
def get(self):
serve = True
output, mtime, etag = self.get_cache_data('hinet_rss')
if 'If-Modified-Since' in self.request.headers:
IFMOD_DATE_FMT = check_date_fmt(self.request.headers['If-Modified-Since'])
last_seen = datetime.datetime.strptime(self.request.headers['If-Modified-Since'], IFMOD_DATE_FMT)
last_modified = datetime.datetime.strptime(mtime, HTTP_DATE_FMT)
if last_seen >= last_modified:
serve = False
if 'If-None-Match' in self.request.headers:
etags = [x.strip('" ') for x in self.request.headers['If-None-Match'].split(',')]
if etag in etags:
serve = False
if output is not None:
self.set_headers()
self.response.headers['ETag'] = '"%s"' % etag
self.output_content(output, serve)
return
items = []
parser = hinet.MyHTMLParser()
parser.feed(urllib.urlopen('http://search.hinet.net/getNotify?callback=jsonpCallback&type=0&sort=0&mobile=1').read())
for i in parser.struc_data:
items.append(PyRSS2Gen.RSSItem(title=i[1] + ' ' +i[3], link=i[2], pubDate=i[0]))
rss = PyRSS2Gen.RSS2(
title=u"Hinet系統公告",
link="http://www.hinet.net/pu/notify.htm",
description=u"此RSS內容取自Hinet網頁,依照著作權法之合理使用原則節錄部份內容。\
本RSS僅供參考,Hinet或任何人都不對內容負責",
lastBuildDate=mtime,
items=items)
output = StringIO.StringIO()
rss.write_xml(output,encoding='utf-8')
etag = hashlib.sha1(output.getvalue()).hexdigest()
memcache.set('hinet_rss', output.getvalue(), time=3600)
memcache.set('h_mtime', mtime, time=3600)
memcache.set('h_etag', etag, time=3600)
self.set_headers()
self.response.headers['ETag'] = '"%s"' % (etag,)
self.output_content(output.getvalue(), serve)
#generate seednet rss
class Seednet(webapp2.RequestHandler):
def output_content(self, content, serve=True):
if serve:
self.response.out.write(content)
else:
self.response.set_status(304)
def set_headers(self):
self.response.headers['Content-Type'] = 'application/xhtml+xml'
self.response.headers['Cache-Control'] = 'public, max-age=3600, must-revalidate'
def get_cache_data(self, rss):
output = memcache.get('seednet_rss')
mtime = memcache.get('s_mtime')
etag = memcache.get('s_etag')
if mtime is None:
mtime = datetime.datetime.utcnow().strftime(HTTP_DATE_FMT) + 'GMT'
self.response.headers['Last-Modified'] = mtime
return output, mtime, etag
def get(self):
serve = True
output, mtime, etag = self.get_cache_data('seednet_rss')
if 'If-Modified-Since' in self.request.headers:
IFMOD_DATE_FMT = check_date_fmt(self.request.headers['If-Modified-Since'])
last_seen = datetime.datetime.strptime(self.request.headers['If-Modified-Since'], IFMOD_DATE_FMT)
last_modified = datetime.datetime.strptime(mtime, HTTP_DATE_FMT)
if last_seen >= last_modified:
serve = False
if 'If-None-Match' in self.request.headers:
etags = [x.strip('" ') for x in self.request.headers['If-None-Match'].split(',')]
if etag in etags:
serve = False
if output is not None:
self.set_headers()
self.response.headers['ETag'] = '"%s"' % etag
self.output_content(output, serve)
return
items = []
parser = seednet.MyHTMLParser()
parser.feed(urllib.urlopen(
'https://service.seed.net.tw/register-cgi/service_notice?FUNC=notice_qry_more&Category=02&Start=1').read())
for i in parser.struc_data:
items.append(PyRSS2Gen.RSSItem(title=i[3], link=i[2], pubDate=i[0]))
rss = PyRSS2Gen.RSS2(
title=u"Seednet系統公告",
link="https://service.seed.net.tw/register-cgi/service_notice?FUNC=notice_qry_more&Category=02&Start=1",
description=u"此RSS內容取自Seednet網頁,依照著作權法之合理使用原則節錄部份內容。\
本RSS僅供參考,Seednet或任何人都不對內容負責",
lastBuildDate=mtime,
items=items)
output = StringIO.StringIO()
rss.write_xml(output,encoding='utf-8')
etag = hashlib.sha1(output.getvalue()).hexdigest()
memcache.set('seednet_rss', output.getvalue(), time=3600)
memcache.set('s_mtime', mtime, time=3600)
memcache.set('s_etag', etag, time=3600)
self.set_headers()
self.response.headers['ETag'] = '"%s"' % (etag,)
self.output_content(output.getvalue(), serve)
application = webapp2.WSGIApplication([
('/', MainPage),
('/hinet', Hinet),
('/seednet', Seednet),
], debug=False)
| ryanho/ISParser | main.py | Python | bsd-3-clause | 6,603 |
# Create your views here.
import socket
from pyasn1.error import PyAsn1Error
import requests
from .heartbleed import test_heartbleed
from .models import Check
try:
from OpenSSL.SSL import Error as SSLError
except ImportError:
# In development, we might not have OpenSSL - it's only needed for SNI
class SSLError(Exception):
pass
class SecurityChecker(object):
def run_check(self, url):
self.session = requests.session()
self.session.headers = [('User-agent', "Sasha's pony checkup - http://ponycheckup.com/")]
try:
homepage = self.session.get(url, timeout=7)
check_record = Check(url=url)
check_record.hsts_header_found = self.check_supports_hsts(url)
check_record.xframe_header_found = True if 'X-Frame-Options' in homepage.headers else False
check_record.supports_https = self.check_supports_https(url)
check_record.heartbleed_vuln = self.check_heartbleed_vuln(url)
(check_record.admin_found, check_record.admin_forces_https) = self.check_admin(url)
(check_record.login_found, check_record.login_forces_https) = self.check_login(url)
check_record.allows_trace = self.check_trace(url)
check_record.runs_debug = self.check_runs_debug(url)
check_record.csrf_cookie_found = True if self.find_csrf_cookie() else False
session_cookie = self.find_session_cookie()
if session_cookie:
check_record.session_cookie_found = True
check_record.session_cookie_secure = session_cookie.secure
check_record.session_cookie_httponly = session_cookie.has_nonstandard_attr('httponly')
else:
check_record.session_cookie_found = False
check_record.update_recommendation_count()
check_record.save()
return check_record
except (requests.RequestException, SSLError, PyAsn1Error) as error:
return error
def check_supports_https(self, url):
try:
self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return True
def check_heartbleed_vuln(self, url):
try:
url = url.replace("http://", "").replace("/", "")
return bool(test_heartbleed(url))
except socket.error:
return False
def check_supports_hsts(self, url):
try:
ssltest = self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return 'Strict-Transport-Security' in ssltest.headers
def check_runs_debug(self, url):
data = self.session.get(url+"/[][][][][]-this-tries-to-trigger-404....", timeout=7)
return "You're seeing this error because you have <code>DEBUG = True</code>" in data.content
def check_trace(self, url):
response = self.session.request('TRACE', url, timeout=7)
return 'Content-Type' in response.headers and response.headers['Content-Type'] == "message/http"
def check_admin(self, url):
response = self.session.get(url + "/admin", timeout=7)
if response.status_code == 404:
return (False, None)
data = response.content.lower()
admin_found = '"id_username"' in data and ("csrfmiddlewaretoken" in data or "Django" in data or "__admin_media_prefix__" in data)
return (admin_found, self._response_used_https(response))
def check_login(self, url):
response = self.session.get(url + "/accounts/login", timeout=7)
if response.status_code == 404:
response = self.session.get(url + "/login", timeout=7)
if response.status_code == 404:
return (False, None)
return (True, self._response_used_https(response))
def _response_used_https(self, response):
return response.url[:5] == "https"
def find_session_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'sessionid':
return cookie
return False
def find_csrf_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'csrftoken':
return cookie
return False
| erikr/ponycheckup | ponycheckup/check/checker.py | Python | bsd-3-clause | 4,346 |
# -*- coding: utf-8 -*-
import csv
import json
from cStringIO import StringIO
from datetime import datetime
from django.conf import settings
from django.core import mail, management
from django.core.cache import cache
import mock
from nose.plugins.attrib import attr
from nose.tools import eq_
from piston.models import Consumer
from pyquery import PyQuery as pq
import amo
import amo.tests
from amo.tests import (assert_no_validation_errors, assert_required, formset,
initial)
from access.models import Group, GroupUser
from addons.models import Addon, CompatOverride, CompatOverrideRange
from amo.urlresolvers import reverse
from amo.utils import urlparams
from applications.models import AppVersion
from bandwagon.models import FeaturedCollection, MonthlyPick
from compat.cron import compatibility_report
from compat.models import CompatReport
from devhub.models import ActivityLog
from files.models import Approval, File
from stats.models import UpdateCount
from users.models import UserProfile
from users.utils import get_task_user
from versions.models import ApplicationsVersions, Version
from zadmin import forms, tasks
from zadmin.forms import DevMailerForm
from zadmin.models import EmailPreviewTopic, ValidationJob, ValidationResult
from zadmin.views import completed_versions_dirty, find_files
no_op_validation = dict(errors=0, warnings=0, notices=0, messages=[],
compatibility_summary=dict(errors=0, warnings=0,
notices=0))
class TestSiteEvents(amo.tests.TestCase):
fixtures = ['base/users', 'zadmin/tests/siteevents']
def setUp(self):
self.client.login(username='[email protected]', password='password')
def test_get(self):
url = reverse('zadmin.site_events')
response = self.client.get(url)
eq_(response.status_code, 200)
events = response.context['events']
eq_(len(events), 1)
def test_add(self):
url = reverse('zadmin.site_events')
new_event = {
'event_type': 2,
'start': '2012-01-01',
'description': 'foo',
}
response = self.client.post(url, new_event, follow=True)
eq_(response.status_code, 200)
events = response.context['events']
eq_(len(events), 2)
def test_edit(self):
url = reverse('zadmin.site_events', args=[1])
modified_event = {
'event_type': 2,
'start': '2012-01-01',
'description': 'bar',
}
response = self.client.post(url, modified_event, follow=True)
eq_(response.status_code, 200)
events = response.context['events']
eq_(events[0].description, 'bar')
def test_delete(self):
url = reverse('zadmin.site_events.delete', args=[1])
response = self.client.get(url, follow=True)
eq_(response.status_code, 200)
events = response.context['events']
eq_(len(events), 0)
class TestFlagged(amo.tests.TestCase):
fixtures = ['base/users', 'zadmin/tests/flagged']
def setUp(self):
super(TestFlagged, self).setUp()
self.client.login(username='[email protected]', password='password')
self.url = reverse('zadmin.flagged')
@mock.patch.object(settings, 'MARKETPLACE', False)
def test_get(self):
response = self.client.get(self.url, follow=True)
addons = dict((a.id, a) for a in response.context['addons'])
eq_(len(addons), 3)
# 1. an addon should have latest version and approval attached
addon = Addon.objects.get(id=1)
eq_(addons[1], addon)
eq_(addons[1].version.id,
Version.objects.filter(addon=addon).latest().id)
eq_(addons[1].approval.id,
Approval.objects.filter(addon=addon).latest().id)
# 2. missing approval is ok
addon = Addon.objects.get(id=2)
eq_(addons[2], addon)
eq_(addons[2].version.id,
Version.objects.filter(addon=addon).latest().id)
eq_(addons[2].approval, None)
# 3. missing approval is ok
addon = Addon.objects.get(id=3)
eq_(addons[3], addon)
eq_(addons[3].approval.id,
Approval.objects.filter(addon=addon).latest().id)
eq_(addons[3].version, None)
@mock.patch.object(settings, 'MARKETPLACE', False)
def test_post(self):
response = self.client.post(self.url, {'addon_id': ['1', '2']},
follow=True)
self.assertRedirects(response, self.url)
assert not Addon.objects.no_cache().get(id=1).admin_review
assert not Addon.objects.no_cache().get(id=2).admin_review
addons = response.context['addons']
eq_(len(addons), 1)
eq_(addons[0], Addon.objects.get(id=3))
@mock.patch.object(settings, 'MARKETPLACE', False)
def test_empty(self):
Addon.objects.update(admin_review=False)
res = self.client.get(self.url)
eq_(set(res.context['addons']), set([]))
@mock.patch.object(settings, 'MARKETPLACE', False)
def test_addons_only(self):
Addon.objects.get(id=2).update(type=amo.ADDON_WEBAPP)
res = self.client.get(self.url)
eq_(set([r.pk for r in res.context['addons']]),
set([1, 3]))
class BulkValidationTest(amo.tests.TestCase):
fixtures = ['base/apps', 'base/platforms', 'base/addon_3615',
'base/appversion', 'base/users']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
self.addon = Addon.objects.get(pk=3615)
self.creator = UserProfile.objects.get(username='editor')
self.version = self.addon.get_version()
ApplicationsVersions.objects.filter(
application=1, version=self.version).update(
max=AppVersion.objects.get(application=1, version='3.7a1pre'))
self.application_version = self.version.apps.all()[0]
self.application = self.application_version.application
self.min = self.application_version.min
self.max = self.application_version.max
self.curr_max = self.appversion('3.7a1pre')
self.counter = 0
self.old_task_user = settings.TASK_USER_ID
settings.TASK_USER_ID = self.creator.id
def tearDown(self):
settings.TASK_USER_ID = self.old_task_user
def appversion(self, version, application=amo.FIREFOX.id):
return AppVersion.objects.get(application=application,
version=version)
def create_job(self, **kwargs):
kw = dict(application_id=amo.FIREFOX.id,
curr_max_version=kwargs.pop('current', self.curr_max),
target_version=kwargs.pop('target',
self.appversion('3.7a3')),
creator=self.creator)
kw.update(kwargs)
return ValidationJob.objects.create(**kw)
def create_file(self, version=None, platform_id=amo.PLATFORM_ALL.id):
if not version:
version = self.version
return File.objects.create(version=version,
filename='file-%s' % self.counter,
platform_id=platform_id,
status=amo.STATUS_PUBLIC)
def create_result(self, job, f, **kwargs):
self.counter += 1
kw = dict(file=f,
validation='{}',
errors=0,
warnings=0,
notices=0,
validation_job=job,
task_error=None,
valid=0,
completed=datetime.now())
kw.update(kwargs)
return ValidationResult.objects.create(**kw)
def start_validation(self, new_max='3.7a3'):
self.new_max = self.appversion(new_max)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': self.new_max.id,
'finish_email': '[email protected]'},
follow=True)
eq_(r.status_code, 200)
class TestBulkValidation(BulkValidationTest):
@mock.patch('zadmin.tasks.bulk_validate_file')
def test_start(self, bulk_validate_file):
new_max = self.appversion('3.7a3')
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': new_max.id,
'finish_email': '[email protected]'},
follow=True)
self.assertNoFormErrors(r)
self.assertRedirects(r, reverse('zadmin.validation'))
job = ValidationJob.objects.get()
eq_(job.application_id, amo.FIREFOX.id)
eq_(job.curr_max_version.version, self.curr_max.version)
eq_(job.target_version.version, new_max.version)
eq_(job.finish_email, '[email protected]')
eq_(job.completed, None)
eq_(job.result_set.all().count(),
len(self.version.all_files))
assert bulk_validate_file.delay.called
@mock.patch('zadmin.tasks.bulk_validate_file')
def test_ignore_user_disabled_addons(self, bulk_validate_file):
self.addon.update(disabled_by_user=True)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': self.appversion('3.7a3').id,
'finish_email': '[email protected]'},
follow=True)
self.assertNoFormErrors(r)
self.assertRedirects(r, reverse('zadmin.validation'))
assert not bulk_validate_file.delay.called
@mock.patch('zadmin.tasks.bulk_validate_file')
def test_ignore_non_public_addons(self, bulk_validate_file):
target_ver = self.appversion('3.7a3').id
for status in (amo.STATUS_DISABLED, amo.STATUS_NULL,
amo.STATUS_DELETED):
self.addon.update(status=status)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': target_ver,
'finish_email': '[email protected]'},
follow=True)
self.assertNoFormErrors(r)
self.assertRedirects(r, reverse('zadmin.validation'))
assert not bulk_validate_file.delay.called, (
'Addon with status %s should be ignored' % status)
@mock.patch('zadmin.tasks.bulk_validate_file')
def test_ignore_lang_packs(self, bulk_validate_file):
target_ver = self.appversion('3.7a3').id
self.addon.update(type=amo.ADDON_LPAPP)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': target_ver,
'finish_email': '[email protected]'},
follow=True)
self.assertNoFormErrors(r)
self.assertRedirects(r, reverse('zadmin.validation'))
assert not bulk_validate_file.delay.called, (
'Lang pack addons should be ignored')
@mock.patch('zadmin.tasks.bulk_validate_file')
def test_ignore_themes(self, bulk_validate_file):
target_ver = self.appversion('3.7a3').id
self.addon.update(type=amo.ADDON_THEME)
self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': target_ver,
'finish_email': '[email protected]'})
assert not bulk_validate_file.delay.called, (
'Theme addons should be ignored')
@mock.patch('zadmin.tasks.bulk_validate_file')
def test_validate_all_non_disabled_addons(self, bulk_validate_file):
target_ver = self.appversion('3.7a3').id
bulk_validate_file.delay.called = False
self.addon.update(status=amo.STATUS_PUBLIC)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': target_ver,
'finish_email': '[email protected]'},
follow=True)
self.assertNoFormErrors(r)
self.assertRedirects(r, reverse('zadmin.validation'))
assert bulk_validate_file.delay.called, (
'Addon with status %s should be validated' % self.addon.status)
def test_grid(self):
job = self.create_job()
for res in (dict(errors=0), dict(errors=1)):
self.create_result(job, self.create_file(), **res)
r = self.client.get(reverse('zadmin.validation'))
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('table tr td').eq(0).text(), str(job.pk)) # ID
eq_(doc('table tr td').eq(3).text(), 'Firefox') # Application
eq_(doc('table tr td').eq(4).text(), self.curr_max.version)
eq_(doc('table tr td').eq(5).text(), '3.7a3')
eq_(doc('table tr td').eq(6).text(), '2') # tested
eq_(doc('table tr td').eq(7).text(), '1') # failing
eq_(doc('table tr td').eq(8).text()[0], '1') # passing
eq_(doc('table tr td').eq(9).text(), '0') # exceptions
def test_application_versions_json(self):
r = self.client.post(reverse('zadmin.application_versions_json'),
{'application_id': amo.FIREFOX.id})
eq_(r.status_code, 200)
data = json.loads(r.content)
empty = True
for id, ver in data['choices']:
empty = False
eq_(AppVersion.objects.get(pk=id).version, ver)
assert not empty, "Unexpected: %r" % data
def test_job_status(self):
job = self.create_job()
def get_data():
self.create_result(job, self.create_file(), **{})
r = self.client.post(reverse('zadmin.job_status'),
{'job_ids': json.dumps([job.pk])})
eq_(r.status_code, 200)
data = json.loads(r.content)[str(job.pk)]
return data
data = get_data()
eq_(data['completed'], 1)
eq_(data['total'], 1)
eq_(data['percent_complete'], '100')
eq_(data['job_id'], job.pk)
eq_(data['completed_timestamp'], '')
job.update(completed=datetime.now())
data = get_data()
assert data['completed_timestamp'] != '', (
'Unexpected: %s' % data['completed_timestamp'])
class TestBulkUpdate(BulkValidationTest):
def setUp(self):
super(TestBulkUpdate, self).setUp()
self.job = self.create_job(completed=datetime.now())
self.update_url = reverse('zadmin.notify.success', args=[self.job.pk])
self.list_url = reverse('zadmin.validation')
self.data = {'text': '{{ APPLICATION }} {{ VERSION }}',
'subject': '..'}
self.version_one = Version.objects.create(addon=self.addon)
self.version_two = Version.objects.create(addon=self.addon)
def test_no_update_link(self):
self.create_result(self.job, self.create_file(), **{})
r = self.client.get(self.list_url)
doc = pq(r.content)
eq_(doc('table tr td a.set-max-version').text(), 'Set max version')
def test_update_link(self):
self.create_result(self.job, self.create_file(), **{'valid': 1})
r = self.client.get(self.list_url)
doc = pq(r.content)
eq_(doc('table tr td a.set-max-version').text(), 'Set max version')
def test_update_url(self):
self.create_result(self.job, self.create_file(), **{'valid': 1})
r = self.client.get(self.list_url)
doc = pq(r.content)
eq_(doc('table tr td a.set-max-version').attr('data-job-url'),
self.update_url)
def test_update_anonymous(self):
self.client.logout()
r = self.client.post(self.update_url)
eq_(r.status_code, 302)
def test_version_pks(self):
for version in [self.version_one, self.version_two]:
for x in range(0, 3):
self.create_result(self.job, self.create_file(version))
eq_(sorted(completed_versions_dirty(self.job)),
[self.version_one.pk, self.version_two.pk])
def test_update_passing_only(self):
self.create_result(self.job, self.create_file(self.version_one))
self.create_result(self.job, self.create_file(self.version_two),
errors=1)
eq_(sorted(completed_versions_dirty(self.job)),
[self.version_one.pk])
def test_update_pks(self):
self.create_result(self.job, self.create_file(self.version))
r = self.client.post(self.update_url, self.data)
eq_(r.status_code, 302)
eq_(self.version.apps.all()[0].max, self.job.target_version)
def test_update_unclean_pks(self):
self.create_result(self.job, self.create_file(self.version))
self.create_result(self.job, self.create_file(self.version),
errors=1)
r = self.client.post(self.update_url, self.data)
eq_(r.status_code, 302)
eq_(self.version.apps.all()[0].max, self.job.curr_max_version)
def test_update_pks_logs(self):
self.create_result(self.job, self.create_file(self.version))
eq_(ActivityLog.objects.for_addons(self.addon).count(), 0)
self.client.post(self.update_url, self.data)
upd = amo.LOG.MAX_APPVERSION_UPDATED.id
logs = ActivityLog.objects.for_addons(self.addon).filter(action=upd)
eq_(logs.count(), 1)
eq_(logs[0].user, get_task_user())
def test_update_wrong_version(self):
self.create_result(self.job, self.create_file(self.version))
av = self.version.apps.all()[0]
av.max = self.appversion('3.6')
av.save()
self.client.post(self.update_url, self.data)
eq_(self.version.apps.all()[0].max, self.appversion('3.6'))
def test_update_all_within_range(self):
self.create_result(self.job, self.create_file(self.version))
# Create an appversion in between current and target.
av = self.version.apps.all()[0]
av.max = self.appversion('3.7a2')
av.save()
self.client.post(self.update_url, self.data)
eq_(self.version.apps.all()[0].max, self.appversion('3.7a3'))
def test_version_comparison(self):
# regression test for bug 691984
job = self.create_job(completed=datetime.now(),
current=self.appversion('3.0.9'),
target=self.appversion('3.5'))
# .* was not sorting right
self.version.apps.all().update(max=self.appversion('3.0.*'))
self.create_result(job, self.create_file(self.version))
self.client.post(reverse('zadmin.notify.success', args=[job.pk]),
self.data)
eq_(self.version.apps.all()[0].max, self.appversion('3.5'))
def test_update_different_app(self):
self.create_result(self.job, self.create_file(self.version))
target = self.version.apps.all()[0]
target.application_id = amo.FIREFOX.id
target.save()
eq_(self.version.apps.all()[0].max, self.curr_max)
def test_update_twice(self):
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url, self.data)
eq_(self.version.apps.all()[0].max, self.job.target_version)
now = self.version.modified
self.client.post(self.update_url, self.data)
eq_(self.version.modified, now)
def test_update_notify(self):
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url, self.data)
eq_(len(mail.outbox), 1)
def test_update_subject(self):
data = self.data.copy()
data['subject'] = '{{ ADDON_NAME }}{{ ADDON_VERSION }}'
f = self.create_file(self.version)
self.create_result(self.job, f)
self.client.post(self.update_url, data)
eq_(mail.outbox[0].subject,
'%s%s' % (self.addon.name, f.version.version))
@mock.patch('zadmin.tasks.log')
def test_bulk_update_logs_stats(self, log):
log.info = mock.Mock()
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url, self.data)
eq_(log.info.call_args_list[-1][0][0],
'[1@None] bulk update stats for job %s: '
'{author_emailed: 1, bumped: 1, is_dry_run: 0, processed: 1}'
% self.job.pk)
def test_application_version(self):
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url, self.data)
eq_(mail.outbox[0].body, 'Firefox 3.7a3')
def test_multiple_result_links(self):
# Creates validation results for two files of the same addon:
results = [
self.create_result(self.job, self.create_file(self.version)),
self.create_result(self.job, self.create_file(self.version))]
self.client.post(self.update_url, {'text': '{{ RESULT_LINKS }}',
'subject': '..'})
links = mail.outbox[0].body.split(' ')
for result in results:
assert any(ln.endswith(reverse('devhub.bulk_compat_result',
args=(self.addon.slug, result.pk)))
for ln in links), ('Unexpected links: %s' % links)
def test_notify_mail_preview(self):
self.create_result(self.job, self.create_file(self.version))
self.client.post(self.update_url,
{'text': 'the message', 'subject': 'the subject',
'preview_only': 'on'})
eq_(len(mail.outbox), 0)
rs = self.job.get_success_preview_emails()
eq_([e.subject for e in rs], ['the subject'])
# version should not be bumped since it's in preview mode:
eq_(self.version.apps.all()[0].max, self.max)
upd = amo.LOG.MAX_APPVERSION_UPDATED.id
logs = ActivityLog.objects.for_addons(self.addon).filter(action=upd)
eq_(logs.count(), 0)
class TestBulkNotify(BulkValidationTest):
def setUp(self):
super(TestBulkNotify, self).setUp()
self.job = self.create_job(completed=datetime.now())
self.update_url = reverse('zadmin.notify.failure', args=[self.job.pk])
self.syntax_url = reverse('zadmin.notify.syntax')
self.list_url = reverse('zadmin.validation')
self.version_one = Version.objects.create(addon=self.addon)
self.version_two = Version.objects.create(addon=self.addon)
def test_no_notify_link(self):
self.create_result(self.job, self.create_file(), **{})
r = self.client.get(self.list_url)
doc = pq(r.content)
eq_(len(doc('table tr td a.notify')), 0)
def test_notify_link(self):
self.create_result(self.job, self.create_file(), **{'errors': 1})
r = self.client.get(self.list_url)
doc = pq(r.content)
eq_(doc('table tr td a.notify').text(), 'Notify')
def test_notify_url(self):
self.create_result(self.job, self.create_file(), **{'errors': 1})
r = self.client.get(self.list_url)
doc = pq(r.content)
eq_(doc('table tr td a.notify').attr('data-job-url'), self.update_url)
def test_notify_anonymous(self):
self.client.logout()
r = self.client.post(self.update_url)
eq_(r.status_code, 302)
def test_notify_log(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
eq_(ActivityLog.objects.for_addons(self.addon).count(), 0)
self.client.post(self.update_url, {'text': '..', 'subject': '..'})
upd = amo.LOG.BULK_VALIDATION_EMAILED.id
logs = ActivityLog.objects.for_addons(self.addon).filter(action=upd)
eq_(logs.count(), 1)
eq_(logs[0].user, self.creator)
def test_notify_mail(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url, {'text': '..',
'subject': '{{ ADDON_NAME }}'})
eq_(r.status_code, 302)
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].body, '..')
eq_(mail.outbox[0].subject, self.addon.name)
eq_(mail.outbox[0].to, [u'[email protected]'])
def test_result_links(self):
result = self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url, {'text': '{{ RESULT_LINKS }}',
'subject': '...'})
eq_(r.status_code, 302)
eq_(len(mail.outbox), 1)
res = reverse('devhub.bulk_compat_result',
args=(self.addon.slug, result.pk))
email = mail.outbox[0].body
assert res in email, ('Unexpected message: %s' % email)
def test_notify_mail_partial(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
self.create_result(self.job, self.create_file(self.version))
r = self.client.post(self.update_url, {'text': '..', 'subject': '..'})
eq_(r.status_code, 302)
eq_(len(mail.outbox), 1)
def test_notify_mail_multiple(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url, {'text': '..', 'subject': '..'})
eq_(r.status_code, 302)
eq_(len(mail.outbox), 2)
def test_notify_mail_preview(self):
for i in range(2):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url,
{'text': 'the message', 'subject': 'the subject',
'preview_only': 'on'})
eq_(r.status_code, 302)
eq_(len(mail.outbox), 0)
rs = self.job.get_failure_preview_emails()
eq_([e.subject for e in rs], ['the subject', 'the subject'])
def test_notify_rendering(self):
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url,
{'text': '{{ ADDON_NAME }}{{ COMPAT_LINK }}',
'subject': '{{ ADDON_NAME }} blah'})
eq_(r.status_code, 302)
eq_(len(mail.outbox), 1)
url = reverse('devhub.versions.edit', args=[self.addon.pk,
self.version.pk])
assert str(self.addon.name) in mail.outbox[0].body
assert url in mail.outbox[0].body
assert str(self.addon.name) in mail.outbox[0].subject
def test_notify_unicode(self):
self.addon.name = u'འབྲུག་ཡུལ།'
self.addon.save()
self.create_result(self.job, self.create_file(self.version),
**{'errors': 1})
r = self.client.post(self.update_url,
{'text': '{{ ADDON_NAME }}',
'subject': '{{ ADDON_NAME }} blah'})
eq_(r.status_code, 302)
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].body, self.addon.name)
def test_notify_template(self):
for text, res in (['some sample text', True],
['{{ ADDON_NAME }}{% if %}', False]):
eq_(forms.NotifyForm({'text': text, 'subject': '...'}).is_valid(),
res)
def test_notify_syntax(self):
for text, res in (['some sample text', True],
['{{ ADDON_NAME }}{% if %}', False]):
r = self.client.post(self.syntax_url, {'text': text,
'subject': '..'})
eq_(r.status_code, 200)
eq_(json.loads(r.content)['valid'], res)
def test_undeclared_variables(self):
for text, res in (['{{NOT_DECLARED}}', False],
['{{ NOT_DECLARED }}', False],
["""
{{ADDON_NAME}}
{{NOT_DECLARED}}
""", False],
['{{ADDON_NAME}} {{NOT_DECLARED}}', False],
['{{ADDON_NAME}}', True]):
r = self.client.post(self.syntax_url, {'text': text,
'subject': '..'})
eq_(r.status_code, 200)
assert json.loads(r.content)['valid'] == res, (
'Text %r unexpectedly resulted in %r' % (text, res))
def test_undeclared_variable_form_submit(self):
f = forms.NotifyForm({'text': '{{ UNDECLARED }}', 'subject': '...'})
eq_(f.is_valid(), False)
def test_addon_name_contains_platform(self):
for pl in (amo.PLATFORM_MAC.id, amo.PLATFORM_LINUX.id):
f = self.create_file(self.version, platform_id=pl)
self.create_result(self.job, f, errors=1)
self.client.post(self.update_url, {'text': '...',
'subject': '{{ ADDON_NAME }}'})
subjects = sorted(m.subject for m in mail.outbox)
eq_(subjects,
['Delicious Bookmarks (Linux)',
'Delicious Bookmarks (Mac OS X)'])
def test_addon_name_for_platform_all(self):
f = self.create_file(self.version, platform_id=amo.PLATFORM_ALL.id)
self.create_result(self.job, f, errors=1)
self.client.post(self.update_url, {'text': '...',
'subject': '{{ ADDON_NAME }}'})
eq_(mail.outbox[0].subject, unicode(self.addon.name))
class TestBulkValidationTask(BulkValidationTest):
@attr('validator')
def test_validate(self):
self.start_validation()
res = ValidationResult.objects.get()
self.assertCloseToNow(res.completed)
assert_no_validation_errors(res)
eq_(res.errors, 1) # package could not be found
eq_(res.valid, False)
eq_(res.warnings, 0)
eq_(res.notices, 0)
v = json.loads(res.validation)
eq_(v['errors'], 1)
self.assertCloseToNow(res.validation_job.completed)
eq_(res.validation_job.stats['total'], 1)
eq_(res.validation_job.stats['completed'], 1)
eq_(res.validation_job.stats['passing'], 0)
eq_(res.validation_job.stats['failing'], 1)
eq_(res.validation_job.stats['errors'], 0)
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].subject,
'Behold! Validation results for Firefox %s->%s'
% (self.curr_max.version, self.new_max.version))
eq_(mail.outbox[0].to, ['[email protected]'])
@mock.patch('validator.validate.validate')
def test_validator_bulk_compat_flag(self, validate):
try:
self.start_validation()
except Exception:
# We only care about the call to `validate()`, not the result.
pass
assert validate.call_args[1].get('compat_test')
@mock.patch('zadmin.tasks.run_validator')
def test_task_error(self, run_validator):
run_validator.side_effect = RuntimeError('validation error')
try:
self.start_validation()
except:
# the real test is how it's handled, below...
pass
res = ValidationResult.objects.get()
err = res.task_error.strip()
assert err.endswith('RuntimeError: validation error'), (
'Unexpected: %s' % err)
self.assertCloseToNow(res.completed)
eq_(res.validation_job.stats['total'], 1)
eq_(res.validation_job.stats['errors'], 1)
eq_(res.validation_job.stats['passing'], 0)
eq_(res.validation_job.stats['failing'], 0)
@mock.patch('zadmin.tasks.run_validator')
def test_validate_for_appversions(self, run_validator):
data = {
"errors": 1,
"warnings": 50,
"notices": 1,
"messages": [],
"compatibility_summary": {
"errors": 0,
"warnings": 0,
"notices": 0
},
"metadata": {}
}
run_validator.return_value = json.dumps(data)
self.start_validation()
assert run_validator.called
eq_(run_validator.call_args[1]['for_appversions'],
{amo.FIREFOX.guid: [self.new_max.version]})
@mock.patch('zadmin.tasks.run_validator')
def test_validate_all_tiers(self, run_validator):
run_validator.return_value = json.dumps(no_op_validation)
res = self.create_result(self.create_job(), self.create_file(), **{})
tasks.bulk_validate_file(res.id)
assert run_validator.called
eq_(run_validator.call_args[1]['test_all_tiers'], True)
@mock.patch('zadmin.tasks.run_validator')
def test_merge_with_compat_summary(self, run_validator):
data = {
"errors": 1,
"detected_type": "extension",
"success": False,
"warnings": 50,
"notices": 1,
"ending_tier": 5,
"messages": [
{
"description": "A global function was called ...",
"tier": 3,
"message": "Global called in dangerous manner",
"uid": "de93a48831454e0b9d965642f6d6bf8f",
"id": [],
"compatibility_type": None,
"for_appversions": None,
"type": "warning",
},
{
"description": ("...no longer indicate the language "
"of Firefox's UI..."),
"tier": 5,
"message": "navigator.language may not behave as expected",
"uid": "f44c1930887c4d9e8bd2403d4fe0253a",
"id": [],
"compatibility_type": "error",
"for_appversions": {
"{ec8030f7-c20a-464f-9b0e-13a3a9e97384}": ["4.2a1pre",
"5.0a2",
"6.0a1"]
},
"type": "warning"
}],
"compatibility_summary": {
"notices": 1,
"errors": 6,
"warnings": 0
},
"metadata": {
"version": "1.0",
"name": "FastestFox",
"id": "<id>"
}
}
run_validator.return_value = json.dumps(data)
res = self.create_result(self.create_job(), self.create_file(), **{})
tasks.bulk_validate_file(res.id)
assert run_validator.called
res = ValidationResult.objects.get(pk=res.pk)
eq_(res.errors,
data['errors'] + data['compatibility_summary']['errors'])
eq_(res.warnings,
data['warnings'] + data['compatibility_summary']['warnings'])
eq_(res.notices,
data['notices'] + data['compatibility_summary']['notices'])
@mock.patch('validator.validate.validate')
def test_app_version_overrides(self, validate):
validate.return_value = json.dumps(no_op_validation)
self.start_validation(new_max='3.7a4')
assert validate.called
overrides = validate.call_args[1]['overrides']
eq_(overrides['targetapp_minVersion'], {amo.FIREFOX.guid: '3.7a4'})
eq_(overrides['targetapp_maxVersion'], {amo.FIREFOX.guid: '3.7a4'})
def create_version(self, addon, statuses, version_str=None):
max = self.max
if version_str:
max = AppVersion.objects.filter(version=version_str)[0]
version = Version.objects.create(addon=addon)
ApplicationsVersions.objects.create(application=self.application,
min=self.min, max=max,
version=version)
for status in statuses:
File.objects.create(status=status, version=version)
return version
def find_files(self, job_kwargs=None):
if not job_kwargs:
job_kwargs = {}
job = self.create_job(**job_kwargs)
find_files(job)
return list(job.result_set.values_list('file_id', flat=True))
def test_getting_disabled(self):
self.addon.update(status=amo.STATUS_DISABLED)
eq_(len(self.find_files()), 0)
def test_getting_deleted(self):
self.addon.update(status=amo.STATUS_DELETED)
eq_(len(self.find_files()), 0)
def test_getting_status(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC,
amo.STATUS_NOMINATED])
ids = self.find_files()
eq_(len(ids), 2)
def test_getting_latest_public(self):
old_version = self.create_version(self.addon, [amo.STATUS_PUBLIC])
self.create_version(self.addon, [amo.STATUS_NULL])
ids = self.find_files()
eq_(len(ids), 1)
eq_(old_version.files.all()[0].pk, ids[0])
def test_getting_latest_public_order(self):
self.create_version(self.addon, [amo.STATUS_PURGATORY])
new_version = self.create_version(self.addon, [amo.STATUS_PUBLIC])
ids = self.find_files()
eq_(len(ids), 1)
eq_(new_version.files.all()[0].pk, ids[0])
def delete_orig_version(self, fixup=True):
# Because deleting versions resets the status...
self.version.delete()
# Don't really care what status this is, as long
# as it gets past the first SQL query.
self.addon.update(status=amo.STATUS_PUBLIC)
def test_no_versions(self):
self.delete_orig_version()
eq_(len(self.find_files()), 0)
def test_no_files(self):
self.version.files.all().delete()
self.addon.update(status=amo.STATUS_PUBLIC)
eq_(len(self.find_files()), 0)
def test_not_public(self):
version = self.create_version(self.addon, [amo.STATUS_LITE])
self.delete_orig_version()
ids = self.find_files()
eq_(len(ids), 1)
eq_(version.files.all()[0].pk, ids[0])
def test_not_public_and_newer(self):
self.create_version(self.addon, [amo.STATUS_LITE])
new_version = self.create_version(self.addon, [amo.STATUS_LITE])
self.delete_orig_version()
ids = self.find_files()
eq_(len(ids), 1)
eq_(new_version.files.all()[0].pk, ids[0])
def test_not_public_w_beta(self):
self.create_version(self.addon, [amo.STATUS_LITE])
self.create_version(self.addon, [amo.STATUS_BETA])
self.delete_orig_version()
ids = self.find_files()
eq_(len(ids), 2)
def test_not_public_w_multiple_files(self):
self.create_version(self.addon, [amo.STATUS_BETA])
new_version = self.create_version(self.addon, [amo.STATUS_LITE,
amo.STATUS_BETA])
self.delete_orig_version()
ids = self.find_files()
eq_(len(ids), 2)
eq_(sorted([v.id for v in new_version.files.all()]), sorted(ids))
def test_not_prelim_w_multiple_files(self):
self.create_version(self.addon, [amo.STATUS_BETA])
self.create_version(self.addon, [amo.STATUS_BETA,
amo.STATUS_NOMINATED])
self.delete_orig_version()
ids = self.find_files()
eq_(len(ids), 3)
def test_public_partial(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC])
new_version = self.create_version(self.addon, [amo.STATUS_BETA,
amo.STATUS_DISABLED])
ids = self.find_files()
eq_(len(ids), 2)
assert new_version.files.all()[1].pk not in ids
def test_getting_w_unreviewed(self):
old_version = self.create_version(self.addon, [amo.STATUS_PUBLIC])
new_version = self.create_version(self.addon, [amo.STATUS_UNREVIEWED])
ids = self.find_files()
eq_(len(ids), 2)
eq_(sorted([old_version.files.all()[0].pk,
new_version.files.all()[0].pk]),
sorted(ids))
def test_multiple_files(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC, amo.STATUS_PUBLIC,
amo.STATUS_PUBLIC])
ids = self.find_files()
eq_(len(ids), 3)
def test_multiple_public(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC])
new_version = self.create_version(self.addon, [amo.STATUS_PUBLIC])
ids = self.find_files()
eq_(len(ids), 1)
eq_(new_version.files.all()[0].pk, ids[0])
def test_multiple_addons(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
self.create_version(addon, [amo.STATUS_PURGATORY])
ids = self.find_files()
eq_(len(ids), 1)
eq_(self.version.files.all()[0].pk, ids[0])
def test_no_app(self):
version = self.create_version(self.addon, [amo.STATUS_LITE])
self.delete_orig_version()
version.apps.all().delete()
ids = self.find_files()
eq_(len(ids), 0)
def test_wrong_version(self):
self.create_version(self.addon, [amo.STATUS_LITE],
version_str='4.0b2pre')
self.delete_orig_version()
ids = self.find_files()
eq_(len(ids), 0)
def test_version_slightly_newer_than_current(self):
# addon matching current app/version but with a newer public version
# that is within range of the target app/version.
# See bug 658739.
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a2')
newer = self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a3')
kw = dict(curr_max_version=self.appversion('3.7a2'),
target_version=self.appversion('3.7a4'))
ids = self.find_files(job_kwargs=kw)
eq_(newer.files.all()[0].pk, ids[0])
def test_version_compatible_with_newer_app(self):
# addon with a newer public version that is already compatible with
# an app/version higher than the target.
# See bug 658739.
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a2')
# A version that supports a newer Firefox than what we're targeting
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a4')
kw = dict(curr_max_version=self.appversion('3.7a2'),
target_version=self.appversion('3.7a3'))
ids = self.find_files(job_kwargs=kw)
eq_(len(ids), 0)
def test_version_compatible_with_target_app(self):
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a2')
# Already has a version that supports target:
self.create_version(self.addon, [amo.STATUS_PUBLIC],
version_str='3.7a3')
kw = dict(curr_max_version=self.appversion('3.7a2'),
target_version=self.appversion('3.7a3'))
ids = self.find_files(job_kwargs=kw)
eq_(len(ids), 0)
class TestTallyValidationErrors(BulkValidationTest):
def setUp(self):
super(TestTallyValidationErrors, self).setUp()
self.data = {
"errors": 1,
"warnings": 1,
"notices": 0,
"messages": [
{
"message": "message one",
"description": ["message one long"],
"id": ["path", "to", "test_one"],
"uid": "de93a48831454e0b9d965642f6d6bf8f",
"type": "error",
},
{
"message": "message two",
"description": "message two long",
"id": ["path", "to", "test_two"],
"uid": "f44c1930887c4d9e8bd2403d4fe0253a",
"compatibility_type": "error",
"type": "warning"
}],
"metadata": {},
"compatibility_summary": {
"errors": 1,
"warnings": 1,
"notices": 0
}
}
def csv(self, job_id):
r = self.client.get(reverse('zadmin.validation_tally_csv',
args=[job_id]))
eq_(r.status_code, 200)
rdr = csv.reader(StringIO(r.content))
header = rdr.next()
rows = sorted((r for r in rdr), key=lambda r: r[0])
return header, rows
@mock.patch('zadmin.tasks.run_validator')
def test_csv(self, run_validator):
run_validator.return_value = json.dumps(self.data)
self.start_validation()
res = ValidationResult.objects.get()
eq_(res.task_error, None)
header, rows = self.csv(res.validation_job.pk)
eq_(header, ['message_id', 'message', 'long_message',
'type', 'addons_affected'])
eq_(rows.pop(0), ['path.to.test_one',
'message one', 'message one long', 'error', '1'])
eq_(rows.pop(0), ['path.to.test_two',
'message two', 'message two long', 'error', '1'])
def test_count_per_addon(self):
job = self.create_job()
data_str = json.dumps(self.data)
for i in range(3):
tasks.tally_validation_results(job.pk, data_str)
header, rows = self.csv(job.pk)
eq_(rows.pop(0), ['path.to.test_one',
'message one', 'message one long', 'error', '3'])
eq_(rows.pop(0), ['path.to.test_two',
'message two', 'message two long', 'error', '3'])
def test_nested_list_messages(self):
job = self.create_job()
self.data['messages'] = [{
"message": "message one",
"description": ["message one long", ["something nested"]],
"id": ["path", "to", "test_one"],
"uid": "de93a48831454e0b9d965642f6d6bf8f",
"type": "error",
}]
data_str = json.dumps(self.data)
# This was raising an exception. bug 733845
tasks.tally_validation_results(job.pk, data_str)
class TestEmailPreview(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
addon = Addon.objects.get(pk=3615)
self.topic = EmailPreviewTopic(addon)
def test_csv(self):
self.topic.send_mail('the subject', u'Hello Ivan Krsti\u0107',
from_email='[email protected]',
recipient_list=['[email protected]'])
r = self.client.get(reverse('zadmin.email_preview_csv',
args=[self.topic.topic]))
eq_(r.status_code, 200)
rdr = csv.reader(StringIO(r.content))
eq_(rdr.next(), ['from_email', 'recipient_list', 'subject', 'body'])
eq_(rdr.next(), ['[email protected]', '[email protected]',
'the subject', 'Hello Ivan Krsti\xc4\x87'])
class TestMonthlyPick(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/apps', 'base/users']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
self.url = reverse('zadmin.monthly_pick')
addon = Addon.objects.get(pk=3615)
MonthlyPick.objects.create(addon=addon,
locale='zh-CN',
blurb="test data",
image="http://www.google.com")
self.f = self.client.get(self.url).context['form'].initial_forms[0]
self.initial = self.f.initial
def test_form_initial(self):
eq_(self.initial['addon'], 3615)
eq_(self.initial['locale'], 'zh-CN')
eq_(self.initial['blurb'], 'test data')
eq_(self.initial['image'], 'http://www.google.com')
def test_success_insert(self):
dupe = initial(self.f)
del dupe['id']
dupe.update(locale='fr')
data = formset(initial(self.f), dupe, initial_count=1)
self.client.post(self.url, data)
eq_(MonthlyPick.objects.count(), 2)
eq_(MonthlyPick.objects.all()[1].locale, 'fr')
def test_insert_no_image(self):
dupe = initial(self.f)
dupe.update(id='', image='', locale='en-US')
data = formset(initial(self.f), dupe, initial_count=1)
self.client.post(self.url, data)
eq_(MonthlyPick.objects.count(), 2)
eq_(MonthlyPick.objects.all()[1].image, '')
def test_success_insert_no_locale(self):
dupe = initial(self.f)
del dupe['id']
del dupe['locale']
data = formset(initial(self.f), dupe, initial_count=1)
self.client.post(self.url, data)
eq_(MonthlyPick.objects.count(), 2)
eq_(MonthlyPick.objects.all()[1].locale, '')
def test_insert_long_blurb(self):
dupe = initial(self.f)
dupe.update(id='', blurb='x' * 201, locale='en-US')
data = formset(initial(self.f), dupe, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['form'].errors[1]['blurb'][0],
'Ensure this value has at most 200 characters (it has 201).')
def test_success_update(self):
d = initial(self.f)
d.update(locale='fr')
r = self.client.post(self.url, formset(d, initial_count=1))
eq_(r.status_code, 302)
eq_(MonthlyPick.objects.all()[0].locale, 'fr')
def test_success_delete(self):
d = initial(self.f)
d.update(DELETE=True)
self.client.post(self.url, formset(d, initial_count=1))
eq_(MonthlyPick.objects.count(), 0)
def test_require_login(self):
self.client.logout()
r = self.client.get(self.url)
eq_(r.status_code, 302)
class TestFeatures(amo.tests.TestCase):
fixtures = ['base/apps', 'base/users', 'base/collections',
'base/addon_3615.json']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
self.url = reverse('zadmin.features')
FeaturedCollection.objects.create(application_id=amo.FIREFOX.id,
locale='zh-CN', collection_id=80)
self.f = self.client.get(self.url).context['form'].initial_forms[0]
self.initial = self.f.initial
def test_form_initial(self):
eq_(self.initial['application'], amo.FIREFOX.id)
eq_(self.initial['locale'], 'zh-CN')
eq_(self.initial['collection'], 80)
def test_form_attrs(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('#features tr').attr('data-app'), str(amo.FIREFOX.id))
assert doc('#features td.app').hasClass(amo.FIREFOX.short)
eq_(doc('#features td.collection.loading').attr('data-collection'),
'80')
assert doc('#features .collection-ac.js-hidden')
assert not doc('#features .collection-ac[disabled]')
def test_disabled_autocomplete_errors(self):
"""If any collection errors, autocomplete field should be enabled."""
d = dict(application=amo.FIREFOX.id, collection=999)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
doc = pq(r.content)
assert not doc('#features .collection-ac[disabled]')
def test_required_app(self):
d = dict(locale='zh-CN', collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.status_code, 200)
eq_(r.context['form'].errors[0]['application'],
['This field is required.'])
eq_(r.context['form'].errors[0]['collection'],
['Invalid collection for this application.'])
def test_bad_app(self):
d = dict(application=999, collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['form'].errors[0]['application'],
['Select a valid choice. That choice is not one of the available '
'choices.'])
def test_bad_collection_for_app(self):
d = dict(application=amo.THUNDERBIRD.id, collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['form'].errors[0]['collection'],
['Invalid collection for this application.'])
def test_optional_locale(self):
d = dict(application=amo.FIREFOX.id, collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['form'].errors, [{}])
def test_bad_locale(self):
d = dict(application=amo.FIREFOX.id, locale='klingon', collection=80)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['form'].errors[0]['locale'],
['Select a valid choice. klingon is not one of the available '
'choices.'])
def test_required_collection(self):
d = dict(application=amo.FIREFOX.id)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['form'].errors[0]['collection'],
['This field is required.'])
def test_bad_collection(self):
d = dict(application=amo.FIREFOX.id, collection=999)
data = formset(self.initial, d, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['form'].errors[0]['collection'],
['Invalid collection for this application.'])
def test_success_insert(self):
dupe = initial(self.f)
del dupe['id']
dupe.update(locale='fr')
data = formset(initial(self.f), dupe, initial_count=1)
self.client.post(self.url, data)
eq_(FeaturedCollection.objects.count(), 2)
eq_(FeaturedCollection.objects.all()[1].locale, 'fr')
def test_success_update(self):
d = initial(self.f)
d.update(locale='fr')
r = self.client.post(self.url, formset(d, initial_count=1))
eq_(r.status_code, 302)
eq_(FeaturedCollection.objects.all()[0].locale, 'fr')
def test_success_delete(self):
d = initial(self.f)
d.update(DELETE=True)
self.client.post(self.url, formset(d, initial_count=1))
eq_(FeaturedCollection.objects.count(), 0)
class TestOAuth(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
def test_create_consumer(self):
self.client.post(reverse('zadmin.oauth-consumer-create'),
data={'name': 'Test',
'description': 'Test description',
'status': 'accepted'})
eq_(Consumer.objects.count(), 1)
class TestLookup(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
self.user = UserProfile.objects.get(pk=999)
self.url = reverse('zadmin.search', args=['users', 'userprofile'])
def test_logged_out(self):
self.client.logout()
eq_(self.client.get('%s?q=admin' % self.url).status_code, 403)
def check_results(self, q, expected):
res = self.client.get(urlparams(self.url, q=q))
eq_(res.status_code, 200)
content = json.loads(res.content)
eq_(len(content), len(expected))
ids = [int(c['value']) for c in content]
emails = [u'%s' % c['label'] for c in content]
for d in expected:
id = d['value']
email = u'%s' % d['label']
assert id in ids, (
'Expected user ID "%s" not found' % id)
assert email in emails, (
'Expected username "%s" not found' % email)
def test_lookup_wrong_model(self):
self.url = reverse('zadmin.search', args=['doesnt', 'exist'])
res = self.client.get(urlparams(self.url, q=''))
eq_(res.status_code, 404)
def test_lookup_empty(self):
users = UserProfile.objects.values('id', 'email')
self.check_results('', [dict(
value=u['id'], label=u['email']) for u in users])
def test_lookup_by_id(self):
self.check_results(self.user.id, [dict(value=self.user.id,
label=self.user.email)])
def test_lookup_by_email(self):
self.check_results(self.user.email, [dict(value=self.user.id,
label=self.user.email)])
def test_lookup_by_username(self):
self.check_results(self.user.username, [dict(value=self.user.id,
label=self.user.email)])
class TestAddonSearch(amo.tests.ESTestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
self.reindex(Addon)
assert self.client.login(username='[email protected]',
password='password')
self.url = reverse('zadmin.addon-search')
@mock.patch('mkt.webapps.tasks.index_webapps')
def test_lookup_app(self, index_webapps_mock):
# Load the Webapp fixture here, as loading it in the
# TestAddonSearch.fixtures would trigger the reindex, and fail, as
# this is an AMO test.
management.call_command('loaddata', 'base/337141-steamcube')
index_webapps_mock.assert_called()
res = self.client.get(urlparams(self.url, q='steamcube'))
eq_(res.status_code, 200)
links = pq(res.content)('form + h3 + ul li a')
eq_(len(links), 0)
if any(li.text().contains('Steamcube') for li in links):
raise AssertionError('Did not expect webapp in results.')
def test_lookup_addon(self):
res = self.client.get(urlparams(self.url, q='delicious'))
# There's only one result, so it should just forward us to that page.
eq_(res.status_code, 302)
class TestAddonAdmin(amo.tests.TestCase):
fixtures = ['base/users', 'base/337141-steamcube', 'base/addon_3615']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
self.url = reverse('admin:addons_addon_changelist')
def test_no_webapps(self):
res = self.client.get(self.url)
doc = pq(res.content)
rows = doc('#result_list tbody tr')
eq_(rows.length, 1)
eq_(rows.find('a').attr('href'),
'3615/')
class TestAddonManagement(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.url = reverse('zadmin.addon_manage', args=[self.addon.slug])
self.client.login(username='[email protected]', password='password')
def _form_data(self, data=None):
initial_data = {
'status': '4',
'highest_status': '4',
'outstanding': '0',
'form-0-status': '4',
'form-0-id': '67442',
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
}
if data:
initial_data.update(data)
return initial_data
def test_addon_status_change(self):
data = self._form_data({'status': '2'})
r = self.client.post(self.url, data, follow=True)
eq_(r.status_code, 200)
addon = Addon.objects.get(pk=3615)
eq_(addon.status, 2)
def test_outstanding_change(self):
data = self._form_data({'outstanding': '1'})
r = self.client.post(self.url, data, follow=True)
eq_(r.status_code, 200)
addon = Addon.objects.get(pk=3615)
eq_(addon.outstanding, 1)
def test_addon_file_status_change(self):
data = self._form_data({'form-0-status': '2'})
r = self.client.post(self.url, data, follow=True)
eq_(r.status_code, 200)
file = File.objects.get(pk=67442)
eq_(file.status, 2)
@mock.patch.object(File, 'file_path',
amo.tests.AMOPaths().file_fixture_path(
'delicious_bookmarks-2.1.106-fx.xpi'))
def test_regenerate_hash(self):
version = Version.objects.create(addon_id=3615)
file = File.objects.create(
filename='delicious_bookmarks-2.1.106-fx.xpi', version=version)
r = self.client.post(reverse('zadmin.recalc_hash', args=[file.id]))
eq_(json.loads(r.content)[u'success'], 1)
file = File.objects.get(pk=file.id)
assert file.size, 'File size should not be zero'
assert file.hash, 'File hash should not be empty'
@mock.patch.object(File, 'file_path',
amo.tests.AMOPaths().file_fixture_path(
'delicious_bookmarks-2.1.106-fx.xpi'))
def test_regenerate_hash_get(self):
""" Don't allow GET """
version = Version.objects.create(addon_id=3615)
file = File.objects.create(
filename='delicious_bookmarks-2.1.106-fx.xpi', version=version)
r = self.client.get(reverse('zadmin.recalc_hash', args=[file.id]))
eq_(r.status_code, 405) # GET out of here
class TestJetpack(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.url = reverse('zadmin.jetpack')
self.client.login(username='[email protected]', password='password')
self.versions = '["1.0", "1.1", "1.2", "1.2.1"]'
self.patcher = mock.patch('devhub.tasks.urllib2.urlopen')
self.urlopen_mock = self.patcher.start()
self.urlopen_mock.return_value = self.urlopener(self.versions)
self.addCleanup(self.patcher.stop)
def urlopener(self, content):
m = mock.Mock()
m.read.return_value = content
return m
def test_no_builder_versions(self):
self.urlopen_mock.return_value = self.urlopener('xxx')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
for field in ('minver', 'maxver'):
eq_(doc('input[name=%s]' % field).length, 1)
def test_get_builder_versions(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
for field in ('minver', 'maxver'):
eq_(doc('select[name=%s]' % field).length, 1)
options = doc('select[name=%s] option' % field)
versions = [''] + json.loads(self.versions)
for option, version in zip(options, versions):
eq_(pq(option).attr('value'), version)
def test_change_range_optional(self):
r = self.client.post(self.url)
self.assertRedirects(r, self.url)
def test_change_range_max_required(self):
r = self.client.post(self.url, {'minver': '1.0'})
eq_(r.status_code, 200)
assert_required(r.context['form'].errors['maxver'][0])
def test_change_range_min_required(self):
r = self.client.post(self.url, {'maxver': '1.1'})
eq_(r.status_code, 200)
assert_required(r.context['form'].errors['minver'][0])
def test_change_range_bad(self):
r = self.client.post(self.url, {'minver': '1.1', 'maxver': '1.0'})
eq_(r.status_code, 200)
eq_(r.context['form'].non_field_errors(), ['Invalid version range.'])
def test_change_range_unknown(self):
r = self.client.post(self.url, {'minver': '9.0', 'maxver': '99.0'})
eq_(r.status_code, 200)
self.assertFormError(r, 'form', 'minver',
'Select a valid choice. 9.0 is not one of the available choices.')
self.assertFormError(r, 'form', 'maxver',
'Select a valid choice. 99.0 is not one of the available choices.')
def set_range(self, min_, max_):
r = self.client.post(self.url, {'minver': min_, 'maxver': max_})
self.assertRedirects(r, self.url)
r = self.client.get(self.url)
eq_(r.status_code, 200)
minver, maxver = r.context['upgrader'].jetpack_versions()
eq_(minver, min_)
eq_(maxver, max_)
eq_(r.context['upgrader'].version(), None)
eq_(pq(r.content)('input[name=upgrade]').length, 1)
def test_change_range_success(self):
self.set_range('1.0', '1.1')
def submit_upgrade(self):
r = self.client.post(self.url, {'upgrade': True})
self.assertRedirects(r, self.url)
def test_upgrade(self):
self.set_range('1.2', '1.2.1')
self.submit_upgrade()
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(r.context['upgrader'].version(), '1.2.1')
eq_(pq(r.content)('input[name=cancel]').length, 1)
def test_cancel(self):
self.set_range('1.2', '1.2.1')
self.submit_upgrade()
r = self.client.post(self.url, {'cancel': True})
self.assertRedirects(r, self.url)
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(r.context['upgrader'].version(), None)
@mock.patch('zadmin.views.start_upgrade_task.delay')
def test_resend(self, start_upgrade):
self.set_range('1.2', '1.2.1')
self.submit_upgrade()
file_id = str(5)
self.client.get(reverse('zadmin.jetpack.resend', args=[file_id]))
start_upgrade.assert_called_with([file_id], sdk_version='1.2.1')
class TestCompat(amo.tests.ESTestCase):
fixtures = ['base/users']
def setUp(self):
self.url = reverse('zadmin.compat')
self.client.login(username='[email protected]', password='password')
self.app = amo.FIREFOX
self.app_version = amo.COMPAT[0]['main']
self.addon = self.populate(guid='xxx')
self.generate_reports(self.addon, good=0, bad=0, app=self.app,
app_version=self.app_version)
def update(self):
compatibility_report()
self.refresh()
def populate(self, **kw):
now = datetime.now()
name = 'Addon %s' % now
kw.update(guid=name)
addon = amo.tests.addon_factory(name=name, **kw)
UpdateCount.objects.create(addon=addon, count=10, date=now)
return addon
def generate_reports(self, addon, good, bad, app, app_version):
defaults = dict(guid=addon.guid, app_guid=app.guid,
app_version=app_version)
for x in xrange(good):
CompatReport.objects.create(works_properly=True, **defaults)
for x in xrange(bad):
CompatReport.objects.create(works_properly=False, **defaults)
self.update()
def get_pq(self, **kw):
r = self.client.get(self.url, kw)
eq_(r.status_code, 200)
return pq(r.content)('#compat-results')
def test_defaults(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(r.context['app'], self.app)
eq_(r.context['version'], self.app_version)
table = pq(r.content)('#compat-results')
eq_(table.length, 1)
eq_(table.find('.no-results').length, 1)
def check_row(self, tr, addon, good, bad, percentage, app, app_version):
eq_(tr.length, 1)
version = addon.current_version.version
name = tr.find('.name')
eq_(name.find('.version').text(), 'v' + version)
eq_(name.remove('.version').text(), unicode(addon.name))
eq_(name.find('a').attr('href'), addon.get_url_path())
eq_(tr.find('.maxver').text(), addon.compatible_apps[app].max.version)
incompat = tr.find('.incompat')
eq_(incompat.find('.bad').text(), str(bad))
eq_(incompat.find('.total').text(), str(good + bad))
percentage += '%'
assert percentage in incompat.text(), (
'Expected incompatibility to be %r' % percentage)
eq_(tr.find('.version a').attr('href'),
reverse('devhub.versions.edit',
args=[addon.slug, addon.current_version.id]))
eq_(tr.find('.reports a').attr('href'),
reverse('compat.reporter_detail', args=[addon.guid]))
form = tr.find('.overrides form')
eq_(form.attr('action'), reverse('admin:addons_compatoverride_add'))
self.check_field(form, '_compat_ranges-TOTAL_FORMS', '1')
self.check_field(form, '_compat_ranges-INITIAL_FORMS', '0')
self.check_field(form, '_continue', '1')
self.check_field(form, '_confirm', '1')
self.check_field(form, 'addon', str(addon.id))
self.check_field(form, 'guid', addon.guid)
compat_field = '_compat_ranges-0-%s'
self.check_field(form, compat_field % 'min_version', '0')
self.check_field(form, compat_field % 'max_version', version)
self.check_field(form, compat_field % 'app', str(app.id))
self.check_field(form, compat_field % 'min_app_version',
app_version + 'a1')
self.check_field(form, compat_field % 'max_app_version',
app_version + '*')
def check_field(self, form, name, val):
eq_(form.find('input[name="%s"]' % name).val(), val)
def test_firefox_hosted(self):
addon = self.populate()
self.generate_reports(addon, good=0, bad=11, app=self.app,
app_version=self.app_version)
tr = self.get_pq().find('tr[data-guid="%s"]' % addon.guid)
self.check_row(tr, addon, good=0, bad=11, percentage='100.0',
app=self.app, app_version=self.app_version)
# Add an override for this current app version.
compat = CompatOverride.objects.create(addon=addon, guid=addon.guid)
CompatOverrideRange.objects.create(compat=compat,
app_id=amo.FIREFOX.id, min_app_version=self.app_version + 'a1',
max_app_version=self.app_version + '*')
# Check that there is an override for this current app version.
tr = self.get_pq().find('tr[data-guid="%s"]' % addon.guid)
eq_(tr.find('.overrides a').attr('href'),
reverse('admin:addons_compatoverride_change', args=[compat.id]))
def test_non_default_version(self):
app_version = amo.COMPAT[2]['main']
addon = self.populate()
self.generate_reports(addon, good=0, bad=11, app=self.app,
app_version=app_version)
eq_(self.get_pq().find('tr[data-guid="%s"]' % addon.guid).length, 0)
appver = '%s-%s' % (self.app.id, app_version)
tr = self.get_pq(appver=appver)('tr[data-guid="%s"]' % addon.guid)
self.check_row(tr, addon, good=0, bad=11, percentage='100.0',
app=self.app, app_version=app_version)
def test_minor_versions(self):
addon = self.populate()
self.generate_reports(addon, good=0, bad=1, app=self.app,
app_version=self.app_version)
self.generate_reports(addon, good=1, bad=2, app=self.app,
app_version=self.app_version + 'a2')
tr = self.get_pq(ratio=0.0, minimum=0).find('tr[data-guid="%s"]' %
addon.guid)
self.check_row(tr, addon, good=1, bad=3, percentage='75.0',
app=self.app, app_version=self.app_version)
def test_ratio(self):
addon = self.populate()
self.generate_reports(addon, good=11, bad=11, app=self.app,
app_version=self.app_version)
# Should not show up for > 80%.
eq_(self.get_pq().find('tr[data-guid="%s"]' % addon.guid).length, 0)
# Should not show up for > 50%.
tr = self.get_pq(ratio=.5).find('tr[data-guid="%s"]' % addon.guid)
eq_(tr.length, 0)
# Should show up for > 40%.
tr = self.get_pq(ratio=.4).find('tr[data-guid="%s"]' % addon.guid)
eq_(tr.length, 1)
def test_min_incompatible(self):
addon = self.populate()
self.generate_reports(addon, good=0, bad=11, app=self.app,
app_version=self.app_version)
# Should show up for >= 10.
eq_(self.get_pq().find('tr[data-guid="%s"]' % addon.guid).length, 1)
# Should show up for >= 0.
tr = self.get_pq(minimum=0).find('tr[data-guid="%s"]' % addon.guid)
eq_(tr.length, 1)
# Should not show up for >= 20.
tr = self.get_pq(minimum=20).find('tr[data-guid="%s"]' % addon.guid)
eq_(tr.length, 0)
class TestMemcache(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
self.url = reverse('zadmin.memcache')
cache.set('foo', 'bar')
self.client.login(username='[email protected]', password='password')
def test_login(self):
self.client.logout()
eq_(self.client.get(self.url).status_code, 302)
def test_can_clear(self):
self.client.post(self.url, {'yes': 'True'})
eq_(cache.get('foo'), None)
def test_cant_clear(self):
self.client.post(self.url, {'yes': 'False'})
eq_(cache.get('foo'), 'bar')
class TestElastic(amo.tests.ESTestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
self.url = reverse('zadmin.elastic')
self.client.login(username='[email protected]', password='password')
def test_login(self):
self.client.logout()
self.assertRedirects(self.client.get(self.url),
reverse('users.login') + '?to=/en-US/admin/elastic')
class TestEmailDevs(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
self.login('admin')
self.addon = Addon.objects.get(pk=3615)
def post(self, recipients='eula', subject='subject', message='msg',
preview_only=False):
return self.client.post(reverse('zadmin.email_devs'),
dict(recipients=recipients, subject=subject,
message=message,
preview_only=preview_only))
def test_preview(self):
res = self.post(preview_only=True)
self.assertNoFormErrors(res)
preview = EmailPreviewTopic(topic='email-devs')
eq_([e.recipient_list for e in preview.filter()], ['[email protected]'])
eq_(len(mail.outbox), 0)
def test_actual(self):
subject = 'about eulas'
message = 'message about eulas'
res = self.post(subject=subject, message=message)
self.assertNoFormErrors(res)
self.assertRedirects(res, reverse('zadmin.email_devs'))
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].subject, subject)
eq_(mail.outbox[0].body, message)
eq_(mail.outbox[0].to, ['[email protected]'])
eq_(mail.outbox[0].from_email, settings.DEFAULT_FROM_EMAIL)
def test_only_eulas(self):
self.addon.update(eula=None)
res = self.post()
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_sdk_devs(self):
(File.objects.filter(version__addon=self.addon)
.update(jetpack_version='1.5'))
res = self.post(recipients='sdk')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].to, ['[email protected]'])
def test_only_sdk_devs(self):
res = self.post(recipients='sdk')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_apps_with_payments(self):
self.addon.update(type=amo.ADDON_WEBAPP,
premium_type=amo.ADDON_PREMIUM)
res = self.post(recipients='payments')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(status=amo.STATUS_PENDING)
res = self.post(recipients='payments')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(status=amo.STATUS_DELETED)
res = self.post(recipients='payments')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_free_apps_with_new_regions(self):
self.addon.update(type=amo.ADDON_WEBAPP)
res = self.post(recipients='free_apps_region_enabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
mail.outbox = []
res = self.post(recipients='free_apps_region_disabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(enable_new_regions=True)
res = self.post(recipients='free_apps_region_enabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
res = self.post(recipients='free_apps_region_disabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_apps_with_payments_and_new_regions(self):
self.addon.update(type=amo.ADDON_WEBAPP,
premium_type=amo.ADDON_PREMIUM)
res = self.post(recipients='payments_region_enabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
mail.outbox = []
res = self.post(recipients='payments_region_disabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(enable_new_regions=True)
res = self.post(recipients='payments_region_enabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
res = self.post(recipients='payments_region_disabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_desktop_apps(self):
from addons.models import AddonDeviceType
self.addon.update(type=amo.ADDON_WEBAPP)
AddonDeviceType.objects.create(addon=self.addon,
device_type=amo.DEVICE_MOBILE.id)
res = self.post(recipients='desktop_apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
mail.outbox = []
AddonDeviceType.objects.create(addon=self.addon,
device_type=amo.DEVICE_DESKTOP.id)
res = self.post(recipients='desktop_apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(status=amo.STATUS_PENDING)
res = self.post(recipients='desktop_apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(status=amo.STATUS_DELETED)
res = self.post(recipients='desktop_apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_apps(self):
self.addon.update(type=amo.ADDON_WEBAPP)
res = self.post(recipients='apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
def test_only_extensions(self):
self.addon.update(type=amo.ADDON_EXTENSION)
res = self.post(recipients='all_extensions')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
def test_ignore_deleted_always(self):
self.addon.update(status=amo.STATUS_DELETED)
for name, label in DevMailerForm._choices:
res = self.post(recipients=name)
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_exclude_pending_for_addons(self):
self.addon.update(status=amo.STATUS_PENDING)
for name, label in DevMailerForm._choices:
if name in ('payments', 'desktop_apps'):
continue
res = self.post(recipients=name)
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
class TestPerms(amo.tests.TestCase):
fixtures = ['base/users', 'base/apps', 'zadmin/tests/flagged']
def test_admin_user(self):
# Admin should see views with Django's perm decorator and our own.
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 200)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 200)
eq_(self.client.get(reverse('zadmin.flagged')).status_code, 200)
eq_(self.client.get(reverse('zadmin.langpacks')).status_code, 200)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 200)
eq_(self.client.get(reverse('zadmin.monthly_pick')).status_code, 200)
eq_(self.client.get(reverse('zadmin.features')).status_code, 200)
eq_(self.client.get(
reverse('discovery.module_admin')).status_code, 200)
eq_(self.client.get(
reverse('zadmin.oauth-consumer-create')).status_code, 200)
def test_staff_user(self):
# Staff users have some privileges.
user = UserProfile.objects.get(email='[email protected]')
group = Group.objects.create(name='Staff', rules='AdminTools:View')
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 200)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 200)
eq_(self.client.get(reverse('zadmin.flagged')).status_code, 200)
eq_(self.client.get(reverse('zadmin.langpacks')).status_code, 200)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 200)
eq_(self.client.get(reverse('zadmin.monthly_pick')).status_code, 200)
eq_(self.client.get(reverse('zadmin.features')).status_code, 200)
eq_(self.client.get(
reverse('discovery.module_admin')).status_code, 200)
eq_(self.client.get(
reverse('zadmin.oauth-consumer-create')).status_code, 403)
def test_sr_reviewers_user(self):
# Sr Reviewers users have only a few privileges.
user = UserProfile.objects.get(email='[email protected]')
group = Group.objects.create(name='Sr Reviewer',
rules='ReviewerAdminTools:View')
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 200)
eq_(self.client.get(reverse('zadmin.flagged')).status_code, 200)
eq_(self.client.get(reverse('zadmin.langpacks')).status_code, 200)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 200)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 403)
eq_(self.client.get(
reverse('zadmin.oauth-consumer-create')).status_code, 403)
def test_bulk_compat_user(self):
# Bulk Compatibility Updaters only have access to /admin/validation/*.
user = UserProfile.objects.get(email='[email protected]')
group = Group.objects.create(name='Bulk Compatibility Updaters',
rules='BulkValidationAdminTools:View')
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 200)
eq_(self.client.get(reverse('zadmin.validation')).status_code, 200)
eq_(self.client.get(reverse('zadmin.flagged')).status_code, 403)
eq_(self.client.get(reverse('zadmin.langpacks')).status_code, 403)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 403)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 403)
eq_(self.client.get(
reverse('zadmin.oauth-consumer-create')).status_code, 403)
def test_unprivileged_user(self):
# Unprivileged user.
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 403)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 403)
eq_(self.client.get(reverse('zadmin.flagged')).status_code, 403)
eq_(self.client.get(reverse('zadmin.langpacks')).status_code, 403)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 403)
eq_(self.client.get(reverse('zadmin.monthly_pick')).status_code, 403)
eq_(self.client.get(reverse('zadmin.features')).status_code, 403)
eq_(self.client.get(
reverse('discovery.module_admin')).status_code, 403)
eq_(self.client.get(
reverse('zadmin.oauth-consumer-create')).status_code, 403)
# Anonymous users should also get a 403.
self.client.logout()
self.assertRedirects(self.client.get(reverse('zadmin.index')),
reverse('users.login') + '?to=/en-US/admin/')
| robhudson/zamboni | apps/zadmin/tests/test_views.py | Python | bsd-3-clause | 87,057 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-07-10 12:00
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('django_project', '0013_auto_20160710_1124'),
]
operations = [
migrations.AlterField(
model_name='annotation',
name='comment',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='django_project.Comment'),
),
]
| kunalsharma05/django-project | django_project/migrations/0014_auto_20160710_1200.py | Python | bsd-3-clause | 558 |
from django.http import HttpResponse
from django.template import Template
def admin_required_view(request):
if request.user.is_staff:
return HttpResponse(Template('You are an admin').render({}))
return HttpResponse(Template('Access denied').render({}))
| bfirsh/pytest_django | tests/views.py | Python | bsd-3-clause | 270 |
from django.db import models
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from press_links.enums import STATUS_CHOICES, LIVE_STATUS, DRAFT_STATUS
from django.utils import timezone
from parler.models import TranslatableModel, TranslatedFields
from parler.managers import TranslatableQuerySet
class EntryManager(models.Manager):
def get_queryset(self):
return EntryQuerySet(self.model)
def live(self):
"""
Returns a list of all published entries.
:rtype: django.db.models.QuerySet
"""
return self.filter(pub_date__lte=timezone.now(), status=LIVE_STATUS) \
.filter(site=Site.objects.get_current())
class EntryQuerySet(TranslatableQuerySet):
pass
class Entry(TranslatableModel):
author = models.ForeignKey(User, verbose_name=_('author'),
related_name='%(app_label)s_%(class)s_related')
slug = models.SlugField(max_length=255, unique_for_date='pub_date',
verbose_name='slug')
pub_date = models.DateTimeField(default=timezone.now,
verbose_name=_('publication date'))
status = models.IntegerField(choices=STATUS_CHOICES, default=DRAFT_STATUS,
verbose_name=_('status'))
site = models.ManyToManyField(Site,
verbose_name=_('Sites where the entry is published'),
related_name='%(app_label)s_%(class)s_related')
objects = EntryManager()
translations = TranslatedFields(
title=models.CharField(max_length=255, verbose_name=_('title')),
source=models.CharField(max_length=255, blank=True,
verbose_name=_('the source for the entry')),
excerpt=models.TextField(blank=True, verbose_name=_(u'Excerpt'))
)
@models.permalink
def get_absolute_url(self):
return ('press_links_entry_detail', (), {'slug': self.slug})
class Meta:
get_latest_by = 'pub_date'
ordering = ['-pub_date']
verbose_name = _('Press Entry')
verbose_name_plural = _('Press Entries')
def __unicode__(self):
return self.title
class Link(TranslatableModel):
link_new_page = models.BooleanField(default=False, verbose_name=_('open link in new page'))
entry = models.ForeignKey(Entry, verbose_name=_('Entry'))
translations = TranslatedFields(
link=models.CharField(max_length=255,
verbose_name=_('link address (add http:// for external link)')),
link_text=models.CharField(max_length=255,
verbose_name=_('text for link'))
)
class Meta:
verbose_name = _('Press Link')
verbose_name_plural = _('Press Links')
| iberben/django-press-links | press_links/models.py | Python | bsd-3-clause | 2,904 |
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.accounts.models import User
from huxley.api.tests import (CreateAPITestCase, DestroyAPITestCase,
ListAPITestCase, PartialUpdateAPITestCase,
RetrieveAPITestCase)
from huxley.utils.test import TestSchools, TestUsers
class UserDetailGetTestCase(RetrieveAPITestCase):
url_name = 'api:user_detail'
def test_anonymous_user(self):
'''It should reject request from an anonymous user.'''
user = TestUsers.new_user()
response = self.get_response(user.id)
self.assertNotAuthenticated(response)
def test_other_user(self):
'''It should reject request from another user.'''
user1 = TestUsers.new_user(username='user1')
user2 = TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(user1.id)
self.assertPermissionDenied(response)
def test_superuser(self):
'''It should return the correct fields for a superuser.'''
user1 = TestUsers.new_user(username='user1')
user2 = TestUsers.new_superuser(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(user1.id)
self.assertEqual(response.data, {
'id': user1.id,
'username': user1.username,
'first_name': user1.first_name,
'last_name': user1.last_name,
'user_type': user1.user_type,
'school': user1.school_id,
'committee': user1.committee_id})
def test_self(self):
'''It should return the correct fields for a single user.'''
school = TestSchools.new_school()
user = school.advisor
self.client.login(username=user.username, password='test')
response = self.get_response(user.id)
self.assertEqual(response.data, {
'id': user.id,
'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name,
'user_type': user.user_type,
'school': {
'id': school.id,
'registered': school.registered.isoformat(),
'name': school.name,
'address': school.address,
'city': school.city,
'state': school.state,
'zip_code': school.zip_code,
'country': school.country,
'primary_name': school.primary_name,
'primary_gender': school.primary_gender,
'primary_email': school.primary_email,
'primary_phone': school.primary_phone,
'primary_type': school.primary_type,
'secondary_name': school.secondary_name,
'secondary_gender': school.secondary_gender,
'secondary_email': school.secondary_email,
'secondary_phone': school.secondary_phone,
'secondary_type': school.secondary_type,
'program_type': school.program_type,
'times_attended': school.times_attended,
'international': school.international,
'waitlist': school.waitlist,
'beginner_delegates': school.beginner_delegates,
'intermediate_delegates': school.intermediate_delegates,
'advanced_delegates': school.advanced_delegates,
'spanish_speaking_delegates': school.spanish_speaking_delegates,
'country_preferences': school.country_preference_ids,
'prefers_bilingual': school.prefers_bilingual,
'prefers_specialized_regional':
school.prefers_specialized_regional,
'prefers_crisis': school.prefers_crisis,
'prefers_alternative': school.prefers_alternative,
'prefers_press_corps': school.prefers_press_corps,
'registration_comments': school.registration_comments,
'fees_owed': float(school.fees_owed),
'fees_paid': float(school.fees_paid),
},
'committee': user.committee_id})
def test_chair(self):
'''It should have the correct fields for chairs.'''
user = TestUsers.new_user(user_type=User.TYPE_CHAIR,
committee_id=4)
self.client.login(username='testuser', password='test')
response = self.get_response(user.id)
self.assertEqual(response.data, {
'id': user.id,
'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name,
'user_type': user.user_type,
'school': user.school_id,
'committee': user.committee_id})
class UserDetailDeleteTestCase(DestroyAPITestCase):
url_name = 'api:user_detail'
def setUp(self):
self.user = TestUsers.new_user(username='user1', password='user1')
def test_anonymous_user(self):
'''It should reject the request from an anonymous user.'''
response = self.get_response(self.user.id)
self.assertNotAuthenticated(response)
self.assertTrue(User.objects.filter(id=self.user.id).exists())
def test_other_user(self):
'''It should reject the request from another user.'''
TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.user.id)
self.assertPermissionDenied(response)
self.assertTrue(User.objects.filter(id=self.user.id).exists())
def test_self(self):
'''It should allow a user to delete themself.'''
self.client.login(username='user1', password='user1')
response = self.get_response(self.user.id)
self.assertEqual(response.status_code, 204)
self.assertFalse(User.objects.filter(id=self.user.id).exists())
def test_superuser(self):
'''It should allow a superuser to delete a user.'''
TestUsers.new_superuser(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.user.id)
self.assertEqual(response.status_code, 204)
self.assertFalse(User.objects.filter(id=self.user.id).exists())
class UserDetailPatchTestCase(PartialUpdateAPITestCase):
url_name = 'api:user_detail'
params = {'first_name': 'first',
'last_name': 'last'}
def setUp(self):
self.user = TestUsers.new_user(username='user1', password='user1')
def test_anonymous_user(self):
'''An anonymous user should not be able to change information.'''
response = self.get_response(self.user.id, params=self.params)
self.assertNotAuthenticated(response)
user = User.objects.get(id=self.user.id)
self.assertEqual(user.first_name, 'Test')
self.assertEqual(user.last_name, 'User')
def test_other_user(self):
'''Another user should not be able to change information about any other user.'''
TestUsers.new_user(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.user.id, params=self.params)
self.assertPermissionDenied(response)
user = User.objects.get(id=self.user.id)
self.assertEqual(user.first_name, 'Test')
self.assertEqual(user.last_name, 'User')
def test_self(self):
'''A User should be allowed to change information about himself.'''
self.client.login(username='user1', password='user1')
response = self.get_response(self.user.id, params=self.params)
user = User.objects.get(id=self.user.id)
self.assertEqual(response.data['first_name'], user.first_name)
self.assertEqual(response.data['last_name'], user.last_name)
def test_superuser(self):
'''A superuser should be allowed to change information about a user.'''
TestUsers.new_superuser(username='user2', password='user2')
self.client.login(username='user2', password='user2')
response = self.get_response(self.user.id, params=self.params)
user = User.objects.get(id=self.user.id)
self.assertEqual(response.data['first_name'], user.first_name)
self.assertEqual(response.data['last_name'], user.last_name)
class UserListGetTestCase(ListAPITestCase):
url_name = 'api:user_list'
def test_anonymous_user(self):
'''It should reject the request from an anonymous user.'''
TestUsers.new_user(username='user1')
TestUsers.new_user(username='user2')
response = self.get_response()
self.assertNotAuthenticated(response)
def test_user(self):
'''It should reject the request from a regular user.'''
TestUsers.new_user(username='user1', password='user1')
TestUsers.new_user(username='user2')
self.client.login(username='user1', password='user1')
response = self.get_response()
self.assertPermissionDenied(response)
def test_superuser(self):
'''It should allow a superuser to list all users.'''
user1 = TestUsers.new_superuser(username='user1', password='user1')
user2 = TestUsers.new_user(username='user2')
self.client.login(username='user1', password='user1')
response = self.get_response()
self.assertEqual(response.data, [
{'id': user1.id,
'username': user1.username,
'first_name': user1.first_name,
'last_name': user1.last_name,
'user_type': user1.user_type,
'school': user1.school_id,
'committee': user1.committee_id},
{'id': user2.id,
'username': user2.username,
'first_name': user2.first_name,
'last_name': user2.last_name,
'user_type': user2.user_type,
'school': user2.school_id,
'committee': user2.committee_id}])
class UserListPostTestCase(CreateAPITestCase):
url_name = 'api:user_list'
params = {'username': 'Kunal',
'password': 'password',
'first_name': 'Kunal',
'last_name': 'Mehta'}
def test_valid(self):
params = self.get_params()
response = self.get_response(params)
user_query = User.objects.filter(id=response.data['id'])
self.assertTrue(user_query.exists())
user = User.objects.get(id=response.data['id'])
self.assertEqual(response.data, {
'id': user.id,
'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name,
'user_type': User.TYPE_ADVISOR,
'school': user.school_id,
'email': user.email})
def test_empty_username(self):
response = self.get_response(params=self.get_params(username=''))
self.assertEqual(response.data, {
'username': ['This field is required.']})
def test_taken_username(self):
TestUsers.new_user(username='_Kunal', password='pass')
response = self.get_response(params=self.get_params(username='_Kunal'))
self.assertEqual(response.data, {
'username': ['This username is already taken.']})
def test_invalid_username(self):
response = self.get_response(params=self.get_params(username='>Kunal'))
self.assertEqual(response.data, {
'username': ['Usernames may contain alphanumerics, underscores, '
'and/or hyphens only.']})
def test_empty_password(self):
response = self.get_response(params=self.get_params(password=''))
self.assertEqual(response.data, {
'password': ['This field is required.']})
def test_invalid_password(self):
response = self.get_response(params=self.get_params(password='>pass'))
self.assertEqual(response.data, {
'password': ['Password contains invalid characters.']})
def test_empty_first_name(self):
response = self.get_response(params=self.get_params(first_name=''))
self.assertEqual(response.data, {
'first_name': ['This field is required.']})
def test_empty_last_name(self):
response = self.get_response(params=self.get_params(last_name=''))
self.assertEqual(response.data, {
'last_name': ['This field is required.']})
def test_username_length(self):
response = self.get_response(params=self.get_params(username='user'))
self.assertEqual(response.data, {
'username': ['Username must be at least 5 characters.']})
def test_password_length(self):
response = self.get_response(params=self.get_params(password='pass'))
self.assertEqual(response.data, {
'password': ['Password must be at least 6 characters.']})
class CurrentUserTestCase(TestCase):
def setUp(self):
self.client = Client()
self.url = reverse('api:current_user')
def get_data(self, url):
return json.loads(self.client.get(url).content)
def test_login(self):
user = TestUsers.new_user(username='lol', password='lol')
user2 = TestUsers.new_user(username='bunny', password='bunny')
credentials = {'username': 'lol', 'password': 'lol'}
response = self.client.post(self.url,
data=json.dumps(credentials),
content_type='application/json')
self.assertEqual(response.status_code, 201)
self.assertEqual(self.client.session['_auth_user_id'], user.id)
credentials = {'username': 'bunny', 'password': 'bunny'}
response = self.client.post(self.url,
data=json.dumps(credentials),
content_type='application/json')
self.assertEqual(self.client.session['_auth_user_id'], user.id)
data = json.loads(response.content)
self.assertEqual(data['detail'],
'Another user is currently logged in.')
def test_logout(self):
user = TestUsers.new_user(username='lol', password='lol')
self.client.login(username='lol', password='lol')
self.assertEqual(self.client.session['_auth_user_id'], user.id)
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 204)
self.assertTrue('_auth_user_id' not in self.client.session)
def test_get(self):
data = self.get_data(self.url)
self.assertEqual(len(data.keys()), 1)
self.assertEqual(data['detail'], 'Not found')
school = TestSchools.new_school()
user = school.advisor
self.client.login(username=user.username, password='test')
data = self.get_data(self.url)
self.assertEqual(len(data.keys()), 7)
self.assertEqual(data['id'], user.id)
self.assertEqual(data['username'], user.username)
self.assertEqual(data['first_name'], user.first_name)
self.assertEqual(data['last_name'], user.last_name)
self.assertEqual(data['user_type'], User.TYPE_ADVISOR)
self.assertEqual(data['school'], {
'id': school.id,
'registered': school.registered.isoformat(),
'name': school.name,
'address': school.address,
'city': school.city,
'state': school.state,
'zip_code': school.zip_code,
'country': school.country,
'primary_name': school.primary_name,
'primary_gender': school.primary_gender,
'primary_email': school.primary_email,
'primary_phone': school.primary_phone,
'primary_type': school.primary_type,
'secondary_name': school.secondary_name,
'secondary_gender': school.secondary_gender,
'secondary_email': school.secondary_email,
'secondary_phone': school.secondary_phone,
'secondary_type': school.secondary_type,
'program_type': school.program_type,
'times_attended': school.times_attended,
'international': school.international,
'waitlist': school.waitlist,
'beginner_delegates': school.beginner_delegates,
'intermediate_delegates': school.intermediate_delegates,
'advanced_delegates': school.advanced_delegates,
'spanish_speaking_delegates': school.spanish_speaking_delegates,
'country_preferences': school.country_preference_ids,
'prefers_bilingual': school.prefers_bilingual,
'prefers_specialized_regional': school.prefers_specialized_regional,
'prefers_crisis': school.prefers_crisis,
'prefers_alternative': school.prefers_alternative,
'prefers_press_corps': school.prefers_press_corps,
'registration_comments': school.registration_comments,
'fees_owed': float(school.fees_owed),
'fees_paid': float(school.fees_paid),
})
| jmosky12/huxley | huxley/api/tests/test_user.py | Python | bsd-3-clause | 17,518 |
from django.db import models
class Entry(models.Model):
title = models.CharField(max_length=200)
date = models.DateTimeField()
class Meta:
ordering = ('date',)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return "/blog/%s/" % self.pk
class Article(models.Model):
title = models.CharField(max_length=200)
entry = models.ForeignKey(Entry)
def __unicode__(self):
return self.title
| bfirsh/syndication-view | syndication/tests/models.py | Python | bsd-3-clause | 486 |
from holoviews.element import (
ElementConversion, Points as HvPoints, Polygons as HvPolygons,
Path as HvPath
)
from .geo import (_Element, Feature, Tiles, is_geographic, # noqa (API import)
WMTS, Points, Image, Text, LineContours, RGB,
FilledContours, Path, Polygons, Shape, Dataset,
Contours, TriMesh, Graph, Nodes, EdgePaths, QuadMesh,
VectorField, Labels, HexTiles, Rectangles, Segments)
class GeoConversion(ElementConversion):
"""
GeoConversion is a very simple container object which can
be given an existing Dataset and provides methods to convert
the Dataset into most other Element types. If the requested
key dimensions correspond to geographical coordinates the
conversion interface will automatically use a geographical
Element type while all other plot will use regular HoloViews
Elements.
"""
def __init__(self, cube):
self._element = cube
def __call__(self, *args, **kwargs):
group_type = args[0]
if 'crs' not in kwargs and issubclass(group_type, _Element):
kwargs['crs'] = self._element.crs
is_gpd = self._element.interface.datatype == 'geodataframe'
if is_gpd:
kdims = args[1] if len(args) > 1 else kwargs.get('kdims', None)
if len(args) > 1:
args = (Dataset, [])+args[2:]
else:
args = (Dataset,)
kwargs['kdims'] = []
converted = super(GeoConversion, self).__call__(*args, **kwargs)
if is_gpd:
if kdims is None: kdims = group_type.kdims
converted = converted.map(lambda x: x.clone(kdims=kdims, new_type=group_type), Dataset)
return converted
def linecontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(LineContours, kdims, vdims, mdims, **kwargs)
def filledcontours(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(FilledContours, kdims, vdims, mdims, **kwargs)
def image(self, kdims=None, vdims=None, mdims=None, **kwargs):
return self(Image, kdims, vdims, mdims, **kwargs)
def points(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Points if is_geographic(self._element, kdims) else HvPoints
return self(el_type, kdims, vdims, mdims, **kwargs)
def polygons(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Polygons if is_geographic(self._element, kdims) else HvPolygons
return self(el_type, kdims, vdims, mdims, **kwargs)
def path(self, kdims=None, vdims=None, mdims=None, **kwargs):
if kdims is None: kdims = self._element.kdims
el_type = Path if is_geographic(self._element, kdims) else HvPath
return self(el_type, kdims, vdims, mdims, **kwargs)
Dataset._conversion_interface = GeoConversion
| ioam/geoviews | geoviews/element/__init__.py | Python | bsd-3-clause | 3,017 |
#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'Edgecast (Verizon Digital Media)'
def is_waf(self):
schemes = [
self.matchHeader(('Server', r'^ECD(.+)?')),
self.matchHeader(('Server', r'^ECS(.*)?'))
]
if any(i for i in schemes):
return True
return False | EnableSecurity/wafw00f | wafw00f/plugins/edgecast.py | Python | bsd-3-clause | 371 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-08 22:45
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('topics', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='articletopicrank',
options={'ordering': ('rank',)},
),
migrations.AlterModelOptions(
name='wordtopicrank',
options={'ordering': ('rank',)},
),
]
| GeorgiaTechDHLab/TOME | topics/migrations/0002_auto_20170308_2245.py | Python | bsd-3-clause | 537 |
from operator import attrgetter
from plyj.model.source_element import Expression
from plyj.utility import assert_type
class Name(Expression):
simple = property(attrgetter("_simple"))
value = property(attrgetter("_value"))
def __init__(self, value):
"""
Represents a name.
:param value: The name.
"""
super(Name, self).__init__()
self._fields = ['value']
self._value = None
self._simple = False
self.value = value
@value.setter
def value(self, value):
self._value = assert_type(value, str)
self._simple = "." not in value
def append_name(self, name):
self._simple = False
if isinstance(name, Name):
self._value = self._value + '.' + name._value
else:
self._value = self._value + '.' + name
@staticmethod
def ensure(se, simple):
if isinstance(se, str):
return Name(se)
if not isinstance(se, Name):
raise TypeError("Required Name, got \"{}\"".format(str(type(se))))
if simple and not se.simple:
raise TypeError("Required simple Name, got complex Name")
return se
def serialize(self):
return self.value | Craxic/plyj | plyj/model/name.py | Python | bsd-3-clause | 1,255 |
from .variables import *
def Cell(node):
# cells must stand on own line
if node.parent.cls not in ("Assign", "Assigns"):
node.auxiliary("cell")
return "{", ",", "}"
def Assign(node):
if node.name == 'varargin':
out = "%(0)s = va_arg(varargin, " + node[0].type + ") ;"
else:
out = "%(0)s.clear() ;"
# append to cell, one by one
for elem in node[1]:
out = out + "\n%(0)s.push_back(" + str(elem) + ") ;"
return out
| jonathf/matlab2cpp | src/matlab2cpp/rules/_cell.py | Python | bsd-3-clause | 495 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# raise of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this raise of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'border' de la commande 'voile'."""
from primaires.interpreteur.masque.parametre import Parametre
class PrmBorder(Parametre):
"""Commande 'voile border'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "border", "haul")
self.schema = "<nombre>"
self.aide_courte = "borde la voile présente"
self.aide_longue = \
"Cette commande permet de border la voile dans la salle où " \
"vous vous trouvez. Plus la voile est bordée, plus elle " \
"est parallèle à l'âxe du navire. La voile doit être plus " \
"ou moins bordée selon l'allure du navire. Si vous voulez " \
"changer d'amure, utilisez la commande %voile% %voile:empanner%."
def ajouter(self):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
nombre = self.noeud.get_masque("nombre")
nombre.proprietes["limite_sup"] = "90"
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
salle = personnage.salle
if not hasattr(salle, "voiles"):
personnage << "|err|Vous n'êtes pas sur un navire.|ff|"
return
voiles = salle.voiles
if not voiles:
personnage << "|err|Vous ne voyez aucune voile ici.|ff|"
return
voile = voiles[0]
if not voile.hissee:
personnage << "|err|Cette voile n'est pas hissée.|ff|"
else:
nombre = dic_masques["nombre"].nombre
if voile.orientation < 0:
voile.orientation += nombre
if voile.orientation > -5:
voile.orientation = -5
personnage << "Vous bordez {}.".format(voile.nom)
personnage.salle.envoyer("{{}} borde {}.".format(
voile.nom), personnage)
elif voile.orientation > 0:
voile.orientation -= nombre
if voile.orientation < 5:
voile.orientation = 5
personnage << "Vous bordez {}.".format(voile.nom)
personnage.salle.envoyer("{{}} borde {}.".format(
voile.nom), personnage)
else:
personnage << "|err|Cette voile ne peut être bordée " \
"davantage.|ff|"
| stormi/tsunami | src/secondaires/navigation/commandes/voile/border.py | Python | bsd-3-clause | 4,041 |
from corehq.apps.reports.models import HQToggle
from corehq.apps.reports.fields import ReportField
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_noop
class SubmitToggle(HQToggle):
def __init__(self, type, show, name, doc_type):
super(SubmitToggle, self).__init__(type, show, name)
self.doc_type = doc_type
class SubmissionErrorType(object):
# This is largely modeled off how the user filter works
SUCCESS = 0
PARTIAL_SUCCESS = 1
DUPLICATE = 2
OVERWRITTEN = 3
UNKNOWN_ERROR = 4
ARCHIVED = 5
doc_types = ["XFormInstance", "XFormError", "XFormDuplicate", "XFormDeprecated", "SubmissionErrorLog", "XFormArchived"]
human_readable = [ugettext_noop("Normal Form"),
ugettext_noop("Form with Errors"),
ugettext_noop("Duplicate Form"),
ugettext_noop("Overwritten Form"),
ugettext_noop("Generic Error"),
ugettext_noop("Archived Form")]
error_defaults = [False, True, False, False, True, False]
success_defaults = [True, False, False, False, False, False]
@classmethod
def display_name_by_doc_type(cls, doc_type):
return cls.display_name_by_index(cls.doc_types.index(doc_type))
@classmethod
def display_name_by_index(cls, index):
return cls.human_readable[index]
@classmethod
def doc_type_by_index(cls, index):
return cls.doc_types[index]
@classmethod
def use_error_defaults(cls):
return [SubmitToggle(i, cls.error_defaults[i], name, cls.doc_types[i]) for i, name in enumerate(cls.human_readable)]
@classmethod
def use_success_defaults(cls):
return [SubmitToggle(i, cls.success_defaults[i], name, cls.doc_types[i]) for i, name in enumerate(cls.human_readable)]
@classmethod
def use_filter(cls, filter):
return [SubmitToggle(i, unicode(i) in filter, name, cls.doc_types[i]) for i, name in enumerate(cls.human_readable)]
class SubmissionTypeField(ReportField):
slug = "submitfilter"
template = "reports/fields/submit_error_types.html"
def update_context(self):
self.context['submission_types'] = self.get_filter_toggle(self.request)
@classmethod
def get_filter_toggle(cls, request):
filter = None
try:
if request.GET.get(cls.slug, ''):
filter = request.GET.getlist(cls.slug)
except KeyError:
pass
if filter:
return SubmissionErrorType.use_filter(filter)
else:
return SubmissionErrorType.use_error_defaults()
| gmimano/commcaretest | corehq/apps/receiverwrapper/fields.py | Python | bsd-3-clause | 2,703 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0004_auto_20150428_2142'),
]
operations = [
migrations.AddField(
model_name='parentrelation',
name='signature',
field=models.CharField(max_length=255, null=True, verbose_name='sig', blank=True),
preserve_default=True,
),
]
| oskarm91/sis | apps/users/migrations/0005_parentrelation_signature.py | Python | bsd-3-clause | 488 |
# BridgeDB by Nick Mathewson.
# Copyright (c) 2007-2009, The Tor Project, Inc.
# See LICENSE for licensing information
from __future__ import print_function
import doctest
import os
import random
import sqlite3
import tempfile
import unittest
import warnings
import time
from datetime import datetime
import bridgedb.Bridges
import bridgedb.Main
import bridgedb.Dist
import bridgedb.Time
import bridgedb.Storage
import re
import ipaddr
from bridgedb.Filters import filterBridgesByIP4
from bridgedb.Filters import filterBridgesByIP6
from bridgedb.Filters import filterBridgesByOnlyIP4
from bridgedb.Filters import filterBridgesByOnlyIP6
from bridgedb.Filters import filterBridgesByTransport
from bridgedb.Filters import filterBridgesByNotBlockedIn
from bridgedb.Stability import BridgeHistory
from bridgedb.parse import addr
from bridgedb.parse import networkstatus
from math import log
def suppressWarnings():
warnings.filterwarnings('ignore', '.*tmpnam.*')
def randomIP():
if random.choice(xrange(2)):
return randomIP4()
return randomIP6()
def randomIP4():
return ipaddr.IPv4Address(random.getrandbits(32))
def randomIP4String():
return randomIP4().compressed
def randomIP6():
return ipaddr.IPv6Address(random.getrandbits(128))
def randomIP6String():
return bracketIP6(randomIP6().compressed)
def randomIPString():
if random.choice(xrange(2)):
return randomIP4String()
return randomIP6String()
def bracketIP6(ip):
"""Put brackets around an IPv6 address, just as tor does."""
return "[%s]" % ip
def random16IP():
upper = "123.123." # same 16
lower = ".".join([str(random.randrange(1,256)) for _ in xrange(2)])
return upper+lower
def randomPort():
return random.randint(1,65535)
def randomPortSpec():
"""
returns a random list of ports
"""
ports = []
for i in range(0,24):
ports.append(random.randint(1,65535))
ports.sort(reverse=True)
portspec = ""
for i in range(0,16):
portspec += "%d," % random.choice(ports)
portspec = portspec.rstrip(',') #remove trailing ,
return portspec
def randomCountry():
countries = ['us', 'nl', 'de', 'cz', 'sk', 'as', 'si', 'it']
#XXX: load from geoip
return random.choice(countries)
def randomCountrySpec():
countries = ['us', 'nl', 'de', 'cz', 'sk', 'as', 'si', 'it']
#XXX: load from geoip
spec = ""
choices = []
for i in xrange(10):
choices.append(random.choice(countries))
choices = set(choices) #dedupe
choices = list(choices)
spec += ",".join(choices)
return spec
def fakeBridge(orport=8080, running=True, stable=True, or_addresses=False,
transports=False):
nn = "bridge-%s"%random.randrange(0,1000000)
ip = ipaddr.IPAddress(randomIP4())
fp = "".join([random.choice("0123456789ABCDEF") for _ in xrange(40)])
b = bridgedb.Bridges.Bridge(nn,ip,orport,fingerprint=fp)
b.setStatus(running, stable)
oraddrs = []
if or_addresses:
for i in xrange(8):
# Only add or_addresses if they are valid. Otherwise, the test
# will randomly fail if an invalid address is chosen:
address = randomIP4String()
portlist = addr.PortList(randomPortSpec())
if addr.isValidIP(address):
oraddrs.append((address, portlist,))
for address, portlist in oraddrs:
networkstatus.parseALine("{0}:{1}".format(address, portlist))
try:
portlist.add(b.or_addresses[address])
except KeyError:
pass
finally:
b.or_addresses[address] = portlist
if transports:
for i in xrange(0,8):
b.transports.append(bridgedb.Bridges.PluggableTransport(b,
random.choice(["obfs", "obfs2", "pt1"]),
randomIP(), randomPort()))
return b
def fakeBridge6(orport=8080, running=True, stable=True, or_addresses=False,
transports=False):
nn = "bridge-%s"%random.randrange(0,1000000)
ip = ipaddr.IPAddress(randomIP6())
fp = "".join([random.choice("0123456789ABCDEF") for _ in xrange(40)])
b = bridgedb.Bridges.Bridge(nn,ip,orport,fingerprint=fp)
b.setStatus(running, stable)
oraddrs = []
if or_addresses:
for i in xrange(8):
# Only add or_addresses if they are valid. Otherwise, the test
# will randomly fail if an invalid address is chosen:
address = randomIP6()
portlist = addr.PortList(randomPortSpec())
if addr.isValidIP(address):
address = bracketIP6(address)
oraddrs.append((address, portlist,))
for address, portlist in oraddrs:
networkstatus.parseALine("{0}:{1}".format(address, portlist))
try:
portlist.add(b.or_addresses[address])
except KeyError:
pass
finally:
b.or_addresses[address] = portlist
try:
portlist.add(b.or_addresses[address])
except KeyError:
pass
finally:
b.or_addresses[address] = portlist
if transports:
for i in xrange(0,8):
b.transports.append(bridgedb.Bridges.PluggableTransport(b,
random.choice(["obfs", "obfs2", "pt1"]),
randomIP(), randomPort()))
return b
def fake16Bridge(orport=8080, running=True, stable=True):
nn = "bridge-%s"%random.randrange(0,1000000)
ip = random16IP()
fp = "".join([random.choice("0123456789ABCDEF") for _ in xrange(40)])
b = bridgedb.Bridges.Bridge(nn,ip,orport,fingerprint=fp)
b.setStatus(running, stable)
return b
simpleDesc = "router Unnamed %s %s 0 9030\n"\
"opt fingerprint DEAD BEEF F00F DEAD BEEF F00F DEAD BEEF F00F DEAD\n"\
"opt @purpose bridge\n"
orAddress = "or-address %s:%s\n"
def gettimestamp():
ts = time.strftime("%Y-%m-%d %H:%M:%S")
return "opt published %s\n" % ts
class RhymesWith255Category:
def contains(self, ip):
return ip.endswith(".255")
class EmailBridgeDistTests(unittest.TestCase):
def setUp(self):
self.fd, self.fname = tempfile.mkstemp()
self.db = bridgedb.Storage.Database(self.fname)
bridgedb.Storage.setGlobalDB(self.db)
self.cur = self.db._conn.cursor()
def tearDown(self):
self.db.close()
os.close(self.fd)
os.unlink(self.fname)
def testEmailRateLimit(self):
db = self.db
EMAIL_DOMAIN_MAP = {'example.com':'example.com'}
d = bridgedb.Dist.EmailBasedDistributor(
"Foo",
{'example.com': 'example.com',
'dkim.example.com': 'dkim.example.com'},
{'example.com': [], 'dkim.example.com': ['dkim']})
for _ in xrange(256):
d.insert(fakeBridge())
d.getBridgesForEmail('[email protected]', 1, 3)
self.assertRaises(bridgedb.Dist.TooSoonEmail,
d.getBridgesForEmail, '[email protected]', 1, 3)
self.assertRaises(bridgedb.Dist.IgnoreEmail,
d.getBridgesForEmail, '[email protected]', 1, 3)
def testUnsupportedDomain(self):
db = self.db
self.assertRaises(bridgedb.Dist.UnsupportedDomain,
bridgedb.Dist.normalizeEmail, '[email protected]',
{'example.com':'example.com'},
{'example.com':[]})
class IPBridgeDistTests(unittest.TestCase):
def dumbAreaMapper(self, ip):
return ip
def testBasicDist(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(256):
d.insert(fakeBridge())
n = d.getBridgesForIP("1.2.3.4", "x", 2)
n2 = d.getBridgesForIP("1.2.3.4", "x", 2)
self.assertEquals(n, n2)
def testDistWithCategories(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo",
[RhymesWith255Category()])
assert len(d.categories) == 1
for _ in xrange(256):
d.insert(fakeBridge())
for _ in xrange(256):
# Make sure that the categories do not overlap
f = lambda: ".".join([str(random.randrange(1,255)) for _ in xrange(4)])
g = lambda: ".".join([str(random.randrange(1,255)) for _ in xrange(3)] + ['255'])
n = d.getBridgesForIP(g(), "x", 10)
n2 = d.getBridgesForIP(f(), "x", 10)
assert(len(n) > 0)
assert(len(n2) > 0)
for b in n:
assert (b not in n2)
for b in n2:
assert (b not in n)
#XXX: #6175 breaks this test!
#def testDistWithPortRestrictions(self):
# param = bridgedb.Bridges.BridgeRingParameters(needPorts=[(443, 1)])
# d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Baz",
# answerParameters=param)
# for _ in xrange(32):
# d.insert(fakeBridge(443))
# for _ in range(256):
# d.insert(fakeBridge())
# for _ in xrange(32):
# i = randomIP()
# n = d.getBridgesForIP(i, "x", 5)
# count = 0
# fps = {}
# for b in n:
# fps[b.getID()] = 1
# if b.orport == 443:
# count += 1
# self.assertEquals(len(fps), len(n))
# self.assertEquals(len(fps), 5)
# self.assertTrue(count >= 1)
#def testDistWithFilter16(self):
# d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
# for _ in xrange(256):
# d.insert(fake16Bridge())
# n = d.getBridgesForIP("1.2.3.4", "x", 10)
# slash16s = dict()
# for bridge in n:
# m = re.match(r'(\d+\.\d+)\.\d+\.\d+', bridge.ip)
# upper16 = m.group(1)
# self.assertTrue(upper16 not in slash16s)
# slash16s[upper16] = True
def testDistWithFilterIP6(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for i in xrange(500):
bridges = d.getBridgesForIP(randomIP4String(),
"faketimestamp",
bridgeFilterRules=[filterBridgesByIP6])
bridge = random.choice(bridges)
bridge_line = bridge.getConfigLine(addressClass=ipaddr.IPv6Address)
address, portlist = networkstatus.parseALine(bridge_line)
assert type(address) is ipaddr.IPv6Address
assert filterBridgesByIP6(random.choice(bridges))
def testDistWithFilterIP4(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for i in xrange(500):
bridges = d.getBridgesForIP(randomIP4String(),
"faketimestamp",
bridgeFilterRules=[filterBridgesByIP4])
bridge = random.choice(bridges)
bridge_line = bridge.getConfigLine(addressClass=ipaddr.IPv4Address)
address, portlist = networkstatus.parseALine(bridge_line)
assert type(address) is ipaddr.IPv4Address
assert filterBridgesByIP4(random.choice(bridges))
def testDistWithFilterBoth(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for i in xrange(50):
bridges = d.getBridgesForIP(randomIP4String(),
"faketimestamp", 1,
bridgeFilterRules=[
filterBridgesByIP4,
filterBridgesByIP6])
if bridges:
t = bridges.pop()
assert filterBridgesByIP4(t)
assert filterBridgesByIP6(t)
address, portlist = networkstatus.parseALine(
t.getConfigLine(addressClass=ipaddr.IPv4Address))
assert type(address) is ipaddr.IPv4Address
address, portlist = networkstatus.parseALine(
t.getConfigLine(addressClass=ipaddr.IPv6Address))
assert type(address) is ipaddr.IPv6Address
def testDistWithFilterAll(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for i in xrange(5):
b = d.getBridgesForIP(randomIP4String(), "x", 1, bridgeFilterRules=[
filterBridgesByOnlyIP4, filterBridgesByOnlyIP6])
assert len(b) == 0
def testDistWithFilterBlockedCountries(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True))
d.insert(fakeBridge(or_addresses=True))
for b in d.splitter.bridges:
# china blocks all :-(
for pt in b.transports:
key = "%s:%s" % (pt.address, pt.port)
b.blockingCountries[key] = set(['cn'])
for address, portlist in b.or_addresses.items():
for port in portlist:
key = "%s:%s" % (address, port)
b.blockingCountries[key] = set(['cn'])
key = "%s:%s" % (b.ip, b.orport)
b.blockingCountries[key] = set(['cn'])
for i in xrange(5):
b = d.getBridgesForIP(randomIP4String(), "x", 1, bridgeFilterRules=[
filterBridgesByNotBlockedIn("cn")])
assert len(b) == 0
b = d.getBridgesForIP(randomIP4String(), "x", 1, bridgeFilterRules=[
filterBridgesByNotBlockedIn("us")])
assert len(b) > 0
def testDistWithFilterBlockedCountriesAdvanced(self):
d = bridgedb.Dist.IPBasedDistributor(self.dumbAreaMapper, 3, "Foo")
for _ in xrange(250):
d.insert(fakeBridge6(or_addresses=True, transports=True))
d.insert(fakeBridge(or_addresses=True, transports=True))
for b in d.splitter.bridges:
# china blocks some transports
for pt in b.transports:
if random.choice(xrange(2)) > 0:
key = "%s:%s" % (pt.address, pt.port)
b.blockingCountries[key] = set(['cn'])
for address, portlist in b.or_addresses.items():
# china blocks some transports
for port in portlist:
if random.choice(xrange(2)) > 0:
key = "%s:%s" % (address, port)
b.blockingCountries[key] = set(['cn'])
key = "%s:%s" % (b.ip, b.orport)
b.blockingCountries[key] = set(['cn'])
# we probably will get at least one bridge back!
# it's pretty unlikely to lose a coin flip 250 times in a row
for i in xrange(5):
b = d.getBridgesForIP(randomIPString(), "x", 1,
bridgeFilterRules=[
filterBridgesByNotBlockedIn("cn", methodname='obfs2'),
filterBridgesByTransport('obfs2'),
])
try: assert len(b) > 0
except AssertionError:
print("epic fail")
b = d.getBridgesForIP(randomIPString(), "x", 1, bridgeFilterRules=[
filterBridgesByNotBlockedIn("us")])
assert len(b) > 0
class DictStorageTests(unittest.TestCase):
def setUp(self):
self.fd, self.fname = tempfile.mkstemp()
self.conn = sqlite3.Connection(self.fname)
def tearDown(self):
self.conn.close()
os.close(self.fd)
os.unlink(self.fname)
def testSimpleDict(self):
self.conn.execute("CREATE TABLE A ( X PRIMARY KEY, Y )")
d = bridgedb.Storage.SqliteDict(self.conn, self.conn.cursor(),
"A", (), (), "X", "Y")
self.basictests(d)
def testComplexDict(self):
self.conn.execute("CREATE TABLE B ( X, Y, Z, "
"CONSTRAINT B_PK PRIMARY KEY (X,Y) )")
d = bridgedb.Storage.SqliteDict(self.conn, self.conn.cursor(),
"B", ("X",), ("x1",), "Y", "Z")
d2 = bridgedb.Storage.SqliteDict(self.conn, self.conn.cursor(),
"B", ("X",), ("x2",), "Y", "Z")
self.basictests(d)
self.basictests(d2)
def basictests(self, d):
d["hello"] = "goodbye"
d["hola"] = "adios"
self.assertEquals(d["hola"], "adios")
d["hola"] = "hasta luego"
self.assertEquals(d["hola"], "hasta luego")
self.assertEquals(sorted(d.keys()), [u"hello", u"hola"])
self.assertRaises(KeyError, d.__getitem__, "buongiorno")
self.assertEquals(d.get("buongiorno", "ciao"), "ciao")
self.conn.commit()
d["buongiorno"] = "ciao"
del d['hola']
self.assertRaises(KeyError, d.__getitem__, "hola")
self.conn.rollback()
self.assertEquals(d["hola"], "hasta luego")
self.assertEquals(d.setdefault("hola","bye"), "hasta luego")
self.assertEquals(d.setdefault("yo","bye"), "bye")
self.assertEquals(d['yo'], "bye")
class SQLStorageTests(unittest.TestCase):
def setUp(self):
self.fd, self.fname = tempfile.mkstemp()
self.db = bridgedb.Storage.Database(self.fname)
self.cur = self.db._conn.cursor()
def tearDown(self):
self.db.close()
os.close(self.fd)
os.unlink(self.fname)
def assertCloseTo(self, a, b, delta=60):
self.assertTrue(abs(a-b) <= delta)
def testBridgeStorage(self):
db = self.db
B = bridgedb.Bridges.Bridge
t = time.time()
cur = self.cur
k1 = "aaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbb"
k2 = "abababababababababababababababababababab"
k3 = "cccccccccccccccccccccccccccccccccccccccc"
b1 = B("serv1", "1.2.3.4", 999, fingerprint=k1)
b1_v2 = B("serv1", "1.2.3.5", 9099, fingerprint=k1)
b2 = B("serv2", "2.3.4.5", 9990, fingerprint=k2)
b3 = B("serv3", "2.3.4.6", 9008, fingerprint=k3)
validRings = ["ring1", "ring2", "ring3"]
r = db.insertBridgeAndGetRing(b1, "ring1", t, validRings)
self.assertEquals(r, "ring1")
r = db.insertBridgeAndGetRing(b1, "ring10", t+500, validRings)
self.assertEquals(r, "ring1")
cur.execute("SELECT distributor, address, or_port, first_seen, "
"last_seen FROM Bridges WHERE hex_key = ?", (k1,))
v = cur.fetchone()
self.assertEquals(v,
("ring1", "1.2.3.4", 999,
bridgedb.Storage.timeToStr(t),
bridgedb.Storage.timeToStr(t+500)))
r = db.insertBridgeAndGetRing(b1_v2, "ring99", t+800, validRings)
self.assertEquals(r, "ring1")
cur.execute("SELECT distributor, address, or_port, first_seen, "
"last_seen FROM Bridges WHERE hex_key = ?", (k1,))
v = cur.fetchone()
self.assertEquals(v,
("ring1", "1.2.3.5", 9099,
bridgedb.Storage.timeToStr(t),
bridgedb.Storage.timeToStr(t+800)))
db.insertBridgeAndGetRing(b2, "ring2", t, validRings)
db.insertBridgeAndGetRing(b3, "ring3", t, validRings)
cur.execute("SELECT COUNT(distributor) FROM Bridges")
v = cur.fetchone()
self.assertEquals(v, (3,))
r = db.getEmailTime("[email protected]")
self.assertEquals(r, None)
db.setEmailTime("[email protected]", t)
db.setEmailTime("[email protected]", t+1000)
r = db.getEmailTime("[email protected]")
self.assertCloseTo(r, t)
r = db.getEmailTime("[email protected]")
self.assertCloseTo(r, t+1000)
r = db.getEmailTime("[email protected]")
self.assertEquals(r, None)
db.cleanEmailedBridges(t+200)
db.setEmailTime("[email protected]", t+5000)
r = db.getEmailTime("[email protected]")
self.assertEquals(r, None)
r = db.getEmailTime("[email protected]")
self.assertCloseTo(r, t+5000)
cur.execute("SELECT * FROM EmailedBridges")
self.assertEquals(len(cur.fetchall()), 1)
db.addBridgeBlock(b2.fingerprint, 'us')
self.assertEquals(db.isBlocked(b2.fingerprint, 'us'), True)
db.delBridgeBlock(b2.fingerprint, 'us')
self.assertEquals(db.isBlocked(b2.fingerprint, 'us'), False)
db.addBridgeBlock(b2.fingerprint, 'uk')
db.addBridgeBlock(b3.fingerprint, 'uk')
self.assertEquals(set([b2.fingerprint, b3.fingerprint]),
set(db.getBlockedBridges('uk')))
db.addBridgeBlock(b2.fingerprint, 'cn')
db.addBridgeBlock(b2.fingerprint, 'de')
db.addBridgeBlock(b2.fingerprint, 'jp')
db.addBridgeBlock(b2.fingerprint, 'se')
db.addBridgeBlock(b2.fingerprint, 'kr')
self.assertEquals(set(db.getBlockingCountries(b2.fingerprint)),
set(['uk', 'cn', 'de', 'jp', 'se', 'kr']))
self.assertEquals(db.getWarnedEmail("[email protected]"), False)
db.setWarnedEmail("[email protected]")
self.assertEquals(db.getWarnedEmail("[email protected]"), True)
db.setWarnedEmail("[email protected]", False)
self.assertEquals(db.getWarnedEmail("[email protected]"), False)
db.setWarnedEmail("[email protected]")
self.assertEquals(db.getWarnedEmail("[email protected]"), True)
db.cleanWarnedEmails(t+200)
self.assertEquals(db.getWarnedEmail("[email protected]"), False)
class ParseDescFileTests(unittest.TestCase):
def testSimpleDesc(self):
test = ""
for i in range(100):
test+= "".join(simpleDesc % (randomIP(), randomPort()))
test+=gettimestamp()
test+="router-signature\n"
bs = [b for b in bridgedb.Bridges.parseDescFile(test.split('\n'))]
self.assertEquals(len(bs), 100)
for b in bs:
b.assertOK()
def testSingleOrAddress(self):
test = ""
for i in range(100):
test+= simpleDesc % (randomIP(), randomPort())
test+= orAddress % (randomIP(),randomPort())
test+=gettimestamp()
test+= "router-signature\n"
bs = [b for b in bridgedb.Bridges.parseDescFile(test.split('\n'))]
self.assertEquals(len(bs), 100)
for b in bs:
b.assertOK()
def testMultipleOrAddress(self):
test = ""
for i in range(100):
test+= simpleDesc % (randomIPString(), randomPort())
for i in xrange(8):
test+= orAddress % (randomIPString(),randomPortSpec())
test+=gettimestamp()
test+= "router-signature\n"
bs = [b for b in bridgedb.Bridges.parseDescFile(test.split('\n'))]
self.assertEquals(len(bs), 100)
for b in bs:
b.assertOK()
def testConvolutedOrAddress(self):
test = ""
for i in range(100):
test+= simpleDesc % (randomIPString(), randomPort())
for i in xrange(8):
test+= orAddress % (randomIPString(),randomPortSpec())
test+=gettimestamp()
test+= "router-signature\n"
bs = [b for b in bridgedb.Bridges.parseDescFile(test.split('\n'))]
self.assertEquals(len(bs), 100)
for b in bs:
b.assertOK()
def testParseCountryBlockFile(self):
simpleBlock = "%s:%s %s\n"
countries = ['us', 'nl', 'de', 'cz', 'sk', 'as', 'si', 'it']
test = str()
for i in range(100):
test += simpleBlock % (randomIPString(), randomPort(),
randomCountrySpec())
test+=gettimestamp()
for a,p,c in bridgedb.Bridges.parseCountryBlockFile(test.split('\n')):
assert type(a) in (ipaddr.IPv6Address, ipaddr.IPv4Address)
assert isinstance(p, addr.PortList)
assert isinstance(c, list)
assert len(c) > 0
for y in c:
assert y in countries
#print "address: %s" % a
#print "portlist: %s" % p
#print "countries: %s" % c
class BridgeStabilityTests(unittest.TestCase):
def setUp(self):
self.fd, self.fname = tempfile.mkstemp()
self.db = bridgedb.Storage.Database(self.fname)
bridgedb.Storage.setGlobalDB(self.db)
self.cur = self.db._conn.cursor()
def tearDown(self):
self.db.close()
os.close(self.fd)
os.unlink(self.fname)
def testAddOrUpdateSingleBridgeHistory(self):
db = self.db
b = fakeBridge()
timestamp = time.time()
bhe = bridgedb.Stability.addOrUpdateBridgeHistory(b, timestamp)
assert isinstance(bhe, BridgeHistory)
assert isinstance(db.getBridgeHistory(b.fingerprint), BridgeHistory)
assert len([y for y in db.getAllBridgeHistory()]) == 1
def testDeletingSingleBridgeHistory(self):
db = self.db
b = fakeBridge()
timestamp = time.time()
bhe = bridgedb.Stability.addOrUpdateBridgeHistory(b, timestamp)
assert isinstance(bhe, BridgeHistory)
assert isinstance(db.getBridgeHistory(b.fingerprint), BridgeHistory)
db.delBridgeHistory(b.fingerprint)
assert db.getBridgeHistory(b.fingerprint) is None
assert len([y for y in db.getAllBridgeHistory()]) == 0
def testTOSA(self):
db = self.db
b = random.choice([fakeBridge,fakeBridge6])()
def timestampSeries(x):
for i in xrange(61):
yield (i+1)*60*30 + x # 30 minute intervals
now = time.time()
time_on_address = long(60*30*60) # 30 hours
downtime = 60*60*random.randint(0,4) # random hours of downtime
for t in timestampSeries(now):
bridgedb.Stability.addOrUpdateBridgeHistory(b,t)
assert db.getBridgeHistory(b.fingerprint).tosa == time_on_address
b.orport += 1
for t in timestampSeries(now + time_on_address + downtime):
bhe = bridgedb.Stability.addOrUpdateBridgeHistory(b,t)
assert db.getBridgeHistory(b.fingerprint).tosa == time_on_address + downtime
def testLastSeenWithDifferentAddressAndPort(self):
db = self.db
for i in xrange(10):
num_desc = 30
time_start = time.time()
ts = [ 60*30*(i+1) + time_start for i in xrange(num_desc) ]
b = random.choice([fakeBridge(), fakeBridge6()])
[ bridgedb.Stability.addOrUpdateBridgeHistory(b, t) for t in ts ]
# change the port
b.orport = b.orport+1
last_seen = ts[-1]
ts = [ 60*30*(i+1) + last_seen for i in xrange(num_desc) ]
[ bridgedb.Stability.addOrUpdateBridgeHistory(b, t) for t in ts ]
b = db.getBridgeHistory(b.fingerprint)
assert b.tosa == ts[-1] - last_seen
assert (long(last_seen*1000) == b.lastSeenWithDifferentAddressAndPort)
assert (long(ts[-1]*1000) == b.lastSeenWithThisAddressAndPort)
def testFamiliar(self):
# create some bridges
# XXX: slow
num_bridges = 10
num_desc = 4*48 # 30m intervals, 48 per day
time_start = time.time()
bridges = [ fakeBridge() for x in xrange(num_bridges) ]
t = time.time()
ts = [ (i+1)*60*30+t for i in xrange(num_bridges) ]
for b in bridges:
time_series = [ 60*30*(i+1) + time_start for i in xrange(num_desc) ]
[ bridgedb.Stability.addOrUpdateBridgeHistory(b, i) for i in time_series ]
assert None not in bridges
# +1 to avoid rounding errors
assert bridges[-(num_bridges/8 + 1)].familiar == True
def testDiscountAndPruneBridgeHistory(self):
""" Test pruning of old Bridge History """
if os.environ.get('TRAVIS'):
self.skipTest("Hangs on Travis-CI.")
db = self.db
# make a bunch of bridges
num_bridges = 20
time_start = time.time()
bridges = [random.choice([fakeBridge, fakeBridge6])()
for i in xrange(num_bridges)]
# run some of the bridges for the full time series
running = bridges[:num_bridges/2]
# and some that are not
expired = bridges[num_bridges/2:]
for b in running: assert b not in expired
# Solving:
# 1 discount event per 12 hours, 24 descriptors 30m apart
num_successful = random.randint(2,60)
# figure out how many intervals it will take for weightedUptime to
# decay to < 1
num_desc = int(30*log(1/float(num_successful*30*60))/(-0.05))
timeseries = [ 60*30*(i+1) + time_start for i in xrange(num_desc) ]
for i in timeseries:
for b in running:
bridgedb.Stability.addOrUpdateBridgeHistory(b, i)
if num_successful > 0:
for b in expired:
bridgedb.Stability.addOrUpdateBridgeHistory(b, i)
num_successful -= 1
# now we expect to see the bridge has been removed from history
for bridge in expired:
b = db.getBridgeHistory(bridge.fingerprint)
assert b is None
# and make sure none of the others have
for bridge in running:
b = db.getBridgeHistory(bridge.fingerprint)
assert b is not None
def testSuite():
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for klass in [ IPBridgeDistTests, DictStorageTests, SQLStorageTests,
EmailBridgeDistTests, ParseDescFileTests, BridgeStabilityTests ]:
suite.addTest(loader.loadTestsFromTestCase(klass))
for module in [ bridgedb.Bridges,
bridgedb.Main,
bridgedb.Dist,
bridgedb.Time ]:
suite.addTest(doctest.DocTestSuite(module))
return suite
def main():
suppressWarnings()
unittest.TextTestRunner(verbosity=1).run(testSuite())
| wfn/bridgedb | lib/bridgedb/Tests.py | Python | bsd-3-clause | 30,756 |
from __future__ import absolute_import, unicode_literals
from six import add_metaclass, text_type
from .event_encoder import Parameter, EventEncoder
@add_metaclass(EventEncoder)
class Event(object):
hit = Parameter('t', text_type, required=True)
category = Parameter('ec', text_type, required=True)
action = Parameter('ea', text_type, required=True)
label = Parameter('el', text_type)
value = Parameter('ev', int)
def __init__(self, **kwargs):
self.hit = 'event'
for name, value in kwargs.items():
setattr(self, name, value)
| enthought/python-analytics | python_analytics/events.py | Python | bsd-3-clause | 583 |
"""
=======================
Generate Surface Labels
=======================
Define a label that is centered on a specific vertex in the surface mesh. Plot
that label and the focus that defines its center.
"""
print __doc__
from surfer import Brain, utils
subject_id = "fsaverage"
"""
Bring up the visualization.
"""
brain = Brain(subject_id, "lh", "inflated")
"""
First we'll identify a stereotaxic focus in the MNI coordinate system. This
might be a peak activations from a volume based analysis.
"""
coord = [-43, 25, 24]
"""
Next we grow a label along the surface around the neareset vertex to this
coordinate in the white surface mesh. The `n_steps` argument controls the size
of the resulting label.
"""
utils.coord_to_label(subject_id, coord, label='example_data/coord',
hemi='lh', n_steps=25, map_surface="white")
brain.add_label('example_data/coord-lh.label', color="darkseagreen", alpha=.8)
"""
Now we plot the focus on the inflated surface at the vertex identified in the
previous step.
"""
brain.add_foci([coord], map_surface="white", color="mediumseagreen")
"""
We can also do this using a vertex index, perhaps defined as the peak
activation in a surface analysis. This will be more accurate than using a
volume-based focus.
"""
coord = 0
utils.coord_to_label(subject_id, coord, label='example_data/coord',
hemi='lh', n_steps=40, map_surface="white",
coord_as_vert=True)
brain.add_label('example_data/coord-lh.label', color='royalblue', alpha=.8)
"""
Now we plot the foci on the inflated surface. We will map the foci onto the
surface by finding the vertex on the "white" mesh that is closest to the
coordinate of the point we want to display.
"""
brain.add_foci([coord], map_surface="white", coords_as_verts=True,
color="mediumblue")
"""
Set the camera position to show the extent of the labels.
"""
brain.show_view(dict(elevation=40, distance=430))
| aestrivex/PySurfer | examples/plot_label_foci.py | Python | bsd-3-clause | 1,955 |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from telemetry.page.actions import all_page_actions
from telemetry.page.actions import page_action
def _GetActionFromData(action_data):
action_name = action_data['action']
action = all_page_actions.FindClassWithName(action_name)
if not action:
logging.critical('Could not find an action named %s.', action_name)
logging.critical('Check the page set for a typo and check the error '
'log for possible Python loading/compilation errors.')
raise Exception('Action "%s" not found.' % action_name)
return action(action_data)
def GetCompoundActionFromPage(page, action_name):
if not action_name:
return []
action_data_list = getattr(page, action_name)
if not isinstance(action_data_list, list):
action_data_list = [action_data_list]
action_list = []
for subaction_data in action_data_list:
subaction_name = subaction_data['action']
if hasattr(page, subaction_name):
subaction = GetCompoundActionFromPage(page, subaction_name)
else:
subaction = [_GetActionFromData(subaction_data)]
action_list += subaction * subaction_data.get('repeat', 1)
return action_list
class Failure(Exception):
"""Exception that can be thrown from PageBenchmark to indicate an
undesired but designed-for problem."""
pass
class PageTestResults(object):
def __init__(self):
self.page_successes = []
self.page_failures = []
self.skipped_pages = []
def AddSuccess(self, page):
self.page_successes.append({'page': page})
def AddFailure(self, page, message, details):
self.page_failures.append({'page': page,
'message': message,
'details': details})
def AddSkippedPage(self, page, message, details):
self.skipped_pages.append({'page': page,
'message': message,
'details': details})
class PageTest(object):
"""A class styled on unittest.TestCase for creating page-specific tests."""
def __init__(self,
test_method_name,
action_name_to_run='',
needs_browser_restart_after_each_run=False,
discard_first_result=False):
self.options = None
try:
self._test_method = getattr(self, test_method_name)
except AttributeError:
raise ValueError, 'No such method %s.%s' % (
self.__class_, test_method_name) # pylint: disable=E1101
self._action_name_to_run = action_name_to_run
self._needs_browser_restart_after_each_run = (
needs_browser_restart_after_each_run)
self._discard_first_result = discard_first_result
@property
def needs_browser_restart_after_each_run(self):
return self._needs_browser_restart_after_each_run
@property
def discard_first_result(self):
"""When set to True, the first run of the test is discarded. This is
useful for cases where it's desirable to have some test resource cached so
the first run of the test can warm things up. """
return self._discard_first_result
def AddCommandLineOptions(self, parser):
"""Override to expose command-line options for this benchmark.
The provided parser is an optparse.OptionParser instance and accepts all
normal results. The parsed options are available in Run as
self.options."""
pass
def CustomizeBrowserOptions(self, options):
"""Override to add test-specific options to the BrowserOptions object"""
pass
def CustomizeBrowserOptionsForPage(self, page, options):
"""Add options specific to the test and the given page."""
if not self.CanRunForPage(page):
return
for action in GetCompoundActionFromPage(page, self._action_name_to_run):
action.CustomizeBrowserOptions(options)
def SetUpBrowser(self, browser):
"""Override to customize the browser right after it has launched."""
pass
def CanRunForPage(self, page): #pylint: disable=W0613
"""Override to customize if the test can be ran for the given page."""
return True
def WillRunPageSet(self, tab, results):
"""Override to do operations before the page set is navigated."""
pass
def DidRunPageSet(self, tab, results):
"""Override to do operations after page set is completed, but before browser
is torn down."""
pass
def WillNavigateToPage(self, page, tab):
"""Override to do operations before the page is navigated."""
pass
def DidNavigateToPage(self, page, tab):
"""Override to do operations right after the page is navigated, but before
any waiting for completion has occurred."""
pass
def WillRunAction(self, page, tab, action):
"""Override to do operations before running the action on the page."""
pass
def DidRunAction(self, page, tab, action):
"""Override to do operations after running the action on the page."""
pass
def Run(self, options, page, tab, results):
self.options = options
compound_action = GetCompoundActionFromPage(page, self._action_name_to_run)
self._RunCompoundAction(page, tab, compound_action)
try:
self._test_method(page, tab, results)
finally:
self.options = None
def _RunCompoundAction(self, page, tab, actions):
for i, action in enumerate(actions):
prev_action = actions[i - 1] if i > 0 else None
next_action = actions[i + 1] if i < len(actions) - 1 else None
if (action.RunsPreviousAction() and
next_action and next_action.RunsPreviousAction()):
raise page_action.PageActionFailed('Consecutive actions cannot both '
'have RunsPreviousAction() == True.')
if not (next_action and next_action.RunsPreviousAction()):
action.WillRunAction(page, tab)
self.WillRunAction(page, tab, action)
try:
action.RunAction(page, tab, prev_action)
finally:
self.DidRunAction(page, tab, action)
@property
def action_name_to_run(self):
return self._action_name_to_run
| codenote/chromium-test | tools/telemetry/telemetry/page/page_test.py | Python | bsd-3-clause | 6,182 |
from django.conf.urls import url, patterns
from data import views
urlpatterns = patterns("data.views",
url(r"^$", views.IndexView.as_view()),
url(r"^a/(?P<application_external_id>[^/]{,255})\.json$", views.ApplicationInstanceListView.as_view()),
url(r"^(?P<model_external_id>[^/]{,255})\.json$", views.InstanceListView.as_view()),
url(r"^(?P<model_external_id>[^/]{,255})/(?P<instance_external_id>[^/]{,255})\.json", views.InstanceDetailView.as_view()),
) | mohawkhq/mohawk-data-platform | data/urls.py | Python | bsd-3-clause | 479 |
from setuptools import setup, find_packages
setup(
name='django-facebook-comments',
version=__import__('facebook_comments').__version__,
description='Django implementation for Facebook Graph API Comments',
long_description=open('README.md').read(),
author='ramusus',
author_email='[email protected]',
url='https://github.com/ramusus/django-facebook-comments',
download_url='http://pypi.python.org/pypi/django-facebook-comments',
license='BSD',
packages=find_packages(),
include_package_data=True,
zip_safe=False, # because we're including media that Django needs
install_requires=[
'django-facebook-api>=0.5.0',
'django-facebook-users>=0.3.0',
'django-m2m-history>=0.1.2',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| ramusus/django-facebook-comments | setup.py | Python | bsd-3-clause | 1,122 |
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('datasets', parent_package, top_path)
config.add_subpackage('volumes')
config.add_subpackage('transforms')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| yarikoptic/NiPy-OLD | nipy/neurospin/datasets/setup.py | Python | bsd-3-clause | 390 |
"""
:Requirements: django-tagging
This module contains some additional helper tags for the django-tagging
project. Note that the functionality here might already be present in
django-tagging but perhaps with some slightly different behaviour or
usage.
"""
from django import template
from django.core.urlresolvers import reverse as url_reverse
from tagging.utils import parse_tag_input
register = template.Library()
class TagsForObjectNode(template.Node):
def __init__(self, tags_string, urlname, junctor=None, last_junctor=None):
self.tags_string = template.Variable(tags_string)
self.junctor = junctor is None and ', ' or junctor.lstrip('"').rstrip('"')
self.last_junctor = last_junctor is None and ' and ' or last_junctor.lstrip('"').rstrip('"')
self.urlname = urlname
def render(self, context):
tags = parse_tag_input(self.tags_string.resolve(context))
tags = ['<a href="%s" rel="tag">%s</a>' % (url_reverse(self.urlname, kwargs={'tag':t}), t) for t in tags]
if len(tags) > 2:
first_part = self.junctor.join(tags[:-1])
return first_part + self.last_junctor + tags[-1]
if len(tags) == 2:
return self.last_junctor.join(tags)
return self.junctor.join(tags)
@register.tag('object_tags')
def tags_for_object(parser, token):
"""
Simple tag for rendering tags of an object
Usage::
{% object_tags object.tags blog-tag ", " " and " %}
The last two arguments determine the junctor between the tag names with
the last being the last junctor being used.
"""
variables = token.split_contents()[1:]
return TagsForObjectNode(*variables)
| zerok/django-zsutils | django_zsutils/templatetags/zsutils/taghelpers.py | Python | bsd-3-clause | 1,704 |
#! /usr/bin/env python
#Copyright (c) 2016, Buti Al Delail
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are met:
#
#* Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
#* Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
#* Neither the name of kuri_mbzirc_challenge_3 nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
#AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
#IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
#FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
#DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
#SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
import rospy
import thread
import threading
import time
import mavros
import actionlib
from math import *
from mavros.utils import *
from mavros import setpoint as SP
from tf.transformations import quaternion_from_euler
from kuri_msgs.msg import *
class dropzone_landing:
def __init__(self):
self.done = False
self.done_evt = threading.Event()
self.isExploring = False
self.progress = 0.0
self.x = 0.0
self.y = 0.0
self.z = 0.0
self.currentPoseX = 0
self.currentPoseY = 0
self.currentPoseZ = 0
self.navigating = False
mavros.set_namespace('/uav_1/mavros')
# publisher for mavros/setpoint_position/local
self.pub = SP.get_pub_position_local(queue_size=10)
# subscriber for mavros/local_position/local
self.sub = rospy.Subscriber(mavros.get_topic('local_position', 'pose'),
SP.PoseStamped, self.reached)
self.objects_map = ObjectsMap()
self.client = actionlib.SimpleActionClient('TrackingAction', TrackingAction)
#client = self.client
#client = self.actionServer.client
print "Waiting for tracking server"
self.client.wait_for_server()
self.goal = TrackingGoal()
self.goal.uav_id = 1
self.client.send_goal(self.goal)
print "Waiting for result"
self.client.wait_for_result()
print "Result:"
self.objects =self.client.get_result().tracked_objects.objects
print self.objects
try:
thread.start_new_thread(self.navigate, ())
except:
fault("Error: Unable to start thread")
def navigate(self):
rate = rospy.Rate(40) # 10hz
msg = SP.PoseStamped(
header=SP.Header(
frame_id="base_footprint", # no matter, plugin don't use TF
stamp=rospy.Time.now()), # stamp should update
)
while not rospy.is_shutdown():
msg.pose.position.x = self.x
msg.pose.position.y = self.y
msg.pose.position.z = self.z
# For demo purposes we will lock yaw/heading to north.
yaw_degrees = 0 # North
yaw = radians(yaw_degrees)
quaternion = quaternion_from_euler(0, 0, yaw)
msg.pose.orientation = SP.Quaternion(*quaternion)
self.pub.publish(msg)
rate.sleep()
def setPose(self, x, y, z, delay=0, wait=True):
self.done = False
self.x = x
self.y = y
self.z = z
self.navigating = True
if wait:
rate = rospy.Rate(5)
while not self.done and not rospy.is_shutdown():
rate.sleep()
time.sleep(delay)
def takeoff(self, z, delay=0, wait=True):
diff = z - self.currentPoseZ
while not abs(diff)<0.2:
diff = z - self.currentPoseZ
if diff>0:
self.setPose(self.currentPoseX,self.currentPoseY,self.currentPoseZ + 1, 0, False)
else:
self.setPose(self.currentPoseX,self.currentPoseY,self.currentPoseZ - 0.1, 0, False)
def land(self, delay=0, wait=True):
altitude = self.currentPoseZ
while altitude > 0:
altitude = self.currentPoseZ
self.setPose(self.currentPoseX,self.currentPoseY,self.currentPoseZ - 0.5 ,2)
def reached(self, topic):
def is_near(msg, x, y, d):
rospy.logdebug("Position %s: local: %d, target: %d, abs diff: %d",
msg, x, y, abs(x - y))
return abs(x - y) < d
self.currentPoseX = topic.pose.position.x
self.currentPoseY = topic.pose.position.y
self.currentPoseZ = topic.pose.position.z
if is_near('X', topic.pose.position.x, self.x, 0.2) and \
is_near('Y', topic.pose.position.y, self.y, 0.2) and \
is_near('Z', topic.pose.position.z, self.z, 0.5):
if self.navigating:
self.done = True
self.navigating = False
self.done_evt.set()
def explore(self):
print 'explore started '
rate = rospy.Rate(30)
self.newGoal = True
if self.isExploring == False:
#Change this later when we have a better exploration
#self.isExploring = True
while self.done == False:
time.sleep(1)
rospy.loginfo("Climb")
self.progress += 0.1
self.takeoff(5)
self.progress += 0.1
rospy.loginfo("Moving to Red_Object")
self.reached_object = False
red_object_id = -1
xspeed = 1
while self.reached_object == False:
self.client.send_goal(self.goal)
self.client.wait_for_result()
self.objects = self.client.get_result().tracked_objects.objects
islost = True
for obj in self.objects:
if red_object_id == -1 and (obj.color == 'RED' or obj.color == 'BLUE' or obj.color == 'GREEN'): #pick any nearby object
red_object_id = obj.object_id
if obj.object_id == red_object_id:
islost = False
print 'Moving to Drop zone', self.currentPoseX-obj.pose2.pose.position.x, self.currentPoseY-obj.pose2.pose.position.y, obj.pose.pose.position.x, obj.pose.pose.position.y
if fabs(obj.pose2.pose.position.x) < 0.01 and fabs(obj.pose2.pose.position.y) > 0.01:
print 'Moving Y'
self.setPose(self.x, self.currentPoseY+obj.pose2.pose.position.y*xspeed, self.z, 0 , False)
elif fabs(obj.pose2.pose.position.y) < 0.01 and fabs(obj.pose2.pose.position.x) > 0.01:
print 'Moving X'
self.setPose(self.currentPoseX-obj.pose2.pose.position.x*xspeed, self.y, self.z, 0 , False)
else:
print 'Moving XY'
self.setPose(self.currentPoseX-obj.pose2.pose.position.x*xspeed, self.currentPoseY+obj.pose2.pose.position.y*xspeed, self.z, 0 , True)
if fabs(obj.pose2.pose.position.x) < 0.3 and fabs(obj.pose2.pose.position.y) < 0.3 and self.z > 0.0:
print 'Moving Z'
land = 0.2
if self.z <= 3:
xspeed = 1
if self.z <= 1.5:
xspeed = 0.5
if self.z < 0.5:
land = 0.05
self.setPose(self.x, self.y, self.z - land * xspeed, 1, False)
if self.z <= 0.4:
self.reached_object = True
if islost == True:
red_object_id = -1
if red_object_id == -1:
rospy.loginfo("No object in sight, exploring")
#self.setPose(self.x, self.y - 5, self.z, 1, True)
rate.sleep()
time.sleep(10)
rospy.loginfo("Picked Object, climb")
self.takeoff(1)
self.takeoff(2)
self.takeoff(3)
self.takeoff(4)
self.takeoff(5)
#self.setPose(self.x, self.y, self.z)
time.sleep(10)
rospy.loginfo("Moving to DropZone")
self.setPose(1, -21, 5) ##Go near dropzone
self.progress += 0.1
self.reached_dropzone = False
xspeed = 3
while self.reached_dropzone == False:
self.client.send_goal(self.goal)
self.client.wait_for_result()
self.objects = self.client.get_result().tracked_objects.objects
for obj in self.objects:
if obj.color == 'DROP_ZONE':
print 'Moving to Drop zone', self.currentPoseX-obj.pose2.pose.position.x, self.currentPoseY-obj.pose2.pose.position.y, obj.pose.pose.position.x, obj.pose.pose.position.y
if fabs(obj.pose2.pose.position.x) < 0.1 and fabs(obj.pose2.pose.position.y) > 0.1:
print 'Moving Y'
self.setPose(self.x, self.currentPoseY+obj.pose2.pose.position.y*xspeed, self.z, 0 , False)
elif fabs(obj.pose2.pose.position.y) < 0.1 and fabs(obj.pose2.pose.position.x) > 0.1:
print 'Moving X'
self.setPose(self.currentPoseX-obj.pose2.pose.position.x*xspeed, self.y, self.z, 0 , False)
else:
print 'Moving XY'
self.setPose(self.currentPoseX-obj.pose2.pose.position.x*xspeed, self.currentPoseY+obj.pose2.pose.position.y*xspeed, self.z, 0 , True)
if fabs(obj.pose2.pose.position.x) < 0.3 and fabs(obj.pose2.pose.position.y) < 0.3 and self.z > 1:
print 'Moving Z'
land = 0.5
if self.z <= 3:
land = 0.2
xspeed = 0.5
self.setPose(self.x, self.y, self.z - land, 1, False)
if self.z < 1.5:
self.reached_dropzone = True
rate.sleep()
self.progress += 0.1
rospy.loginfo("Landed Object, climb")
self.takeoff(7)
rospy.loginfo("Bye!")
def main(args):
'''Initializes and cleanup ros node'''
rospy.init_node('dropzone_landing', anonymous=True)
d = dropzone_landing()
d.explore()
try:
rospy.spin()
except KeyboardInterrupt:
print "Shutting down ROS Image feature detector module"
cv2.destroyAllWindows()
if __name__ == '__main__':
main(sys.argv)
| kuri-kustar/kuri_mbzirc_challenge_3 | kuri_object_tracking/scripts/object_picktest.py | Python | bsd-3-clause | 12,469 |
from setuptools import setup
setup(
name='pymail365',
version='0.1',
description='A python client for sending mail using Microsoft Office 365 rest service.',
long_description=open('README.rst').read(),
author='Mikko Hellsing',
author_email='[email protected]',
license='BSD',
url='https://github.com/aino/pymail365',
packages=['pymail365'],
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
| aino/pymail365 | setup.py | Python | bsd-3-clause | 688 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 5, transform = "Quantization", sigma = 0.0, exog_count = 100, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_Quantization/trend_MovingAverage/cycle_5/ar_12/test_artificial_32_Quantization_MovingAverage_5_12_100.py | Python | bsd-3-clause | 272 |
# Autogenerated by the mkresources management command 2014-11-13 23:53
from tastypie.resources import ModelResource
from tastypie.fields import ToOneField, ToManyField
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from ietf import api
from ietf.message.models import * # pyflakes:ignore
from ietf.person.resources import PersonResource
from ietf.group.resources import GroupResource
from ietf.doc.resources import DocumentResource
class MessageResource(ModelResource):
by = ToOneField(PersonResource, 'by')
related_groups = ToManyField(GroupResource, 'related_groups', null=True)
related_docs = ToManyField(DocumentResource, 'related_docs', null=True)
class Meta:
queryset = Message.objects.all()
serializer = api.Serializer()
#resource_name = 'message'
filtering = {
"id": ALL,
"time": ALL,
"subject": ALL,
"frm": ALL,
"to": ALL,
"cc": ALL,
"bcc": ALL,
"reply_to": ALL,
"body": ALL,
"content_type": ALL,
"by": ALL_WITH_RELATIONS,
"related_groups": ALL_WITH_RELATIONS,
"related_docs": ALL_WITH_RELATIONS,
}
api.message.register(MessageResource())
from ietf.person.resources import PersonResource
class SendQueueResource(ModelResource):
by = ToOneField(PersonResource, 'by')
message = ToOneField(MessageResource, 'message')
class Meta:
queryset = SendQueue.objects.all()
serializer = api.Serializer()
#resource_name = 'sendqueue'
filtering = {
"id": ALL,
"time": ALL,
"send_at": ALL,
"sent_at": ALL,
"note": ALL,
"by": ALL_WITH_RELATIONS,
"message": ALL_WITH_RELATIONS,
}
api.message.register(SendQueueResource())
| wpjesus/codematch | ietf/message/resources.py | Python | bsd-3-clause | 1,927 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 - 2022 -- Lars Heuer
# All rights reserved.
#
# License: BSD License
#
"""\
Tests if the PNG serializer does not add more colors than needed.
See also issue <https://github.com/heuer/segno/issues/62>
"""
from __future__ import unicode_literals, absolute_import
import io
import pytest
import segno
def test_plte():
qr = segno.make_qr('test')
assert qr.version < 7
dark = 'red'
buff_1 = io.BytesIO()
buff_2 = io.BytesIO()
qr.save(buff_1, kind='png', dark=dark, finder_dark=dark, version_dark='green')
qr.save(buff_2, kind='png', dark=dark)
assert buff_1.getvalue() == buff_2.getvalue()
def test_plte2():
qr = segno.make_qr('test')
assert qr.version < 7
dark = 'red'
buff_1 = io.BytesIO()
buff_2 = io.BytesIO()
qr.save(buff_1, kind='png', dark=dark, finder_dark=dark, version_dark='green')
qr.save(buff_2, kind='png', dark=dark)
assert buff_1.getvalue() == buff_2.getvalue()
def test_plte3():
qr = segno.make_qr('test')
assert qr.version < 7
dark = 'red'
buff_1 = io.BytesIO()
buff_2 = io.BytesIO()
qr.save(buff_1, kind='png', dark=dark, finder_dark=dark, version_dark='green')
qr.save(buff_2, kind='png', dark=dark)
assert buff_1.getvalue() == buff_2.getvalue()
def test_plte_micro():
qr = segno.make_micro('RAIN')
dark = 'red'
buff_1 = io.BytesIO()
buff_2 = io.BytesIO()
qr.save(buff_1, kind='png', dark=dark, finder_dark=dark, alignment_dark='green')
qr.save(buff_2, kind='png', dark=dark)
assert buff_1.getvalue() == buff_2.getvalue()
def test_plte_micro2():
qr = segno.make_micro('RAIN')
dark = 'red'
buff_1 = io.BytesIO()
buff_2 = io.BytesIO()
qr.save(buff_1, kind='png', dark=dark, finder_dark=dark, dark_module='green')
qr.save(buff_2, kind='png', dark=dark)
assert buff_1.getvalue() == buff_2.getvalue()
if __name__ == '__main__':
pytest.main([__file__])
| heuer/segno | tests/test_png_plte.py | Python | bsd-3-clause | 1,978 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from cms.models import Page
from cms.utils.i18n import get_language_list
from django.db import migrations, models
def forwards(apps, schema_editor):
BlogConfig = apps.get_model('djangocms_blog', 'BlogConfig')
BlogConfigTranslation = apps.get_model('djangocms_blog', 'BlogConfigTranslation')
Post = apps.get_model('djangocms_blog', 'Post')
BlogCategory = apps.get_model('djangocms_blog', 'BlogCategory')
GenericBlogPlugin = apps.get_model('djangocms_blog', 'GenericBlogPlugin')
LatestPostsPlugin = apps.get_model('djangocms_blog', 'LatestPostsPlugin')
AuthorEntriesPlugin = apps.get_model('djangocms_blog', 'AuthorEntriesPlugin')
config = None
for page in Page.objects.drafts().filter(application_urls='BlogApp'):
config, created = BlogConfig.objects.get_or_create(namespace=page.application_namespace)
if not BlogConfigTranslation.objects.exists():
for lang in get_language_list():
title = page.get_title(lang)
translation = BlogConfigTranslation.objects.create(language_code=lang, master_id=config.pk, app_title=title)
if config:
for model in (Post, BlogCategory, GenericBlogPlugin, LatestPostsPlugin, AuthorEntriesPlugin):
for item in model.objects.filter(app_config__isnull=True):
item.app_config = config
item.save()
def backwards(apps, schema_editor):
# No need for backward data migration
pass
class Migration(migrations.Migration):
dependencies = [
('cms', '0004_auto_20140924_1038'),
('djangocms_blog', '0013_auto_20160201_2235'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
| skirsdeda/djangocms-blog | djangocms_blog/migrations/0014_auto_20160215_1331.py | Python | bsd-3-clause | 1,773 |
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import CACHE_PREFIX, ES_INDEXES, KNOWN_PROXIES, LOGGING
from .. import splitstrip
import private_base as private
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['sa_pool_key'] = 'master'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave']['sa_pool_key'] = 'slave'
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
# 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
# 'BACKEND': 'memcachepool.cache.UMemcacheCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
},
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
## Celery
BROKER_URL = private.BROKER_URL
CELERY_ALWAYS_EAGER = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTION_ICONS_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'z.task': { 'level': logging.DEBUG },
'z.hera': { 'level': logging.INFO },
'z.redis': { 'level': logging.DEBUG },
'z.pool': { 'level': logging.ERROR },
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' % RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
PACKAGER_PATH = os.path.join(TMP_PATH, 'packager')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
PERF_THRESHOLD = 20
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'addons-landfill'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = "https://builder-addons-dev.allizom.org/repackage/sdk-versions/"
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_landfill' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons-dev.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
PERF_TEST_URL = 'http://talos-addon-master1.amotest.scl1.mozilla.com/trigger/trigger.cgi'
REDIRECT_URL = 'https://outgoing.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
ALLOW_SELF_REVIEWS = True
AES_KEYS = private.AES_KEYS
| anaran/olympia | sites/landfill/settings_base.py | Python | bsd-3-clause | 5,402 |
'''
Created on 9 jan. 2013
@author: sander
'''
from bitstring import BitStream, ConstBitStream, Bits
from ipaddress import IPv4Address, IPv6Address
from pylisp.packet.ip import protocol_registry
from pylisp.packet.ip.protocol import Protocol
from pylisp.utils import checksum
import numbers
class UDPMessage(Protocol):
header_type = 17
def __init__(self, source_port=0, destination_port=0, checksum=0,
payload=''):
# Call the superclass constructor
super(UDPMessage, self).__init__(payload=payload)
self.source_port = source_port
self.destination_port = destination_port
self.checksum = checksum
def sanitize(self):
'''
Check if the current settings conform to the RFC and fix where possible
'''
# Check ports
if not isinstance(self.source_port, numbers.Integral) \
or self.source_port < 0 \
or self.source_port >= 2 ** 16:
raise ValueError('Invalid source port')
if not isinstance(self.destination_port, numbers.Integral) \
or self.destination_port < 0 \
or self.destination_port >= 2 ** 16:
raise ValueError('Invalid destination port')
# We can't calculate the checksum because we don't know enough by
# ourself to construct the pseudo-header
def generate_pseudo_header(self, source, destination):
# Calculate the length of the UDP layer
udp_length = 8 + len(bytes(self.payload))
if isinstance(source, IPv4Address) \
and isinstance(destination, IPv4Address):
# Generate an IPv4 pseudo-header
header = BitStream('uint:32=%d, '
'uint:32=%d, '
'uint:16=17, '
'uint:16=%d' % (int(source),
int(destination),
udp_length))
elif isinstance(source, IPv6Address) \
and isinstance(destination, IPv6Address):
# Generate an IPv6 pseudo-header
header = BitStream('uint:128=%d, '
'uint:128=%d, '
'uint:32=%d, '
'uint:32=17' % (int(source),
int(destination),
udp_length))
else:
raise ValueError('Source and destination must belong to the same '
'IP version')
# Return the header bytes
return header.bytes
def calculate_checksum(self, source, destination):
# Calculate the pseudo-header for the checksum calculation
pseudo_header = self.generate_pseudo_header(source, destination)
# Remember the current checksum, generate a message and restore the
# original checksum
old_checksum = self.checksum
self.checksum = 0
message = self.to_bytes()
self.checksum = old_checksum
# Calculate the checksum
my_checksum = checksum.ones_complement(pseudo_header + message)
# If the computed checksum is zero, it is transmitted as all ones (the
# equivalent in one's complement arithmetic). An all zero transmitted
# checksum value means that the transmitter generated no checksum (for
# debugging or for higher level protocols that don't care).
if my_checksum == 0:
my_checksum = 0xffff
return my_checksum
def verify_checksum(self, source, destination):
# An all zero transmitted checksum value means that the transmitter
# generated no checksum (for debugging or for higher level protocols
# that don't care).
if self.checksum == 0:
return True
return self.checksum == self.calculate_checksum(source, destination)
def get_lisp_message(self, only_data=False, only_control=False):
# Check the UDP ports
lisp_data = (self.source_port == 4341
or self.destination_port == 4341)
lisp_control = (self.source_port == 4342
or self.destination_port == 4342)
if lisp_data and lisp_control:
raise ValueError("Cannot mix LISP data and control ports")
from pylisp.packet.lisp.control.base import ControlMessage
from pylisp.packet.lisp.data import DataPacket
if lisp_data or only_data:
if not isinstance(self.payload, DataPacket):
raise ValueError("Payload is not a LISP data packet")
return self.payload
elif lisp_control or only_control:
if not isinstance(self.payload, ControlMessage):
raise ValueError("Payload is not a LISP control message")
return self.payload
else:
raise ValueError("No LISP content found")
def get_lisp_data_packet(self):
return self.get_lisp_message(only_data=True)
def get_lisp_control_message(self):
return self.get_lisp_message(only_control=True)
@classmethod
def from_bytes(cls, bitstream):
'''
Parse the given packet and update properties accordingly
'''
packet = cls()
# Convert to ConstBitStream (if not already provided)
if not isinstance(bitstream, ConstBitStream):
if isinstance(bitstream, Bits):
bitstream = ConstBitStream(auto=bitstream)
else:
bitstream = ConstBitStream(bytes=bitstream)
# Read the source and destination ports
(packet.source_port,
packet.destination_port) = bitstream.readlist('2*uint:16')
# Store the length
length = bitstream.read('uint:16')
if length < 8:
raise ValueError('Invalid UDP length')
# Read the checksum
packet.checksum = bitstream.read('uint:16')
# And the rest is payload
payload_bytes = length - 8
packet.payload = bitstream.read('bytes:%d' % payload_bytes)
# LISP-specific handling
if packet.source_port == 4341 or packet.destination_port == 4341:
# Payload is a LISP data packet
from pylisp.packet.lisp.data import DataPacket
packet.payload = DataPacket.from_bytes(packet.payload)
elif packet.source_port == 4342 or packet.destination_port == 4342:
# Payload is a LISP control message
from pylisp.packet.lisp.control.base import ControlMessage
packet.payload = ControlMessage.from_bytes(packet.payload)
# There should be no remaining bits
if bitstream.pos != bitstream.len:
raise ValueError('Bits remaining after processing packet')
# Verify that the properties make sense
packet.sanitize()
return packet
def to_bytes(self):
'''
Create bytes from properties
'''
# Verify that the properties make sense
self.sanitize()
# Write the source and destination ports
bitstream = BitStream('uint:16=%d, '
'uint:16=%d' % (self.source_port,
self.destination_port))
# Write the length
payload_bytes = bytes(self.payload)
length = len(payload_bytes) + 8
bitstream += BitStream('uint:16=%d' % length)
# Write the checksum
bitstream += BitStream('uint:16=%d' % self.checksum)
return bitstream.bytes + payload_bytes
# Register this header type
protocol_registry.register_type_class(UDPMessage)
| steffann/pylisp | pylisp/packet/ip/udp.py | Python | bsd-3-clause | 7,685 |
# Copyright (c) 2019 Guo Yejun
#
# This file is part of FFmpeg.
#
# FFmpeg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# FFmpeg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with FFmpeg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ==============================================================================
import tensorflow as tf
import numpy as np
import sys, struct
import convert_header as header
__all__ = ['convert_from_tensorflow']
class Operand(object):
IOTYPE_INPUT = 1
IOTYPE_OUTPUT = 2
IOTYPE_INTERMEDIATE = IOTYPE_INPUT | IOTYPE_OUTPUT
DTYPE_FLOAT = 1
DTYPE_UINT8 = 4
index = 0
def __init__(self, name, dtype, dims):
self.name = name
self.dtype = dtype
self.dims = dims
self.iotype = 0
self.used_count = 0
self.index = Operand.index
Operand.index = Operand.index + 1
self.iotype2str = {Operand.IOTYPE_INPUT: 'in', Operand.IOTYPE_OUTPUT: 'out', Operand.IOTYPE_INTERMEDIATE: 'inout'}
self.dtype2str = {Operand.DTYPE_FLOAT: 'DT_FLOAT', Operand.DTYPE_UINT8: 'DT_UINT8'}
def add_iotype(self, iotype):
self.iotype = self.iotype | iotype
if iotype == Operand.IOTYPE_INPUT:
self.used_count = self.used_count + 1
def __str__(self):
return "{}: (name: {}, iotype: {}, dtype: {}, dims: ({},{},{},{}) used_count: {})".format(self.index,
self.name, self.iotype2str[self.iotype], self.dtype2str[self.dtype],
self.dims[0], self.dims[1], self.dims[2], self.dims[3], self.used_count)
def __lt__(self, other):
return self.index < other.index
class TFConverter:
def __init__(self, graph_def, nodes, outfile, dump4tb):
self.graph_def = graph_def
self.nodes = nodes
self.outfile = outfile
self.dump4tb = dump4tb
self.layer_number = 0
self.output_names = []
self.name_node_dict = {}
self.edges = {}
self.conv_activations = {'Relu':0, 'Tanh':1, 'Sigmoid':2, 'None':3, 'LeakyRelu':4}
self.conv_paddings = {'VALID':0, 'SAME':1}
self.converted_nodes = set()
self.conv2d_scope_names = set()
self.conv2d_scopename_inputname_dict = {}
self.op2code = {'Conv2D':1, 'DepthToSpace':2, 'MirrorPad':3, 'Maximum':4, 'MathBinary':5}
self.mathbin2code = {'Sub':0, 'Add':1, 'Mul':2, 'RealDiv':3}
self.mirrorpad_mode = {'CONSTANT':0, 'REFLECT':1, 'SYMMETRIC':2}
self.name_operand_dict = {}
def add_operand(self, name, type):
node = self.name_node_dict[name]
if name not in self.name_operand_dict:
dtype = node.attr['dtype'].type
if dtype == 0:
dtype = node.attr['T'].type
dims = [-1,-1,-1,-1]
if 'shape' in node.attr:
dims[0] = node.attr['shape'].shape.dim[0].size
dims[1] = node.attr['shape'].shape.dim[1].size
dims[2] = node.attr['shape'].shape.dim[2].size
dims[3] = node.attr['shape'].shape.dim[3].size
operand = Operand(name, dtype, dims)
self.name_operand_dict[name] = operand;
self.name_operand_dict[name].add_iotype(type)
return self.name_operand_dict[name].index
def dump_for_tensorboard(self):
graph = tf.get_default_graph()
tf.import_graph_def(self.graph_def, name="")
tf.summary.FileWriter('/tmp/graph', graph)
print('graph saved, run "tensorboard --logdir=/tmp/graph" to see it')
def get_conv2d_params(self, conv2d_scope_name):
knode = self.name_node_dict[conv2d_scope_name + '/kernel']
bnode = self.name_node_dict[conv2d_scope_name + '/bias']
if conv2d_scope_name + '/dilation_rate' in self.name_node_dict:
dnode = self.name_node_dict[conv2d_scope_name + '/dilation_rate']
else:
dnode = None
# the BiasAdd name is possible be changed into the output name,
# if activation is None, and BiasAdd.next is the last op which is Identity
if conv2d_scope_name + '/BiasAdd' in self.edges:
anode = self.edges[conv2d_scope_name + '/BiasAdd'][0]
if anode.op not in self.conv_activations:
anode = None
else:
anode = None
return knode, bnode, dnode, anode
def dump_complex_conv2d_to_file(self, node, f):
assert(node.op == 'Conv2D')
self.layer_number = self.layer_number + 1
self.converted_nodes.add(node.name)
scope_name = TFConverter.get_scope_name(node.name)
#knode for kernel, bnode for bias, dnode for dilation, anode for activation
knode, bnode, dnode, anode = self.get_conv2d_params(scope_name)
if dnode is not None:
dilation = struct.unpack('i', dnode.attr['value'].tensor.tensor_content[0:4])[0]
else:
dilation = 1
if anode is not None:
activation = anode.op
else:
activation = 'None'
padding = node.attr['padding'].s.decode("utf-8")
# conv2d with dilation > 1 generates tens of nodes, not easy to parse them, so use this tricky method.
if dilation > 1 and scope_name + '/stack' in self.name_node_dict:
if self.name_node_dict[scope_name + '/stack'].op == "Const":
padding = 'SAME'
padding = self.conv_paddings[padding]
ktensor = knode.attr['value'].tensor
filter_height = ktensor.tensor_shape.dim[0].size
filter_width = ktensor.tensor_shape.dim[1].size
in_channels = ktensor.tensor_shape.dim[2].size
out_channels = ktensor.tensor_shape.dim[3].size
kernel = np.frombuffer(ktensor.tensor_content, dtype=np.float32)
kernel = kernel.reshape(filter_height, filter_width, in_channels, out_channels)
kernel = np.transpose(kernel, [3, 0, 1, 2])
has_bias = 1
np.array([self.op2code[node.op], dilation, padding, self.conv_activations[activation], in_channels, out_channels, filter_height, has_bias], dtype=np.uint32).tofile(f)
kernel.tofile(f)
btensor = bnode.attr['value'].tensor
if btensor.tensor_shape.dim[0].size == 1:
bias = struct.pack("f", btensor.float_val[0])
else:
bias = btensor.tensor_content
f.write(bias)
input_name = self.conv2d_scopename_inputname_dict[scope_name]
input_operand_index = self.add_operand(input_name, Operand.IOTYPE_INPUT)
if anode is not None:
output_operand_index = self.add_operand(anode.name, Operand.IOTYPE_OUTPUT)
else:
output_operand_index = self.add_operand(self.edges[bnode.name][0].name, Operand.IOTYPE_OUTPUT)
np.array([input_operand_index, output_operand_index], dtype=np.uint32).tofile(f)
def dump_simple_conv2d_to_file(self, node, f):
assert(node.op == 'Conv2D')
self.layer_number = self.layer_number + 1
self.converted_nodes.add(node.name)
node0 = self.name_node_dict[node.input[0]]
node1 = self.name_node_dict[node.input[1]]
if node0.op == 'Const':
knode = node0
input_name = node.input[1]
else:
knode = node1
input_name = node.input[0]
ktensor = knode.attr['value'].tensor
filter_height = ktensor.tensor_shape.dim[0].size
filter_width = ktensor.tensor_shape.dim[1].size
in_channels = ktensor.tensor_shape.dim[2].size
out_channels = ktensor.tensor_shape.dim[3].size
if filter_height * filter_width * in_channels * out_channels == 1:
kernel = np.float32(ktensor.float_val[0])
else:
kernel = np.frombuffer(ktensor.tensor_content, dtype=np.float32)
kernel = kernel.reshape(filter_height, filter_width, in_channels, out_channels)
kernel = np.transpose(kernel, [3, 0, 1, 2])
has_bias = 0
dilation = 1
padding = node.attr['padding'].s.decode("utf-8")
np.array([self.op2code[node.op], dilation, self.conv_paddings[padding], self.conv_activations['None'],
in_channels, out_channels, filter_height, has_bias], dtype=np.uint32).tofile(f)
kernel.tofile(f)
input_operand_index = self.add_operand(input_name, Operand.IOTYPE_INPUT)
output_operand_index = self.add_operand(node.name, Operand.IOTYPE_OUTPUT)
np.array([input_operand_index, output_operand_index], dtype=np.uint32).tofile(f)
def dump_depth2space_to_file(self, node, f):
assert(node.op == 'DepthToSpace')
self.layer_number = self.layer_number + 1
block_size = node.attr['block_size'].i
np.array([self.op2code[node.op], block_size], dtype=np.uint32).tofile(f)
self.converted_nodes.add(node.name)
input_operand_index = self.add_operand(node.input[0], Operand.IOTYPE_INPUT)
output_operand_index = self.add_operand(node.name, Operand.IOTYPE_OUTPUT)
np.array([input_operand_index, output_operand_index], dtype=np.uint32).tofile(f)
def dump_mirrorpad_to_file(self, node, f):
assert(node.op == 'MirrorPad')
self.layer_number = self.layer_number + 1
mode = node.attr['mode'].s
mode = self.mirrorpad_mode[mode.decode("utf-8")]
np.array([self.op2code[node.op], mode], dtype=np.uint32).tofile(f)
pnode = self.name_node_dict[node.input[1]]
self.converted_nodes.add(pnode.name)
paddings = pnode.attr['value'].tensor.tensor_content
f.write(paddings)
self.converted_nodes.add(node.name)
input_operand_index = self.add_operand(node.input[0], Operand.IOTYPE_INPUT)
output_operand_index = self.add_operand(node.name, Operand.IOTYPE_OUTPUT)
np.array([input_operand_index, output_operand_index], dtype=np.uint32).tofile(f)
def dump_maximum_to_file(self, node, f):
assert(node.op == 'Maximum')
self.layer_number = self.layer_number + 1
ynode = self.name_node_dict[node.input[1]]
y = ynode.attr['value'].tensor.float_val[0]
np.array([self.op2code[node.op]], dtype=np.uint32).tofile(f)
np.array([y], dtype=np.float32).tofile(f)
self.converted_nodes.add(node.name)
input_operand_index = self.add_operand(node.input[0], Operand.IOTYPE_INPUT)
output_operand_index = self.add_operand(node.name, Operand.IOTYPE_OUTPUT)
np.array([input_operand_index, output_operand_index], dtype=np.uint32).tofile(f)
def dump_mathbinary_to_file(self, node, f):
self.layer_number = self.layer_number + 1
self.converted_nodes.add(node.name)
i0_node = self.name_node_dict[node.input[0]]
i1_node = self.name_node_dict[node.input[1]]
np.array([self.op2code['MathBinary'], self.mathbin2code[node.op]], dtype=np.uint32).tofile(f)
if i0_node.op == 'Const':
scalar = i0_node.attr['value'].tensor.float_val[0]
np.array([1], dtype=np.uint32).tofile(f) # broadcast: 1
np.array([scalar], dtype=np.float32).tofile(f)
np.array([0], dtype=np.uint32).tofile(f) # broadcast: 0
input_operand_index = self.add_operand(i1_node.name, Operand.IOTYPE_INPUT)
np.array([input_operand_index], dtype=np.uint32).tofile(f)
elif i1_node.op == 'Const':
scalar = i1_node.attr['value'].tensor.float_val[0]
np.array([0], dtype=np.uint32).tofile(f)
input_operand_index = self.add_operand(i0_node.name, Operand.IOTYPE_INPUT)
np.array([input_operand_index], dtype=np.uint32).tofile(f)
np.array([1], dtype=np.uint32).tofile(f)
np.array([scalar], dtype=np.float32).tofile(f)
else:
np.array([0], dtype=np.uint32).tofile(f)
input_operand_index = self.add_operand(i0_node.name, Operand.IOTYPE_INPUT)
np.array([input_operand_index], dtype=np.uint32).tofile(f)
np.array([0], dtype=np.uint32).tofile(f)
input_operand_index = self.add_operand(i1_node.name, Operand.IOTYPE_INPUT)
np.array([input_operand_index], dtype=np.uint32).tofile(f)
output_operand_index = self.add_operand(node.name, Operand.IOTYPE_OUTPUT)
np.array([output_operand_index], dtype=np.uint32).tofile(f)
def dump_layers_to_file(self, f):
for node in self.nodes:
if node.name in self.converted_nodes:
continue
# conv2d with dilation generates very complex nodes, so handle it in special
if self.in_conv2d_scope(node.name):
if node.op == 'Conv2D':
self.dump_complex_conv2d_to_file(node, f)
continue
if node.op == 'Conv2D':
self.dump_simple_conv2d_to_file(node, f)
elif node.op == 'DepthToSpace':
self.dump_depth2space_to_file(node, f)
elif node.op == 'MirrorPad':
self.dump_mirrorpad_to_file(node, f)
elif node.op == 'Maximum':
self.dump_maximum_to_file(node, f)
elif node.op == 'Sub':
self.dump_mathbinary_to_file(node, f)
elif node.op == 'Add':
self.dump_mathbinary_to_file(node, f)
elif node.op == 'Mul':
self.dump_mathbinary_to_file(node, f)
elif node.op == 'RealDiv':
self.dump_mathbinary_to_file(node, f)
def dump_operands_to_file(self, f):
operands = sorted(self.name_operand_dict.values())
for operand in operands:
#print('{}'.format(operand))
np.array([operand.index, len(operand.name)], dtype=np.uint32).tofile(f)
f.write(operand.name.encode('utf-8'))
np.array([operand.iotype, operand.dtype], dtype=np.uint32).tofile(f)
np.array([operand.dims[0], operand.dims[1], operand.dims[2], operand.dims[3]], dtype=np.uint32).tofile(f)
def dump_to_file(self):
with open(self.outfile, 'wb') as f:
f.write(header.str.encode('utf-8'))
np.array([header.major, header.minor], dtype=np.uint32).tofile(f)
self.dump_layers_to_file(f)
self.dump_operands_to_file(f)
np.array([self.layer_number, len(self.name_operand_dict)], dtype=np.uint32).tofile(f)
def generate_name_node_dict(self):
for node in self.nodes:
self.name_node_dict[node.name] = node
def generate_output_names(self):
used_names = []
for node in self.nodes:
for input in node.input:
used_names.append(input)
for node in self.nodes:
if node.name not in used_names:
self.output_names.append(node.name)
def remove_identity(self):
id_nodes = []
id_dict = {}
for node in self.nodes:
if node.op == 'Identity':
name = node.name
input = node.input[0]
id_nodes.append(node)
# do not change the output name
if name in self.output_names:
self.name_node_dict[input].name = name
self.name_node_dict[name] = self.name_node_dict[input]
del self.name_node_dict[input]
else:
id_dict[name] = input
for idnode in id_nodes:
self.nodes.remove(idnode)
for node in self.nodes:
for i in range(len(node.input)):
input = node.input[i]
if input in id_dict:
node.input[i] = id_dict[input]
def generate_edges(self):
for node in self.nodes:
for input in node.input:
if input in self.edges:
self.edges[input].append(node)
else:
self.edges[input] = [node]
@staticmethod
def get_scope_name(name):
index = name.rfind('/')
if index == -1:
return ""
return name[0:index]
def in_conv2d_scope(self, name):
inner_scope = TFConverter.get_scope_name(name)
if inner_scope == "":
return False;
for scope in self.conv2d_scope_names:
index = inner_scope.find(scope)
if index == 0:
return True
return False
def generate_conv2d_scope_info(self):
# mostly, conv2d is a sub block in graph, get the scope name
for node in self.nodes:
if node.op == 'Conv2D':
scope = TFConverter.get_scope_name(node.name)
# for the case tf.nn.conv2d is called directly
if scope == '':
continue
# for the case tf.nn.conv2d is called within a scope
if scope + '/kernel' not in self.name_node_dict:
continue
self.conv2d_scope_names.add(scope)
# get the input name to the conv2d sub block
for node in self.nodes:
scope = TFConverter.get_scope_name(node.name)
if scope in self.conv2d_scope_names:
if node.op == 'Conv2D' or node.op == 'Shape':
for inp in node.input:
if TFConverter.get_scope_name(inp) != scope:
self.conv2d_scopename_inputname_dict[scope] = inp
def run(self):
self.generate_name_node_dict()
self.generate_output_names()
self.remove_identity()
self.generate_edges()
self.generate_conv2d_scope_info()
if self.dump4tb:
self.dump_for_tensorboard()
self.dump_to_file()
def convert_from_tensorflow(infile, outfile, dump4tb):
with open(infile, 'rb') as f:
# read the file in .proto format
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
nodes = graph_def.node
converter = TFConverter(graph_def, nodes, outfile, dump4tb)
converter.run()
| endlessm/chromium-browser | third_party/ffmpeg/tools/python/convert_from_tensorflow.py | Python | bsd-3-clause | 18,670 |
# -*- coding: utf-8 -*-
import os
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import Client
from .....checkout.tests import BaseCheckoutAppTests
from .....delivery.tests import TestDeliveryProvider
from .....order import handler as order_handler
from .....payment import ConfirmationFormNeeded
from .....payment.tests import TestPaymentProvider
from .....pricing import handler as pricing_handler
from .....product import handler as product_handler
from .....product.tests import DeadParrot
from .....product.tests.pricing import FiveZlotyPriceHandler
from ..app import checkout_app
from .....cart.tests import TestCart
from .....order.tests import TestOrder
class TestPaymentProviderWithConfirmation(TestPaymentProvider):
def confirm(self, order, typ=None):
raise ConfirmationFormNeeded(action='http://test.payment.gateway.example.com')
class App(BaseCheckoutAppTests):
checkout_app = checkout_app
urls = BaseCheckoutAppTests.MockUrls(checkout_app=checkout_app)
def setUp(self):
checkout_app.cart_model = TestCart
checkout_app.order_model = TestOrder
self.parrot = DeadParrot.objects.create(slug='parrot',
species='Hyacinth Macaw')
self.dead_parrot = self.parrot.variants.create(color='blue',
looks_alive=False)
satchless_dir = os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')
self.custom_settings = {
'SATCHLESS_PRODUCT_VIEW_HANDLERS': ('satchless.cart.add_to_cart_handler',),
'TEMPLATE_DIRS': (os.path.join(satchless_dir, 'category', 'templates'),
os.path.join(satchless_dir, 'order', 'templates'),
os.path.join(satchless_dir, 'cart', 'templates'),
os.path.join(satchless_dir, 'cart', 'templates'),
os.path.join(os.path.join(os.path.dirname(__file__),
'templates')),
os.path.join(os.path.join(os.path.dirname(__file__), '..',
'templates'))),
'TEMPLATE_LOADERS': (
'django.template.loaders.filesystem.Loader',
)
}
self.original_settings = self._setup_settings(self.custom_settings)
product_handler.init_queue()
order_handler.delivery_queue = order_handler.DeliveryQueue(TestDeliveryProvider)
order_handler.payment_queue = order_handler.PaymentQueue(TestPaymentProviderWithConfirmation)
self.anon_client = Client()
self.original_pricing_handlers = settings.SATCHLESS_PRICING_HANDLERS
pricing_handler.pricing_queue = pricing_handler.PricingQueue(FiveZlotyPriceHandler)
def tearDown(self):
self._teardown_settings(self.original_settings, self.custom_settings)
product_handler.init_queue()
pricing_handler.pricing_queue = pricing_handler.PricingQueue(*self.original_pricing_handlers)
def test_checkout_view_passes_with_correct_data(self):
cart = self._get_or_create_cart_for_client(self.anon_client)
cart.replace_item(self.dead_parrot, 1)
order = self._get_or_create_order_for_client(self.anon_client)
response = self._test_status(reverse('checkout:checkout',
kwargs={'order_token':
order.token}),
client_instance=self.anon_client,
data={'email': '[email protected]'})
dg = response.context['delivery_group_forms']
data = {'billing_first_name': 'First',
'billing_last_name': 'Last',
'billing_street_address_1': 'Via Rodeo 1',
'billing_city': 'Beverly Hills',
'billing_country': 'US',
'billing_country_area': 'AZ',
'billing_phone': '555-555-5555',
'billing_postal_code': '90210'}
for g, typ, form in dg:
data[form.add_prefix('email')] = '[email protected]'
response = self._test_status(self.checkout_app.reverse('checkout',
kwargs={'order_token':
order.token}),
client_instance=self.anon_client,
status_code=302, method='post', data=data,
follow=True)
order = self.checkout_app.order_model.objects.get(pk=order.pk)
self.assertRedirects(response, reverse('checkout:confirmation',
kwargs={'order_token':
order.token}))
self.assertEqual(order.status, 'payment-pending')
def test_confirmation_view_redirects_when_order_or_payment_is_missing(self):
cart = self._get_or_create_cart_for_client(self.anon_client)
cart.replace_item(self.dead_parrot, 1)
order = self._get_or_create_order_for_client(self.anon_client)
# without payment
self._test_status(reverse('checkout:confirmation',
kwargs={'order_token': order.token}),
client_instance=self.anon_client, status_code=302)
# finish checkout view
response = self._test_status(self.checkout_app.reverse('checkout',
kwargs={'order_token':
order.token}),
client_instance=self.anon_client,
data={'email': '[email protected]'})
dg = response.context['delivery_group_forms']
data = {'billing_first_name': 'First',
'billing_last_name': 'Last',
'billing_street_address_1': 'Via Rodeo 1',
'billing_city': 'Beverly Hills',
'billing_country': 'US',
'billing_country_area': 'AZ',
'billing_phone': '555-555-5555',
'billing_postal_code': '90210'}
for g, typ, form in dg:
data[form.add_prefix('email')] = '[email protected]'
response = self._test_status(self.checkout_app.reverse('checkout',
kwargs={'order_token':
order.token}),
client_instance=self.anon_client,
status_code=302, method='post', data=data,
follow=True)
self._test_status(self.checkout_app.reverse('confirmation',
kwargs={'order_token':
order.token}),
client_instance=self.anon_client,
status_code=200) | fusionbox/satchless | satchless/contrib/checkout/singlestep/tests/__init__.py | Python | bsd-3-clause | 7,298 |
import datetime
import logging
import os
import numpy as np
from matplotlib.path import Path
from matplotlib.widgets import Cursor, EllipseSelector, LassoSelector, RectangleSelector
from sastool.io.credo_cct import Exposure
from scipy.io import loadmat, savemat
from ..core.exposureloader import ExposureLoader
from ..core.filechooser import DoubleFileChooserDialog
from ..core.plotimage import PlotImageWidget
from ..core.toolwindow import ToolWindow
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class MaskEditor(ToolWindow, DoubleFileChooserDialog):
def __init__(self, *args, **kwargs):
self.mask = None
self._undo_stack = []
self._im = None
self._selector = None
self._cursor = None
self.exposureloader = None
self.plot2d = None
ToolWindow.__init__(self, *args, **kwargs)
DoubleFileChooserDialog.__init__(
self, self.widget, 'Open mask file...', 'Save mask file...', [('Mask files', '*.mat'), ('All files', '*')],
self.instrument.config['path']['directories']['mask'],
os.path.abspath(self.instrument.config['path']['directories']['mask']),
)
def init_gui(self, *args, **kwargs):
self.exposureloader = ExposureLoader(self.instrument)
self.builder.get_object('loadexposure_expander').add(self.exposureloader)
self.exposureloader.connect('open', self.on_loadexposure)
self.plot2d = PlotImageWidget()
self.builder.get_object('plotbox').pack_start(self.plot2d.widget, True, True, 0)
self.builder.get_object('toolbar').set_sensitive(False)
def on_loadexposure(self, exposureloader: ExposureLoader, im: Exposure):
if self.mask is None:
self.mask = im.mask
self._im = im
self.plot2d.set_image(im.intensity)
self.plot2d.set_mask(self.mask)
self.builder.get_object('toolbar').set_sensitive(True)
def on_new(self, button):
if self._im is None or self.mask is None:
return False
self.mask = np.ones_like(self.mask)
self.plot2d.set_mask(self.mask)
self.set_last_filename(None)
def on_open(self, button):
filename = self.get_open_filename()
if filename is not None:
mask = loadmat(filename)
self.mask = mask[[k for k in mask.keys() if not k.startswith('__')][0]]
self.plot2d.set_mask(self.mask)
def on_save(self, button):
filename = self.get_last_filename()
if filename is None:
return self.on_saveas(button)
maskname = os.path.splitext(os.path.split(filename)[1])[0]
savemat(filename, {maskname: self.mask})
def on_saveas(self, button):
filename = self.get_save_filename(None)
if filename is not None:
self.on_save(button)
def suggest_filename(self):
return 'mask_dist_{0.year:d}{0.month:02d}{0.day:02d}.mat'.format(datetime.date.today())
def on_selectcircle_toggled(self, button):
if button.get_active():
self.set_sensitive(False, 'Ellipse selection not ready',
['new_button', 'save_button', 'saveas_button', 'open_button', 'undo_button',
'selectrectangle_button', 'selectpolygon_button', 'pixelhunting_button',
'loadexposure_expander', 'close_button', self.plot2d.toolbar,
self.plot2d.settings_expander])
while self.plot2d.toolbar.mode != '':
# turn off zoom, pan, etc. modes.
self.plot2d.toolbar.zoom()
self._selector = EllipseSelector(self.plot2d.axis,
self.on_ellipse_selected,
rectprops={'facecolor': 'white', 'edgecolor': 'none', 'alpha': 0.7,
'fill': True, 'zorder': 10},
button=[1, ],
interactive=False, lineprops={'zorder': 10})
self._selector.state.add('square')
self._selector.state.add('center')
else:
assert isinstance(self._selector, EllipseSelector)
self._selector.set_active(False)
self._selector.set_visible(False)
self._selector = None
self.plot2d.replot(keepzoom=False)
self.set_sensitive(True)
def on_ellipse_selected(self, pos1, pos2):
# pos1 and pos2 are mouse button press and release events, with xdata and ydata carrying
# the two opposite corners of the bounding box of the circle. These are NOT the exact
# button presses and releases!
row = np.arange(self.mask.shape[0])[:, np.newaxis]
column = np.arange(self.mask.shape[1])[np.newaxis, :]
row0 = 0.5 * (pos1.ydata + pos2.ydata)
col0 = 0.5 * (pos1.xdata + pos2.xdata)
r2 = ((pos2.xdata - pos1.xdata) ** 2 + (pos2.ydata - pos1.ydata) ** 2) / 8
tobemasked = (row - row0) ** 2 + (column - col0) ** 2 <= r2
self._undo_stack.append(self.mask)
if self.builder.get_object('mask_button').get_active():
self.mask &= ~tobemasked
elif self.builder.get_object('unmask_button').get_active():
self.mask |= tobemasked
elif self.builder.get_object('invertmask_button').get_active():
self.mask[tobemasked] = ~self.mask[tobemasked]
else:
pass
self.builder.get_object('selectcircle_button').set_active(False)
self.plot2d.set_mask(self.mask)
def on_selectrectangle_toggled(self, button):
if button.get_active():
self.set_sensitive(False, 'Rectangle selection not ready',
['new_button', 'save_button', 'saveas_button', 'open_button', 'undo_button',
'selectcircle_button', 'selectpolygon_button', 'pixelhunting_button',
'loadexposure_expander', 'close_button', self.plot2d.toolbar,
self.plot2d.settings_expander])
while self.plot2d.toolbar.mode != '':
# turn off zoom, pan, etc. modes.
self.plot2d.toolbar.zoom()
self._selector = RectangleSelector(self.plot2d.axis,
self.on_rectangle_selected,
rectprops={'facecolor': 'white', 'edgecolor': 'none', 'alpha': 0.7,
'fill': True, 'zorder': 10},
button=[1, ],
interactive=False, lineprops={'zorder': 10})
else:
self._selector.set_active(False)
self._selector.set_visible(False)
self._selector = None
self.plot2d.replot(keepzoom=False)
self.set_sensitive(True)
def on_rectangle_selected(self, pos1, pos2):
# pos1 and pos2 are mouse button press and release events, with xdata and ydata
# carrying the two opposite corners of the bounding box of the rectangle. These
# are NOT the exact button presses and releases!
row = np.arange(self.mask.shape[0])[:, np.newaxis]
column = np.arange(self.mask.shape[1])[np.newaxis, :]
tobemasked = ((row >= min(pos1.ydata, pos2.ydata)) & (row <= max(pos1.ydata, pos2.ydata)) &
(column >= min(pos1.xdata, pos2.xdata)) & (column <= max(pos1.xdata, pos2.xdata)))
self._undo_stack.append(self.mask)
if self.builder.get_object('mask_button').get_active():
self.mask = self.mask & (~tobemasked)
elif self.builder.get_object('unmask_button').get_active():
self.mask = self.mask | tobemasked
elif self.builder.get_object('invertmask_button').get_active():
self.mask[tobemasked] = ~self.mask[tobemasked]
else:
pass
self.builder.get_object('selectrectangle_button').set_active(False)
self.plot2d.set_mask(self.mask)
def on_selectpolygon_toggled(self, button):
if button.get_active():
self.set_sensitive(False, 'Polygon selection not ready',
['new_button', 'save_button', 'saveas_button', 'open_button', 'undo_button',
'selectrectangle_button', 'selectcircle_button', 'pixelhunting_button',
'loadexposure_expander', 'close_button', self.plot2d.toolbar,
self.plot2d.settings_expander])
while self.plot2d.toolbar.mode != '':
# turn off zoom, pan, etc. modes.
self.plot2d.toolbar.zoom()
self._selector = LassoSelector(self.plot2d.axis,
self.on_polygon_selected,
lineprops={'color': 'white', 'zorder': 10},
button=[1, ],
)
else:
self._selector.set_active(False)
self._selector.set_visible(False)
self._selector = None
self.plot2d.replot(keepzoom=False)
self.set_sensitive(True)
def on_polygon_selected(self, vertices):
path = Path(vertices)
col, row = np.meshgrid(np.arange(self.mask.shape[1]),
np.arange(self.mask.shape[0]))
points = np.vstack((col.flatten(), row.flatten())).T
tobemasked = path.contains_points(points).reshape(self.mask.shape)
self._undo_stack.append(self.mask)
if self.builder.get_object('mask_button').get_active():
self.mask = self.mask & (~tobemasked)
elif self.builder.get_object('unmask_button').get_active():
self.mask = self.mask | tobemasked
elif self.builder.get_object('invertmask_button').get_active():
self.mask[tobemasked] = ~self.mask[tobemasked]
else:
pass
self.plot2d.set_mask(self.mask)
self.builder.get_object('selectpolygon_button').set_active(False)
def on_mask_toggled(self, button):
pass
def on_unmask_toggled(self, button):
pass
def on_invertmask_toggled(self, button):
pass
def on_pixelhunting_toggled(self, button):
if button.get_active():
self._cursor = Cursor(self.plot2d.axis, useblit=False, color='white', lw=1)
self._cursor.connect_event('button_press_event', self.on_cursorclick)
while self.plot2d.toolbar.mode != '':
# turn off zoom, pan, etc. modes.
self.plot2d.toolbar.zoom()
else:
self._cursor.disconnect_events()
self._cursor = None
self._undo_stack.append(self.mask)
self.plot2d.replot(keepzoom=False)
def on_cursorclick(self, event):
if (event.inaxes == self.plot2d.axis) and (self.plot2d.toolbar.mode == ''):
self.mask[round(event.ydata), round(event.xdata)] ^= True
self._cursor.disconnect_events()
self._cursor = None
self.plot2d.replot(keepzoom=True)
self.on_pixelhunting_toggled(self.builder.get_object('pixelhunting_button'))
def cleanup(self):
super().cleanup()
self._undo_stack = []
def on_undo(self, button):
try:
self.mask = self._undo_stack.pop()
except IndexError:
return
self.plot2d.set_mask(self.mask)
| awacha/cct | attic/gui/tools/maskeditor.py | Python | bsd-3-clause | 11,717 |
import numpy as np
from numpy.testing import (assert_equal,
assert_almost_equal,
assert_raises)
import skimage
from skimage import data
from skimage._shared._warnings import expected_warnings
from skimage.filters.thresholding import (threshold_adaptive,
threshold_otsu,
threshold_li,
threshold_yen,
threshold_isodata,
threshold_mean,
threshold_triangle,
threshold_minimum)
class TestSimpleImage():
def setup(self):
self.image = np.array([[0, 0, 1, 3, 5],
[0, 1, 4, 3, 4],
[1, 2, 5, 4, 1],
[2, 4, 5, 2, 1],
[4, 5, 1, 0, 0]], dtype=int)
def test_otsu(self):
assert threshold_otsu(self.image) == 2
def test_otsu_negative_int(self):
image = self.image - 2
assert threshold_otsu(image) == 0
def test_otsu_float_image(self):
image = np.float64(self.image)
assert 2 <= threshold_otsu(image) < 3
def test_li(self):
assert int(threshold_li(self.image)) == 2
def test_li_negative_int(self):
image = self.image - 2
assert int(threshold_li(image)) == 0
def test_li_float_image(self):
image = np.float64(self.image)
assert 2 <= threshold_li(image) < 3
def test_li_constant_image(self):
assert_raises(ValueError, threshold_li, np.ones((10,10)))
def test_yen(self):
assert threshold_yen(self.image) == 2
def test_yen_negative_int(self):
image = self.image - 2
assert threshold_yen(image) == 0
def test_yen_float_image(self):
image = np.float64(self.image)
assert 2 <= threshold_yen(image) < 3
def test_yen_arange(self):
image = np.arange(256)
assert threshold_yen(image) == 127
def test_yen_binary(self):
image = np.zeros([2, 256], dtype=np.uint8)
image[0] = 255
assert threshold_yen(image) < 1
def test_yen_blank_zero(self):
image = np.zeros((5, 5), dtype=np.uint8)
assert threshold_yen(image) == 0
def test_yen_blank_max(self):
image = np.empty((5, 5), dtype=np.uint8)
image.fill(255)
assert threshold_yen(image) == 255
def test_isodata(self):
assert threshold_isodata(self.image) == 2
assert threshold_isodata(self.image, return_all=True) == [2]
def test_isodata_blank_zero(self):
image = np.zeros((5, 5), dtype=np.uint8)
assert threshold_isodata(image) == 0
assert threshold_isodata(image, return_all=True) == [0]
def test_isodata_linspace(self):
image = np.linspace(-127, 0, 256)
assert -63.8 < threshold_isodata(image) < -63.6
assert_almost_equal(threshold_isodata(image, return_all=True),
[-63.74804688, -63.25195312])
def test_isodata_16bit(self):
np.random.seed(0)
imfloat = np.random.rand(256, 256)
assert 0.49 < threshold_isodata(imfloat, nbins=1024) < 0.51
assert all(0.49 < threshold_isodata(imfloat, nbins=1024,
return_all=True))
def test_threshold_adaptive_generic(self):
def func(arr):
return arr.sum() / arr.shape[0]
ref = np.array(
[[False, False, False, False, True],
[False, False, True, False, True],
[False, False, True, True, False],
[False, True, True, False, False],
[ True, True, False, False, False]]
)
out = threshold_adaptive(self.image, 3, method='generic', param=func)
assert_equal(ref, out)
def test_threshold_adaptive_gaussian(self):
ref = np.array(
[[False, False, False, False, True],
[False, False, True, False, True],
[False, False, True, True, False],
[False, True, True, False, False],
[ True, True, False, False, False]]
)
out = threshold_adaptive(self.image, 3, method='gaussian')
assert_equal(ref, out)
out = threshold_adaptive(self.image, 3, method='gaussian',
param=1./3.)
assert_equal(ref, out)
def test_threshold_adaptive_mean(self):
ref = np.array(
[[False, False, False, False, True],
[False, False, True, False, True],
[False, False, True, True, False],
[False, True, True, False, False],
[ True, True, False, False, False]]
)
out = threshold_adaptive(self.image, 3, method='mean')
assert_equal(ref, out)
def test_threshold_adaptive_median(self):
ref = np.array(
[[False, False, False, False, True],
[False, False, True, False, False],
[False, False, True, False, False],
[False, False, True, True, False],
[False, True, False, False, False]]
)
out = threshold_adaptive(self.image, 3, method='median')
assert_equal(ref, out)
def test_otsu_camera_image():
camera = skimage.img_as_ubyte(data.camera())
assert 86 < threshold_otsu(camera) < 88
def test_otsu_coins_image():
coins = skimage.img_as_ubyte(data.coins())
assert 106 < threshold_otsu(coins) < 108
def test_otsu_coins_image_as_float():
coins = skimage.img_as_float(data.coins())
assert 0.41 < threshold_otsu(coins) < 0.42
def test_otsu_astro_image():
img = skimage.img_as_ubyte(data.astronaut())
with expected_warnings(['grayscale']):
assert 109 < threshold_otsu(img) < 111
def test_otsu_one_color_image():
img = np.ones((10, 10), dtype=np.uint8)
assert_raises(ValueError, threshold_otsu, img)
def test_li_camera_image():
camera = skimage.img_as_ubyte(data.camera())
assert 63 < threshold_li(camera) < 65
def test_li_coins_image():
coins = skimage.img_as_ubyte(data.coins())
assert 95 < threshold_li(coins) < 97
def test_li_coins_image_as_float():
coins = skimage.img_as_float(data.coins())
assert 0.37 < threshold_li(coins) < 0.38
def test_li_astro_image():
img = skimage.img_as_ubyte(data.astronaut())
assert 66 < threshold_li(img) < 68
def test_yen_camera_image():
camera = skimage.img_as_ubyte(data.camera())
assert 197 < threshold_yen(camera) < 199
def test_yen_coins_image():
coins = skimage.img_as_ubyte(data.coins())
assert 109 < threshold_yen(coins) < 111
def test_yen_coins_image_as_float():
coins = skimage.img_as_float(data.coins())
assert 0.43 < threshold_yen(coins) < 0.44
def test_adaptive_even_block_size_error():
img = data.camera()
assert_raises(ValueError, threshold_adaptive, img, block_size=4)
def test_isodata_camera_image():
camera = skimage.img_as_ubyte(data.camera())
threshold = threshold_isodata(camera)
assert np.floor((camera[camera <= threshold].mean() +
camera[camera > threshold].mean()) / 2.0) == threshold
assert threshold == 87
assert threshold_isodata(camera, return_all=True) == [87]
def test_isodata_coins_image():
coins = skimage.img_as_ubyte(data.coins())
threshold = threshold_isodata(coins)
assert np.floor((coins[coins <= threshold].mean() +
coins[coins > threshold].mean()) / 2.0) == threshold
assert threshold == 107
assert threshold_isodata(coins, return_all=True) == [107]
def test_isodata_moon_image():
moon = skimage.img_as_ubyte(data.moon())
threshold = threshold_isodata(moon)
assert np.floor((moon[moon <= threshold].mean() +
moon[moon > threshold].mean()) / 2.0) == threshold
assert threshold == 86
thresholds = threshold_isodata(moon, return_all=True)
for threshold in thresholds:
assert np.floor((moon[moon <= threshold].mean() +
moon[moon > threshold].mean()) / 2.0) == threshold
assert_equal(thresholds, [86, 87, 88, 122, 123, 124, 139, 140])
def test_isodata_moon_image_negative_int():
moon = skimage.img_as_ubyte(data.moon()).astype(np.int32)
moon -= 100
threshold = threshold_isodata(moon)
assert np.floor((moon[moon <= threshold].mean() +
moon[moon > threshold].mean()) / 2.0) == threshold
assert threshold == -14
thresholds = threshold_isodata(moon, return_all=True)
for threshold in thresholds:
assert np.floor((moon[moon <= threshold].mean() +
moon[moon > threshold].mean()) / 2.0) == threshold
assert_equal(thresholds, [-14, -13, -12, 22, 23, 24, 39, 40])
def test_isodata_moon_image_negative_float():
moon = skimage.img_as_ubyte(data.moon()).astype(np.float64)
moon -= 100
assert -14 < threshold_isodata(moon) < -13
thresholds = threshold_isodata(moon, return_all=True)
assert_almost_equal(thresholds,
[-13.83789062, -12.84179688, -11.84570312, 22.02148438,
23.01757812, 24.01367188, 38.95507812, 39.95117188])
def test_threshold_minimum():
camera = skimage.img_as_ubyte(data.camera())
threshold = threshold_minimum(camera)
assert threshold == 76
threshold = threshold_minimum(camera, bias='max')
assert threshold == 77
astronaut = skimage.img_as_ubyte(data.astronaut())
threshold = threshold_minimum(astronaut)
assert threshold == 117
def test_threshold_minimum_synthetic():
img = np.arange(25*25, dtype=np.uint8).reshape((25, 25))
img[0:9, :] = 50
img[14:25, :] = 250
threshold = threshold_minimum(img, bias='min')
assert threshold == 93
threshold = threshold_minimum(img, bias='mid')
assert threshold == 159
threshold = threshold_minimum(img, bias='max')
assert threshold == 225
def test_threshold_minimum_failure():
img = np.zeros((16*16), dtype=np.uint8)
assert_raises(RuntimeError, threshold_minimum, img)
def test_mean():
img = np.zeros((2, 6))
img[:, 2:4] = 1
img[:, 4:] = 2
assert(threshold_mean(img) == 1.)
def test_triangle_uint_images():
assert(threshold_triangle(np.invert(data.text())) == 151)
assert(threshold_triangle(data.text()) == 104)
assert(threshold_triangle(data.coins()) == 80)
assert(threshold_triangle(np.invert(data.coins())) == 175)
def test_triangle_float_images():
text = data.text()
int_bins = text.max() - text.min() + 1
# Set nbins to match the uint case and threshold as float.
assert(round(threshold_triangle(
text.astype(np.float), nbins=int_bins)) == 104)
# Check that rescaling image to floats in unit interval is equivalent.
assert(round(threshold_triangle(text / 255., nbins=int_bins) * 255) == 104)
# Repeat for inverted image.
assert(round(threshold_triangle(
np.invert(text).astype(np.float), nbins=int_bins)) == 151)
assert (round(threshold_triangle(
np.invert(text) / 255., nbins=int_bins) * 255) == 151)
def test_triangle_flip():
# Depending on the skewness, the algorithm flips the histogram.
# We check that the flip doesn't affect too much the result.
img = data.camera()
inv_img = np.invert(img)
t = threshold_triangle(inv_img)
t_inv_img = inv_img > t
t_inv_inv_img = np.invert(t_inv_img)
t = threshold_triangle(img)
t_img = img > t
# Check that most of the pixels are identical
# See numpy #7685 for a future np.testing API
unequal_pos = np.where(t_img.ravel() != t_inv_inv_img.ravel())
assert(len(unequal_pos[0]) / t_img.size < 1e-2)
if __name__ == '__main__':
np.testing.run_module_suite()
| vighneshbirodkar/scikit-image | skimage/filters/tests/test_thresholding.py | Python | bsd-3-clause | 11,985 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "content_edit_proj.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| burke-software/django-content-edit | manage.py | Python | bsd-3-clause | 260 |
# -*- coding: utf-8 -*-
# __author__ = chenchiyuan
from __future__ import division, unicode_literals, print_function
from django.core.management import BaseCommand
from applications.posts.models import Post
class Command(BaseCommand):
def handle(self, *args, **options):
posts = Post.objects.all()
delete_ids = []
for post in posts:
print(post.title)
if post.title < 4:
continue
check_posts = Post.objects.filter(title=post.title).order_by('-rating')
dirty_ids = check_posts[1:].values_list("id", flat=True)
delete_ids.extend(dirty_ids)
Post.objects.filter(id__in=list(set(delete_ids))).delete() | chenchiyuan/yajiong | applications/posts/management/commands/clear_posts_same.py | Python | bsd-3-clause | 710 |
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
from saef_app.core import models
| echevemaster/saef | saef_app/core/database.py | Python | bsd-3-clause | 96 |
from django.db import models
from .managers import CRUDManager, CRUDException
class CRUDFilterModel(models.Model):
class Meta:
abstract = True
@classmethod
def verify_user_has_role(cls, user, role, request):
"""
Call user-defined auth function to determine if this user can use this role.
"""
if role in ['anonymous', 'authenticated']:
return True
elif role == "admin":
return user.is_superuser
if CRUDManager.auth_function is None:
raise CRUDException("You must define an auth_function for CRUDManagerMixin", 500)
try:
value = CRUDManager.auth_function(role, user, request)
except Exception as exc:
raise CRUDException("Your auth_function in CRUDManager threw an exception: " + str(exc), 500)
if not value:
raise CRUDException("This user is not authorized to use this role", 403)
return True
@classmethod
def role_can_perform_operation_with_filter(cls, role, operation, filter_str):
"""
For this class, make sure this role can perform this operation (with this filter)
"""
# print("Check cls ", str(cls), " role ", role, " operation ", operation, " filter_str ", filter_str)
if operation.upper() not in ['C', 'R', 'U', 'D']:
raise CRUDException("Operation must be one of: 'C', 'R', 'U', 'D'", 500)
try:
filters = CRUDManager.get_filter_set_for_model(cls)['allowed_methods'][role]
except KeyError:
# Users that are simply authenticated are not allowed:
# DUBIOUS LOGIC -- return this if filters is {'__default': None} and role is authenticated. anonymous = 401
if role == "authenticated":
raise CRUDException("You must specify a role for this endpoint in the ROLE header", 400)
# Invalid role:
else:
raise CRUDException(role + " is not a valid role", 400)
try:
allowed_methods = filters[filter_str]
except KeyError:
# print(filter_str, " not a valid filter for cls ", str(cls), ", role ", role, " -- ", filters)
raise CRUDException(filter_str + " is not a valid filter here", 400)
# print("Role: ", role, ", allowed_methods: ", str(allowed_methods), " operation: ", operation)
if allowed_methods is not None and operation.upper() in [method.upper() for method in allowed_methods]:
return True
else:
return False
@classmethod
def __get_objects(cls, user, role, operation, filters=['__default'], request=None):
"""
Return queryset that this user/role has access to (given these filters)
"""
# UNSAFE to call this function from outside of the "get_queryset_or_false" function.
# If this is not an abstract class, start with all objects, and filter down.
if hasattr(cls, 'objects'):
object_set = cls.objects.all()
else:
object_set = []
try:
for filter_str in filters:
# print("__get_objects with role ", role, " operation ", operation, " filter ", filter_str, " func: ", str(CRUDManager.get_filter_set_for_model(cls)['filter'][role][filter_str]))
object_set = CRUDManager.get_filter_set_for_model(cls)['filter'][role][filter_str](object_set, user, request)
except CRUDException:
# Elevate CRUDExceptions to be caught by middleware
raise
except Exception:
raise CRUDException("Error calling filter functions. Please see the 'QuerySet vs Manager Methods' section of the documentation.", 400)
return object_set
@classmethod
def check_for_permissions(cls, user, role, operation, request, filters=['__default']):
"""
Make sure this role can perform this operation
"""
cls.verify_user_has_role(user, role, request)
for filter_str in filters:
if not cls.role_can_perform_operation_with_filter(role, operation, filter_str):
raise CRUDException("Cannot perform this operation with this role.", status_code=403)
@classmethod
def get_queryset_or_false(cls, user, role, operation, filters=['__default'], request=None, _id=-1, lookup_field='pk'):
"""
Return queryset (and make sure this item is in the queryset)
"""
# Redundant?
cls.check_for_permissions(user, role, operation, request, filters)
# Get our objects:
object_set = cls.__get_objects(user, role, operation, filters, request)
# If this is a single-object operation, we have to have a valid ID
if operation.upper() in ['U', 'D']:
if _id == -1:
raise CRUDException("ID must be specified for Update and Delete", 400)
else:
kwargs = {'{0}'.format(lookup_field): _id}
if object_set.filter(**kwargs).count() == 0:
# It's possible that the object just doesn't exist... but we'll return a 403 to obfuscate
raise CRUDException("Cannot perform this operation on this object.", status_code=403)
# Later, we can start to perform different operations here:
# if operation == 'R':
# return object_set
# elif operation == "C":
# ....
return object_set
| areedtomlinson/django-crud-filters | CRUDFilters/models.py | Python | mit | 5,473 |
"""frosted/checker.py.
The core functionality of frosted lives here. Implements the core checking capability models Bindings and Scopes
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import builtins
import doctest
import itertools
import os
import pkg_resources
import sys
from pies import ast
from pies.overrides import *
from frosted import messages
PY34_GTE = sys.version_info >= (3, 4)
FROSTED_BUILTINS = set(dir(builtins) + ['__file__', '__builtins__', '__debug__', '__name__', 'WindowsError',
'__import__'] +
os.environ.get('PYFLAKES_BUILTINS', '').split(','))
def node_name(node):
"""
Convenience function: Returns node.id, or node.name, or None
"""
return hasattr(node, 'id') and node.id or hasattr(node, 'name') and node.name
class Binding(object):
"""Represents the binding of a value to a name.
The checker uses this to keep track of which names have been bound and which names have not. See Assignment for a
special type of binding that is checked with stricter rules.
"""
__slots__ = ('name', 'source', 'used')
def __init__(self, name, source):
self.name = name
self.source = source
self.used = False
def __str__(self):
return self.name
def __repr__(self):
return '<%s object %r from line %r at 0x%x>' % (self.__class__.__name__,
self.name,
self.source.lineno,
id(self))
class Importation(Binding):
"""A binding created by an import statement."""
__slots__ = ('fullName', )
def __init__(self, name, source):
self.fullName = name
name = name.split('.')[0]
super(Importation, self).__init__(name, source)
class Argument(Binding):
"""Represents binding a name as an argument."""
__slots__ = ()
class Definition(Binding):
"""A binding that defines a function or a class."""
__slots__ = ()
class Assignment(Binding):
"""Represents binding a name with an explicit assignment.
The checker will raise warnings for any Assignment that isn't used. Also, the checker does not consider assignments
in tuple/list unpacking to be Assignments, rather it treats them as simple Bindings.
"""
__slots__ = ()
class FunctionDefinition(Definition):
__slots__ = ('signature', )
def __init__(self, name, source):
super(FunctionDefinition, self).__init__(name, source)
self.signature = FunctionSignature(source)
class ClassDefinition(Definition):
__slots__ = ()
class ExportBinding(Binding):
"""A binding created by an __all__ assignment. If the names in the list
can be determined statically, they will be treated as names for export and
additional checking applied to them.
The only __all__ assignment that can be recognized is one which takes
the value of a literal list containing literal strings. For example:
__all__ = ["foo", "bar"]
Names which are imported and not otherwise used but appear in the value of
__all__ will not have an unused import warning reported for them.
"""
__slots__ = ()
def names(self):
"""Return a list of the names referenced by this binding."""
names = []
if isinstance(self.source, ast.List):
for node in self.source.elts:
if isinstance(node, ast.Str):
names.append(node.s)
return names
class Scope(dict):
importStarred = False # set to True when import * is found
def __repr__(self):
scope_cls = self.__class__.__name__
return '<%s at 0x%x %s>' % (scope_cls, id(self), dict.__repr__(self))
class ClassScope(Scope):
pass
class FunctionScope(Scope):
"""Represents the name scope for a function."""
uses_locals = False
always_used = set(['__tracebackhide__', '__traceback_info__', '__traceback_supplement__'])
def __init__(self):
Scope.__init__(self)
self.globals = self.always_used.copy()
def unusedAssignments(self):
"""Return a generator for the assignments which have not been used."""
for name, binding in self.items():
if (not binding.used and name not in self.globals
and not self.uses_locals
and isinstance(binding, Assignment)):
yield name, binding
class GeneratorScope(Scope):
pass
class ModuleScope(Scope):
pass
class FunctionSignature(object):
__slots__ = ('decorated', 'argument_names', 'default_count', 'kw_only_argument_names', 'default_count',
'kw_only_argument_names', 'kw_only_default_count', 'has_var_arg', 'has_kw_arg')
def __init__(self, node):
self.decorated = bool(any(node.decorator_list))
self.argument_names = ast.argument_names(node)
self.default_count = len(node.args.defaults)
self.kw_only_argument_names = ast.kw_only_argument_names(node)
self.kw_only_default_count = ast.kw_only_default_count(node)
self.has_var_arg = node.args.vararg is not None
self.has_kw_arg = node.args.kwarg is not None
def min_argument_count(self):
return len(self.argument_names) - self.default_count
def maxArgumentCount(self):
return len(self.argument_names)
def checkCall(self, call_node, reporter, name):
if self.decorated:
return
filledSlots = set()
filledKwOnlySlots = set()
for item, arg in enumerate(call_node.args):
if item >= len(self.argument_names):
if not self.has_var_arg:
return reporter.report(messages.TooManyArguments, call_node, name, self.maxArgumentCount())
break
filledSlots.add(item)
for kw in call_node.keywords:
slots = None
try:
argIndex = self.argument_names.index(kw.arg)
slots = filledSlots
except ValueError:
try:
argIndex = self.kw_only_argument_names.index(kw.arg)
slots = filledKwOnlySlots
except ValueError:
if self.has_kw_arg:
continue
else:
return reporter.report(messages.UnexpectedArgument, call_node, name, kw.arg)
if argIndex in slots:
return reporter.report(messages.MultipleValuesForArgument, call_node, name, kw.arg)
slots.add(argIndex)
filledSlots.update(range(len(self.argument_names) - self.default_count, len(self.argument_names)))
filledKwOnlySlots.update(range(len(self.kw_only_argument_names) - self.kw_only_default_count,
len(self.kw_only_argument_names)))
if (len(filledSlots) < len(self.argument_names) and not call_node.starargs and not call_node.kwargs):
return reporter.report(messages.TooFewArguments, call_node, name, self.min_argument_count())
if (len(filledKwOnlySlots) < len(self.kw_only_argument_names) and not call_node.kwargs):
missing_arguments = [repr(arg) for i, arg in enumerate(self.kw_only_argument_names)
if i not in filledKwOnlySlots]
return reporter.report(messages.NeedKwOnlyArgument, call_node, name, ', '.join(missing_arguments))
class Checker(object):
"""The core of frosted, checks the cleanliness and sanity of Python code."""
node_depth = 0
offset = None
trace_tree = False
frosted_builtins = FROSTED_BUILTINS
def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings):
self.settings = settings
self.ignore_errors = settings.get('ignore_frosted_errors', [])
self.ignore_lines = ignore_lines
file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None)
if file_specific_ignores:
self.ignore_errors += file_specific_ignores
self._node_handlers = {}
self._deferred_functions = []
self._deferred_assignments = []
self.dead_scopes = []
self.messages = []
self.filename = filename
if builtins:
self.frosted_builtins = self.frosted_builtins.union(builtins)
self.scope_stack = [ModuleScope()]
self.except_handlers = [()]
self.futures_allowed = True
self.root = tree
self.handle_children(tree)
self.run_deferred(self._deferred_functions)
self._deferred_functions = None
self.run_deferred(self._deferred_assignments)
self._deferred_assignments = None
del self.scope_stack[1:]
self.pop_scope()
self.check_dead_scopes()
self.check_plugins()
def check_plugins(self):
""" collect plugins from entry point 'frosted.plugins'
and run their check() method, passing the filename
"""
checkers = {}
for ep in pkg_resources.iter_entry_points(group='frosted.plugins'):
checkers.update({ep.name: ep.load()})
for plugin_name, plugin in checkers.items():
if self.filename != '(none)':
messages = plugin.check(self.filename)
for message, loc, args, kwargs in messages:
self.report(message, loc, *args, **kwargs)
def defer_function(self, callable):
"""Schedule a function handler to be called just before completion.
This is used for handling function bodies, which must be deferred because code later in the file might modify
the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it
will contain any new bindings added to it.
"""
self._deferred_functions.append((callable, self.scope_stack[:], self.offset))
def defer_assignment(self, callable):
"""Schedule an assignment handler to be called just after deferred
function handlers."""
self._deferred_assignments.append((callable, self.scope_stack[:], self.offset))
def run_deferred(self, deferred):
"""Run the callables in deferred using their associated scope stack."""
for handler, scope, offset in deferred:
self.scope_stack = scope
self.offset = offset
handler()
@property
def scope(self):
return self.scope_stack[-1]
def pop_scope(self):
self.dead_scopes.append(self.scope_stack.pop())
def check_dead_scopes(self):
"""Look at scopes which have been fully examined and report names in
them which were imported but unused."""
for scope in self.dead_scopes:
export = isinstance(scope.get('__all__'), ExportBinding)
if export:
all = scope['__all__'].names()
# Look for possible mistakes in the export list
if not scope.importStarred and os.path.basename(self.filename) != '__init__.py':
undefined = set(all) - set(scope)
for name in undefined:
self.report(messages.UndefinedExport, scope['__all__'].source, name)
else:
all = []
# Look for imported names that aren't used without checking imports in namespace definition
for importation in scope.values():
if isinstance(importation, Importation) and not importation.used and importation.name not in all:
self.report(messages.UnusedImport, importation.source, importation.name)
def push_scope(self, scope_class=FunctionScope):
self.scope_stack.append(scope_class())
def push_function_scope(self): # XXX Deprecated
self.push_scope(FunctionScope)
def push_class_scope(self): # XXX Deprecated
self.push_scope(ClassScope)
def report(self, message_class, *args, **kwargs):
error_code = message_class.error_code
if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not
str(message_class.error_number) in self.ignore_errors):
kwargs['verbose'] = self.settings.get('verbose')
message = message_class(self.filename, *args, **kwargs)
if message.lineno not in self.ignore_lines:
self.messages.append(message)
def has_parent(self, node, kind):
while hasattr(node, 'parent'):
node = node.parent
if isinstance(node, kind):
return True
def get_common_ancestor(self, lnode, rnode, stop=None):
stop = stop or self.root
if lnode is rnode:
return lnode
if stop in (lnode, rnode):
return stop
if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'):
return
if (lnode.level > rnode.level):
return self.get_common_ancestor(lnode.parent, rnode, stop)
if (rnode.level > lnode.level):
return self.get_common_ancestor(lnode, rnode.parent, stop)
return self.get_common_ancestor(lnode.parent, rnode.parent, stop)
def descendant_of(self, node, ancestors, stop=None):
for ancestor in ancestors:
if self.get_common_ancestor(node, ancestor, stop) not in (stop, None):
return True
return False
def on_fork(self, parent, lnode, rnode, items):
return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent))
def different_forks(self, lnode, rnode):
"""True, if lnode and rnode are located on different forks of
IF/TRY."""
ancestor = self.get_common_ancestor(lnode, rnode)
if isinstance(ancestor, ast.If):
for fork in (ancestor.body, ancestor.orelse):
if self.on_fork(ancestor, lnode, rnode, fork):
return True
elif isinstance(ancestor, ast.Try):
body = ancestor.body + ancestor.orelse
for fork in [body] + [[hdl] for hdl in ancestor.handlers]:
if self.on_fork(ancestor, lnode, rnode, fork):
return True
elif isinstance(ancestor, ast.TryFinally):
if self.on_fork(ancestor, lnode, rnode, ancestor.body):
return True
return False
def add_binding(self, node, value, report_redef=True):
"""Called when a binding is altered.
- `node` is the statement responsible for the change
- `value` is the optional new value, a Binding instance, associated
with the binding; if None, the binding is deleted if it exists.
- if `report_redef` is True (default), rebinding while unused will be
reported.
"""
redefinedWhileUnused = False
if not isinstance(self.scope, ClassScope):
for scope in self.scope_stack[::-1]:
existing = scope.get(value.name)
if (isinstance(existing, Importation)
and not existing.used
and (not isinstance(value, Importation) or
value.fullName == existing.fullName)
and report_redef
and not self.different_forks(node, existing.source)):
redefinedWhileUnused = True
self.report(messages.RedefinedWhileUnused,
node, value.name, existing.source)
existing = self.scope.get(value.name)
if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp):
if (existing and report_redef
and not self.has_parent(existing.source, (ast.For, ast.ListComp))
and not self.different_forks(node, existing.source)):
self.report(messages.RedefinedInListComp,
node, value.name, existing.source)
if (isinstance(existing, Definition)
and not existing.used
and not self.different_forks(node, existing.source)):
self.report(messages.RedefinedWhileUnused,
node, value.name, existing.source)
else:
self.scope[value.name] = value
def get_node_handler(self, node_class):
try:
return self._node_handlers[node_class]
except KeyError:
nodeType = str(node_class.__name__).upper()
self._node_handlers[node_class] = handler = getattr(self, nodeType)
return handler
def iter_visible_scopes(self):
outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1)
scopes = [scope for scope in outerScopes
if isinstance(scope, (FunctionScope, ModuleScope))]
if (isinstance(self.scope, GeneratorScope)
and scopes[-1] != self.scope_stack[-2]):
scopes.append(self.scope_stack[-2])
scopes.append(self.scope_stack[-1])
return iter(reversed(scopes))
def handle_node_load(self, node):
name = node_name(node)
if not name:
return
importStarred = False
for scope in self.iter_visible_scopes():
importStarred = importStarred or scope.importStarred
try:
scope[name].used = (self.scope, node)
except KeyError:
pass
else:
return
# look in the built-ins
if importStarred or name in self.frosted_builtins:
return
if name == '__path__' and os.path.basename(self.filename) == '__init__.py':
# the special name __path__ is valid only in packages
return
# protected with a NameError handler?
if 'NameError' not in self.except_handlers[-1]:
self.report(messages.UndefinedName, node, name)
def handle_node_store(self, node):
name = node_name(node)
if not name:
return
# if the name hasn't already been defined in the current scope
if isinstance(self.scope, FunctionScope) and name not in self.scope:
# for each function or module scope above us
for scope in self.scope_stack[:-1]:
if not isinstance(scope, (FunctionScope, ModuleScope)):
continue
# if the name was defined in that scope, and the name has
# been accessed already in the current scope, and hasn't
# been declared global
used = name in scope and scope[name].used
if used and used[0] is self.scope and name not in self.scope.globals:
# then it's probably a mistake
self.report(messages.UndefinedLocal,
scope[name].used[1], name, scope[name].source)
break
parent = getattr(node, 'parent', None)
if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)):
binding = Binding(name, node)
elif (parent is not None and name == '__all__' and
isinstance(self.scope, ModuleScope)):
binding = ExportBinding(name, parent.value)
else:
binding = Assignment(name, node)
if name in self.scope:
binding.used = self.scope[name].used
self.add_binding(node, binding)
def handle_node_delete(self, node):
name = node_name(node)
if not name:
return
if isinstance(self.scope, FunctionScope) and name in self.scope.globals:
self.scope.globals.remove(name)
else:
try:
del self.scope[name]
except KeyError:
self.report(messages.UndefinedName, node, name)
def handle_children(self, tree):
for node in ast.iter_child_nodes(tree):
self.handleNode(node, tree)
def is_docstring(self, node):
"""Determine if the given node is a docstring, as long as it is at the
correct place in the node tree."""
return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and
isinstance(node.value, ast.Str))
def docstring(self, node):
if isinstance(node, ast.Expr):
node = node.value
if not isinstance(node, ast.Str):
return (None, None)
# Computed incorrectly if the docstring has backslash
doctest_lineno = node.lineno - node.s.count('\n') - 1
return (node.s, doctest_lineno)
def handleNode(self, node, parent):
if node is None:
return
if self.offset and getattr(node, 'lineno', None) is not None:
node.lineno += self.offset[0]
node.col_offset += self.offset[1]
if self.trace_tree:
print(' ' * self.node_depth + node.__class__.__name__)
if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or
self.is_docstring(node)):
self.futures_allowed = False
self.node_depth += 1
node.level = self.node_depth
node.parent = parent
try:
handler = self.get_node_handler(node.__class__)
handler(node)
finally:
self.node_depth -= 1
if self.trace_tree:
print(' ' * self.node_depth + 'end ' + node.__class__.__name__)
_get_doctest_examples = doctest.DocTestParser().get_examples
def handle_doctests(self, node):
try:
docstring, node_lineno = self.docstring(node.body[0])
if not docstring:
return
examples = self._get_doctest_examples(docstring)
except (ValueError, IndexError):
# e.g. line 6 of the docstring for <string> has inconsistent
# leading whitespace: ...
return
node_offset = self.offset or (0, 0)
self.push_scope()
for example in examples:
try:
tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST)
except SyntaxError:
e = sys.exc_info()[1]
position = (node_lineno + example.lineno + e.lineno,
example.indent + 4 + (e.offset or 0))
self.report(messages.DoctestSyntaxError, node, position)
else:
self.offset = (node_offset[0] + node_lineno + example.lineno,
node_offset[1] + example.indent + 4)
self.handle_children(tree)
self.offset = node_offset
self.pop_scope()
def find_return_with_argument(self, node):
"""Finds and returns a return statment that has an argument.
Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother
checking.
"""
for item in node.body:
if isinstance(item, ast.Return) and item.value:
return item
elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'):
return_with_argument = self.find_return_with_argument(item)
if return_with_argument:
return return_with_argument
def is_generator(self, node):
"""Checks whether a function is a generator by looking for a yield
statement or expression."""
if not isinstance(node.body, list):
# lambdas can not be generators
return False
for item in node.body:
if isinstance(item, (ast.Assign, ast.Expr)):
if isinstance(item.value, ast.Yield):
return True
elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'):
if self.is_generator(item):
return True
return False
def ignore(self, node):
pass
# "stmt" type nodes
RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children
CONTINUE = BREAK = PASS = ignore
# "expr" type nodes
BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \
LIST = TUPLE = STARRED = NAMECONSTANT = handle_children
NUM = STR = BYTES = ELLIPSIS = ignore
# "slice" type nodes
SLICE = EXTSLICE = INDEX = handle_children
# expression contexts are node instances too, though being constants
LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore
# same for operators
AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \
NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore
# additional node types
COMPREHENSION = KEYWORD = handle_children
def GLOBAL(self, node):
"""Keep track of globals declarations."""
if isinstance(self.scope, FunctionScope):
self.scope.globals.update(node.names)
NONLOCAL = GLOBAL
def LISTCOMP(self, node):
# handle generators before element
for gen in node.generators:
self.handleNode(gen, node)
self.handleNode(node.elt, node)
def GENERATOREXP(self, node):
self.push_scope(GeneratorScope)
# handle generators before element
for gen in node.generators:
self.handleNode(gen, node)
self.handleNode(node.elt, node)
self.pop_scope()
SETCOMP = GENERATOREXP
def DICTCOMP(self, node):
self.push_scope(GeneratorScope)
for gen in node.generators:
self.handleNode(gen, node)
self.handleNode(node.key, node)
self.handleNode(node.value, node)
self.pop_scope()
def FOR(self, node):
"""Process bindings for loop variables."""
vars = []
def collectLoopVars(n):
if isinstance(n, ast.Name):
vars.append(n.id)
elif isinstance(n, ast.expr_context):
return
else:
for c in ast.iter_child_nodes(n):
collectLoopVars(c)
collectLoopVars(node.target)
for varn in vars:
if (isinstance(self.scope.get(varn), Importation)
# unused ones will get an unused import warning
and self.scope[varn].used):
self.report(messages.ImportShadowedByLoopVar,
node, varn, self.scope[varn].source)
self.handle_children(node)
def NAME(self, node):
"""Handle occurrence of Name (which can be a load/store/delete
access.)"""
# Locate the name in locals / function / globals scopes.
if isinstance(node.ctx, (ast.Load, ast.AugLoad)):
self.handle_node_load(node)
if (node.id == 'locals' and isinstance(self.scope, FunctionScope)
and isinstance(node.parent, ast.Call)):
# we are doing locals() call in current scope
self.scope.uses_locals = True
elif isinstance(node.ctx, (ast.Store, ast.AugStore)):
self.handle_node_store(node)
elif isinstance(node.ctx, ast.Del):
self.handle_node_delete(node)
else:
# must be a Param context -- this only happens for names in function
# arguments, but these aren't dispatched through here
raise RuntimeError("Got impossible expression context: %r" % (node.ctx,))
def CALL(self, node):
f = node.func
if isinstance(f, ast.Name):
for scope in self.iter_visible_scopes():
definition = scope.get(f.id)
if definition:
if isinstance(definition, FunctionDefinition):
definition.signature.checkCall(node, self, f.id)
break
self.handle_children(node)
def FUNCTIONDEF(self, node):
for deco in node.decorator_list:
self.handleNode(deco, node)
self.add_binding(node, FunctionDefinition(node.name, node))
self.LAMBDA(node)
if self.settings.get('run_doctests', False):
self.defer_function(lambda: self.handle_doctests(node))
def LAMBDA(self, node):
args = []
annotations = []
if PY2:
def addArgs(arglist):
for arg in arglist:
if isinstance(arg, ast.Tuple):
addArgs(arg.elts)
else:
if arg.id in args:
self.report(messages.DuplicateArgument,
node, arg.id)
args.append(arg.id)
addArgs(node.args.args)
defaults = node.args.defaults
else:
for arg in node.args.args + node.args.kwonlyargs:
annotations.append(arg.annotation)
args.append(arg.arg)
defaults = node.args.defaults + node.args.kw_defaults
# Only for Python3 FunctionDefs
is_py3_func = hasattr(node, 'returns')
for arg_name in ('vararg', 'kwarg'):
wildcard = getattr(node.args, arg_name)
if not wildcard:
continue
args.append(getattr(wildcard, 'arg', wildcard))
if is_py3_func:
if PY34_GTE:
annotations.append(wildcard.annotation)
else:
argannotation = arg_name + 'annotation'
annotations.append(getattr(node.args, argannotation))
if is_py3_func:
annotations.append(node.returns)
if PY3:
if len(set(args)) < len(args):
for (idx, arg) in enumerate(args):
if arg in args[:idx]:
self.report(messages.DuplicateArgument, node, arg)
for child in annotations + defaults:
if child:
self.handleNode(child, node)
def runFunction():
self.push_scope()
for name in args:
self.add_binding(node, Argument(name, node), report_redef=False)
if isinstance(node.body, list):
# case for FunctionDefs
for stmt in node.body:
self.handleNode(stmt, node)
else:
# case for Lambdas
self.handleNode(node.body, node)
def checkUnusedAssignments():
"""Check to see if any assignments have not been used."""
for name, binding in self.scope.unusedAssignments():
self.report(messages.UnusedVariable, binding.source, name)
self.defer_assignment(checkUnusedAssignments)
if PY2:
def checkReturnWithArgumentInsideGenerator():
"""Check to see if there are any return statements with
arguments but the function is a generator."""
if self.is_generator(node):
stmt = self.find_return_with_argument(node)
if stmt is not None:
self.report(messages.ReturnWithArgsInsideGenerator, stmt)
self.defer_assignment(checkReturnWithArgumentInsideGenerator)
self.pop_scope()
self.defer_function(runFunction)
def CLASSDEF(self, node):
"""Check names used in a class definition, including its decorators,
base classes, and the body of its definition.
Additionally, add its name to the current scope.
"""
for deco in node.decorator_list:
self.handleNode(deco, node)
for baseNode in node.bases:
self.handleNode(baseNode, node)
if not PY2:
for keywordNode in node.keywords:
self.handleNode(keywordNode, node)
self.push_scope(ClassScope)
if self.settings.get('run_doctests', False):
self.defer_function(lambda: self.handle_doctests(node))
for stmt in node.body:
self.handleNode(stmt, node)
self.pop_scope()
self.add_binding(node, ClassDefinition(node.name, node))
def ASSIGN(self, node):
self.handleNode(node.value, node)
for target in node.targets:
self.handleNode(target, node)
def AUGASSIGN(self, node):
self.handle_node_load(node.target)
self.handleNode(node.value, node)
self.handleNode(node.target, node)
def IMPORT(self, node):
for alias in node.names:
name = alias.asname or alias.name
importation = Importation(name, node)
self.add_binding(node, importation)
def IMPORTFROM(self, node):
if node.module == '__future__':
if not self.futures_allowed:
self.report(messages.LateFutureImport,
node, [n.name for n in node.names])
else:
self.futures_allowed = False
for alias in node.names:
if alias.name == '*':
self.scope.importStarred = True
self.report(messages.ImportStarUsed, node, node.module)
continue
name = alias.asname or alias.name
importation = Importation(name, node)
if node.module == '__future__':
importation.used = (self.scope, node)
self.add_binding(node, importation)
def TRY(self, node):
handler_names = []
# List the exception handlers
for handler in node.handlers:
if isinstance(handler.type, ast.Tuple):
for exc_type in handler.type.elts:
handler_names.append(node_name(exc_type))
elif handler.type:
handler_names.append(node_name(handler.type))
# Memorize the except handlers and process the body
self.except_handlers.append(handler_names)
for child in node.body:
self.handleNode(child, node)
self.except_handlers.pop()
# Process the other nodes: "except:", "else:", "finally:"
for child in ast.iter_child_nodes(node):
if child not in node.body:
self.handleNode(child, node)
TRYEXCEPT = TRY
def EXCEPTHANDLER(self, node):
# 3.x: in addition to handling children, we must handle the name of
# the exception, which is not a Name node, but a simple string.
if node.type is None:
self.report(messages.BareExcept, node)
if isinstance(node.name, str):
self.handle_node_store(node)
self.handle_children(node)
| timothycrosley/frosted | frosted/checker.py | Python | mit | 36,119 |
#!/usr/bin/env python
import os
import re
import sys
import glob
import argparse
from copy import copy
from decimal import Decimal,InvalidOperation
number_pattern = re.compile("(-?\d+\.?\d*(e[\+|\-]?\d+)?)", re.IGNORECASE)
# Search an input value for a number
def findNumber(value):
try:
return Decimal(value)
except InvalidOperation:
try:
return Decimal(number_pattern.search(value.replace(',', '')).group())
except AttributeError:
raise Exception('Value "{0}" does not contain a number'.format(value))
def concatFiles(files, opts='r'):
for f in files:
for line in openFile(f, opts):
yield line
def fileRange(startFile, endFile):
startDir, startFile = os.path.split(startFile)
_, endFile = os.path.split(endFile)
if startDir == '':
files = glob.iglob('*');
else:
files = glob.iglob(os.path.expanduser(startDir) + '/*');
ret = []
for fn in files:
if startFile <= os.path.basename(fn) <= endFile:
ret.append(fn)
return sorted(ret)
def openFile(filename, opts):
if type(filename) is str:
if filename == '-':
return sys.stdin if opts == 'r' else sys.stdout
else:
return gzip.open(os.path.expanduser(filename), opts+'b') if filename.endswith('.gz') else open(os.path.expanduser(filename), opts)
elif type(filename) is file:
return filename
else:
raise IOError('Unknown input type: %s' % type(filename))
class Header:
def __init__(self, columns = []):
self.columns = columns
def __len__(self):
return len(self.columns)
def __iter__(self):
return self.columns.__iter__()
def setCol(self, colName, index):
while len(self.columns) <= index:
self.columns.append(str(len(self.columns)))
self.columns[index] = colName
def addCol(self, colName):
col = colName
i = 1
while col in self.columns:
col = colName+str(i)
i += 1
self.columns.append(col)
return len(self.columns) - 1
def addCols(self, colNames):
return [self.addCol(colName) for colName in colNames]
def extend(self, header):
self.addCols(header.columns)
def index(self, colName):
if colName is None:
return colName
elif colName in self.columns:
return self.columns.index(colName)
else:
try:
return int(colName)
except ValueError as e:
raise ValueError('Invalid column %s specified' % colName)
def indexes(self, colNames):
return [self.index(colName) for colName in colNames]
def name(self, index):
try:
return self.columns[int(index)]
except ValueError:
return str(index)
except IndexError:
return 'col_'+str(index)
def names(self, indexes):
return [self.name(index) for index in indexes]
def copy(self):
return Header(copy(self.columns))
class FileWriter:
def __init__(self, outputStream, reader, args, opts = 'w'):
self._outputStream = openFile(outputStream, opts)
self._delimiter = args.delimiter if args.delimiter else os.environ.get('TOOLBOX_DELIMITER', ' ')
self.write = self._firstwrite
self._header = Header()
if reader and reader.hasHeader:
if hasattr(args, 'append') and args.append:
self._header = reader.header.copy()
else:
if hasattr(args, 'group'):
self._header.addCols(reader.header.names(args.group))
if hasattr(args, 'labels'):
self._header.addCols(args.labels)
@property
def header(self):
return self._header
@property
def hasHeader(self):
return len(self._header.columns) > 0
def _firstwrite(self, chunks):
self.write = self._write
if self.hasHeader:
self.write(self._header.columns)
if len(self._header) != len(chunks):
sys.stderr.write('Warning: number of rows in output does not match number of rows in header\n')
self.write(chunks)
def _write(self, chunks):
self._outputStream.write(self._delimiter.join(map(str, chunks))+'\n')
class FileReader:
def __init__(self, inputStream, args):
self._inputStream = openFile(inputStream, 'r')
self._delimiter = args.delimiter if args.delimiter else os.environ.get('TOOLBOX_DELIMITER', None)
header = args.header or os.environ.get('TOOLBOX_HEADER', '').lower() == 'true'
if header:
self._header = self._readHeader()
self.next = self._firstnext
else:
self._header = Header()
self.next = self._next
@property
def delimiter(self):
return self._delimiter
@property
def header(self):
return self._header
@property
def hasHeader(self):
return len(self._header.columns) > 0
def _readHeader(self):
preamble = next(self._inputStream)
return Header(preamble.strip().split(self._delimiter))
def __iter__(self):
return self
def __next__(self):
return self.next()
def _firstnext(self):
self.next = self._next
row = self.next()
if len(row) != len(self._header):
sys.stderr.write('Warning: number of rows in input does not match number of rows in header\n')
return row
def _next(self):
return next(self._inputStream).strip().split(self._delimiter)
def readline(self):
try:
return self.next()
except StopIteration:
return None
def close(self):
self._inputStream.close()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
class ParameterParser:
def __init__(self, descrip, infiles = 1, outfile = True, group = True, columns = 1, append = True, labels = None, ordered = True):
self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter, description=descrip)
if infiles == 0:
pass
elif infiles == 1:
self.parser.add_argument('infile', nargs='?', default='-', help='use - for stdin')
else:
self.parser.add_argument('infiles', nargs='*', default=['-'], help='use - for stdin')
if outfile:
self.parser.add_argument('outfile', nargs='?', default='-', help='use - for stdout')
if group:
self.parser.add_argument('-g', '--group', nargs='+', default=[], help='column(s) to group input by')
if columns == 1:
self.parser.add_argument('-c', '--column', default=0, help='column to manipulate')
elif columns != 0:
self.parser.add_argument('-c', '--columns', nargs='+', default=[0], help='column(s) to manipulate')
if labels:
self.parser.add_argument('-l', '--labels', nargs='+', default=labels, help='labels for the column(s)')
if append:
self.parser.add_argument('--append', action='store_true', default=False, help='keep original columns in output')
if ordered:
self.parser.add_argument('--ordered', action='store_true', default=False, help='input is sorted by group')
self.parser.add_argument('--delimiter', default=None)
self.parser.add_argument('--header', action='store_true', default=False)
def parseArgs(self):
args = self.parser.parse_args()
if hasattr(args, 'infile'):
args.infile = FileReader(args.infile, args)
elif hasattr(args, 'infiles'):
args.infiles = [FileReader(infile, args) for infile in args.infiles]
args.infile = args.infiles[0]
if hasattr(args, 'group'):
args.group_names = args.infile.header.names(args.group)
args.group = args.infile.header.indexes(args.group)
if hasattr(args, 'columns'):
args.columns_names = args.infile.header.names(args.columns)
args.columns = args.infile.header.indexes(args.columns)
if hasattr(args, 'column'):
args.column_name = args.infile.header.name(args.column)
args.column = args.infile.header.index(args.column)
return args
def getArgs(self, args):
if hasattr(args, 'outfile'):
if hasattr(args, 'infile'):
args.outfile = FileWriter(args.outfile, args.infile, args)
else:
args.outfile = FileWriter(args.outfile, None, args)
return args
| scoky/pytools | data_tools/files.py | Python | mit | 8,781 |
# Tiger/Line country shapefiles' "statefp" field is the FIPS code.
# The following is http://www.epa.gov/enviro/html/codes/state.html
data = """
State Abbreviation FIPS Code State Name
AK 02 ALASKA
AL 01 ALABAMA
AR 05 ARKANSAS
AS 60 AMERICAN SAMOA
AZ 04 ARIZONA
CA 06 CALIFORNIA
CO 08 COLORADO
CT 09 CONNECTICUT
DC 11 DISTRICT OF COLUMBIA
DE 10 DELAWARE
FL 12 FLORIDA
GA 13 GEORGIA
GU 66 GUAM
HI 15 HAWAII
IA 19 IOWA
ID 16 IDAHO
IL 17 ILLINOIS
IN 18 INDIANA
KS 20 KANSAS
KY 21 KENTUCKY
LA 22 LOUISIANA
MA 25 MASSACHUSETTS
MD 24 MARYLAND
ME 23 MAINE
MI 26 MICHIGAN
MN 27 MINNESOTA
MO 29 MISSOURI
MS 28 MISSISSIPPI
MT 30 MONTANA
NC 37 NORTH CAROLINA
ND 38 NORTH DAKOTA
NE 31 NEBRASKA
NH 33 NEW HAMPSHIRE
NJ 34 NEW JERSEY
NM 35 NEW MEXICO
NV 32 NEVADA
NY 36 NEW YORK
OH 39 OHIO
OK 40 OKLAHOMA
OR 41 OREGON
PA 42 PENNSYLVANIA
PR 72 PUERTO RICO
RI 44 RHODE ISLAND
SC 45 SOUTH CAROLINA
SD 46 SOUTH DAKOTA
TN 47 TENNESSEE
TX 48 TEXAS
UT 49 UTAH
VA 51 VIRGINIA
VI 78 VIRGIN ISLANDS
VT 50 VERMONT
WA 53 WASHINGTON
WI 55 WISCONSIN
WV 54 WEST VIRGINIA
WY 56 WYOMING
"""
data = data.strip().split("\n")[1:]
data = [x.split("\t") for x in data]
fips2postal = {}
for postal, fips, longname in data:
fips2postal[fips] = postal
#print data
#print fips2postal
| brendano/twitter_geo_preproc | geo2_pipeline/geocode/state_codes.py | Python | mit | 1,252 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/space/armor/shared_mass_reduction_kit_mk4.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | obi-two/Rebelion | data/scripts/templates/object/draft_schematic/space/armor/shared_mass_reduction_kit_mk4.py | Python | mit | 463 |
# -*- coding: utf-8 -*-
from unittest import TestCase
# from nose.tools import eq_
import numpy as np
from pysas import waveread, World
from pysas.mcep import estimate_alpha, spec2mcep_from_matrix, mcep2coef
from pysas.synthesis.mlsa import MLSAFilter
from pysas.synthesis import Synthesis
from pysas.excite import ExcitePulse
class SynthesisTest(TestCase):
def setUp(self):
signal, samplingrate, _ = waveread("test/cmu_arctic/arctic_a0001.wav")
self.world = World(samplingrate)
self.alpha = estimate_alpha(samplingrate)
self.samplingrate = samplingrate
self.signal = signal
self.f0, self.spec_mat, _ = self.world.analyze(signal)
self.ep = ExcitePulse(16000, 80, False)
self.order = 24
def test_synthesis_filter(self):
excite = self.ep.gen(self.f0)
mcep_mat = spec2mcep_from_matrix(self.spec_mat, self.order, self.alpha)
coef_mat = []
for i in range(mcep_mat.shape[0]):
coef_mat.append(mcep2coef(mcep_mat[i], 0.41))
coef_mat = np.array(coef_mat)
mlsa = MLSAFilter(self.order, self.alpha, 5)
syn = Synthesis(80, mlsa)
syn.synthesis(excite, coef_mat)
| shunsukeaihara/pyworld | test/test_filter.py | Python | mit | 1,201 |
"""
GUI progressbar decorator for iterators.
Includes a default (x)range iterator printing to stderr.
Usage:
>>> from tqdm_gui import tgrange[, tqdm_gui]
>>> for i in tgrange(10): #same as: for i in tqdm_gui(xrange(10))
... ...
"""
# future division is important to divide integers and get as
# a result precise floating numbers (instead of truncated int)
from __future__ import division, absolute_import
# import compatibility functions and utilities
import sys
from time import time
from ._utils import _range
# to inherit from the tqdm class
from ._tqdm import tqdm
__author__ = {"github.com/": ["casperdcl", "lrq3000"]}
__all__ = ['tqdm_gui', 'tgrange']
class tqdm_gui(tqdm): # pragma: no cover
"""
Experimental GUI version of tqdm!
"""
@classmethod
def write(cls, s, file=None, end="\n"):
"""
Print a message via tqdm_gui (just an alias for print)
"""
if file is None:
file = sys.stdout
# TODO: print text on GUI?
file.write(s)
file.write(end)
def __init__(self, *args, **kwargs):
import matplotlib as mpl
import matplotlib.pyplot as plt
from collections import deque
kwargs['gui'] = True
super(tqdm_gui, self).__init__(*args, **kwargs)
# Initialize the GUI display
if self.disable or not kwargs['gui']:
return
self.fp.write('Warning: GUI is experimental/alpha\n')
self.mpl = mpl
self.plt = plt
self.sp = None
# Remember if external environment uses toolbars
self.toolbar = self.mpl.rcParams['toolbar']
self.mpl.rcParams['toolbar'] = 'None'
self.mininterval = max(self.mininterval, 0.5)
self.fig, ax = plt.subplots(figsize=(9, 2.2))
# self.fig.subplots_adjust(bottom=0.2)
if self.total:
self.xdata = []
self.ydata = []
self.zdata = []
else:
self.xdata = deque([])
self.ydata = deque([])
self.zdata = deque([])
self.line1, = ax.plot(self.xdata, self.ydata, color='b')
self.line2, = ax.plot(self.xdata, self.zdata, color='k')
ax.set_ylim(0, 0.001)
if self.total:
ax.set_xlim(0, 100)
ax.set_xlabel('percent')
self.fig.legend((self.line1, self.line2), ('cur', 'est'),
loc='center right')
# progressbar
self.hspan = plt.axhspan(0, 0.001,
xmin=0, xmax=0, color='g')
else:
# ax.set_xlim(-60, 0)
ax.set_xlim(0, 60)
ax.invert_xaxis()
ax.set_xlabel('seconds')
ax.legend(('cur', 'est'), loc='lower left')
ax.grid()
# ax.set_xlabel('seconds')
ax.set_ylabel((self.unit if self.unit else 'it') + '/s')
if self.unit_scale:
plt.ticklabel_format(style='sci', axis='y',
scilimits=(0, 0))
ax.yaxis.get_offset_text().set_x(-0.15)
# Remember if external environment is interactive
self.wasion = plt.isinteractive()
plt.ion()
self.ax = ax
def __iter__(self):
# TODO: somehow allow the following:
# if not self.gui:
# return super(tqdm_gui, self).__iter__()
iterable = self.iterable
if self.disable:
for obj in iterable:
yield obj
return
# ncols = self.ncols
mininterval = self.mininterval
maxinterval = self.maxinterval
miniters = self.miniters
dynamic_miniters = self.dynamic_miniters
unit = self.unit
unit_scale = self.unit_scale
ascii = self.ascii
start_t = self.start_t
last_print_t = self.last_print_t
last_print_n = self.last_print_n
n = self.n
# dynamic_ncols = self.dynamic_ncols
smoothing = self.smoothing
avg_time = self.avg_time
bar_format = self.bar_format
plt = self.plt
ax = self.ax
xdata = self.xdata
ydata = self.ydata
zdata = self.zdata
line1 = self.line1
line2 = self.line2
for obj in iterable:
yield obj
# Update and print the progressbar.
# Note: does not call self.update(1) for speed optimisation.
n += 1
delta_it = n - last_print_n
# check the counter first (avoid calls to time())
if delta_it >= miniters:
cur_t = time()
delta_t = cur_t - last_print_t
if delta_t >= mininterval:
elapsed = cur_t - start_t
# EMA (not just overall average)
if smoothing and delta_t:
avg_time = delta_t / delta_it \
if avg_time is None \
else smoothing * delta_t / delta_it + \
(1 - smoothing) * avg_time
# Inline due to multiple calls
total = self.total
# instantaneous rate
y = delta_it / delta_t
# overall rate
z = n / elapsed
# update line data
xdata.append(n * 100.0 / total if total else cur_t)
ydata.append(y)
zdata.append(z)
# Discard old values
# xmin, xmax = ax.get_xlim()
# if (not total) and elapsed > xmin * 1.1:
if (not total) and elapsed > 66:
xdata.popleft()
ydata.popleft()
zdata.popleft()
ymin, ymax = ax.get_ylim()
if y > ymax or z > ymax:
ymax = 1.1 * y
ax.set_ylim(ymin, ymax)
ax.figure.canvas.draw()
if total:
line1.set_data(xdata, ydata)
line2.set_data(xdata, zdata)
try:
poly_lims = self.hspan.get_xy()
except AttributeError:
self.hspan = plt.axhspan(0, 0.001, xmin=0,
xmax=0, color='g')
poly_lims = self.hspan.get_xy()
poly_lims[0, 1] = ymin
poly_lims[1, 1] = ymax
poly_lims[2] = [n / total, ymax]
poly_lims[3] = [poly_lims[2, 0], ymin]
if len(poly_lims) > 4:
poly_lims[4, 1] = ymin
self.hspan.set_xy(poly_lims)
else:
t_ago = [cur_t - i for i in xdata]
line1.set_data(t_ago, ydata)
line2.set_data(t_ago, zdata)
ax.set_title(self.format_meter(
n, total, elapsed, 0,
self.desc, ascii, unit, unit_scale,
1 / avg_time if avg_time else None, bar_format),
fontname="DejaVu Sans Mono", fontsize=11)
plt.pause(1e-9)
# If no `miniters` was specified, adjust automatically
# to the maximum iteration rate seen so far.
if dynamic_miniters:
if maxinterval and delta_t > maxinterval:
# Set miniters to correspond to maxinterval
miniters = delta_it * maxinterval / delta_t
elif mininterval and delta_t:
# EMA-weight miniters to converge
# towards the timeframe of mininterval
miniters = smoothing * delta_it * mininterval \
/ delta_t + (1 - smoothing) * miniters
else:
miniters = smoothing * delta_it + \
(1 - smoothing) * miniters
# Store old values for next call
last_print_n = n
last_print_t = cur_t
# Closing the progress bar.
# Update some internal variables for close().
self.last_print_n = last_print_n
self.n = n
self.close()
def update(self, n=1):
# if not self.gui:
# return super(tqdm_gui, self).close()
if self.disable:
return
if n < 0:
n = 1
self.n += n
delta_it = self.n - self.last_print_n # should be n?
if delta_it >= self.miniters:
# We check the counter first, to reduce the overhead of time()
cur_t = time()
delta_t = cur_t - self.last_print_t
if delta_t >= self.mininterval:
elapsed = cur_t - self.start_t
# EMA (not just overall average)
if self.smoothing and delta_t:
self.avg_time = delta_t / delta_it \
if self.avg_time is None \
else self.smoothing * delta_t / delta_it + \
(1 - self.smoothing) * self.avg_time
# Inline due to multiple calls
total = self.total
ax = self.ax
# instantaneous rate
y = delta_it / delta_t
# smoothed rate
z = self.n / elapsed
# update line data
self.xdata.append(self.n * 100.0 / total
if total else cur_t)
self.ydata.append(y)
self.zdata.append(z)
# Discard old values
if (not total) and elapsed > 66:
self.xdata.popleft()
self.ydata.popleft()
self.zdata.popleft()
ymin, ymax = ax.get_ylim()
if y > ymax or z > ymax:
ymax = 1.1 * y
ax.set_ylim(ymin, ymax)
ax.figure.canvas.draw()
if total:
self.line1.set_data(self.xdata, self.ydata)
self.line2.set_data(self.xdata, self.zdata)
try:
poly_lims = self.hspan.get_xy()
except AttributeError:
self.hspan = self.plt.axhspan(0, 0.001, xmin=0,
xmax=0, color='g')
poly_lims = self.hspan.get_xy()
poly_lims[0, 1] = ymin
poly_lims[1, 1] = ymax
poly_lims[2] = [self.n / total, ymax]
poly_lims[3] = [poly_lims[2, 0], ymin]
if len(poly_lims) > 4:
poly_lims[4, 1] = ymin
self.hspan.set_xy(poly_lims)
else:
t_ago = [cur_t - i for i in self.xdata]
self.line1.set_data(t_ago, self.ydata)
self.line2.set_data(t_ago, self.zdata)
ax.set_title(self.format_meter(
self.n, total, elapsed, 0,
self.desc, self.ascii, self.unit, self.unit_scale,
1 / self.avg_time if self.avg_time else None,
self.bar_format),
fontname="DejaVu Sans Mono", fontsize=11)
self.plt.pause(1e-9)
# If no `miniters` was specified, adjust automatically to the
# maximum iteration rate seen so far.
# e.g.: After running `tqdm.update(5)`, subsequent
# calls to `tqdm.update()` will only cause an update after
# at least 5 more iterations.
if self.dynamic_miniters:
if self.maxinterval and delta_t > self.maxinterval:
self.miniters = self.miniters * self.maxinterval \
/ delta_t
elif self.mininterval and delta_t:
self.miniters = self.smoothing * delta_it \
* self.mininterval / delta_t + \
(1 - self.smoothing) * self.miniters
else:
self.miniters = self.smoothing * delta_it + \
(1 - self.smoothing) * self.miniters
# Store old values for next call
self.last_print_n = self.n
self.last_print_t = cur_t
def close(self):
# if not self.gui:
# return super(tqdm_gui, self).close()
if self.disable:
return
self.disable = True
self._instances.remove(self)
# Restore toolbars
self.mpl.rcParams['toolbar'] = self.toolbar
# Return to non-interactive mode
if not self.wasion:
self.plt.ioff()
if not self.leave:
self.plt.close(self.fig)
def tgrange(*args, **kwargs):
"""
A shortcut for tqdm_gui(xrange(*args), **kwargs).
On Python3+ range is used instead of xrange.
"""
return tqdm_gui(_range(*args), **kwargs)
| dhaase-de/dh-python-dh | dh/thirdparty/tqdm/_tqdm_gui.py | Python | mit | 13,510 |
EQUALS = 'equals'
GT = 'gt'
LT = 'lt'
IN = 'in'
OPERATOR_SEPARATOR = '__'
REVERSE_ORDER = '-'
ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}
def split_to_field_and_filter_type(filter_name):
filter_split = filter_name.split(OPERATOR_SEPARATOR)
filter_type = filter_split[-1] if len(filter_split) > 0 else None
if filter_type in ALL_OPERATORS:
return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type
else:
return filter_name, None
def split_to_field_and_order_type(field_name_with_operator):
if field_name_with_operator.startswith(REVERSE_ORDER):
return field_name_with_operator[1:], REVERSE_ORDER
else:
return field_name_with_operator, None
def transform_to_list(val):
if isinstance(val, (list, tuple)):
return val
else:
return [val] | Aplopio/rip | rip/filter_operators.py | Python | mit | 831 |
__all__ = ['checker', 'transformer', 'codegen', 'common', 'numsed','numsed_lib','opcoder', 'sedcode', 'snippet_test']
| GillesArcas/numsed | numsed/__init__.py | Python | mit | 118 |
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2015, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
from __future__ import (division, absolute_import, print_function,
unicode_literals)
from beets.plugins import BeetsPlugin
from beets.dbcore import types
from beets.util.confit import ConfigValueError
from beets import library
class TypesPlugin(BeetsPlugin):
@property
def item_types(self):
return self._types()
@property
def album_types(self):
return self._types()
def _types(self):
if not self.config.exists():
return {}
mytypes = {}
for key, value in self.config.items():
if value.get() == 'int':
mytypes[key] = types.INTEGER
elif value.get() == 'float':
mytypes[key] = types.FLOAT
elif value.get() == 'bool':
mytypes[key] = types.BOOLEAN
elif value.get() == 'date':
mytypes[key] = library.DateType()
else:
raise ConfigValueError(
u"unknown type '{0}' for the '{1}' field"
.format(value, key))
return mytypes
| kareemallen/beets | beetsplug/types.py | Python | mit | 1,775 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaegraph.business_base import NodeSearch, DeleteNode
from classificacaodtm_app.commands import ListClassificacaodtmCommand, SaveClassificacaodtmCommand, UpdateClassificacaodtmCommand, \
ClassificacaodtmPublicForm, ClassificacaodtmDetailForm, ClassificacaodtmShortForm
def save_classificacaodtm_cmd(**classificacaodtm_properties):
"""
Command to save Classificacaodtm entity
:param classificacaodtm_properties: a dict of properties to save on model
:return: a Command that save Classificacaodtm, validating and localizing properties received as strings
"""
return SaveClassificacaodtmCommand(**classificacaodtm_properties)
def update_classificacaodtm_cmd(classificacaodtm_id, **classificacaodtm_properties):
"""
Command to update Classificacaodtm entity with id equals 'classificacaodtm_id'
:param classificacaodtm_properties: a dict of properties to update model
:return: a Command that update Classificacaodtm, validating and localizing properties received as strings
"""
return UpdateClassificacaodtmCommand(classificacaodtm_id, **classificacaodtm_properties)
def list_classificacaodtms_cmd():
"""
Command to list Classificacaodtm entities ordered by their creation dates
:return: a Command proceed the db operations when executed
"""
return ListClassificacaodtmCommand()
def classificacaodtm_detail_form(**kwargs):
"""
Function to get Classificacaodtm's detail form.
:param kwargs: form properties
:return: Form
"""
return ClassificacaodtmDetailForm(**kwargs)
def classificacaodtm_short_form(**kwargs):
"""
Function to get Classificacaodtm's short form. just a subset of classificacaodtm's properties
:param kwargs: form properties
:return: Form
"""
return ClassificacaodtmShortForm(**kwargs)
def classificacaodtm_public_form(**kwargs):
"""
Function to get Classificacaodtm'spublic form. just a subset of classificacaodtm's properties
:param kwargs: form properties
:return: Form
"""
return ClassificacaodtmPublicForm(**kwargs)
def get_classificacaodtm_cmd(classificacaodtm_id):
"""
Find classificacaodtm by her id
:param classificacaodtm_id: the classificacaodtm id
:return: Command
"""
return NodeSearch(classificacaodtm_id)
def delete_classificacaodtm_cmd(classificacaodtm_id):
"""
Construct a command to delete a Classificacaodtm
:param classificacaodtm_id: classificacaodtm's id
:return: Command
"""
return DeleteNode(classificacaodtm_id)
| andersonsilvade/5semscript | Projeto/backend/apps/classificacaodtm_app/facade.py | Python | mit | 2,641 |
from django.forms import ModelForm,forms
from django import forms
from appPortas.models import *
from django.forms.models import inlineformset_factory
class PortaForm(ModelForm):
class Meta:
model = Porta
fields = ('descricao',)
class GrupoForm(ModelForm):
class Meta:
model = Grupo
fields = ('descricao',)
| Ednilsonpalhares/SCEFA | appPortas/forms.py | Python | mit | 350 |
"""Support for interface with a Gree climate systems."""
from __future__ import annotations
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import COORDINATORS, DISPATCH_DEVICE_DISCOVERED, DISPATCHERS, DOMAIN
from .entity import GreeEntity
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Gree HVAC device from a config entry."""
@callback
def init_device(coordinator):
"""Register the device."""
async_add_entities(
[
GreePanelLightSwitchEntity(coordinator),
GreeQuietModeSwitchEntity(coordinator),
GreeFreshAirSwitchEntity(coordinator),
GreeXFanSwitchEntity(coordinator),
]
)
for coordinator in hass.data[DOMAIN][COORDINATORS]:
init_device(coordinator)
hass.data[DOMAIN][DISPATCHERS].append(
async_dispatcher_connect(hass, DISPATCH_DEVICE_DISCOVERED, init_device)
)
class GreePanelLightSwitchEntity(GreeEntity, SwitchEntity):
"""Representation of the front panel light on the device."""
def __init__(self, coordinator):
"""Initialize the Gree device."""
super().__init__(coordinator, "Panel Light")
@property
def icon(self) -> str | None:
"""Return the icon for the device."""
return "mdi:lightbulb"
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return SwitchDeviceClass.SWITCH
@property
def is_on(self) -> bool:
"""Return if the light is turned on."""
return self.coordinator.device.light
async def async_turn_on(self, **kwargs):
"""Turn the entity on."""
self.coordinator.device.light = True
await self.coordinator.push_state_update()
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off."""
self.coordinator.device.light = False
await self.coordinator.push_state_update()
self.async_write_ha_state()
class GreeQuietModeSwitchEntity(GreeEntity, SwitchEntity):
"""Representation of the quiet mode state of the device."""
def __init__(self, coordinator):
"""Initialize the Gree device."""
super().__init__(coordinator, "Quiet")
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return SwitchDeviceClass.SWITCH
@property
def is_on(self) -> bool:
"""Return if the state is turned on."""
return self.coordinator.device.quiet
async def async_turn_on(self, **kwargs):
"""Turn the entity on."""
self.coordinator.device.quiet = True
await self.coordinator.push_state_update()
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off."""
self.coordinator.device.quiet = False
await self.coordinator.push_state_update()
self.async_write_ha_state()
class GreeFreshAirSwitchEntity(GreeEntity, SwitchEntity):
"""Representation of the fresh air mode state of the device."""
def __init__(self, coordinator):
"""Initialize the Gree device."""
super().__init__(coordinator, "Fresh Air")
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return SwitchDeviceClass.SWITCH
@property
def is_on(self) -> bool:
"""Return if the state is turned on."""
return self.coordinator.device.fresh_air
async def async_turn_on(self, **kwargs):
"""Turn the entity on."""
self.coordinator.device.fresh_air = True
await self.coordinator.push_state_update()
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off."""
self.coordinator.device.fresh_air = False
await self.coordinator.push_state_update()
self.async_write_ha_state()
class GreeXFanSwitchEntity(GreeEntity, SwitchEntity):
"""Representation of the extra fan mode state of the device."""
def __init__(self, coordinator):
"""Initialize the Gree device."""
super().__init__(coordinator, "XFan")
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return SwitchDeviceClass.SWITCH
@property
def is_on(self) -> bool:
"""Return if the state is turned on."""
return self.coordinator.device.xfan
async def async_turn_on(self, **kwargs):
"""Turn the entity on."""
self.coordinator.device.xfan = True
await self.coordinator.push_state_update()
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off."""
self.coordinator.device.xfan = False
await self.coordinator.push_state_update()
self.async_write_ha_state()
| rohitranjan1991/home-assistant | homeassistant/components/gree/switch.py | Python | mit | 5,377 |
from django.db import models
from django.contrib.auth.models import User
from django.utils.html import escape
from django.db.models import Q
from datetime import date
from datetime import datetime
from MessagesApp.models import Thread
from BlockPages.models import BlockPage, BlockEvent, EventComment
from SpecialInfoApp.models import Interest, HasInterest, School, HasSchool, LivingLoc, HasLivingLoc, Workplace, HasWorkplace
from PostsApp.models import Comment
import helper_functions
import settings
# Create your models here.
class UserProfile(models.Model):
#Required field
user = models.ForeignKey(User, unique=True)
#Extra info
#Enumerated data for columns that can only have a finite set of choices
RELATIONSHIP_STATUSES = (
(u'S', u'Single'),
(u'M', u'Married'),
)
GENDER_CHOICES = (
(u'M', u'Male'),
(u'F', u'Female'),
(u'B', u'Both'),
(u'U', u'Unspecified'),
)
PRIVACY_CHOICES = (
(u'p', u'Private'),
(u'P', u'Public'),
)
#Define extra fields that will form the user profile
relationship_status = models.CharField(max_length=2, choices=RELATIONSHIP_STATUSES, default=u'S')
profile_pic = models.ForeignKey('userInfo.ImageHolder', null=True)
birthday = models.DateField(null=True, blank=True)
about_me = models.TextField(null=True)
gender = models.CharField(max_length=2, choices=GENDER_CHOICES, default=u'U')
interested_in = models.CharField(max_length=2, choices=GENDER_CHOICES, default=u'U')
activity_privacy = models.CharField(max_length=2, choices=PRIVACY_CHOICES, default=u'P')
latitude = models.FloatField(null=True)
longitude = models.FloatField(null=True)
current_block = models.ForeignKey(BlockPage,null=True)
joined = models.DateTimeField(auto_now_add=True)
last_login = models.DateTimeField(auto_now_add=True)
threads = models.ManyToManyField(Thread, through='MessagesApp.ThreadMembership')
events = models.ManyToManyField('BlockPages.BlockEvent')
#Special info m2m's
interests = models.ManyToManyField(Interest, through=HasInterest)
living_locs = models.ManyToManyField(LivingLoc, through=HasLivingLoc)
workplaces = models.ManyToManyField(Workplace, through=HasWorkplace)
schools = models.ManyToManyField(School, through=HasSchool)
##########################################
#Methods used to quickly get data about user in various formats
def getName(self):
"""Returns string containing user's first and last name"""
return self.user.first_name + ' ' + self.user.last_name
def getInfo(self, user_2=None):
"""Returns dictionary object with basic info about this user"""
if user_2 == None:
relationship = 'No relation.'
elif user_2 == self.id:
relationship = 'This is you'
else:
relationship = self.getRelationshipTo(User.objects.get(pk=user_2).get_profile()).get_relationship_type_display()
if self.profile_pic == None:
profile_pic = 'default_profile.jpg'
else:
profile_pic = self.profile_pic.handle
return { 'name' : self.getName(), 'email' : self.user.email,
'gender' : self.get_gender_display(), 'similarity' : 0 if user_2 == None else self.getSimilarityTo(user_2),
'user_id' : self.user.id, 'username' : self.user.username, 'profile_pic' : profile_pic,
'thumbnail' : 'Thumbnails/' + profile_pic, 'relationship' : relationship }
def getProfile(self, user_2=None):
"""Returns dictionary object containing detailed info about user suitable for the viewProfile view"""
info = self.getInfo(user_2)
info.update({ 'relationship_status' : self.get_relationship_status_display(),
'birthday' : 'Unspecified' if self.birthday == None else self.birthday.strftime("%m/%d/%Y"),
'interested_in' : self.get_interested_in_display(), 'about_me' : self.about_me, 'first_name' : self.user.first_name,
'last_name' : self.user.last_name })
return info
def editProfile(self, request_dict):
"""Modifies info about self and self.user based on entered data in request_dict"""
#For each field capable of being edited, make a try-except block
try:
new_rel_status = request_dict['relationship']
#Validate input given RELATIONSHIP_STATUSES choices
for rel in self.RELATIONSHIP_STATUSES:
if new_rel_status == rel[0]:
self.relationship_status = rel[0]
self.save()
elif new_rel_status == rel[1]:
self.relationship_status = rel[0]
self.save()
except KeyError:
pass
#First name
try:
self.user.first_name = escape(request_dict['first_name'])
self.user.save()
except KeyError:
pass
#Last name
try:
self.user.last_name = escape(request_dict['last_name'])
self.user.save()
except KeyError:
pass
#Gender
try:
new_gender = request_dict['gender']
#Validate input given GENDER_CHOICES choices
for gen in self.GENDER_CHOICES:
if new_gender == gen[0]:
self.gender = gen[0]
self.save()
elif new_gender == gen[1]:
self.gender = gen[0]
self.save()
except KeyError:
pass
#Interested in
try:
new_interested_in = request_dict['interested_in']
#Validate input given GENDER_CHOICES choices
for gen in self.GENDER_CHOICES:
if new_interested_in == gen[0]:
self.interested_in = gen[0]
self.save()
elif new_interested_in == gen[1]:
self.interested_in = gen[0]
self.save()
except KeyError:
pass
#Birthday
try:
birthday = request_dict['birthday']
#Validate input
try:
self.birthday = date(birthday['year'], birthday['month'], birthday['day'])
self.save()
except ValueError:
#Invalid date
pass
except KeyError:
pass
#About me
try:
self.about_me = escape(request_dict['about_me'])
self.save()
except KeyError:
pass
#Not really sure how this method would fail
return { 'success' : 1 }
##########################################
#Methods relating to a user's friends
def getFriends(self):
"""
Returns list of this user's friends
"""
try:
return [e.user_2 for e in Relationship.objects.filter(user_1=self).filter(relationship_type__exact=u'F')] + [e.user_1 for e in Relationship.objects.filter(user_2=self).filter(relationship_type__exact=u'F')]
except AttributeError:
return []
def getFriendRequests(self, trash=None):
"""Expects: nothing
Returns: dictionary containing list of requesting friends' info, or error message"""
try:
return { 'requests' : [e.user_2.get_profile().getInfo(self.id) for e in Relationship.objects.filter(user_1=self).filter(relationship_type__exact=u'P')].extend([e.user_1.get_profile().getInfo(self.id) for e in Relationship.objects.filter(user_2=self).filter(relationship_type__exact=u'P')]), 'success' : 1 }
except AttributeError:
return { 'success' : 0, 'error' : 'Error getting friend requests.' }
def getFriendDetails(self, user_2=None):
"""Returns list/array of dictionary objects for each friend w/ extra details"""
return [friend.get_profile().getInfo(user_2) for friend in self.getFriends()]
def requestFriend(self, request_dict):
"""Creates new Relationship object with u'f' to specified user"""
try:
friend = User.objects.get(pk=request_dict['user'])
except User.DoesNotExist:
return { 'success' : 0, 'error' : 'User does not exist.' }
except KeyError:
return { 'success' : 0, 'error' : 'No user specified' }
#Check to make sure there is no Relationship object already
if self.getRelationshipTo(friend) != None:
return { 'success' : 0, 'error' : 'You already have a relationship to this person.' }
#Create relationship object
Relationship(user_1=self.user, user_2=friend).save()
return { 'success' : 1 }
def getRelationshipTo(self, friend):
"""Returns verbose type of relationship between self and friend"""
try:
rel = Relationship.objects.get(Q(user_1=self.user, user_2=friend) | Q(user_2=self.user, user_1=friend))
except Relationship.DoesNotExist:
return None
return rel
def confirmFriend(self, request_dict):
"""Modifies the Relationship object between self and specified user"""
try:
friend = User.objects.get(pk=request_dict['user'])
except User.DoesNotExist:
return { 'success' : 0, 'error' : 'User does not exist.' }
except KeyError:
return { 'success' : 0, 'error' : 'No user specified' }
#Make sure a request exists
try:
request = Relationship.objects.get(user_1=friend, user_2=self.user, relationship_type__exact=u'P')
except Relationship.DoesNotExist:
#No request exists
return { 'success' : 0, 'error' : 'No friend request exists.' }
#Modify relationship type
request.relationship_type = u'F'
request.save()
return { 'success' : 1 }
def rejectFriendRequest(self, request_dict):
"""Removes Relationship object between self and specified user"""
try:
friend = User.objects.get(pk=request_dict['user'])
except User.DoesNotExist: return { 'success' : 0, 'error' : 'User does not exist.' }
except KeyError:
return { 'success' : 0, 'error' : 'No user specified' }
#Make sure a request exists
try:
request = Relationship.objects.get(user_1=friend, user_2=self.user, relationship_type__exact=u'P')
except Relationship.DoesNotExist:
#No request exists
return { 'success' : 0, 'error' : 'No friend request exists.' }
#Remove relationship object
request.delete()
return { 'success' : 1 }
def removeFriend(self, request_dict):
"""Removes Relationship object between self and specified user"""
try:
friend = User.objects.get(pk=request_dict['user'])
except User.DoesNotExist:
return { 'success' : 0, 'error' : 'User does not exist.' }
except KeyError:
return { 'success' : 0, 'error' : 'No user specified' }
#Make sure a request exists
relationship = self.getRelationshipTo(friend)
if relationship == None:
return { 'success' : 0, 'error' : 'You are not friends with this user.' }
#Remove relationship object
relationship.delete()
return { 'success' : 1 }
##########################################
#Methods related to creating posts/comments
def createPost(self, request_dict):
"""
Creates a new post object to specified user
"""
try:
recipient = User.objects.get(pk=request_dict['recipient']).get_profile()
except UserProfile.DoesNotExist:
return { 'success' : 0, 'error' : 'User does not exist.' }
except KeyError:
#Assume they are posting a status
recipient = self
try:
text = request_dict['text']
if text == '':
raise KeyError
except KeyError:
return { 'success' : 0, 'error' : 'Not enough data specified.' }
if recipient == self:
#Add tag for user's current block
Post(author=self, text=text, recipient=recipient, block=self.current_block).save()
else:
#Create new Post and save it
Post(author=self, text=text, recipient=recipient).save()
return { 'success' : 1 }
def createPostComment(self, request_dict):
"""
Creates a comment on a post
"""
return self.createComment(request_dict, 'posts')
def createComment(self, request_dict, type):
"""
Creates a new comment object for the specified post
"""
try:
text = request_dict['text']
if text == '':
raise KeyError
if type == 'posts':
post = Post.objects.get(pk=request_dict['post_id'])
#Create new comment and save it
Comment(post=post, author=self, text=text).save()
else:
event = BlockEvent.objects.get(pk=request_dict['event_id'])
EventComment(event=event, author=self, text=text).save()
except Post.DoesNotExist:
return { 'success' : 0, 'error' : 'Post does not exist' }
except BlockEvent.DoesNotExist:
return { 'success' : 0, 'error' : 'Event does not exist' }
except KeyError:
return { 'success' : 0, 'error' : 'Not enough data specified' }
return { 'success' : 1 }
##########################################
#Methods related to posting/creating events/commenting on current block page
def createBlockEvent(self, request_dict):
"""
Note: no longer in use
Creates an event in the user's current block
"""
try:
title = request_dict['title']
description = request_dict['description']
duration = request_dict['duration']
location = request_dict['location']
except KeyError:
return { 'success' : 0, 'error' : 'Not enough data given' }
if title == '' or description == '' or duration == '' or location == '':
return { 'success' : 0, 'error' : 'Not enough data given' }
#Create new block event
BlockEvent(block_page=self.current_block, author=self, duration=duration, event_title=title, description=description, location=location).save()
return { 'success' : 1 }
def createEventComment(self, request_dict):
"""
Create comment on an event
"""
return self.createComment(request_dict, 'event')
def attendingEvent(self, request_dict):
"""
Adds event in request_dict to user's events m2m field
"""
#Get event in question
try:
event_id = request_dict['event_id']
event = BlockEvent.objects.get(pk=event_id)
except KeyError:
return { 'success' : 0, 'error' : 'Not enough data given' }
except BlockEvent.DoesNotExist:
return { 'success' : 0, 'error' : 'Event does not exist' }
#Add event to this user's events field
self.events.add(event)
self.save()
return { 'success' : 1 }
def getBlockActivity(self, offset=0, num_results=10):
"""
Gets info and feed for block user is in right now
"""
if self.current_block == None:
return { 'success' : 0, 'error' : 'No value for block' }
return self.current_block.getActivity(user=self, offset=offset, num_results=num_results)
def updateCurrentBlock(self, request_dict):
"""
Updates the user's current block given the (latitude, longitude) pair given in request_dict
"""
response = None
try:
latitude = request_dict['latitude']
longitude = request_dict['longitude']
except KeyError:
return { 'success' : 0, 'error' : 'No latitude/longitude given.' }
if not response:
#Calculate x and y coordinates for block
(x_coord, y_coord) = helper_functions.computeXY(latitude, longitude)
#Get block for these coordinates if it exists, otherwise create it
changed = 1
try:
block = BlockPage.objects.get(x_coordinate=x_coord, y_coordinate=y_coord)
if block == self.current_block:
changed = 0
except BlockPage.DoesNotExist:
block = BlockPage(x_coordinate=x_coord, y_coordinate=y_coord)
block.save()
#Set user's current block to this
self.current_block = block
#Also update their last_login value
self.last_login = datetime.now()
self.save()
return { 'success' : 1, 'changed' : changed }
##########################################
#Methods related to messages
def sendNewMessage(self, request_dict):
"""Creates a new thread with specified users, subject, and initial message"""
try:
sub = escape(request_dict['subject'])
message = escape(request_dict['message'])
recipients = request_dict['recipients']
except KeyError:
#Deal with error here
return { 'success' : 0, 'error' : 'Not enough data specified.' }
#Create new thread
new_thread = Thread(subject=sub)
new_thread.save()
#Add recipients (and self) to this thread
ThreadMembership(user=self, thread=new_thread, has_been_read=True).save()
for recipient in recipients:
ThreadMembership(user=User.objects.get(username=recipient).get_profile(), thread=new_thread).save()
#Create initial message
Message(thread=new_thread, user=self, text=message).save()
return { 'success' : 1 }
def createReply(self, request_dict):
"""Creates a new message as part of the specified thread,
then returns success/error dictionary"""
try:
thread = Thread.objects.get(pk=request_dict['thread_id'])
message = escape(request_dict['message'])
except Thread.DoesNotExist:
return { 'success' : 0, 'error' : 'Thread does not exist.' }
except KeyError:
return { 'success' : 0, 'error' : 'Not enough data specified.' }
#Create new message for thread
Message(thread=thread, user=self, text=message).save()
#Set all other memberships in this thread to unread
for thread_mem in ThreadMembership.objects.filter(thread=thread):
if thread_mem.user != self:
thread_mem.has_been_read = False
thread_mem.save()
return { 'success' : 1 }
def getThreads(self, request_dict):
"""
Returns list of dictionary objects containing info about most recent threads
"""
threads = [ thread.getThreadInfo(self) for thread in self.threads.all() ]
return { 'success' : 1, 'threads' : sorted(threads, key=lambda thread: helper_functions.inverse_my_strftime(thread['last_message']['timestamp']), reverse=True)}
def numUnreadMessages(self, request_dict):
"""Returns dictionary object containing num_unread integer"""
return { 'success' : 1, 'number_unread' : len(ThreadMembership.objects.filter(user=self, has_been_read=False)) }
##########################################
#Methods related to activity feeds
def getActivity(self, requesting_user, offset=0, max_num=10):
"""
Returns list of dictionary objects containing most recent actions by this user
requesting_user contains the logged in user (or none if no one is logged in)
request_dict holds some optional info such as the max number of entries to return, and index offset
"""
#Declare list to hold all of the activity dictionary objects
all_activity = []
#Get posts to this user or from this user
all_activity.extend([ post.getDetail() for post in Post.objects.filter(Q(author=self) | Q(recipient=self)).order_by('-time_posted')[offset:max_num+offset] ])
#Get new friendships
#all_activity.extend([ relationship.getDetail() for relationship in Relationship.objects.filter(Q(user_1=self) | Q(user_2=self)).filter(relationship_type__exact=u'F').order_by('-timestamp')[offset:max_num+offset] ])
#Get changes in extra info
#all_activity.extend([ has_interest.getDetail() for has_interest in HasInterest.objects.filter(user=self).order_by('-time_added')[offset:max_num+offset] ])
#all_activity.extend([ has_school.getDetail() for has_school in HasSchool.objects.filter(user=self).order_by('-time_added')[offset:max_num+offset] ])
#all_activity.extend([ has_living_loc.getDetail() for has_living_loc in HasLivingLoc.objects.filter(user=self).order_by('-time_added')[offset:max_num+offset] ])
#all_activity.extend([ has_workplace.getDetail() for has_workplace in HasWorkplace.objects.filter(user=self).order_by('-time_added')[offset:max_num+offset] ])
#Sort all_activity by timestamp, descending
#Limit all_activity from index_offset to max_num
return sorted(all_activity, key=lambda item: helper_functions.inverse_my_strftime(item['timestamp']), reverse=True)
def getUserActivity(self, requesting_user, offset=0, max_num=150):
"""Returns dictionary containing activity for specified user"""
return { 'success' : 1, 'info' : self.getInfo(), 'activity' : self.getActivity(requesting_user, offset) }
def getFriendFeed(self, offset=0, max_num=15):
"""Returns list composed of the user activity feeds of this user's friends"""
#Get all the activity for the user's friends
friend_feed = []
for friend in self.getFriends():
friend_feed.extend(friend.get_profile().getActivity(self, offset))
#Sort it and limit it to from offset to max_num
friend_feed = sorted(friend_feed, key=lambda item: helper_functions.inverse_my_strftime(item['timestamp']), reverse=True)[offset:max_num+offset]
return { 'success' : 1, 'activity' : friend_feed }
##########################################
#Methods related to determining similarity between users
#TODO
def getSimilarityTo(self, user_id):
"""
Returns integer (on scale of 0 to 100) representing the user's similarity to the other user
"""
if user_id == self.id:
return 100
#Get other user
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
return 0
#Note: intersection_score = len(intersect(set1, set2))/(len(set1)+len(set2) - len(intersect(set1, set2))) which is in [0,1]
#Get intersection of friends
friends1 = set(self.getFriends())
friends2 = set(user.get_profile().getFriends())
intersect_size = len(friends1 & friends2)
friends_score = intersect_size/(len(friends1)+len(friends2)-intersect_size)
#Get intersection of extra info's
#Take into account metadata for extra info's
#Stopgap
return int(friends_score * 100)
##########################################
#184 lines
#Methods related to extra info retrieval, adding, and removal
def getInterests(self):
"""Returns list of the interests for this user"""
return [ info.getDetail() for info in HasInterest.objects.filter(user=self) ]
def getSchools(self):
"""Returns list of the schools for this user"""
return [ info.getDetail() for info in HasSchool.objects.filter(user=self) ]
def getLivingLocs(self):
"""Returns list of the living locations for this user"""
return [ info.getDetail() for info in HasLivingLoc.objects.filter(user=self) ]
def getWorkplaces(self):
"""Returns list of the workplaces for this user"""
return [ info.getDetail() for info in HasWorkplace.objects.filter(user=self) ]
def addInterest(self, request_dict):
"""Adds interest for this user, creating new Interest object if necessary"""
try:
interest_title = escape(request_dict['interest'])
except KeyError:
return { 'success' : 0, 'error' : 'No interest given.' }
#Check if interest exists already
try:
interest = Interest.objects.get(title=interest_title)
except Interest.DoesNotExist:
#Make new interest object
interest = Interest.objects.create(title=interest_title)
#Check if user already has this interest
if interest in self.interests.all():
return { 'success' : 0, 'error' : 'You already have this interest.' }
#Add this interest to this user
HasInterest.objects.create(user=self, interest=interest)
return { 'success' : 1 }
def addSchool(self, request_dict):
"""Adds school for this user, creating new School object if necessary"""
try:
school_title = escape(request_dict['school'])
started = escape(request_dict['started']) if 'started' in request_dict else None
ended = escape(request_dict['ended']) if 'ended' in request_dict else None
studied = escape(request_dict['studied']) if 'studied' in request_dict else ''
except KeyError:
return { 'success' : 0, 'error' : 'No school given.' }
#Check if school object exists already
try:
school = School.objects.get(title=school_title)
except School.DoesNotExist:
#Make new school object
school = School.objects.create(title=school_title)
#Check if user already has this school
if school in self.schools.all():
#Check if the other info for this school is the same
has_school = HasSchool.objects.get(school=school, user=self)
if (has_school.date_started == started and has_school.date_ended == ended and studied == has_school.studied):
return { 'success' : 0, 'error' : 'You have already added this school.' }
#Add this school to this user
HasSchool.objects.create(user=self, school=school, studied=studied, date_started=started, date_ended=ended)
return { 'success' : 1 }
def addLivingLoc(self, request_dict):
"""Adds living location for this user, creating new LivingLoc object if necessary"""
try:
living_loc_title = escape(request_dict['living_loc'])
started = escape(request_dict['started']) if 'started' in request_dict else None
ended = escape(request_dict['ended']) if 'ended' in request_dict else None
except KeyError:
return { 'success' : 0, 'error' : 'No living location given.' }
#Check if living_loc object exists already
try:
living_loc = LivingLoc.objects.get(title=living_loc_title)
except LivingLoc.DoesNotExist:
#Make new LivingLoc object
living_loc = LivingLoc.objects.create(title=living_loc_title)
#Check if user already has this living location
if living_loc in self.living_locs.all():
#Check if the other info for this living location is the same
has_living_loc = HasLivingLoc.objects.get(living_loc=living_loc, user=self)
if (has_living_loc.date_started == started and has_living_loc.date_ended == ended):
return { 'success' : 0, 'error' : 'You have already added this living location.' }
#Add this living_loc to this user
HasLivingLoc.objects.create(user=self, living_loc=living_loc, date_started=started, date_ended=ended)
return { 'success' : 1 }
def addWorkplace(self, request_dict):
"""Adds workplace for this user, creating new Workplace object if necessary"""
try:
workplace_title = escape(request_dict['workplace'])
started = escape(request_dict['started']) if 'started' in request_dict else None
ended = escape(request_dict['ended']) if 'ended' in request_dict else None
job = escape(request_dict['job']) if 'job' in request_dict else ''
except KeyError:
return { 'success' : 0, 'error' : 'No workplace given.' }
#Check if workplace object exists already
try:
workplace = Workplace.objects.get(title=workplace_title)
except Workplace.DoesNotExist:
#Make new Workplace object
workplace = Workplace.objects.create(title=workplace_title)
#Check if user already has this workplace
if workplace in self.workplaces.all():
#Check if the other info for this workplace is the same
has_workplace = HasWorkplace.objects.get(workplace=workplace, user=self)
if (has_workplace.date_started == started and has_workplace.date_ended == ended and has_workplace.job == job):
return { 'success' : 0, 'error' : 'You have already added this workplace.' }
#Add this workplace to this user
HasWorkplace.objects.create(user=self, workplace=workplace, job=job, date_started=started, date_ended=ended)
return { 'success' : 1 }
def removeInterest(self, request_dict):
"""Removes user's interest by the HasInterest id"""
try:
has_id = request_dict['interest']
except KeyError:
return { 'success' : 0, 'error' : 'No interest given.' }
#Make sure user actually has the info
try:
has_info = HasInterest.objects.get(pk=has_id)
except HasInterest.DoesNotExist:
return { 'success' : 0, 'error' : 'You do not have this interest.' }
#Remove the info
has_info.delete()
return { 'success' : 1 }
def removeSchool(self, request_dict):
"""Removes user's school by the HasSchool id"""
try:
has_id = request_dict['school']
except KeyError:
return { 'success' : 0, 'error' : 'No school given.' }
#Make sure user actually has the info
try:
has_info = HasSchool.objects.get(pk=has_id)
except HasSchool.DoesNotExist:
return { 'success' : 0, 'error' : 'You have not added this school.' }
#Remove the info
has_info.delete()
return { 'success' : 1 }
def removeLivingLoc(self, request_dict):
"""Removes user's living location by the HasLivingLoc id"""
try:
has_id = request_dict['living_loc']
except KeyError:
return { 'success' : 0, 'error' : 'No living location given.' }
#Make sure user actually has the info
try:
has_info = HasLivingLoc.objects.get(pk=has_id)
except HasLivingLoc.DoesNotExist:
return { 'success' : 0, 'error' : 'You have not added this living location.' }
#Remove the info
has_info.delete()
return { 'success' : 1 }
def removeWorkplace(self, request_dict):
"""Removes user's workplace by the HasWorkplace id"""
try:
has_id = request_dict['workplace']
except KeyError:
return { 'success' : 0, 'error' : 'No workplace given.' }
#Make sure user actually has the info
try:
has_info = HasWorkplace.objects.get(pk=has_id)
except HasWorkplace.DoesNotExist:
return { 'success' : 0, 'error' : 'You have not added this workplace.' }
#Remove the info
has_info.delete()
return { 'success' : 1 }
##########################################
#Methods related to notifications
def getNotifications(self):
"""Returns all entries from Notifications table for this user sorted by timestamp"""
notification_list = []
for notification in self.notification_set.all():
if notification.data_type == u'P':
#Post
notification_list.append(Post.objects.get(pk=notification.object_id).getDetail().update({'type':notification.get_data_type_display()}))
elif notification.data_type == u'M':
#Message
notification_list.append(Message.objects.get(pk=notification.object_id).getDetail().update({'type':notification.get_data_type_display()}))
elif notification.data_type == u'C':
#Comment
notification_list.append(Comment.objects.get(pk=notification.object_id).getDetail().update({'type':notification.get_data_type_display()}))
elif notification.data_type == u'F':
#Friend request
notification_list.append(Relationship.objects.get(pk=notification.object_id).getDetail().update({'type':notification.get_data_type_display()}))
def pushNotification(self, data_type, object_id):
"""Adds notification for this user"""
Notification(user=self, data_type=data_type, object_id=object_id).save()
class Relationship(models.Model):
"""
Represents a relationship between two users, including type
"""
RELATIONSHIP_TYPES = (
(u'P', u'Pending Friend'),
(u'F', u'Friend'),
)
user_1 = models.ForeignKey(User, related_name='+')
user_2 = models.ForeignKey(User, related_name='+')
relationship_type = models.CharField(max_length=2, choices=RELATIONSHIP_TYPES, default=u'P')
timestamp = models.DateTimeField(auto_now_add=True)
def getDetail(self):
"""Returns dictionary object containing basic info"""
return { 'id' : self.id, 'user_1' : self.user_1.get_profile().getInfo(), 'user_2' : self.user_2.get_profile().getInfo(),
'relationship_type' : self.get_relationship_type_display(), 'type' : 'relationship',
'timestamp' : helper_functions.my_strftime(self.timestamp) }
class Notification(models.Model):
"""
Represents a notification for this user
"""
NOTIFICATION_TYPES = (
(u'P', u'Post'),
(u'M', u'Message'),
(u'C', u'Comment'),
(u'F', u'Friend Request'),
)
#TODO convert this to using generic foreign keys
user = models.ForeignKey(UserProfile)
data_type = models.CharField(max_length=2, choices=NOTIFICATION_TYPES)
object_id = models.IntegerField() #Hold's the id of the notification
time_created = models.DateTimeField(auto_now_add=True)
class ImageHolder(models.Model):
"""
Manages an image, including resizing for the thumbnail
and holding the path to the thumbnail and the fullsize image
"""
file = models.ImageField(upload_to=settings.IMAGE_UPLOAD_PATH, null=True)
thumbnail = models.ImageField(upload_to=settings.THUMBNAIL_UPLOAD_PATH, null=True)
creator = models.ForeignKey(UserProfile)
handle = models.CharField(max_length=100, null=True)
caption = models.TextField(null=True)
timestamp = models.DateTimeField(auto_now_add=True)
#Import at end of file in order to avoid circular imports
from MessagesApp.models import Thread, Message, ThreadMembership
from PostsApp.models import Post
| rishabhsixfeet/Dock- | userInfo/models.py | Python | mit | 35,677 |
"""
Experiment for XGBoost + CF
Aim: To find the best tc(max_depth), mb(min_child_weight), mf(colsample_bytree * 93), ntree
tc: [13, 15, 17]
mb: [5, 7, 9]
mf: [40, 45, 50, 55, 60]
ntree: [160, 180, 200, 220, 240, 260, 280, 300, 320, 340, 360]
Averaging 20 models
Summary
Best
loss ntree
mf 40 45 50 55 60 40 45 50 55 60
tc mb
13 5 0.4471 0.4471 0.4473 0.4471 0.4476 300 300 280 280 260
7 0.4477 0.4475 0.4469 0.4472 0.4481 340 320 300 300 300
9 0.4485 0.4484 0.4487 0.4488 0.4487 360 360 340 340 340
15 5 0.4471 *0.4465* 0.4471 0.4476 0.4478 260 *260* 240 240 240
7 0.4473 0.4468 0.4473 0.4474 0.4478 300 280 260 260 260
9 0.4483 0.4480 0.4483 0.4484 0.4492 340 320 300 300 280
17 5 0.4471 0.4472 0.4474 0.4476 0.4478 240 240 220 220 200
7 0.4474 0.4470 0.4468 0.4475 0.4473 280 260 260 240 240
9 0.4481 0.4480 0.4476 0.4480 0.4486 320 300 280 260 260
Time: 1 day, 7:37:21 on i7-4790k 32G MEM GTX660
"""
import numpy as np
import scipy as sp
import pandas as pd
from sklearn.cross_validation import StratifiedKFold
from sklearn.metrics import log_loss
from datetime import datetime
import os
from sklearn.grid_search import ParameterGrid
import xgboost as xgb
from utility import *
path = os.getcwd() + '/'
path_log = path + 'logs/'
file_train = path + 'train.csv'
training = pd.read_csv(file_train, index_col = 0)
num_train = training.shape[0]
y = training['target'].values
yMat = pd.get_dummies(training['target']).values
X = training.iloc[:,:93].values
kf = StratifiedKFold(y, n_folds=5, shuffle = True, random_state = 345)
for train_idx, valid_idx in kf:
break
y_train_1 = yMat[train_idx].argmax(1)
y_train = yMat[train_idx]
y_valid = yMat[valid_idx]
X2, ignore = count_feature(X)
dtrain , dvalid= xgb.DMatrix(X2[train_idx], label = y_train_1), xgb.DMatrix(X2[valid_idx])
#
nIter = 20
nt = 360
nt_lst = range(160, 370, 20)
nt_len = len(nt_lst)
bf = .8 # subsample
sh = .1 # eta
# tc:max_depth, mb:min_child_weight, mf(max features):colsample_bytree * 93
param_grid = {'tc':[13, 15, 17], 'mb':[5, 7, 9], 'mf':[40, 45, 50, 55, 60]}
scores = []
t0 = datetime.now()
for params in ParameterGrid(param_grid):
tc = params['tc']
mb = params['mb']
mf = params['mf']
cs = float(mf) / X.shape[1]
print tc, mb, mf
predAll = [np.zeros(y_valid.shape) for k in range(nt_len)]
for i in range(nIter):
seed = 112233 + i
param = {'bst:max_depth':tc, 'bst:eta':sh,'objective':'multi:softprob','num_class':9,
'min_child_weight':mb, 'subsample':bf, 'colsample_bytree':cs,
'silent':1, 'nthread':8, 'seed':seed}
plst = param.items()
bst = xgb.train(plst, dtrain, nt)
for s in range(nt_len):
ntree = nt_lst[s]
pred = bst.predict(dvalid, ntree_limit = ntree).reshape(y_valid.shape)
predAll[s] += pred
scores.append({'tc':tc, 'mb':mb, 'mf':mf, 'ntree':ntree, 'nModels':i+1, 'seed':seed,
'valid':log_loss(y_valid, pred),
'valid_avg':log_loss(y_valid, predAll[s] / (i+1))})
print scores[-4], datetime.now() - t0
df = pd.DataFrame(scores)
if os.path.exists(path_log) is False:
print 'mkdir', path_log
os.mkdir(path_log)
df.to_csv(path_log + 'exp_XGB_CF_tc_mb_mf_ntree.csv')
keys = ['tc', 'mb', 'mf', 'ntree']
grouped = df.groupby(keys)
pd.set_option('display.precision', 5)
print pd.DataFrame({'loss':grouped['valid_avg'].last().unstack().min(1),
'ntree':grouped['valid_avg'].last().unstack().idxmin(1)}).unstack()
# loss ntree
# mf 40 45 50 55 60 40 45 50 55 60
# tc mb
# 13 5 0.4471 0.4471 0.4473 0.4471 0.4476 300 300 280 280 260
# 7 0.4477 0.4475 0.4469 0.4472 0.4481 340 320 300 300 300
# 9 0.4485 0.4484 0.4487 0.4488 0.4487 360 360 340 340 340
# 15 5 0.4471 0.4465 0.4471 0.4476 0.4478 260 260 240 240 240
# 7 0.4473 0.4468 0.4473 0.4474 0.4478 300 280 260 260 260
# 9 0.4483 0.4480 0.4483 0.4484 0.4492 340 320 300 300 280
# 17 5 0.4471 0.4472 0.4474 0.4476 0.4478 240 240 220 220 200
# 7 0.4474 0.4470 0.4468 0.4475 0.4473 280 260 260 240 240
# 9 0.4481 0.4480 0.4476 0.4480 0.4486 320 300 280 260 260
print pd.DataFrame({'loss':grouped['valid'].mean().unstack().min(1),
'ntree':grouped['valid'].mean().unstack().idxmin(1)}).unstack()
# loss ntree
# mf 40 45 50 55 60 40 45 50 55 60
# tc mb
# 13 5 0.4563 0.4564 0.4564 0.4561 0.4566 280 260 260 260 240
# 7 0.4565 0.4563 0.4557 0.4561 0.4569 320 300 300 300 280
# 9 0.4571 0.4569 0.4571 0.4573 0.4570 340 340 320 300 300
# 15 5 0.4567 0.4559 0.4565 0.4571 0.4571 260 240 240 220 220
# 7 0.4565 0.4558 0.4562 0.4564 0.4568 280 260 260 260 240
# 9 0.4570 0.4567 0.4570 0.4570 0.4577 300 300 280 280 260
# 17 5 0.4568 0.4569 0.4570 0.4572 0.4574 220 220 200 200 200
# 7 0.4567 0.4563 0.4559 0.4567 0.4564 260 240 240 220 220
# 9 0.4571 0.4569 0.4565 0.4567 0.4573 280 280 260 260 240
#
criterion = df.apply(lambda x: x['tc']==15 and x['mb']==5 and x['mf']==45, axis = 1)
grouped = df[criterion].groupby('ntree')
g = grouped[['valid']].mean()
g['valid_avg'] = grouped['valid_avg'].last()
print g
# valid valid_avg
# ntree
# 160 0.461023 0.452912
# 180 0.458513 0.450111
# 200 0.456939 0.448232
# 220 0.456147 0.447141
# 240 0.455870 0.446598
# 260 0.456097 0.446525
# 280 0.456657 0.446827
# 300 0.457434 0.447327
# 320 0.458462 0.448101
# 340 0.459635 0.449036
# 360 0.460977 0.450160
ax = g.plot()
ax.set_title('XGB+CF max_depth=15\n min_child_weight=5, colsample_bytree=45/93.')
ax.set_ylabel('Logloss')
fig = ax.get_figure()
fig.savefig(path_log + 'exp_XGB_CF_tc_mb_mf_ntree.png')
| tks0123456789/kaggle-Otto | exp_XGB_CF_tc_mb_mf_ntree.py | Python | mit | 6,574 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: GPL-3.0
#
# GNU Radio Python Flow Graph
# Title: Record_RX
# Author: Justin Ried
# GNU Radio version: 3.8.1.0
from distutils.version import StrictVersion
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print("Warning: failed to XInitThreads()")
from PyQt5 import Qt
from gnuradio import qtgui
from gnuradio.filter import firdes
import sip
from gnuradio import blocks
from gnuradio import gr
import sys
import signal
from argparse import ArgumentParser
from gnuradio.eng_arg import eng_float, intx
from gnuradio import eng_notation
import osmosdr
import time
from gnuradio import qtgui
class Record_RX(gr.top_block, Qt.QWidget):
def __init__(self):
gr.top_block.__init__(self, "Record_RX")
Qt.QWidget.__init__(self)
self.setWindowTitle("Record_RX")
qtgui.util.check_set_qss()
try:
self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
except:
pass
self.top_scroll_layout = Qt.QVBoxLayout()
self.setLayout(self.top_scroll_layout)
self.top_scroll = Qt.QScrollArea()
self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
self.top_scroll_layout.addWidget(self.top_scroll)
self.top_scroll.setWidgetResizable(True)
self.top_widget = Qt.QWidget()
self.top_scroll.setWidget(self.top_widget)
self.top_layout = Qt.QVBoxLayout(self.top_widget)
self.top_grid_layout = Qt.QGridLayout()
self.top_layout.addLayout(self.top_grid_layout)
self.settings = Qt.QSettings("GNU Radio", "Record_RX")
try:
if StrictVersion(Qt.qVersion()) < StrictVersion("5.0.0"):
self.restoreGeometry(self.settings.value("geometry").toByteArray())
else:
self.restoreGeometry(self.settings.value("geometry"))
except:
pass
##################################################
# Variables
##################################################
self.samp_rate = samp_rate = 2e6
##################################################
# Blocks
##################################################
self.qtgui_freq_sink_x_0 = qtgui.freq_sink_c(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
samp_rate, #bw
"", #name
1
)
self.qtgui_freq_sink_x_0.set_update_time(0.10)
self.qtgui_freq_sink_x_0.set_y_axis(-140, 10)
self.qtgui_freq_sink_x_0.set_y_label('Relative Gain', 'dB')
self.qtgui_freq_sink_x_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, 0.0, 0, "")
self.qtgui_freq_sink_x_0.enable_autoscale(False)
self.qtgui_freq_sink_x_0.enable_grid(False)
self.qtgui_freq_sink_x_0.set_fft_average(1.0)
self.qtgui_freq_sink_x_0.enable_axis_labels(True)
self.qtgui_freq_sink_x_0.enable_control_panel(False)
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "dark blue"]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in range(1):
if len(labels[i]) == 0:
self.qtgui_freq_sink_x_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_freq_sink_x_0.set_line_label(i, labels[i])
self.qtgui_freq_sink_x_0.set_line_width(i, widths[i])
self.qtgui_freq_sink_x_0.set_line_color(i, colors[i])
self.qtgui_freq_sink_x_0.set_line_alpha(i, alphas[i])
self._qtgui_freq_sink_x_0_win = sip.wrapinstance(self.qtgui_freq_sink_x_0.pyqwidget(), Qt.QWidget)
self.top_grid_layout.addWidget(self._qtgui_freq_sink_x_0_win)
self.osmosdr_source_0 = osmosdr.source(
args="numchan=" + str(1) + " " + ''
)
self.osmosdr_source_0.set_sample_rate(samp_rate)
self.osmosdr_source_0.set_center_freq(462725000, 0)
self.osmosdr_source_0.set_freq_corr(0, 0)
self.osmosdr_source_0.set_gain(10, 0)
self.osmosdr_source_0.set_if_gain(25, 0)
self.osmosdr_source_0.set_bb_gain(16, 0)
self.osmosdr_source_0.set_antenna('', 0)
self.osmosdr_source_0.set_bandwidth(0, 0)
self.blocks_file_sink_0 = blocks.file_sink(gr.sizeof_gr_complex*1, '/root/Desktop/CV', False)
self.blocks_file_sink_0.set_unbuffered(False)
##################################################
# Connections
##################################################
self.connect((self.osmosdr_source_0, 0), (self.blocks_file_sink_0, 0))
self.connect((self.osmosdr_source_0, 0), (self.qtgui_freq_sink_x_0, 0))
def closeEvent(self, event):
self.settings = Qt.QSettings("GNU Radio", "Record_RX")
self.settings.setValue("geometry", self.saveGeometry())
event.accept()
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.osmosdr_source_0.set_sample_rate(self.samp_rate)
self.qtgui_freq_sink_x_0.set_frequency_range(0, self.samp_rate)
def main(top_block_cls=Record_RX, options=None):
if StrictVersion("4.5.0") <= StrictVersion(Qt.qVersion()) < StrictVersion("5.0.0"):
style = gr.prefs().get_string('qtgui', 'style', 'raster')
Qt.QApplication.setGraphicsSystem(style)
qapp = Qt.QApplication(sys.argv)
tb = top_block_cls()
tb.start()
tb.show()
def sig_handler(sig=None, frame=None):
Qt.QApplication.quit()
signal.signal(signal.SIGINT, sig_handler)
signal.signal(signal.SIGTERM, sig_handler)
timer = Qt.QTimer()
timer.start(500)
timer.timeout.connect(lambda: None)
def quitting():
tb.stop()
tb.wait()
qapp.aboutToQuit.connect(quitting)
qapp.exec_()
if __name__ == '__main__':
main()
| CyberVines/Universal-Quantum-Cymatics | Record_RX.py | Python | mit | 6,310 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutIteration(Koan):
def test_iterators_are_a_type(self):
it = iter(range(1,6))
fib = 0
for num in it:
fib += num
self.assertEqual(__ , fib)
def test_iterating_with_next(self):
stages = iter(['alpha','beta','gamma'])
try:
self.assertEqual(__, next(stages))
next(stages)
self.assertEqual(__, next(stages))
next(stages)
except StopIteration as ex:
err_msg = 'Ran out of iterations'
self.assertRegexpMatches(err_msg, __)
# ------------------------------------------------------------------
def add_ten(self, item):
return item + 10
def test_map_transforms_elements_of_a_list(self):
seq = [1, 2, 3]
mapped_seq = list()
mapping = map(self.add_ten, seq)
self.assertNotEqual(list, type(mapping).__name__)
self.assertEqual(__, type(mapping).__name__)
# In Python 3 built in iterator funcs return iteratable view objects
# instead of lists
for item in mapping:
mapped_seq.append(item)
self.assertEqual(__, mapped_seq)
# None, iterator methods actually return objects of iter type in
# python 3. In python 2 map() would give you a list.
def test_filter_selects_certain_items_from_a_list(self):
def is_even(item):
return (item % 2) == 0
seq = [1, 2, 3, 4, 5, 6]
even_numbers = list()
for item in filter(is_even, seq):
even_numbers.append(item)
self.assertEqual(__, even_numbers)
def test_just_return_first_item_found(self):
def is_big_name(item):
return len(item) > 4
names = ["Jim", "Bill", "Clarence", "Doug", "Eli"]
name = None
iterator = filter(is_big_name, names)
try:
name = next(iterator)
except StopIteration:
msg = 'Ran out of big names'
self.assertEqual(__, name)
# ------------------------------------------------------------------
def add(self,accum,item):
return accum + item
def multiply(self,accum,item):
return accum * item
def test_reduce_will_blow_your_mind(self):
import functools
# As of Python 3 reduce() has been demoted from a builtin function
# to the functools module.
result = functools.reduce(self.add, [2, 3, 4])
self.assertEqual(__, type(result).__name__)
# Reduce() syntax is same as Python 2
self.assertEqual(__, result)
result2 = functools.reduce(self.multiply, [2, 3, 4], 1)
self.assertEqual(__, result2)
# Extra Credit:
# Describe in your own words what reduce does.
# ------------------------------------------------------------------
def test_creating_lists_with_list_comprehensions(self):
feast = ['lambs', 'sloths', 'orangutans', 'breakfast cereals', 'fruit bats']
comprehension = [delicacy.capitalize() for delicacy in feast]
self.assertEqual(__, comprehension[0])
self.assertEqual(__, comprehension[2])
def test_use_pass_for_iterations_with_no_body(self):
for num in range(1,5):
pass
self.assertEqual(__, num)
# ------------------------------------------------------------------
def test_all_iteration_methods_work_on_any_sequence_not_just_lists(self):
# Ranges are an iteratable sequence
result = map(self.add_ten, range(1,4))
self.assertEqual(__, list(result))
try:
# Files act like a collection of lines
file = open("example_file.txt")
def make_upcase(line):
return line.strip().upper()
upcase_lines = map(make_upcase, file.readlines())
self.assertEqual(__, list(upcase_lines))
# NOTE: You can create your own collections that work with each,
# map, select, etc.
finally:
# Arg, this is ugly.
# We will figure out how to fix this later.
if file:
file.close() | caalle/Python-koans | python 3/koans/about_iteration.py | Python | mit | 4,469 |
"""Control switches."""
from datetime import timedelta
import logging
from ProgettiHWSW.relay import Relay
import async_timeout
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from . import setup_switch
from .const import DEFAULT_POLLING_INTERVAL_SEC, DOMAIN
_LOGGER = logging.getLogger(DOMAIN)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the switches from a config entry."""
board_api = hass.data[DOMAIN][config_entry.entry_id]
relay_count = config_entry.data["relay_count"]
switches = []
async def async_update_data():
"""Fetch data from API endpoint of board."""
async with async_timeout.timeout(5):
return await board_api.get_switches()
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="switch",
update_method=async_update_data,
update_interval=timedelta(seconds=DEFAULT_POLLING_INTERVAL_SEC),
)
await coordinator.async_refresh()
for i in range(1, int(relay_count) + 1):
switches.append(
ProgettihwswSwitch(
coordinator,
f"Relay #{i}",
setup_switch(board_api, i, config_entry.data[f"relay_{str(i)}"]),
)
)
async_add_entities(switches)
class ProgettihwswSwitch(CoordinatorEntity, SwitchEntity):
"""Represent a switch entity."""
def __init__(self, coordinator, name, switch: Relay):
"""Initialize the values."""
super().__init__(coordinator)
self._switch = switch
self._name = name
async def async_turn_on(self, **kwargs):
"""Turn the switch on."""
await self._switch.control(True)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs):
"""Turn the switch off."""
await self._switch.control(False)
await self.coordinator.async_request_refresh()
async def async_toggle(self, **kwargs):
"""Toggle the state of switch."""
await self._switch.toggle()
await self.coordinator.async_request_refresh()
@property
def name(self):
"""Return the switch name."""
return self._name
@property
def is_on(self):
"""Get switch state."""
return self.coordinator.data[self._switch.id]
| rohitranjan1991/home-assistant | homeassistant/components/progettihwsw/switch.py | Python | mit | 2,686 |
#!/usr/bin/python
#
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keystoneclient.v2_0 import client
from keystoneclient.v3 import client as keystoneclient_v3
from keystoneclient.auth import token_endpoint
from keystoneclient import session, exceptions
from charmhelpers.core.decorators import retry_on_exception
# Early versions of keystoneclient lib do not have an explicit
# ConnectionRefused
if hasattr(exceptions, 'ConnectionRefused'):
econnrefused = exceptions.ConnectionRefused
else:
econnrefused = exceptions.ConnectionError
def _get_keystone_manager_class(endpoint, token, api_version):
"""Return KeystoneManager class for the given API version
@param endpoint: the keystone endpoint to point client at
@param token: the keystone admin_token
@param api_version: version of the keystone api the client should use
@returns keystonemanager class used for interrogating keystone
"""
if api_version == 2:
return KeystoneManager2(endpoint, token)
if api_version == 3:
return KeystoneManager3(endpoint, token)
raise ValueError('No manager found for api version {}'.format(api_version))
@retry_on_exception(5, base_delay=3, exc_type=econnrefused)
def get_keystone_manager(endpoint, token, api_version=None):
"""Return a keystonemanager for the correct API version
If api_version has not been set then create a manager based on the endpoint
Use this manager to query the catalogue and determine which api version
should actually be being used. Return the correct client based on that.
Function is wrapped in a retry_on_exception to catch the case where the
keystone service is still initialising and not responding to requests yet.
XXX I think the keystone client should be able to do version
detection automatically so the code below could be greatly
simplified
@param endpoint: the keystone endpoint to point client at
@param token: the keystone admin_token
@param api_version: version of the keystone api the client should use
@returns keystonemanager class used for interrogating keystone
"""
if api_version:
return _get_keystone_manager_class(endpoint, token, api_version)
else:
if 'v2.0' in endpoint.split('/'):
manager = _get_keystone_manager_class(endpoint, token, 2)
else:
manager = _get_keystone_manager_class(endpoint, token, 3)
if endpoint.endswith('/'):
base_ep = endpoint.rsplit('/', 2)[0]
else:
base_ep = endpoint.rsplit('/', 1)[0]
svc_id = None
for svc in manager.api.services.list():
if svc.type == 'identity':
svc_id = svc.id
version = None
for ep in manager.api.endpoints.list():
if ep.service_id == svc_id and hasattr(ep, 'adminurl'):
version = ep.adminurl.split('/')[-1]
if version and version == 'v2.0':
new_ep = base_ep + "/" + 'v2.0'
return _get_keystone_manager_class(new_ep, token, 2)
elif version and version == 'v3':
new_ep = base_ep + "/" + 'v3'
return _get_keystone_manager_class(new_ep, token, 3)
else:
return manager
class KeystoneManager(object):
def resolve_domain_id(self, name):
pass
def resolve_role_id(self, name):
"""Find the role_id of a given role"""
roles = [r._info for r in self.api.roles.list()]
for r in roles:
if name.lower() == r['name'].lower():
return r['id']
def resolve_service_id(self, name, service_type=None):
"""Find the service_id of a given service"""
services = [s._info for s in self.api.services.list()]
for s in services:
if service_type:
if (name.lower() == s['name'].lower() and
service_type == s['type']):
return s['id']
else:
if name.lower() == s['name'].lower():
return s['id']
def resolve_service_id_by_type(self, type):
"""Find the service_id of a given service"""
services = [s._info for s in self.api.services.list()]
for s in services:
if type == s['type']:
return s['id']
class KeystoneManager2(KeystoneManager):
def __init__(self, endpoint, token):
self.api_version = 2
self.api = client.Client(endpoint=endpoint, token=token)
def resolve_user_id(self, name, user_domain=None):
"""Find the user_id of a given user"""
users = [u._info for u in self.api.users.list()]
for u in users:
if name.lower() == u['name'].lower():
return u['id']
def create_endpoints(self, region, service_id, publicurl, adminurl,
internalurl):
self.api.endpoints.create(region=region, service_id=service_id,
publicurl=publicurl, adminurl=adminurl,
internalurl=internalurl)
def tenants_list(self):
return self.api.tenants.list()
def resolve_tenant_id(self, name, domain=None):
"""Find the tenant_id of a given tenant"""
tenants = [t._info for t in self.api.tenants.list()]
for t in tenants:
if name.lower() == t['name'].lower():
return t['id']
def create_tenant(self, tenant_name, description, domain='default'):
self.api.tenants.create(tenant_name=tenant_name,
description=description)
def delete_tenant(self, tenant_id):
self.api.tenants.delete(tenant_id)
def create_user(self, name, password, email, tenant_id=None,
domain_id=None):
self.api.users.create(name=name,
password=password,
email=email,
tenant_id=tenant_id)
def update_password(self, user, password):
self.api.users.update_password(user=user, password=password)
def roles_for_user(self, user_id, tenant_id=None, domain_id=None):
return self.api.roles.roles_for_user(user_id, tenant_id)
def add_user_role(self, user, role, tenant, domain):
self.api.roles.add_user_role(user=user, role=role, tenant=tenant)
class KeystoneManager3(KeystoneManager):
def __init__(self, endpoint, token):
self.api_version = 3
keystone_auth_v3 = token_endpoint.Token(endpoint=endpoint, token=token)
keystone_session_v3 = session.Session(auth=keystone_auth_v3)
self.api = keystoneclient_v3.Client(session=keystone_session_v3)
def resolve_tenant_id(self, name, domain=None):
"""Find the tenant_id of a given tenant"""
if domain:
domain_id = self.resolve_domain_id(domain)
tenants = [t._info for t in self.api.projects.list()]
for t in tenants:
if name.lower() == t['name'].lower() and \
(domain is None or t['domain_id'] == domain_id):
return t['id']
def resolve_domain_id(self, name):
"""Find the domain_id of a given domain"""
domains = [d._info for d in self.api.domains.list()]
for d in domains:
if name.lower() == d['name'].lower():
return d['id']
def resolve_user_id(self, name, user_domain=None):
"""Find the user_id of a given user"""
domain_id = None
if user_domain:
domain_id = self.resolve_domain_id(user_domain)
for user in self.api.users.list(domain=domain_id):
if name.lower() == user.name.lower():
if user_domain:
if domain_id == user.domain_id:
return user.id
else:
return user.id
def create_endpoints(self, region, service_id, publicurl, adminurl,
internalurl):
self.api.endpoints.create(service_id, publicurl, interface='public',
region=region)
self.api.endpoints.create(service_id, adminurl, interface='admin',
region=region)
self.api.endpoints.create(service_id, internalurl,
interface='internal', region=region)
def tenants_list(self):
return self.api.projects.list()
def create_domain(self, domain_name, description):
self.api.domains.create(domain_name, description=description)
def create_tenant(self, tenant_name, description, domain='default'):
domain_id = self.resolve_domain_id(domain)
self.api.projects.create(tenant_name, domain_id,
description=description)
def delete_tenant(self, tenant_id):
self.api.projects.delete(tenant_id)
def create_user(self, name, password, email, tenant_id=None,
domain_id=None):
if not domain_id:
domain_id = self.resolve_domain_id('default')
if tenant_id:
self.api.users.create(name,
domain=domain_id,
password=password,
email=email,
project=tenant_id)
else:
self.api.users.create(name,
domain=domain_id,
password=password,
email=email)
def update_password(self, user, password):
self.api.users.update(user, password=password)
def roles_for_user(self, user_id, tenant_id=None, domain_id=None):
# Specify either a domain or project, not both
if domain_id:
return self.api.roles.list(user_id, domain=domain_id)
else:
return self.api.roles.list(user_id, project=tenant_id)
def add_user_role(self, user, role, tenant, domain):
# Specify either a domain or project, not both
if domain:
self.api.roles.grant(role, user=user, domain=domain)
if tenant:
self.api.roles.grant(role, user=user, project=tenant)
def find_endpoint_v3(self, interface, service_id, region):
found_eps = []
for ep in self.api.endpoints.list():
if ep.service_id == service_id and ep.region == region and \
ep.interface == interface:
found_eps.append(ep)
return found_eps
def delete_old_endpoint_v3(self, interface, service_id, region, url):
eps = self.find_endpoint_v3(interface, service_id, region)
for ep in eps:
if getattr(ep, 'url') != url:
self.api.endpoints.delete(ep.id)
return True
return False
| konono/equlipse | openstack-install/charm/trusty/charm-keystone/hooks/manager.py | Python | mit | 11,385 |
#!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test p2p addr-fetch connections
"""
import time
from test_framework.messages import msg_addr, CAddress, NODE_NETWORK, NODE_WITNESS
from test_framework.p2p import P2PInterface, p2p_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
ADDR = CAddress()
ADDR.time = int(time.time())
ADDR.nServices = NODE_NETWORK | NODE_WITNESS
ADDR.ip = "192.0.0.8"
ADDR.port = 18444
class P2PAddrFetch(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
node = self.nodes[0]
self.log.info("Connect to an addr-fetch peer")
peer = node.add_outbound_p2p_connection(P2PInterface(), p2p_idx=0, connection_type="addr-fetch")
info = node.getpeerinfo()
assert_equal(len(info), 1)
assert_equal(info[0]['connection_type'], 'addr-fetch')
self.log.info("Check that we send getaddr but don't try to sync headers with the addr-fetch peer")
peer.sync_send_with_ping()
with p2p_lock:
assert peer.message_count['getaddr'] == 1
assert peer.message_count['getheaders'] == 0
self.log.info("Check that answering the getaddr with a single address does not lead to disconnect")
# This prevents disconnecting on self-announcements
msg = msg_addr()
msg.addrs = [ADDR]
peer.send_and_ping(msg)
assert_equal(len(node.getpeerinfo()), 1)
self.log.info("Check that answering with larger addr messages leads to disconnect")
msg.addrs = [ADDR] * 2
peer.send_message(msg)
peer.wait_for_disconnect(timeout=5)
self.log.info("Check timeout for addr-fetch peer that does not send addrs")
peer = node.add_outbound_p2p_connection(P2PInterface(), p2p_idx=1, connection_type="addr-fetch")
node.setmocktime(int(time.time()) + 301) # Timeout: 5 minutes
peer.wait_for_disconnect(timeout=5)
if __name__ == '__main__':
P2PAddrFetch().main()
| JeremyRubin/bitcoin | test/functional/p2p_addrfetch.py | Python | mit | 2,266 |
# -*- coding: utf-8 -*-
# Copyright (c) 2006-2010 Tampere University of Technology
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import httplib
import urllib
import socket
class ToolProtocolHTTP(object):
"""
HTTP/HTTPS client for TEMA MBT protocol. Discusses with the TEMA test engine.
"""
# is client connected to the server
isConnected = False
def __init__(self):
self.host = "localhost"
self.port = 80
self.php_file = "temagui_http_proxy.php"
socket.setdefaulttimeout(1800)
def __del__(self):
if self.isConnected:
http_params = urllib.urlencode({"User" : self.username, "Message" : 'CLOSE', "Parameter" : 'Empty'})
http_data = self.__requestreply(http_params)
def __requestreply(self,message ):
""" One http(s) request/reply.
Message: Message to send string.
Returns: Reply string.
"""
http_data = ''
try:
http_connection = None
if self.protocol == "HTTP":
http_connection = httplib.HTTPConnection(self.host, self.port)
elif self.protocol == "HTTPS":
http_connection = httplib.HTTPSConnection(self.host, self.port)
else:
return ''
http_connection.connect()
http_connection.request("POST", self.php_file, message , self.http_headers)
http_response = http_connection.getresponse()
http_data = http_response.read()
http_response.close()
http_connection.close()
except Exception, e:
http_data = ''
return http_data
def init(self, host, path, port, username, protocol):
""" Initialises connection. Sends HELO.
host: Server hostname.
path: path to http proxy in server.
port: port
username: wwwgui username
protocol: http/https
returns: Reply to ACK. On error returns ''
"""
self.http_headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
self.host = host
self.php_file = "/".join(["",path,"temagui_http_proxy.php"])
self.port = port
self.username = username
self.protocol = protocol.upper()
try:
# SEND HELO
http_params = urllib.urlencode({"User" : username, "Message" : 'HELO', "Parameter" : 'Empty'})
http_data = self.__requestreply(http_params)
self.isConnected = True
lines = http_data.splitlines()
if lines != []:
message = lines.pop()
if message == "CLOSE":
http_data = ''
self.isConnected = False
except Exception, e:
self.isConnected = False
return ''
return http_data
def getKeyword(self):
""" Gets keyword from testserver.
Sends GET to testserver and waits for reply.
Returns: Reply to GET. On error return ''
"""
http_data = ''
try:
http_params = urllib.urlencode({"User" : self.username, "Message" : 'GET', "Parameter" : 'Empty'})
http_data = self.__requestreply(http_params)
lines = http_data.splitlines()
if lines != []:
message = lines.pop()
if message == "CLOSE":
self.isConnected = False
return 'ERROR'
if message == 'ERR':
# TODO: don't send ack.
http_data = self.__requestreply(http_params)
http_params = urllib.urlencode({"User" : self.username, "Message" : 'ACK', "Parameter" : 'Empty'})
http_data = self.__requestreply(http_params)
self.isConnected = False
return 'ERROR'
if not http_data.startswith("ACK"):
print http_data
return "ERROR"
else:
#http_data = http_data.partition("ACK")[2].strip()
http_data = http_data.split("ACK")[1].strip()
if http_data == '' or http_data == None:
http_data = ''
self.isConnected = False
except Exception, e:
self.isConnected = False
return http_data
def putResult(self, result):
""" Puts result to testserver.
result: True/False
returns: Reply message to PUT
"""
try:
if result:
http_params = urllib.urlencode({"User" : self.username, "Message" : 'PUT', "Parameter" : 'true'})
else:
http_params = urllib.urlencode({"User" : self.username, "Message" : 'PUT', "Parameter" : 'false'})
except Exception, e:
self.isConnected = False
return ''
try:
http_data = self.__requestreply(http_params)
lines = http_data.splitlines()
if lines != []:
message = lines.pop()
if message == "CLOSE":
self.isConnected = False
return ''
if http_data == '':
self.isConnected = False
except Exception, e:
self.isConnected = False
http_data = ''
return http_data
def log(self, msg):
""" Sends log message to testserver
returns: Reply to message.
"""
http_data = ''
try:
http_params = urllib.urlencode({"User" : self.username, "Message" : 'LOG', "Parameter" : msg })
http_data = self.__requestreply(http_params)
lines = http_data.splitlines()
if lines != []:
message = lines.pop()
if message == "CLOSE":
self.isConnected = False
return ''
if http_data == '':
self.isConnected = False
except Exception, e:
self.isConnected = False
http_data = ''
return http_data
def bye(self):
""" Sends message BYE to testserver. """
http_data = ''
try:
http_params = urllib.urlencode({"User" : self.username, "Message" : 'BYE', "Parameter" : 'None'})
http_data = self.__requestreply(http_params)
self.isConnected = False
except Exception, e:
self.isConnected = False
return ''
def hasConnection(self):
return self.isConnected
if __name__ == "__main__":
c = ToolProtocol()
print "init -> " + c.init()
print "getKeyword -> " + c.getKeyword()
print "putResult -> " + c.putResult(True)
print "getKeyword -> " + c.getKeyword()
print "putResult -> " + c.putResult(False)
print "invalid -> " + c.invalid()
print "bye -> " + c.bye()
| tema-mbt/tema-adapterlib | adapterlib/ToolProtocolHTTP.py | Python | mit | 8,508 |
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
try:
from azure_bdist_wheel import cmdclass
except ImportError:
from distutils import log as logger
logger.warn("Wheel is not available, disabling bdist_wheel hook")
cmdclass = {}
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-cognitiveservices-vision-face"
PACKAGE_PPRINT_NAME = "Cognitive Services Face"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# azure v0.x is not compatible with this package
# azure v0.x used to have a __version__ attribute (newer versions don't)
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst', encoding='utf-8') as f:
readme = f.read()
with open('HISTORY.rst', encoding='utf-8') as f:
history = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + history,
license='MIT License',
author='Microsoft Corporation',
author_email='[email protected]',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=["tests"]),
install_requires=[
'msrest>=0.4.24,<2.0.0',
'azure-common~=1.1',
],
cmdclass=cmdclass
)
| lmazuel/azure-sdk-for-python | azure-cognitiveservices-vision-face/setup.py | Python | mit | 2,837 |
#coding: utf8
import unittest
#from conf import NginxConfig
from blocks import KeyValueOption, KeyOption, Block
'''
s = """server {
nameserver 123;
}"""
s = """server 123;"""
a = NginxConfig()
a.load(s)
print(a.server)
#print(a.server.nameserver)
'''
class NgKVB(Block):
kv = KeyValueOption('kv_value')
class Test(unittest.TestCase):
def test_base(self):
"""
Base tests of Block, KeyValueOption, KeyOption classes functionality
"""
a1 = NgKVB()
a2 = NgKVB()
self.assertEqual(str(a1.kv), 'kv_value')
self.assertEqual(str(a2.kv), 'kv_value')
self.assertEqual('kv' in a1._options, True)
self.assertEqual('kv' in a2._options, True)
self.assertEqual(type(a1.kv), KeyValueOption)
self.assertEqual(type(a2.kv), KeyValueOption)
a1.kv = 'kv_another_value'
self.assertEqual(type(a1.kv), KeyValueOption)
self.assertEqual(id(a1.kv) == id(KeyValueOption), False)
self.assertEqual(id(a2.kv) == id(KeyValueOption), False)
self.assertEqual(str(a1.kv), 'kv_another_value')
self.assertEqual(str(a2.kv), 'kv_value')
def test_block_attribute_inheritance(self):
"""
Tests that base class Block does not take any of child class attributes
"""
a1 = NgKVB()
self.assertEqual(hasattr(Block, '_options'), False)
self.assertEqual(a1._options, ['kv'])
self.assertEqual(hasattr(a1, 'kv'), True)
def test_block_item_assigment(self):
a1 = NgKVB()
a1['kv'] = KeyValueOption('kv_value')
self.assertEqual(a1._options, ['kv'])
self.assertEqual(type(a1['kv']), KeyValueOption)
def test_kv_option(self):
"""
Tests key-value option
"""
kv = KeyValueOption('value')
self.assertEqual(kv.render('kv_name'), '\nkv_name value;')
self.assertEqual(kv.render('kv_name', indent_level=1), '\n kv_name value;')
self.assertEqual(kv.render('kv_name', indent_level=2, indent_char='\t', indent=1), '\n\t\tkv_name value;')
def test_k_option(self):
"""
Tests key option
"""
k = KeyOption()
self.assertEqual(k.render('name'), '\nname;')
self.assertEqual(k.render('name', indent_level=1), '\n name;')
self.assertEqual(k.render('name', indent_level=2, indent_char='\t', indent=1), '\n\t\tname;')
def test_kv_block(self):
"""
Tests key-value option in block
"""
kvb = Block()
kvb.kv = KeyValueOption('value')
self.assertEqual(kvb.render('kbv_name'), '\nkbv_name {\n kv value;\n}')
def test_kv_block_initial(self):
"""
Tests initial values in key-value block and deletions of attributes
"""
kvb = NgKVB()
self.assertEqual(str(kvb.kv), 'kv_value')
self.assertEqual(kvb.kv.render('kv'), '\nkv kv_value;')
self.assertEqual(kvb.render('kvb_name'), '\nkvb_name {\n kv kv_value;\n}')
kvb.kv = 'kv_another_value'
self.assertEqual(type(kvb.kv), KeyValueOption)
self.assertEqual(str(kvb.kv), 'kv_another_value')
self.assertEqual(kvb.kv.render('kv'), '\nkv kv_another_value;')
self.assertEqual(kvb.render('kvb_name'), '\nkvb_name {\n kv kv_another_value;\n}')
del kvb.kv
self.assertEqual(hasattr(kvb, 'kv'), False)
self.assertEqual('kv' in kvb._options, False)
self.assertEqual(kvb.render('kvb_name'), '\nkvb_name {\n}')
kvb2 = NgKVB()
self.assertEqual(str(kvb2.kv), 'kv_value')
self.assertEqual(kvb2.kv.render('kv'), '\nkv kv_value;')
self.assertEqual(kvb2.render('kvb_name'), '\nkvb_name {\n kv kv_value;\n}')
if __name__ == "__main__":
unittest.main()
| FeroxTL/pynginxconfig | test.py | Python | mit | 3,830 |
#!/usr/bin/env python3
import os
bundleFilesDir = 'tmp/bundleSizeDownloads'
yarnLockFile = 'yarn.lock'
packagesFile = 'package.json'
def isDividerLine(line):
# At least 80 chars, all slashes except the last (which is newline). The number is inconsistent for some reason.
return (len(line)>=80
and line.endswith("\n")
and all([c=='/' for c in line[0:-1]]))
def isSpacerLine(line):
# At least 80 chars, starting with "//", ending with "//\n", otherwise all spaces
return (len(line)>=80
and line.startswith("//") and line.endswith("//\n")
and all([c==' ' for c in line[2:-3]]))
assert isDividerLine("////////////////////////////////////////////////////////////////////////////////////\n")
assert isSpacerLine("// //\n")
def readFileLines(filename):
f = open(filename, 'r')
lines = f.readlines()
f.close()
return lines
def bundleFilesToSizeMap():
sizesByFilename = {}
for filename in os.listdir(bundleFilesDir):
lines = readFileLines('%s/%s' % (bundleFilesDir, filename))
sizesByFilename = {**unpackFile(lines), **sizesByFilename}
return sizesByFilename
def unpackFile(lines):
sizes = {}
currentFileStart = None
currentFileName = None
for i in range(0,len(lines)):
if i+4<len(lines) and isDividerLine(lines[i]) and isSpacerLine(lines[i+1]) and isSpacerLine(lines[i+3]) and isDividerLine(lines[i+4]):
if currentFileName:
fileContents = '\n'.join(lines[currentFileStart:i])
sizes[currentFileName] = len(fileContents)
currentFileStart = i+5
currentFileName = lines[i+2].strip()[2:-2].strip()
if currentFileName:
fileContents = '\n'.join(lines[currentFileStart:i])
sizes[currentFileName] = len(fileContents)
return sizes
def ancestorPaths(filename):
pathComponents = filename.split('/')
return ['.']+['/'.join(pathComponents[0:i]) for i in range(1,len(pathComponents))]
def sumSizesInDirectories(sizesByFilename):
sizesByDirectory = {}
for filename in sizesByFilename:
for path in ancestorPaths(filename):
sizesByDirectory[path] = sizesByDirectory[path]+sizesByFilename[filename] if path in sizesByDirectory else sizesByFilename[filename]
return sizesByDirectory
# Given the name of a yarn lockfile (yarn.lock), produce a dictionary from
# package -> array of dependencies of that package.
# The idea of this is to be able to identify when a package is depended on by
# only one other package, so that we can attribute the size of the depended-on
# package to the package that imported it.
#
#def yarnLockToDependencyGraph(lockfileName):
# dependenciesByPackage = {}
# lockfileLines = readFileLines(lockfileName)
#
# def backtrackToPackageName(lines, i):
# #TODO
# pass
# def forwardOverDependencies(lines, i):
# #TODO
# pass
#
# for i in range(0,len(lines)):
# if lockfileLines[0].strip()=='dependencies:':
# packageName = backtrackToPackageName(lines, i)
# dependencies = forwardOverDependencies(lines, i)
# if packageName in dependencies:
# dependenciesByPackage[packageName] = {**dependencies[packageName], **dependencies}
# else:
# dependenciesByPackage[packageName] = dependencies
def packagesFileToDependencyRoots(packagesFileName):
f = open(packagesFileName, 'r')
packagesJson = json.loads(f.read())
f.close()
return packagesJson[dependencies]
def rightalign(num, width):
return (' ' * (width-len(str(num)))) + str(num)
#def getAdjustedPackageSizes(sizesByDirectory, dependencyRoots, dependencyGraph):
# #TODO
# return {}
sizesByFilename = bundleFilesToSizeMap()
sizesByDirectory = sumSizesInDirectories(sizesByFilename)
for path in sorted(list(sizesByDirectory.keys())):
print("%s %s" % (rightalign(sizesByDirectory[path], 10), path))
| Discordius/Telescope | scripts/analyzeBundle.py | Python | mit | 4,048 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/poi/shared_tatooine_evil_nomad_small2.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/building/poi/shared_tatooine_evil_nomad_small2.py | Python | mit | 457 |
# This file is generated by /tmp/pip-kUGBJh-build/-c
# It contains system_info results at the time of building this package.
__all__ = ["get_info","show"]
lapack_opt_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
blas_opt_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
blis_info={}
openblas_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
openblas_lapack_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]}
lapack_mkl_info={}
blas_mkl_info={}
def get_info(name):
g = globals()
return g.get(name, g.get(name + "_info", {}))
def show():
for name,info_dict in globals().items():
if name[0] == "_" or type(info_dict) is not type({}): continue
print(name + ":")
if not info_dict:
print(" NOT AVAILABLE")
for k,v in info_dict.items():
v = str(v)
if k == "sources" and len(v) > 200:
v = v[:60] + " ...\n... " + v[-60:]
print(" %s = %s" % (k,v))
| ryfeus/lambda-packs | Shapely_numpy/source/numpy/distutils/__config__.py | Python | mit | 1,319 |
class IDObject():
"""
Base class for all objects having unique id within the application
"""
def __init__(self, objectId):
"""
Constructor method for building IDObject
objectId - the unique objectId of the object in the application
"""
self._objectId = objectId
def getId(self):
"""
Return the object's unique id
"""
return self._objectId
def testIDObject():
obj = IDObject(133)
assert obj.getId() == 133 | rusucosmin/courses | ubb/fop/2015.Seminar.09/domain/IDObject.py | Python | mit | 514 |
'''Support module for translating strings.
This module provides several functions
for definitions, keys, and transforms.'''
__version__ = 1.3
################################################################################
import random
def definition(name=None):
'Returns a valid definition.'
random.seed(name)
definition, list_one, list_two = str(), range(256), range(256)
for index in range(256):
index_one, index_two = random.randrange(256 - index), random.randrange(256 - index)
definition += chr(list_one[index_one]) + chr(list_two[index_two])
del list_one[index_one], list_two[index_two]
return definition
def key(definition, select):
'Returns a valid key.'
key = range(256)
for index in range(256):
key[ord(definition[index * 2 + int(bool(select))])] = definition[index * 2 + int(not bool(select))]
return ''.join(key)
def transform(key, string):
'Returns a valid transformation.'
return string.translate(key)
################################################################################
if __name__ == '__main__':
import sys
print 'Content-Type: text/plain'
print
print file(sys.argv[0]).read()
| ActiveState/code | recipes/Python/496858_zcryptpy/recipe-496858.py | Python | mit | 1,213 |
#!/usr/bin/env python
from __future__ import print_function
import argparse
import glob
import os
import platform
import shutil
import subprocess
import sys
from lib.util import get_electron_branding, rm_rf, scoped_cwd
PROJECT_NAME = get_electron_branding()['project_name']
PRODUCT_NAME = get_electron_branding()['product_name']
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SNAPSHOT_SOURCE = os.path.join(SOURCE_ROOT, 'spec', 'fixtures', 'testsnap.js')
def main():
args = parse_args()
source_root = os.path.abspath(args.source_root)
initial_app_path = os.path.join(source_root, args.build_dir)
app_path = create_app_copy(initial_app_path)
returncode = 0
try:
with scoped_cwd(app_path):
if args.snapshot_files_dir is None:
with open(os.path.join(app_path, 'mksnapshot_args')) as f:
mkargs = f.read().splitlines()
subprocess.check_call(mkargs + [ SNAPSHOT_SOURCE ], cwd=app_path)
print('ok mksnapshot successfully created snapshot_blob.bin.')
context_snapshot = 'v8_context_snapshot.bin'
if platform.system() == 'Darwin':
if os.environ.get('TARGET_ARCH') == 'arm64':
context_snapshot = 'v8_context_snapshot.arm64.bin'
else:
context_snapshot = 'v8_context_snapshot.x86_64.bin'
context_snapshot_path = os.path.join(app_path, context_snapshot)
gen_binary = get_binary_path('v8_context_snapshot_generator', \
app_path)
genargs = [ gen_binary, \
'--output_file={0}'.format(context_snapshot_path) ]
subprocess.check_call(genargs)
print('ok v8_context_snapshot_generator successfully created ' \
+ context_snapshot)
if args.create_snapshot_only:
return 0
else:
gen_bin_path = os.path.join(args.snapshot_files_dir, '*.bin')
generated_bin_files = glob.glob(gen_bin_path)
for bin_file in generated_bin_files:
shutil.copy2(bin_file, app_path)
test_path = os.path.join(SOURCE_ROOT, 'spec', 'fixtures', \
'snapshot-items-available')
if sys.platform == 'darwin':
bin_files = glob.glob(os.path.join(app_path, '*.bin'))
app_dir = os.path.join(app_path, '{0}.app'.format(PRODUCT_NAME))
electron = os.path.join(app_dir, 'Contents', 'MacOS', PRODUCT_NAME)
bin_out_path = os.path.join(app_dir, 'Contents', 'Frameworks',
'{0} Framework.framework'.format(PROJECT_NAME),
'Resources')
for bin_file in bin_files:
shutil.copy2(bin_file, bin_out_path)
elif sys.platform == 'win32':
electron = os.path.join(app_path, '{0}.exe'.format(PROJECT_NAME))
else:
electron = os.path.join(app_path, PROJECT_NAME)
subprocess.check_call([electron, test_path])
print('ok successfully used custom snapshot.')
except subprocess.CalledProcessError as e:
print('not ok an error was encountered while testing mksnapshot.')
print(e)
returncode = e.returncode
except KeyboardInterrupt:
print('Other error')
returncode = 0
print('Returning with error code: {0}'.format(returncode))
return returncode
# Create copy of app to install custom snapshot
def create_app_copy(initial_app_path):
print('Creating copy of app for testing')
app_path = os.path.join(os.path.dirname(initial_app_path),
os.path.basename(initial_app_path)
+ '-mksnapshot-test')
rm_rf(app_path)
shutil.copytree(initial_app_path, app_path, symlinks=True)
return app_path
def get_binary_path(binary_name, root_path):
if sys.platform == 'win32':
binary_path = os.path.join(root_path, '{0}.exe'.format(binary_name))
else:
binary_path = os.path.join(root_path, binary_name)
return binary_path
def parse_args():
parser = argparse.ArgumentParser(description='Test mksnapshot')
parser.add_argument('-b', '--build-dir',
help='Path to an Electron build folder. \
Relative to the --source-root.',
default=None,
required=True)
parser.add_argument('--create-snapshot-only',
help='Just create snapshot files, but do not run test',
action='store_true')
parser.add_argument('--snapshot-files-dir',
help='Directory containing snapshot files to use \
for testing',
default=None,
required=False)
parser.add_argument('--source-root',
default=SOURCE_ROOT,
required=False)
return parser.parse_args()
if __name__ == '__main__':
sys.exit(main())
| electron/electron | script/verify-mksnapshot.py | Python | mit | 4,821 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Calendar.slug'
db.add_column('schedule_calendar', 'slug',
self.gf('django.db.models.fields.SlugField')(max_length=255, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Calendar.slug'
db.delete_column('schedule_calendar', 'slug')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'schedule.calendar': {
'Meta': {'ordering': "('-modified', '-created')", 'object_name': 'Calendar'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['schedule'] | Bionetbook/bionetbook | bnbapp/schedule/migrations/0003_auto__add_field_calendar_slug.py | Python | mit | 4,571 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# dodotable documentation build configuration file, created by
# sphinx-quickstart on Thu Sep 17 11:47:28 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from dodotable import __version__, __version_info__
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'dodotable'
copyright = '2016, Spoqa, Inc'
author = 'Kang Hyojun'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join(str(v) for v in __version_info__[:2])
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'dodotabledoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'dodotable.tex', 'dodotable Documentation',
'Kang Hyojun', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dodotable', 'dodotable Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'dodotable', 'dodotable Documentation',
author, 'dodotable', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
'sqlalchemy': ('http://docs.sqlalchemy.org/en/latest/', None),
'flask': ('http://flask.pocoo.org/docs/', None)
}
| heejongahn/dodotable | docs/conf.py | Python | mit | 9,622 |
"""
A library of useful helper classes to the saxlib classes, for the
convenience of application and driver writers.
$Id: saxutils.py,v 1.19 2001/03/20 07:19:46 loewis Exp $
"""
import types, sys, urllib, urlparse, os, string
import handler, _exceptions, xmlreader
try:
_StringTypes = [types.StringType, types.UnicodeType]
except AttributeError: # 1.5 compatibility:UnicodeType not defined
_StringTypes = [types.StringType]
def escape(data, entities={}):
"""Escape &, <, and > in a string of data.
You can escape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
data = string.replace(data, "&", "&")
data = string.replace(data, "<", "<")
data = string.replace(data, ">", ">")
for chars, entity in entities.items():
data = string.replace(data, chars, entity)
return data
# --- DefaultHandler
class DefaultHandler(handler.EntityResolver, handler.DTDHandler,
handler.ContentHandler, handler.ErrorHandler):
"""Default base class for SAX2 event handlers. Implements empty
methods for all callback methods, which can be overridden by
application implementors. Replaces the deprecated SAX1 HandlerBase
class."""
# --- Location
class Location:
"""Represents a location in an XML entity. Initialized by being passed
a locator, from which it reads off the current location, which is then
stored internally."""
def __init__(self, locator):
self.__col = locator.getColumnNumber()
self.__line = locator.getLineNumber()
self.__pubid = locator.getPublicId()
self.__sysid = locator.getSystemId()
def getColumnNumber(self):
return self.__col
def getLineNumber(self):
return self.__line
def getPublicId(self):
return self.__pubid
def getSystemId(self):
return self.__sysid
# --- ErrorPrinter
class ErrorPrinter:
"A simple class that just prints error messages to standard out."
def __init__(self, level=0, outfile=sys.stderr):
self._level = level
self._outfile = outfile
def warning(self, exception):
if self._level <= 0:
self._outfile.write("WARNING in %s: %s\n" %
(self.__getpos(exception),
exception.getMessage()))
def error(self, exception):
if self._level <= 1:
self._outfile.write("ERROR in %s: %s\n" %
(self.__getpos(exception),
exception.getMessage()))
def fatalError(self, exception):
if self._level <= 2:
self._outfile.write("FATAL ERROR in %s: %s\n" %
(self.__getpos(exception),
exception.getMessage()))
def __getpos(self, exception):
if isinstance(exception, _exceptions.SAXParseException):
return "%s:%s:%s" % (exception.getSystemId(),
exception.getLineNumber(),
exception.getColumnNumber())
else:
return "<unknown>"
# --- ErrorRaiser
class ErrorRaiser:
"A simple class that just raises the exceptions it is passed."
def __init__(self, level = 0):
self._level = level
def error(self, exception):
if self._level <= 1:
raise exception
def fatalError(self, exception):
if self._level <= 2:
raise exception
def warning(self, exception):
if self._level <= 0:
raise exception
# --- AttributesImpl now lives in xmlreader
from xmlreader import AttributesImpl
# --- XMLGenerator is the SAX2 ContentHandler for writing back XML
try:
import codecs
def _outputwrapper(stream,encoding):
writerclass = codecs.lookup(encoding)[3]
return writerclass(stream)
except ImportError: # 1.5 compatibility: fall back to do-nothing
def _outputwrapper(stream,encoding):
return stream
class XMLGenerator(handler.ContentHandler):
def __init__(self, out=None, encoding="iso-8859-1"):
if out is None:
import sys
out = sys.stdout
handler.ContentHandler.__init__(self)
self._out = _outputwrapper(out,encoding)
self._ns_contexts = [{}] # contains uri -> prefix dicts
self._current_context = self._ns_contexts[-1]
self._undeclared_ns_maps = []
self._encoding = encoding
# ContentHandler methods
def startDocument(self):
self._out.write('<?xml version="1.0" encoding="%s"?>\n' %
self._encoding)
def startPrefixMapping(self, prefix, uri):
self._ns_contexts.append(self._current_context.copy())
self._current_context[uri] = prefix
self._undeclared_ns_maps.append((prefix, uri))
def endPrefixMapping(self, prefix):
self._current_context = self._ns_contexts[-1]
del self._ns_contexts[-1]
def startElement(self, name, attrs):
self._out.write('<' + name)
for (name, value) in attrs.items():
self._out.write(' %s="%s"' % (name, escape(value)))
self._out.write('>')
def endElement(self, name):
self._out.write('</%s>' % name)
def startElementNS(self, name, qname, attrs):
if name[0] is None:
name = name[1]
elif self._current_context[name[0]] is None:
# default namespace
name = name[1]
else:
name = self._current_context[name[0]] + ":" + name[1]
self._out.write('<' + name)
for k,v in self._undeclared_ns_maps:
if k is None:
self._out.write(' xmlns="%s"' % v)
else:
self._out.write(' xmlns:%s="%s"' % (k,v))
self._undeclared_ns_maps = []
for (name, value) in attrs.items():
name = self._current_context[name[0]] + ":" + name[1]
self._out.write(' %s="%s"' % (name, escape(value)))
self._out.write('>')
def endElementNS(self, name, qname):
# XXX: if qname is not None, we better use it.
# Python 2.0b2 requires us to use the recorded prefix for
# name[0], though
if name[0] is None:
qname = name[1]
elif self._current_context[name[0]] is None:
qname = name[1]
else:
qname = self._current_context[name[0]] + ":" + name[1]
self._out.write('</%s>' % qname)
def characters(self, content):
self._out.write(escape(content))
def ignorableWhitespace(self, content):
self._out.write(content)
def processingInstruction(self, target, data):
self._out.write('<?%s %s?>' % (target, data))
# --- ContentGenerator is the SAX1 DocumentHandler for writing back XML
class ContentGenerator(XMLGenerator):
def characters(self, str, start, end):
# In SAX1, characters receives start and end; in SAX2, it receives
# a string. For plain strings, we may want to use a buffer object.
return XMLGenerator.characters(self, str[start:start+end])
# --- XMLFilterImpl
class XMLFilterBase(xmlreader.XMLReader):
"""This class is designed to sit between an XMLReader and the
client application's event handlers. By default, it does nothing
but pass requests up to the reader and events on to the handlers
unmodified, but subclasses can override specific methods to modify
the event stream or the configuration requests as they pass
through."""
# ErrorHandler methods
def error(self, exception):
self._err_handler.error(exception)
def fatalError(self, exception):
self._err_handler.fatalError(exception)
def warning(self, exception):
self._err_handler.warning(exception)
# ContentHandler methods
def setDocumentLocator(self, locator):
self._cont_handler.setDocumentLocator(locator)
def startDocument(self):
self._cont_handler.startDocument()
def endDocument(self):
self._cont_handler.endDocument()
def startPrefixMapping(self, prefix, uri):
self._cont_handler.startPrefixMapping(prefix, uri)
def endPrefixMapping(self, prefix):
self._cont_handler.endPrefixMapping(prefix)
def startElement(self, name, attrs):
self._cont_handler.startElement(name, attrs)
def endElement(self, name):
self._cont_handler.endElement(name)
def startElementNS(self, name, qname, attrs):
self._cont_handler.startElementNS(name, qname, attrs)
def endElementNS(self, name, qname):
self._cont_handler.endElementNS(name, qname)
def characters(self, content):
self._cont_handler.characters(content)
def ignorableWhitespace(self, chars):
self._cont_handler.ignorableWhitespace(chars)
def processingInstruction(self, target, data):
self._cont_handler.processingInstruction(target, data)
def skippedEntity(self, name):
self._cont_handler.skippedEntity(name)
# DTDHandler methods
def notationDecl(self, name, publicId, systemId):
self._dtd_handler.notationDecl(name, publicId, systemId)
def unparsedEntityDecl(self, name, publicId, systemId, ndata):
self._dtd_handler.unparsedEntityDecl(name, publicId, systemId, ndata)
# EntityResolver methods
def resolveEntity(self, publicId, systemId):
self._ent_handler.resolveEntity(publicId, systemId)
# XMLReader methods
def parse(self, source):
self._parent.setContentHandler(self)
self._parent.setErrorHandler(self)
self._parent.setEntityResolver(self)
self._parent.setDTDHandler(self)
self._parent.parse(source)
def setLocale(self, locale):
self._parent.setLocale(locale)
def getFeature(self, name):
return self._parent.getFeature(name)
def setFeature(self, name, state):
self._parent.setFeature(name, state)
def getProperty(self, name):
return self._parent.getProperty(name)
def setProperty(self, name, value):
self._parent.setProperty(name, value)
# FIXME: remove this backward compatibility hack when not needed anymore
XMLFilterImpl = XMLFilterBase
# --- BaseIncrementalParser
class BaseIncrementalParser(xmlreader.IncrementalParser):
"""This class implements the parse method of the XMLReader
interface using the feed, close and reset methods of the
IncrementalParser interface as a convenience to SAX 2.0 driver
writers."""
def parse(self, source):
source = prepare_input_source(source)
self.prepareParser(source)
self._cont_handler.startDocument()
# FIXME: what about char-stream?
inf = source.getByteStream()
buffer = inf.read(16384)
while buffer != "":
self.feed(buffer)
buffer = inf.read(16384)
self.close()
self.reset()
self._cont_handler.endDocument()
def prepareParser(self, source):
"""This method is called by the parse implementation to allow
the SAX 2.0 driver to prepare itself for parsing."""
raise NotImplementedError("prepareParser must be overridden!")
# --- Utility functions
def prepare_input_source(source, base = ""):
"""This function takes an InputSource and an optional base URL and
returns a fully resolved InputSource object ready for reading."""
if type(source) in _StringTypes:
source = xmlreader.InputSource(source)
elif hasattr(source, "read"):
f = source
source = xmlreader.InputSource()
source.setByteStream(f)
if hasattr(f, "name"):
source.setSystemId(f.name)
if source.getByteStream() is None:
sysid = source.getSystemId()
if os.path.isfile(sysid):
basehead = os.path.split(os.path.normpath(base))[0]
source.setSystemId(os.path.join(basehead, sysid))
f = open(sysid, "rb")
else:
source.setSystemId(urlparse.urljoin(base, sysid))
f = urllib.urlopen(source.getSystemId())
source.setByteStream(f)
return source
# ===========================================================================
#
# DEPRECATED SAX 1.0 CLASSES
#
# ===========================================================================
# --- AttributeMap
class AttributeMap:
"""An implementation of AttributeList that takes an (attr,val) hash
and uses it to implement the AttributeList interface."""
def __init__(self, map):
self.map=map
def getLength(self):
return len(self.map.keys())
def getName(self, i):
try:
return self.map.keys()[i]
except IndexError,e:
return None
def getType(self, i):
return "CDATA"
def getValue(self, i):
try:
if type(i)==types.IntType:
return self.map[self.getName(i)]
else:
return self.map[i]
except KeyError,e:
return None
def __len__(self):
return len(self.map)
def __getitem__(self, key):
if type(key)==types.IntType:
return self.map.keys()[key]
else:
return self.map[key]
def items(self):
return self.map.items()
def keys(self):
return self.map.keys()
def has_key(self,key):
return self.map.has_key(key)
def get(self, key, alternative=None):
return self.map.get(key, alternative)
def copy(self):
return AttributeMap(self.map.copy())
def values(self):
return self.map.values()
# --- Event broadcasting object
class EventBroadcaster:
"""Takes a list of objects and forwards any method calls received
to all objects in the list. The attribute list holds the list and
can freely be modified by clients."""
class Event:
"Helper objects that represent event methods."
def __init__(self,list,name):
self.list=list
self.name=name
def __call__(self,*rest):
for obj in self.list:
apply(getattr(obj,self.name), rest)
def __init__(self,list):
self.list=list
def __getattr__(self,name):
return self.Event(self.list,name)
def __repr__(self):
return "<EventBroadcaster instance at %d>" % id(self)
# --- ESIS document handler
import saxlib
class ESISDocHandler(saxlib.HandlerBase):
"A SAX document handler that produces naive ESIS output."
def __init__(self,writer=sys.stdout):
self.writer=writer
def processingInstruction (self,target, remainder):
"""Receive an event signalling that a processing instruction
has been found."""
self.writer.write("?"+target+" "+remainder+"\n")
def startElement(self,name,amap):
"Receive an event signalling the start of an element."
self.writer.write("("+name+"\n")
for a_name in amap.keys():
self.writer.write("A"+a_name+" "+amap[a_name]+"\n")
def endElement(self,name):
"Receive an event signalling the end of an element."
self.writer.write(")"+name+"\n")
def characters(self,data,start_ix,length):
"Receive an event signalling that character data has been found."
self.writer.write("-"+data[start_ix:start_ix+length]+"\n")
# --- XML canonizer
class Canonizer(saxlib.HandlerBase):
"A SAX document handler that produces canonized XML output."
def __init__(self,writer=sys.stdout):
self.elem_level=0
self.writer=writer
def processingInstruction (self,target, remainder):
if not target=="xml":
self.writer.write("<?"+target+" "+remainder+"?>")
def startElement(self,name,amap):
self.writer.write("<"+name)
a_names=amap.keys()
a_names.sort()
for a_name in a_names:
self.writer.write(" "+a_name+"=\"")
self.write_data(amap[a_name])
self.writer.write("\"")
self.writer.write(">")
self.elem_level=self.elem_level+1
def endElement(self,name):
self.writer.write("</"+name+">")
self.elem_level=self.elem_level-1
def ignorableWhitespace(self,data,start_ix,length):
self.characters(data,start_ix,length)
def characters(self,data,start_ix,length):
if self.elem_level>0:
self.write_data(data[start_ix:start_ix+length])
def write_data(self,data):
"Writes datachars to writer."
data=string.replace(data,"&","&")
data=string.replace(data,"<","<")
data=string.replace(data,"\"",""")
data=string.replace(data,">",">")
data=string.replace(data,chr(9),"	")
data=string.replace(data,chr(10)," ")
data=string.replace(data,chr(13)," ")
self.writer.write(data)
# --- mllib
class mllib:
"""A re-implementation of the htmllib, sgmllib and xmllib interfaces as a
SAX DocumentHandler."""
# Unsupported:
# - setnomoretags
# - setliteral
# - translate_references
# - handle_xml
# - handle_doctype
# - handle_charref
# - handle_entityref
# - handle_comment
# - handle_cdata
# - tag_attributes
def __init__(self):
self.reset()
def reset(self):
import saxexts # only used here
self.parser=saxexts.XMLParserFactory.make_parser()
self.handler=mllib.Handler(self.parser,self)
self.handler.reset()
def feed(self,data):
self.parser.feed(data)
def close(self):
self.parser.close()
def get_stack(self):
return self.handler.get_stack()
# --- Handler methods (to be overridden)
def handle_starttag(self,name,method,atts):
method(atts)
def handle_endtag(self,name,method):
method()
def handle_data(self,data):
pass
def handle_proc(self,target,data):
pass
def unknown_starttag(self,name,atts):
pass
def unknown_endtag(self,name):
pass
def syntax_error(self,message):
pass
# --- The internal handler class
class Handler(saxlib.DocumentHandler,saxlib.ErrorHandler):
"""An internal class to handle SAX events and translate them to mllib
events."""
def __init__(self,driver,handler):
self.driver=driver
self.driver.setDocumentHandler(self)
self.driver.setErrorHandler(self)
self.handler=handler
self.reset()
def get_stack(self):
return self.stack
def reset(self):
self.stack=[]
# --- DocumentHandler methods
def characters(self, ch, start, length):
self.handler.handle_data(ch[start:start+length])
def endElement(self, name):
if hasattr(self.handler,"end_"+name):
self.handler.handle_endtag(name,
getattr(self.handler,"end_"+name))
else:
self.handler.unknown_endtag(name)
del self.stack[-1]
def ignorableWhitespace(self, ch, start, length):
self.handler.handle_data(ch[start:start+length])
def processingInstruction(self, target, data):
self.handler.handle_proc(target,data)
def startElement(self, name, atts):
self.stack.append(name)
if hasattr(self.handler,"start_"+name):
self.handler.handle_starttag(name,
getattr(self.handler,
"start_"+name),
atts)
else:
self.handler.unknown_starttag(name,atts)
# --- ErrorHandler methods
def error(self, exception):
self.handler.syntax_error(str(exception))
def fatalError(self, exception):
raise RuntimeError(str(exception))
| Integral-Technology-Solutions/ConfigNOW | Lib/xml/sax/saxutils.py | Python | mit | 20,106 |
# Import a whole load of stuff
from System.IO import *
from System.Drawing import *
from System.Runtime.Remoting import *
from System.Threading import *
from System.Windows.Forms import *
from System.Xml.Serialization import *
from System import *
from Analysis.EDM import *
from DAQ.Environment import *
from EDMConfig import *
def saveBlockConfig(path, config):
fs = FileStream(path, FileMode.Create)
s = XmlSerializer(BlockConfig)
s.Serialize(fs,config)
fs.Close()
def loadBlockConfig(path):
fs = FileStream(path, FileMode.Open)
s = XmlSerializer(BlockConfig)
bc = s.Deserialize(fs)
fs.Close()
return bc
def writeLatestBlockNotificationFile(cluster, blockIndex):
fs = FileStream(Environs.FileSystem.Paths["settingsPath"] + "\\BlockHead\\latestBlock.txt", FileMode.Create)
sw = StreamWriter(fs)
sw.WriteLine(cluster + "\t" + str(blockIndex))
sw.Close()
fs.Close()
def checkYAGAndFix():
interlockFailed = hc.YAGInterlockFailed;
if (interlockFailed):
bh.StopPattern();
bh.StartPattern();
def printWaveformCode(bc, name):
print(name + ": " + str(bc.GetModulationByName(name).Waveform.Code) + " -- " + str(bc.GetModulationByName(name).Waveform.Inverted))
def prompt(text):
sys.stdout.write(text)
return sys.stdin.readline().strip()
def measureParametersAndMakeBC(cluster, eState, bState, rfState, scramblerV, probePolAngle, pumpPolAngle):
fileSystem = Environs.FileSystem
print("Measuring parameters ...")
bh.StopPattern()
hc.UpdateRFPowerMonitor()
hc.UpdateRFFrequencyMonitor()
bh.StartPattern()
hc.UpdateBCurrentMonitor()
hc.UpdateVMonitor()
hc.UpdateI2AOMFreqMonitor()
print("V plus: " + str(hc.CPlusMonitorVoltage * hc.CPlusMonitorScale))
print("V minus: " + str(hc.CMinusMonitorVoltage * hc.CMinusMonitorScale))
print("Bias: " + str(hc.BiasCurrent))
print("B step: " + str(abs(hc.FlipStepCurrent)))
print("DB step: " + str(abs(hc.CalStepCurrent)))
# load a default BlockConfig and customise it appropriately
settingsPath = fileSystem.Paths["settingsPath"] + "\\BlockHead\\"
bc = loadBlockConfig(settingsPath + "default.xml")
bc.Settings["cluster"] = cluster
bc.Settings["eState"] = eState
bc.Settings["bState"] = bState
bc.Settings["rfState"] = rfState
bc.Settings["phaseScramblerV"] = scramblerV
bc.Settings["probePolarizerAngle"] = probePolAngle
bc.Settings["pumpPolarizerAngle"] = pumpPolAngle
bc.Settings["ePlus"] = hc.CPlusMonitorVoltage * hc.CPlusMonitorScale
bc.Settings["eMinus"] = hc.CMinusMonitorVoltage * hc.CMinusMonitorScale
bc.GetModulationByName("B").Centre = (hc.BiasCurrent)/1000
bc.GetModulationByName("B").Step = abs(hc.FlipStepCurrent)/1000
bc.GetModulationByName("DB").Step = abs(hc.CalStepCurrent)/1000
# these next 3, seemingly redundant, lines are to preserve backward compatibility
bc.GetModulationByName("B").PhysicalCentre = (hc.BiasCurrent)/1000
bc.GetModulationByName("B").PhysicalStep = abs(hc.FlipStepCurrent)/1000
bc.GetModulationByName("DB").PhysicalStep = abs(hc.CalStepCurrent)/1000
bc.GetModulationByName("RF1A").Centre = hc.RF1AttCentre
bc.GetModulationByName("RF1A").Step = hc.RF1AttStep
bc.GetModulationByName("RF1A").PhysicalCentre = hc.RF1PowerCentre
bc.GetModulationByName("RF1A").PhysicalStep = hc.RF1PowerStep
bc.GetModulationByName("RF2A").Centre = hc.RF2AttCentre
bc.GetModulationByName("RF2A").Step = hc.RF2AttStep
bc.GetModulationByName("RF2A").PhysicalCentre = hc.RF2PowerCentre
bc.GetModulationByName("RF2A").PhysicalStep = hc.RF2PowerStep
bc.GetModulationByName("RF1F").Centre = hc.RF1FMCentre
bc.GetModulationByName("RF1F").Step = hc.RF1FMStep
bc.GetModulationByName("RF1F").PhysicalCentre = hc.RF1FrequencyCentre
bc.GetModulationByName("RF1F").PhysicalStep = hc.RF1FrequencyStep
bc.GetModulationByName("RF2F").Centre = hc.RF2FMCentre
bc.GetModulationByName("RF2F").Step = hc.RF2FMStep
bc.GetModulationByName("RF2F").PhysicalCentre = hc.RF2FrequencyCentre
bc.GetModulationByName("RF2F").PhysicalStep = hc.RF2FrequencyStep
bc.GetModulationByName("LF1").Centre = hc.FLPZTVoltage
bc.GetModulationByName("LF1").Step = hc.FLPZTStep
bc.GetModulationByName("LF1").PhysicalCentre = hc.I2LockAOMFrequencyCentre
bc.GetModulationByName("LF1").PhysicalStep = hc.I2LockAOMFrequencyStep
# generate the waveform codes
print("Generating waveform codes ...")
eWave = bc.GetModulationByName("E").Waveform
eWave.Name = "E"
lf1Wave = bc.GetModulationByName("LF1").Waveform
lf1Wave.Name = "LF1"
ws = WaveformSetGenerator.GenerateWaveforms( (eWave, lf1Wave), ("B","DB","PI","RF1A","RF2A","RF1F","RF2F") )
bc.GetModulationByName("B").Waveform = ws["B"]
bc.GetModulationByName("DB").Waveform = ws["DB"]
bc.GetModulationByName("PI").Waveform = ws["PI"]
bc.GetModulationByName("RF1A").Waveform = ws["RF1A"]
bc.GetModulationByName("RF2A").Waveform = ws["RF2A"]
bc.GetModulationByName("RF1F").Waveform = ws["RF1F"]
bc.GetModulationByName("RF2F").Waveform = ws["RF2F"]
# change the inversions of the static codes E and LF1
bc.GetModulationByName("E").Waveform.Inverted = WaveformSetGenerator.RandomBool()
bc.GetModulationByName("LF1").Waveform.Inverted = WaveformSetGenerator.RandomBool()
# print the waveform codes
# printWaveformCode(bc, "E")
# printWaveformCode(bc, "B")
# printWaveformCode(bc, "DB")
# printWaveformCode(bc, "PI")
# printWaveformCode(bc, "RF1A")
# printWaveformCode(bc, "RF2A")
# printWaveformCode(bc, "RF1F")
# printWaveformCode(bc, "RF2F")
# printWaveformCode(bc, "LF1")
# store e-switch info in block config
print("Storing E switch parameters ...")
bc.Settings["eRampDownTime"] = hc.ERampDownTime
bc.Settings["eRampDownDelay"] = hc.ERampDownDelay
bc.Settings["eBleedTime"] = hc.EBleedTime
bc.Settings["eSwitchTime"] = hc.ESwitchTime
bc.Settings["eRampUpTime"] = hc.ERampUpTime
bc.Settings["eRampUpDelay"] = hc.ERampUpDelay
# this is for legacy analysis compatibility
bc.Settings["eDischargeTime"] = hc.ERampDownTime + hc.ERampDownDelay
bc.Settings["eChargeTime"] = hc.ERampUpTime + hc.ERampUpDelay
# store the E switch asymmetry in the block
bc.Settings["E0PlusBoost"] = hc.E0PlusBoost
return bc
# lock gains
# microamps of current per volt of control input
kSteppingBiasCurrentPerVolt = 1000.0
# max change in the b-bias voltage per block
kBMaxChange = 0.05
# volts of rf*a input required per cal's worth of offset
kRFAVoltsPerCal = 3.2
kRFAMaxChange = 0.1
# volts of rf*f input required per cal's worth of offset
kRFFVoltsPerCal = 8
kRFFMaxChange = 0.1
def updateLocks(bState):
pmtChannelValues = bh.DBlock.ChannelValues[0]
# note the weird python syntax for a one element list
sigIndex = pmtChannelValues.GetChannelIndex(("SIG",))
sigValue = pmtChannelValues.GetValue(sigIndex)
bIndex = pmtChannelValues.GetChannelIndex(("B",))
bValue = pmtChannelValues.GetValue(bIndex)
#bError = pmtChannelValues.GetError(bIndex)
dbIndex = pmtChannelValues.GetChannelIndex(("DB",))
dbValue = pmtChannelValues.GetValue(dbIndex)
#dbError = pmtChannelValues.GetError(dbIndex)
rf1aIndex = pmtChannelValues.GetChannelIndex(("RF1A","DB"))
rf1aValue = pmtChannelValues.GetValue(rf1aIndex)
#rf1aError = pmtChannelValues.GetError(rf1aIndex)
rf2aIndex = pmtChannelValues.GetChannelIndex(("RF2A","DB"))
rf2aValue = pmtChannelValues.GetValue(rf2aIndex)
#rf2aError = pmtChannelValues.GetError(rf2aIndex)
rf1fIndex = pmtChannelValues.GetChannelIndex(("RF1F","DB"))
rf1fValue = pmtChannelValues.GetValue(rf1fIndex)
#rf1fError = pmtChannelValues.GetError(rf1fIndex)
rf2fIndex = pmtChannelValues.GetChannelIndex(("RF2F","DB"))
rf2fValue = pmtChannelValues.GetValue(rf2fIndex)
#rf2fError = pmtChannelValues.GetError(rf2fIndex)
lf1Index = pmtChannelValues.GetChannelIndex(("LF1",))
lf1Value = pmtChannelValues.GetValue(lf1Index)
#lf1Error = pmtChannelValues.GetError(lf1Index)
lf1dbIndex = pmtChannelValues.GetChannelIndex(("LF1","DB"))
lf1dbValue = pmtChannelValues.GetValue(lf1dbIndex)
print "SIG: " + str(sigValue)
print "B: " + str(bValue) + " DB: " + str(dbValue)
print "RF1A: " + str(rf1aValue) + " RF2A: " + str(rf2aValue)
print "RF1F: " + str(rf1fValue) + " RF2F: " + str(rf2fValue)
print "LF1: " + str(lf1Value) + " LF1.DB: " + str(lf1dbValue)
# B bias lock
# the sign of the feedback depends on the b-state
if bState:
feedbackSign = 1
else:
feedbackSign = -1
deltaBias = - (1.0/8.0) * feedbackSign * (hc.CalStepCurrent * (bValue / dbValue)) / kSteppingBiasCurrentPerVolt
deltaBias = windowValue(deltaBias, -kBMaxChange, kBMaxChange)
print "Attempting to change stepping B bias by " + str(deltaBias) + " V."
newBiasVoltage = windowValue( hc.SteppingBiasVoltage - deltaBias, 0, 5)
hc.SetSteppingBBiasVoltage( newBiasVoltage )
# RFA locks
deltaRF1A = - (1.0/3.0) * (rf1aValue / dbValue) * kRFAVoltsPerCal
deltaRF1A = windowValue(deltaRF1A, -kRFAMaxChange, kRFAMaxChange)
print "Attempting to change RF1A by " + str(deltaRF1A) + " V."
newRF1A = windowValue( hc.RF1AttCentre - deltaRF1A, hc.RF1AttStep, 5 - hc.RF1AttStep)
hc.SetRF1AttCentre( newRF1A )
#
deltaRF2A = - (1.0/3.0) * (rf2aValue / dbValue) * kRFAVoltsPerCal
deltaRF2A = windowValue(deltaRF2A, -kRFAMaxChange, kRFAMaxChange)
print "Attempting to change RF2A by " + str(deltaRF2A) + " V."
newRF2A = windowValue( hc.RF2AttCentre - deltaRF2A, hc.RF2AttStep, 5 - hc.RF2AttStep )
hc.SetRF2AttCentre( newRF2A )
# RFF locks
deltaRF1F = - (1.0/4.0) * (rf1fValue / dbValue) * kRFFVoltsPerCal
deltaRF1F = windowValue(deltaRF1F, -kRFFMaxChange, kRFFMaxChange)
print "Attempting to change RF1F by " + str(deltaRF1F) + " V."
newRF1F = windowValue( hc.RF1FMCentre - deltaRF1F, hc.RF1FMStep, 5 - hc.RF1FMStep)
hc.SetRF1FMCentre( newRF1F )
#
deltaRF2F = - (1.0/4.0) * (rf2fValue / dbValue) * kRFFVoltsPerCal
deltaRF2F = windowValue(deltaRF2F, -kRFFMaxChange, kRFFMaxChange)
print "Attempting to change RF2F by " + str(deltaRF2F) + " V."
newRF2F = windowValue( hc.RF2FMCentre - deltaRF2F, hc.RF2FMStep, 5 - hc.RF2FMStep )
hc.SetRF2FMCentre( newRF2F )
# Laser frequency lock (-ve multiplier in f0 mode and +ve in f1)
deltaLF1 = 1.25 * (lf1Value / dbValue) # I think this should be +ve (but that doesn't work)
deltaLF1 = windowValue(deltaLF1, -0.1, 0.1)
print "Attempting to change LF1 by " + str(deltaLF1) + " V."
newLF1 = windowValue( hc.FLPZTVoltage - deltaLF1, hc.FLPZTStep, 5 - hc.FLPZTStep )
hc.SetFLPZTVoltage( newLF1 )
def windowValue(value, minValue, maxValue):
if ( (value < maxValue) & (value > minValue) ):
return value
else:
if (value < minValue):
return minValue
else:
return maxValue
kTargetRotationPeriod = 10
kReZeroLeakageMonitorsPeriod = 10
r = Random()
def EDMGo():
# Setup
f = None
fileSystem = Environs.FileSystem
dataPath = fileSystem.GetDataDirectory(fileSystem.Paths["edmDataPath"])
settingsPath = fileSystem.Paths["settingsPath"] + "\\BlockHead\\"
print("Data directory is : " + dataPath)
print("")
suggestedClusterName = fileSystem.GenerateNextDataFileName()
sm.SelectProfile("Scan B")
# User inputs data
cluster = prompt("Cluster name [" + suggestedClusterName +"]: ")
if cluster == "":
cluster = suggestedClusterName
print("Using cluster " + suggestedClusterName)
eState = hc.EManualState
print("E-state: " + str(eState))
bState = hc.BManualState
print("B-state: " + str(bState))
rfState = hc.RFManualState
print("rf-state: " + str(rfState))
# this is to make sure the B current monitor is in a sensible state
hc.UpdateBCurrentMonitor()
# randomise Ramsey phase
scramblerV = 0.724774 * r.NextDouble()
hc.SetScramblerVoltage(scramblerV)
# randomise polarizations
probePolAngle = 360.0 * r.NextDouble()
hc.SetProbePolarizerAngle(probePolAngle)
pumpPolAngle = 360.0 * r.NextDouble()
hc.SetPumpPolarizerAngle(pumpPolAngle)
bc = measureParametersAndMakeBC(cluster, eState, bState, rfState, scramblerV, probePolAngle, pumpPolAngle)
# loop and take data
blockIndex = 0
maxBlockIndex = 10000
while blockIndex < maxBlockIndex:
print("Acquiring block " + str(blockIndex) + " ...")
# save the block config and load into blockhead
print("Saving temp config.")
bc.Settings["clusterIndex"] = blockIndex
tempConfigFile ='%(p)stemp%(c)s_%(i)s.xml' % {'p': settingsPath, 'c': cluster, 'i': blockIndex}
saveBlockConfig(tempConfigFile, bc)
System.Threading.Thread.Sleep(500)
print("Loading temp config.")
bh.LoadConfig(tempConfigFile)
# take the block and save it
print("Running ...")
bh.AcquireAndWait()
print("Done.")
blockPath = '%(p)s%(c)s_%(i)s.zip' % {'p': dataPath, 'c': cluster, 'i': blockIndex}
bh.SaveBlock(blockPath)
print("Saved block "+ str(blockIndex) + ".")
# give mma a chance to analyse the block
print("Notifying Mathematica and waiting ...")
writeLatestBlockNotificationFile(cluster, blockIndex)
System.Threading.Thread.Sleep(5000)
print("Done.")
# increment and loop
File.Delete(tempConfigFile)
checkYAGAndFix()
blockIndex = blockIndex + 1
updateLocks(bState)
# randomise Ramsey phase
scramblerV = 0.724774 * r.NextDouble()
hc.SetScramblerVoltage(scramblerV)
# randomise polarizations
probePolAngle = 360.0 * r.NextDouble()
hc.SetProbePolarizerAngle(probePolAngle)
pumpPolAngle = 360.0 * r.NextDouble()
hc.SetPumpPolarizerAngle(pumpPolAngle)
bc = measureParametersAndMakeBC(cluster, eState, bState, rfState, scramblerV, probePolAngle, pumpPolAngle)
hc.StepTarget(1)
# do things that need periodically doing
# if ((blockIndex % kTargetRotationPeriod) == 0):
# print("Rotating target.")
# hc.StepTarget(10)
pmtChannelValues = bh.DBlock.ChannelValues[0]
dbIndex = pmtChannelValues.GetChannelIndex(("DB",))
dbValue = pmtChannelValues.GetValue(dbIndex)
if (dbValue < 8.4):
print("Dodgy spot target rotation.")
hc.StepTarget(5)
if ((blockIndex % kReZeroLeakageMonitorsPeriod) == 0):
print("Recalibrating leakage monitors.")
hc.EnableEField( False )
System.Threading.Thread.Sleep(10000)
hc.EnableBleed( True )
System.Threading.Thread.Sleep(1000)
hc.EnableBleed( False )
System.Threading.Thread.Sleep(5000)
hc.CalibrateIMonitors()
hc.EnableEField( True )
bh.StopPattern()
def run_script():
EDMGo()
| jstammers/EDMSuite | EDMScripts/EDMLoop_neg_slope.py | Python | mit | 14,423 |
model_search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.response-format?" + \
"[q=search term&" + \
"fq=filter-field:(filter-term)&additional-params=values]" + \
"&api-key=9key"
"""http://api.nytimes.com/svc/search/v2/articlesearch.json?q=terrorism+OR+terrorist
&begin_date=19900102&end_date=19900103&sort=newest&api-key=
key"""
search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.json?" + \
"[q=terror]" + \
"&api-key=key"
precise_search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.json"
terms = "?q=terrorism+OR+terrorist"
api = "&api-key=key"
print(precise_search+terms+dates+api)
"""
aggressive for looping in order to overcome the ten article limit. instead search each key word PER JOUR, and then concat the jsons into a nice pandas dataframe, and then eventually a csv.
"""
months_list = ["%.2d" % i for i in range(1,2)]
days_list = ["%.2d" % i for i in range(1,32)]
json_files = []
print(months_list)
for x in months_list:
month_s = x
month_e = x
for y in days_list:
day_s = y
day_e = str(int(y)+1).zfill(2)
year_s = "1990"
year_e = "1990"
start = year_s + month_s + day_s
end = year_e + month_e + day_e
dates = "&begin_date="+start+"&end_date="+end+"&sort=newest"
#print(start + " "+end + "\n" +dates)
r = requests.get(precise_search+terms+dates+api)
original_json = json.loads(r.text)
response_json = original_json['response']
json_file = response_json['docs']
json_files.append(json_file)
frames = []
for x in json_files:
df = pd.DataFrame.from_dict(x)
frames.append(df)
#print(frames)
result = pd.concat(frames)
result
| polypmer/scrape | new-york-times/nytimes-scrape.py | Python | mit | 1,833 |
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from flask import session
from indico.core.db import db
from indico.modules.events.logs import EventLogKind, EventLogRealm
from indico.modules.events.tracks import logger
from indico.modules.events.tracks.models.groups import TrackGroup
from indico.modules.events.tracks.models.tracks import Track
from indico.modules.events.tracks.settings import track_settings
def create_track(event, data):
track = Track(event=event)
track.populate_from_dict(data)
db.session.flush()
logger.info('Track %r created by %r', track, session.user)
event.log(EventLogRealm.management, EventLogKind.positive, 'Tracks',
'Track "{}" has been created.'.format(track.title), session.user)
return track
def update_track(track, data):
track.populate_from_dict(data)
db.session.flush()
logger.info('Track %r modified by %r', track, session.user)
track.event.log(EventLogRealm.management, EventLogKind.change, 'Tracks',
'Track "{}" has been modified.'.format(track.title), session.user)
def delete_track(track):
db.session.delete(track)
logger.info('Track deleted by %r: %r', session.user, track)
def update_program(event, data):
track_settings.set_multi(event, data)
logger.info('Program of %r updated by %r', event, session.user)
event.log(EventLogRealm.management, EventLogKind.change, 'Tracks', 'The program has been updated', session.user)
def create_track_group(event, data):
track_group = TrackGroup()
track_group.event = event
track_group.populate_from_dict(data)
db.session.flush()
logger.info('Track group %r created by %r', track_group, session.user)
event.log(EventLogRealm.management, EventLogKind.positive, 'Track Groups',
'Track group "{}" has been created.'.format(track_group.title), session.user)
def update_track_group(track_group, data):
track_group.populate_from_dict(data)
db.session.flush()
logger.info('Track group %r updated by %r', track_group, session.user)
track_group.event.log(EventLogRealm.management, EventLogKind.positive, 'Track Groups',
'Track group "{}" has been updated.'.format(track_group.title), session.user)
def delete_track_group(track_group):
db.session.delete(track_group)
logger.info('Track group deleted by %r: %r', session.user, track_group)
| mvidalgarcia/indico | indico/modules/events/tracks/operations.py | Python | mit | 2,614 |
from pytest import approx, raises
from fastats.maths.gamma import gammaln
def test_gamma_ints():
assert gammaln(10) == approx(12.801827480081469, rel=1e-6)
assert gammaln(5) == approx(3.1780538303479458, rel=1e-6)
assert gammaln(19) == approx(36.39544520803305, rel=1e-6)
def test_gamma_floats():
assert gammaln(3.141) == approx(0.8271155090776673, rel=1e-6)
assert gammaln(8.8129) == approx(10.206160943471318, rel=1e-6)
assert gammaln(12.001) == approx(17.50475055100354, rel=1e-6)
assert gammaln(0.007812) == approx(4.847635060148693, rel=1e-6)
assert gammaln(86.13) == approx(296.3450079998172, rel=1e-6)
def test_gamma_negative():
raises(AssertionError, gammaln, -1)
raises(AssertionError, gammaln, -0.023)
raises(AssertionError, gammaln, -10.9)
if __name__ == '__main__':
import pytest
pytest.main([__file__])
| dwillmer/fastats | tests/maths/test_gamma.py | Python | mit | 878 |
from i3pystatus import IntervalModule
from i3pystatus.core.util import internet, require
from datetime import datetime
from urllib.request import urlopen
import json
import re
GEOLOOKUP_URL = 'http://api.wunderground.com/api/%s/geolookup%s/q/%s.json'
STATION_QUERY_URL = 'http://api.wunderground.com/api/%s/%s/q/%s.json'
class Wunderground(IntervalModule):
'''
This module retrieves weather data using the Weather Underground API.
.. note::
A Weather Underground API key is required to use this module, you can
sign up for a developer API key free at
https://www.wunderground.com/weather/api/
A developer API key is allowed 500 queries per day, and no more than 10
in a given minute. Therefore, it is recommended to be conservative when
setting the update interval.
Valid values for ``location_code`` include:
* **State/City_Name** - CA/San_Francisco
* **Country/City** - France/Paris
* **Geolocation by IP** - autoip
* **Zip or Postal Code** - 60616
* **ICAO Airport Code** - icao:LAX
* **Latitude/Longitude** - 41.8301943,-87.6342619
* **Personal Weather Station (PWS)** - pws:KILCHICA30
When not using a ``pws`` or ``icao`` station ID, the location will be
queried, and the closest station will be used. For a list of PWS
station IDs, visit the following URL:
http://www.wunderground.com/weatherstation/ListStations.asp
.. _weather-usage-wunderground:
.. rubric:: Usage example
.. code-block:: python
from i3pystatus import Status
from i3pystatus.weather import wunderground
status = Status()
status.register(
'weather',
format='{condition} {current_temp}{temp_unit}{icon}[ Hi: {high_temp}] Lo: {low_temp}',
colorize=True,
backend=wunderground.Wunderground(
api_key='dbafe887d56ba4ad',
location_code='pws:MAT645',
units='imperial',
),
)
status.run()
See :ref:`here <weather-formatters>` for a list of formatters which can be
used.
'''
interval = 300
settings = (
('api_key', 'Weather Underground API key'),
('location_code', 'Location code from wunderground.com'),
('units', '\'metric\' or \'imperial\''),
('use_pws', 'Set to False to use only airport stations'),
('forecast', 'Set to ``True`` to check forecast (generates one '
'additional API request per weather update). If set to '
'``False``, then the ``low_temp`` and ``high_temp`` '
'formatters will be set to empty strings.'),
)
required = ('api_key', 'location_code')
api_key = None
location_code = None
units = 'metric'
use_pws = True
forecast = False
# These will be set once weather data has been checked
station_id = None
forecast_url = None
@require(internet)
def api_request(self, url):
'''
Execute an HTTP POST to the specified URL and return the content
'''
with urlopen(url) as content:
try:
content_type = dict(content.getheaders())['Content-Type']
charset = re.search(r'charset=(.*)', content_type).group(1)
except AttributeError:
charset = 'utf-8'
response = json.loads(content.read().decode(charset))
try:
raise Exception(response['response']['error']['description'])
except KeyError:
pass
return response
@require(internet)
def geolookup(self):
'''
Use the location_code to perform a geolookup and find the closest
station. If the location is a pws or icao station ID, no lookup will be
peformed.
'''
if self.station_id is None:
try:
for no_lookup in ('pws', 'icao'):
sid = self.location_code.partition(no_lookup + ':')[-1]
if sid:
self.station_id = self.location_code
return
except AttributeError:
# Numeric or some other type, either way we'll just stringify
# it below and perform a lookup.
pass
extra_opts = '/pws:0' if not self.use_pws else ''
api_url = GEOLOOKUP_URL % (self.api_key,
extra_opts,
self.location_code)
response = self.api_request(api_url)
station_type = 'pws' if self.use_pws else 'airport'
try:
stations = response['location']['nearby_weather_stations']
nearest = stations[station_type]['station'][0]
except (KeyError, IndexError):
raise Exception('No locations matched location_code %s'
% self.location_code)
if self.use_pws:
nearest_pws = nearest.get('id', '')
if not nearest_pws:
raise Exception('No id entry for station')
self.station_id = 'pws:%s' % nearest_pws
else:
nearest_airport = nearest.get('icao', '')
if not nearest_airport:
raise Exception('No icao entry for station')
self.station_id = 'icao:%s' % nearest_airport
@require(internet)
def get_forecast(self):
'''
If configured to do so, make an API request to retrieve the forecast
data for the configured/queried weather station, and return the low and
high temperatures. Otherwise, return two empty strings.
'''
if self.forecast:
query_url = STATION_QUERY_URL % (self.api_key,
'forecast',
self.station_id)
try:
response = self.api_request(query_url)['forecast']
response = response['simpleforecast']['forecastday'][0]
except (KeyError, IndexError, TypeError):
raise Exception('No forecast data found for %s' % self.station_id)
unit = 'celsius' if self.units == 'metric' else 'fahrenheit'
low_temp = response.get('low', {}).get(unit, '')
high_temp = response.get('high', {}).get(unit, '')
return low_temp, high_temp
else:
return '', ''
@require(internet)
def weather_data(self):
'''
Query the configured/queried station and return the weather data
'''
# If necessary, do a geolookup to set the station_id
self.geolookup()
query_url = STATION_QUERY_URL % (self.api_key,
'conditions',
self.station_id)
try:
response = self.api_request(query_url)['current_observation']
self.forecast_url = response.pop('ob_url', None)
except KeyError:
raise Exception('No weather data found for %s' % self.station_id)
low_temp, high_temp = self.get_forecast()
if self.units == 'metric':
temp_unit = 'c'
speed_unit = 'kph'
distance_unit = 'km'
pressure_unit = 'mb'
else:
temp_unit = 'f'
speed_unit = 'mph'
distance_unit = 'mi'
pressure_unit = 'in'
def _find(key, data=None):
data = data or response
return data.get(key, 'N/A')
try:
observation_time = int(_find('observation_epoch'))
except TypeError:
observation_time = 0
return dict(
city=_find('city', response['observation_location']),
condition=_find('weather'),
observation_time=datetime.fromtimestamp(observation_time),
current_temp=_find('temp_' + temp_unit),
low_temp=low_temp,
high_temp=high_temp,
temp_unit='°' + temp_unit.upper(),
feelslike=_find('feelslike_' + temp_unit),
dewpoint=_find('dewpoint_' + temp_unit),
wind_speed=_find('wind_' + speed_unit),
wind_unit=speed_unit,
wind_direction=_find('wind_dir'),
wind_gust=_find('wind_gust_' + speed_unit),
pressure=_find('pressure_' + pressure_unit),
pressure_unit=pressure_unit,
pressure_trend=_find('pressure_trend'),
visibility=_find('visibility_' + distance_unit),
visibility_unit=distance_unit,
humidity=_find('relative_humidity').rstrip('%'),
uv_index=_find('uv'),
)
| eBrnd/i3pystatus | i3pystatus/weather/wunderground.py | Python | mit | 8,899 |
"""
NBConvert Preprocessor for sanitizing HTML rendering of notebooks.
"""
from bleach import (
ALLOWED_ATTRIBUTES,
ALLOWED_STYLES,
ALLOWED_TAGS,
clean,
)
from traitlets import (
Any,
Bool,
List,
Set,
Unicode,
)
from .base import Preprocessor
class SanitizeHTML(Preprocessor):
# Bleach config.
attributes = Any(
config=True,
default_value=ALLOWED_ATTRIBUTES,
help="Allowed HTML tag attributes",
)
tags = List(
Unicode(),
config=True,
default_value=ALLOWED_TAGS,
help="List of HTML tags to allow",
)
styles = List(
Unicode(),
config=True,
default_value=ALLOWED_STYLES,
help="Allowed CSS styles if <style> tag is whitelisted"
)
strip = Bool(
config=True,
default_value=False,
help="If True, remove unsafe markup entirely instead of escaping"
)
strip_comments = Bool(
config=True,
default_value=True,
help="If True, strip comments from escaped HTML",
)
# Display data config.
safe_output_keys = Set(
config=True,
default_value={
'metadata', # Not a mimetype per-se, but expected and safe.
'text/plain',
'text/latex',
'application/json',
'image/png',
'image/jpeg',
},
help="Cell output mimetypes to render without modification",
)
sanitized_output_types = Set(
config=True,
default_value={
'text/html',
'text/markdown',
},
help="Cell output types to display after escaping with Bleach.",
)
def preprocess_cell(self, cell, resources, cell_index):
"""
Sanitize potentially-dangerous contents of the cell.
Cell Types:
raw:
Sanitize literal HTML
markdown:
Sanitize literal HTML
code:
Sanitize outputs that could result in code execution
"""
if cell.cell_type == 'raw':
# Sanitize all raw cells anyway.
# Only ones with the text/html mimetype should be emitted
# but erring on the side of safety maybe.
cell.source = self.sanitize_html_tags(cell.source)
return cell, resources
elif cell.cell_type == 'markdown':
cell.source = self.sanitize_html_tags(cell.source)
return cell, resources
elif cell.cell_type == 'code':
cell.outputs = self.sanitize_code_outputs(cell.outputs)
return cell, resources
def sanitize_code_outputs(self, outputs):
"""
Sanitize code cell outputs.
Removes 'text/javascript' fields from display_data outputs, and
runs `sanitize_html_tags` over 'text/html'.
"""
for output in outputs:
# These are always ascii, so nothing to escape.
if output['output_type'] in ('stream', 'error'):
continue
data = output.data
to_remove = []
for key in data:
if key in self.safe_output_keys:
continue
elif key in self.sanitized_output_types:
self.log.info("Sanitizing %s" % key)
data[key] = self.sanitize_html_tags(data[key])
else:
# Mark key for removal. (Python doesn't allow deletion of
# keys from a dict during iteration)
to_remove.append(key)
for key in to_remove:
self.log.info("Removing %s" % key)
del data[key]
return outputs
def sanitize_html_tags(self, html_str):
"""
Sanitize a string containing raw HTML tags.
"""
return clean(
html_str,
tags=self.tags,
attributes=self.attributes,
styles=self.styles,
strip=self.strip,
strip_comments=self.strip_comments,
)
| sserrot/champion_relationships | venv/Lib/site-packages/nbconvert/preprocessors/sanitize.py | Python | mit | 4,070 |
class Solution:
def containVirus(self, grid: List[List[int]]) -> int:
current_set_number = 1
grid_set = [[0 for i in range(len(grid[0]))] for j in range(len(grid))]
set_grid = {}
threaten = {}
def getAdjacentCellsSet(row, col) -> List[int]:
answer = []
if row != 0 and grid_set[row-1][col] != 0 and grid_set[row-1][col] not in answer:
answer.append(grid_set[row-1][col])
if col != 0 and grid_set[row][col-1] != 0 and grid_set[row][col-1] not in answer:
answer.append(grid_set[row][col-1])
if row != len(grid)-1 and grid_set[row+1][col] != 0 and grid_set[row+1][col] not in answer:
answer.append(grid_set[row+1][col])
if col != len(grid[0])-1 and grid_set[row][col+1] != 0 and grid_set[row][col+1] not in answer:
answer.append(grid_set[row][col+1])
if -1 in answer:
answer.remove(-1)
if grid_set[row][col] in answer:
answer.remove(grid_set[row][col])
return answer
# Merge all regions to the first one.
def merge(regions: List[int]):
merge_to = regions[0]
for i in range(1, len(regions)):
for x, y in set_grid[regions[i]]:
grid_set[x][y] = merge_to
set_grid[merge_to] += set_grid[regions[i]]
del set_grid[regions[i]]
if regions[i] in threaten:
del threaten[regions[i]]
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 1:
adjacent_sets = getAdjacentCellsSet(i, j)
set_number = 0
if len(adjacent_sets) == 0:
set_number = current_set_number
current_set_number += 1
elif len(adjacent_sets) == 1:
set_number = adjacent_sets[0]
else: # Merge
merge(adjacent_sets)
set_number = adjacent_sets[0]
grid_set[i][j] = set_number
if set_number not in set_grid:
set_grid[set_number] = []
set_grid[set_number].append((i, j))
def adjacentThreatened(x, y):
answer = []
if x != 0 and grid_set[x-1][y] == 0:
answer.append((x-1, y))
if y != 0 and grid_set[x][y-1] == 0:
answer.append((x, y-1))
if x != len(grid_set)-1 and grid_set[x+1][y] == 0:
answer.append((x+1, y))
if y != len(grid_set[0])-1 and grid_set[x][y+1] == 0:
answer.append((x, y+1))
return answer
def threatenCells():
for i in set_grid:
if i == 0 or i == -1:
continue
threatened = set()
for x, y in set_grid[i]:
threatened = threatened.union(adjacentThreatened(x, y))
threaten[i] = len(threatened)
def contain(set_number):
wall = 0
for x, y in set_grid[set_number]:
grid_set[x][y] = -1
if x != 0 and grid_set[x-1][y] == 0:
wall += 1
if y != 0 and grid_set[x][y-1] == 0:
wall += 1
if x != len(grid_set)-1 and grid_set[x+1][y] == 0:
wall += 1
if y != len(grid_set[0])-1 and grid_set[x][y+1] == 0:
wall += 1
del set_grid[set_number]
del threaten[set_number]
return wall
def spread():
to_spread = deque()
for _, v in set_grid.items():
to_spread.extend(v)
while len(to_spread) > 0:
x, y = to_spread.popleft()
current_set = grid_set[x][y]
if x != 0 and grid_set[x-1][y] == 0:
grid_set[x-1][y] = current_set
set_grid[current_set].append((x-1, y))
adj = getAdjacentCellsSet(x-1, y)
merge([current_set]+adj)
if y != 0 and grid_set[x][y-1] == 0:
grid_set[x][y-1] = current_set
set_grid[current_set].append((x, y-1))
adj = getAdjacentCellsSet(x, y-1)
merge([current_set]+adj)
if x != len(grid_set)-1 and grid_set[x+1][y] == 0:
grid_set[x+1][y] = current_set
set_grid[current_set].append((x+1, y))
adj = getAdjacentCellsSet(x+1, y)
merge([current_set]+adj)
if y != len(grid_set[0])-1 and grid_set[x][y+1] == 0:
grid_set[x][y+1] = current_set
set_grid[current_set].append((x, y+1))
adj = getAdjacentCellsSet(x, y+1)
merge([current_set]+adj)
answer = 0
threatenCells()
# print(grid_set)
# print(answer)
while len(threaten) != 0:
# print(threaten)
largest_infected = sorted(
threaten.items(), key=lambda x: x[1], reverse=True)[0]
answer += contain(largest_infected[0])
spread()
# print(grid_set)
# print(answer)
threatenCells()
return answer
| jianjunz/online-judge-solutions | leetcode/0750-contain-virus.py | Python | mit | 5,673 |
class Solution(object):
def validWordSquare(self, words):
"""
:type words: List[str]
:rtype: bool
"""
if words is None or len(words) == 0:
return True
ls = len(words)
for i in range(ls):
for j in range(1, len(words[i])):
if j >= ls:
return False
if i >= len(words[j]):
return False
if words[i][j] != words[j][i]:
return False
return True
# def validWordSquare(self, words):
# # https://discuss.leetcode.com/topic/63423/1-liner-python/2
# # The map(None, ...) transposes the "matrix", filling missing spots with None
# return map(None, *words) == map(None, *map(None, *words))
| qiyuangong/leetcode | python/422_Valid_Word_Square.py | Python | mit | 805 |
# coding=utf-8
"""
The NfsCollector collects nfs utilization metrics using /proc/net/rpc/nfs.
#### Dependencies
* /proc/net/rpc/nfs
"""
import diamond.collector
import os
class NfsCollector(diamond.collector.Collector):
PROC = '/proc/net/rpc/nfs'
def get_default_config_help(self):
config_help = super(NfsCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(NfsCollector, self).get_default_config()
config.update({
'enabled': False,
'path': 'nfs'
})
return config
def collect(self):
"""
Collect stats
"""
if os.access(self.PROC, os.R_OK):
results = {}
# Open file
file = open(self.PROC)
for line in file:
line = line.split()
if line[0] == 'net':
results['net.packets'] = line[1]
results['net.udpcnt'] = line[2]
results['net.tcpcnt'] = line[3]
results['net.tcpconn'] = line[4]
elif line[0] == 'rpc':
results['rpc.calls'] = line[1]
results['rpc.retrans'] = line[2]
results['rpc.authrefrsh'] = line[3]
elif line[0] == 'proc2':
results['v2.null'] = line[1]
results['v2.getattr'] = line[2]
results['v2.setattr'] = line[3]
results['v2.root'] = line[4]
results['v2.lookup'] = line[5]
results['v2.readlink'] = line[6]
results['v2.read'] = line[7]
results['v2.wrcache'] = line[8]
results['v2.write'] = line[9]
results['v2.create'] = line[10]
results['v2.remove'] = line[11]
results['v2.rename'] = line[12]
results['v2.link'] = line[13]
results['v2.symlink'] = line[14]
results['v2.mkdir'] = line[15]
results['v2.rmdir'] = line[16]
results['v2.readdir'] = line[17]
results['v2.fsstat'] = line[18]
elif line[0] == 'proc3':
results['v3.null'] = line[1]
results['v3.getattr'] = line[2]
results['v3.setattr'] = line[3]
results['v3.lookup'] = line[4]
results['v3.access'] = line[5]
results['v3.readlink'] = line[6]
results['v3.read'] = line[7]
results['v3.write'] = line[8]
results['v3.create'] = line[9]
results['v3.mkdir'] = line[10]
results['v3.symlink'] = line[11]
results['v3.mknod'] = line[12]
results['v3.remove'] = line[13]
results['v3.rmdir'] = line[14]
results['v3.rename'] = line[15]
results['v3.link'] = line[16]
results['v3.readdir'] = line[17]
results['v3.readdirplus'] = line[18]
results['v3.fsstat'] = line[19]
results['v3.fsinfo'] = line[20]
results['v3.pathconf'] = line[21]
results['v3.commit'] = line[22]
elif line[0] == 'proc4':
results['v4.null'] = line[1]
results['v4.read'] = line[2]
results['v4.write'] = line[3]
results['v4.commit'] = line[4]
results['v4.open'] = line[5]
results['v4.open_conf'] = line[6]
results['v4.open_noat'] = line[7]
results['v4.open_dgrd'] = line[8]
results['v4.close'] = line[9]
results['v4.setattr'] = line[10]
results['v4.fsinfo'] = line[11]
results['v4.renew'] = line[12]
results['v4.setclntid'] = line[13]
results['v4.confirm'] = line[14]
results['v4.lock'] = line[15]
results['v4.lockt'] = line[16]
results['v4.locku'] = line[17]
results['v4.access'] = line[18]
results['v4.getattr'] = line[19]
results['v4.lookup'] = line[20]
results['v4.lookup_root'] = line[21]
results['v4.remove'] = line[22]
results['v4.rename'] = line[23]
results['v4.link'] = line[24]
results['v4.symlink'] = line[25]
results['v4.create'] = line[26]
results['v4.pathconf'] = line[27]
results['v4.statfs'] = line[28]
results['v4.readlink'] = line[29]
results['v4.readdir'] = line[30]
try:
results['v4.server_caps'] = line[31]
except IndexError:
pass
try:
results['v4.delegreturn'] = line[32]
except IndexError:
pass
try:
results['v4.getacl'] = line[33]
except IndexError:
pass
try:
results['v4.setacl'] = line[34]
except IndexError:
pass
try:
results['v4.fs_locations'] = line[35]
except IndexError:
pass
try:
results['v4.rel_lkowner'] = line[36]
except IndexError:
pass
try:
results['v4.exchange_id'] = line[37]
except IndexError:
pass
try:
results['v4.create_ses'] = line[38]
except IndexError:
pass
try:
results['v4.destroy_ses'] = line[39]
except IndexError:
pass
try:
results['v4.sequence'] = line[40]
except IndexError:
pass
try:
results['v4.get_lease_t'] = line[41]
except IndexError:
pass
try:
results['v4.reclaim_comp'] = line[42]
except IndexError:
pass
try:
results['v4.layoutget'] = line[43]
except IndexError:
pass
try:
results['v4.layoutcommit'] = line[44]
except IndexError:
pass
try:
results['v4.layoutreturn'] = line[45]
except IndexError:
pass
try:
results['v4.getdevlist'] = line[46]
except IndexError:
pass
try:
results['v4.getdevinfo'] = line[47]
except IndexError:
pass
try:
results['v4.ds_write'] = line[48]
except IndexError:
pass
try:
results['v4.ds_commit'] = line[49]
except IndexError:
pass
try:
results['v4.getdevlist'] = line[50]
except IndexError:
pass
# Close File
file.close()
for stat in results.keys():
metric_name = '.' + stat
metric_value = long(float(results[stat]))
metric_value = self.derivative(metric_name, metric_value)
self.publish(metric_name, metric_value)
return True
return False
| datafiniti/Diamond | src/collectors/nfs/nfs.py | Python | mit | 8,613 |
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from sqlalchemy.ext.declarative import declared_attr
from indico.core.db import db
from indico.util.locators import locator_property
from indico.util.string import format_repr, return_ascii
class SessionType(db.Model):
__tablename__ = 'session_types'
@declared_attr
def __table_args__(cls):
return (db.Index('ix_uq_session_types_event_id_name_lower', cls.event_id, db.func.lower(cls.name),
unique=True),
{'schema': 'events'})
id = db.Column(
db.Integer,
primary_key=True
)
event_id = db.Column(
db.Integer,
db.ForeignKey('events.events.id'),
index=True,
nullable=False
)
name = db.Column(
db.String,
nullable=False
)
code = db.Column(
db.String,
nullable=False,
default=''
)
is_poster = db.Column(
db.Boolean,
nullable=False,
default=False
)
event = db.relationship(
'Event',
lazy=True,
backref=db.backref(
'session_types',
cascade='all, delete-orphan',
lazy=True
)
)
# relationship backrefs:
# - sessions (Session.type)
@return_ascii
def __repr__(self):
return format_repr(self, 'id', _text=self.name)
@locator_property
def locator(self):
return dict(self.event.locator, session_type_id=self.id)
| mvidalgarcia/indico | indico/modules/events/sessions/models/types.py | Python | mit | 1,690 |
"""
Minimal example showing the use of the AutoCompleteMode.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.qt import QtWidgets
from pyqode.core.api import CodeEdit
from pyqode.core.backend import server
from pyqode.core.modes import RightMarginMode
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
editor = CodeEdit()
editor.backend.start(server.__file__)
editor.resize(800, 600)
margin = editor.modes.append(RightMarginMode())
margin.position = 4
editor.file.open(__file__)
editor.show()
app.exec_()
editor.close()
del editor
del app
| zwadar/pyqode.core | examples/modes/right_margin.py | Python | mit | 640 |
import os
import json
from nose.tools import assert_equal
from .project import load_lsdsng
from .utils import temporary_file
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
def _test_load_store_instrument(source_lsdsng, lsdinst_path, original_index):
proj = load_lsdsng(source_lsdsng)
proj.song.instruments.import_from_file(0x2a, lsdinst_path)
target_instr = proj.song.instruments[0x2a]
original_instr = proj.song.instruments[original_index]
assert_equal(original_instr, target_instr)
with temporary_file() as tmpfile:
original_instr.export_to_file(tmpfile)
with open(tmpfile, 'r') as fp:
saved_inst = json.load(fp)
with open(lsdinst_path, 'r') as fp:
original_inst = json.load(fp)
assert_equal(original_inst, saved_inst)
def test_load_store_wave_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST_0x00_wave.lsdinst'),
0x00)
def test_load_store_pulse_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST_0x03_pulse.lsdinst'),
0x03)
def test_load_store_kit_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST_0x16_kit.lsdinst'),
0x16)
def test_load_store_noise_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'ANNARKTE.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'ANNARKTE_0x06_noise.lsdinst'),
0x06)
def test_load_store_arduinoboy():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'ARDBOYxx.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'MIDI.lsdinst'),
0x01)
| alexras/pylsdj | pylsdj/test_instrument.py | Python | mit | 1,954 |
"""
# A Better Where
WHERE2 is a near-linear time top-down clustering alogithm.
WHERE2 updated an older where with new Python tricks.
## Standard Header Stuff
"""
from __future__ import division,print_function
import sys
sys.dont_write_bytecode = True
from lib import *
from nasa93 import *
"""
## Dimensionality Reduction with Fastmp
Project data.dat in N dimensions down to a single dimension connecting
twp distant points. Divide that data.dat at the median of those projects.
"""
def fastmap(m,data):
"Divide data.dat into two using distance to two distant items."
import random
random.seed(1)
one = any(data) # 1) pick anything
west = furthest(m,one,data) # 2) west is as far as you can go from anything
east = furthest(m,west,data) # 3) east is as far as you can go from west
c = dist(m,west,east)
# now find everyone's distance
lst = []
for one in data:
a = dist(m,one,west)
b = dist(m,one,east)
x = (a*a + c*c - b*b)/(2*c) # cosine rule
y = max(0, a**2 - x**2)**0.5 # not used, here for a demo
lst += [(x,one)]
lst = sorted(lst)
mid = len(lst)//2
wests = map(second,lst[:mid])
easts = map(second,lst[mid:])
return wests,west, easts,east,c
def gt(x,y): return x > y
def lt(x,y): return x < y
"""
In the above:
+ _m_ is some model that generates candidate
solutions that we wish to niche.
+ _(west,east)_ are not _the_ most distant points
(that would require _N*N) distance
calculations). But they are at least very distant
to each other.
This code needs some helper functions. _Dist_ uses
the standard Euclidean measure. Note that you tune
what it uses to define the niches (decisions or
objectives) using the _what_ parameter:
"""
def dist(m,i,j,
what = lambda m: m.decisions):
"Euclidean distance 0 <= d <= 1 between decisions"
n = len(i.cells)
deltas = 0
for c in what(m):
n1 = norm(m, c, i.cells[c])
n2 = norm(m, c, j.cells[c])
inc = (n1-n2)**2
deltas += inc
n += abs(m.w[c])
return deltas**0.5 / n**0.5
"""
The _Dist_ function normalizes all the raw values zero to one.
"""
def norm(m,c,val) :
"Normalizes val in col c within model m 0..1"
return (val- m.lo[c]) / (m.hi[c]- m.lo[c]+ 0.0001)
"""
Now we can define _furthest_:
"""
def furthest(m,i,all,
init = 0,
better = gt):
"find which of all is furthest from 'i'"
out,d= i,init
for j in all:
if i == j: continue
tmp = dist(m,i,j)
if better(tmp,d):
out,d = j,tmp
return out
"""
And of course, _closest_:
"""
def closest(m,i,all):
return furthest(m,i,all,init=10**32,better=lt)
"""
## WHERE2 = Recursive Fastmap
WHERE2 finds everyone's else's distance from the poles
and divide the data.dat on the mean point of those
distances. This all stops if:
+ Any division has _tooFew_ solutions (say,
less than _sqrt_ of the total number of
solutions).
+ Something has gone horribly wrong and you are
recursing _tooDeep_
This code is controlled by the options in [_The_ settings](settingspy). For
example, if _The.pruning_ is true, we may ignore
some sub-tree (this process is discussed, later on).
Also, if _The.verbose_ is true, the _show_
function prints out a little tree showing the
progress (and to print indents in that tree, we use
the string _The.b4_). For example, here's WHERE2
dividing 93 examples from NASA93.
---| _where |-----------------
93
|.. 46
|.. |.. 23
|.. |.. |.. 11
|.. |.. |.. |.. 5.
|.. |.. |.. |.. 6.
|.. |.. |.. 12
|.. |.. |.. |.. 6.
|.. |.. |.. |.. 6.
|.. |.. 23
|.. |.. |.. 11
|.. |.. |.. |.. 5.
|.. |.. |.. |.. 6.
|.. |.. |.. 12
|.. |.. |.. |.. 6.
|.. |.. |.. |.. 6.
|.. 47
|.. |.. 23
|.. |.. |.. 11
|.. |.. |.. |.. 5.
|.. |.. |.. |.. 6.
|.. |.. |.. 12
|.. |.. |.. |.. 6.
|.. |.. |.. |.. 6.
|.. |.. 24
|.. |.. |.. 12
|.. |.. |.. |.. 6.
|.. |.. |.. |.. 6.
|.. |.. |.. 12
|.. |.. |.. |.. 6.
|.. |.. |.. |.. 6.
WHERE2 returns clusters, where each cluster contains
multiple solutions.
"""
def where2(m, data, lvl=0, up=None):
node = o(val=None,_up=up,_kids=[])
def tooDeep(): return lvl > The.what.depthMax
def tooFew() : return len(data) < The.what.minSize
def show(suffix):
if The.verbose:
print(The.what.b4*lvl,len(data),
suffix,' ; ',id(node) % 1000,sep='')
if tooDeep() or tooFew():
show(".")
node.val = data
else:
show("")
wests,west, easts,east,c = fastmap(m,data)
node.update(c=c,east=east,west=west)
goLeft, goRight = maybePrune(m,lvl,west,east)
if goLeft:
node._kids += [where2(m, wests, lvl+1, node)]
if goRight:
node._kids += [where2(m, easts, lvl+1, node)]
return node
"""
## An Experimental Extensions
Lately I've been experimenting with a system that
prunes as it divides the data.dat. GALE checks for
domination between the poles and ignores data.dat in
halves with a dominated pole. This means that for
_N_ solutions we only ever have to evaluate
_2*log(N)_ of them- which is useful if each
evaluation takes a long time.
The niches found in this way
contain non-dominated poles; i.e. they are
approximations to the Pareto frontier.
Preliminary results show that this is a useful
approach but you should treat those results with a
grain of salt.
In any case, this code supports that pruning as an
optional extra (and is enabled using the
_slots.pruning_ flag). In summary, this code says if
the scores for the poles are more different that
_slots.wriggle_ and one pole has a better score than
the other, then ignore the other pole.
"""
def maybePrune(m,lvl,west,east):
"Usually, go left then right, unless dominated."
goLeft, goRight = True,True # default
if The.prune and lvl >= The.what.depthMin:
sw = scores(m, west)
se = scores(m, east)
if abs(sw - se) > The.wriggle: # big enough to consider
if se > sw: goLeft = False # no left
if sw > se: goRight = False # no right
return goLeft, goRight
"""
Note that I do not allow pruning until we have
descended at least _slots.depthMin_ into the tree.
### Model-specific Stuff
WHERE2 talks to models via the the following model-specific variables:
+ _m.cols_: list of indices in a list
+ _m.names_: a list of names for each column.
+ _m.decisions_: the subset of cols relating to decisions.
+ _m.obectives_: the subset of cols relating to objectives.
+ _m.eval(m,eg)_: function for computing variables from _eg_.
+ _m.lo[c]_ : the lowest value in column _c_.
+ _m.hi[c]_ : the highest value in column _c_.
+ _m.w[c]_: the weight for each column. Usually equal to one.
If an objective and if we are minimizing that objective, then the weight is negative.
### Model-general stuff
Using the model-specific stuff, WHERE2 defines some
useful general functions.
"""
def some(m,x) :
"with variable x of model m, pick one value at random"
return m.lo[x] + by(m.hi[x] - m.lo[x])
def scores(m,it):
"Score an individual."
if not it.scored:
m.eval(m,it)
new, w = 0, 0
for c in m.objectives:
val = it.cells[c]
w += abs(m.w[c])
tmp = norm(m,c,val)
if m.w[c] < 0:
tmp = 1 - tmp
new += (tmp**2)
it.score = (new**0.5) / (w**0.5)
it.scored = True
return it.score
"""
## Tree Code
Tools for manipulating the tree returned by _where2_.
### Primitive: Walk the nodes
"""
def nodes(tree,seen=None,steps=0):
if seen is None: seen=[]
if tree:
if not id(tree) in seen:
seen.append(id(tree))
yield tree,steps
for kid in tree._kids:
for sub,steps1 in nodes(kid,seen,steps+1):
yield sub,steps1
"""
### Return nodes that are leaves
"""
def leaves(tree,seen=None,steps=0):
for node,steps1 in nodes(tree,seen,steps):
if not node._kids:
yield node,steps1
"""
### Return nodes nearest to furthest
"""
def neighbors(leaf,seen=None,steps=-1):
"""Walk the tree from 'leaf' increasingly
distant leaves. """
if seen is None: seen=[]
for down,steps1 in leaves(leaf,seen,steps+1):
yield down,steps1
if leaf:
for up,steps1 in neighbors(leaf._up, seen,steps+1):
yield up,steps1
"""
### Return nodes in Groups, Closest to Furthest
"""
def around(leaf, f=lambda x: x):
tmp,last = [], None
for node,dist in neighbors(leaf):
if dist > 0:
if dist == last:
tmp += [f(node)]
else:
if tmp:
yield last,tmp
tmp = [f(node)]
last = dist
if tmp:
yield last,tmp
"""
## Demo Code
### Code Showing the scores
"""
#@go
def _scores():
m = nasa93()
out = []
for row in m._rows:
scores(m,row)
out += [(row.score, [row.cells[c] for c in m.objectives])]
for s,x in sorted(out):
print(s,x)
"""
### Code Showing the Distances
"""
#@go
def _distances(m=nasa93):
m=m()
seed(The.seed)
for i in m._rows:
j = closest(m,i, m._rows)
k = furthest(m,i, m._rows)
idec = [i.cells[c] for c in m.decisions]
jdec = [j.cells[c] for c in m.decisions]
kdec = [k.cells[c] for c in m.decisions]
print("\n",
gs(idec), g(scores(m,i)),"\n",
gs(jdec),"closest ", g(dist(m,i,j)),"\n",
gs(kdec),"furthest", g(dist(m,i,k)))
"""
### A Demo for Where2.
"""
"""
@go
def _where(m=nasa93):
m= m()
seed(1)
told=N()
for r in m._rows:
s = scores(m,r)
told += s
global The
The=defaults().update(verbose = True,
minSize = len(m._rows)**0.5,
prune = False,
wriggle = 0.3*told.sd())
tree = where2(m, m._rows)
n=0
for node,_ in leaves(tree):
m = len(node.val)
#print(m,' ',end="")
n += m
print(id(node) % 1000, ' ',end='')
for near,dist in neighbors(node):
print(dist,id(near) % 1000,' ',end='')
print("")
print(n)
filter = lambda z: id(z) % 1000
for node,_ in leaves(tree):
print(filter(node),
[x for x in around(node,filter)])
"""
| rahlk/WarnPlan | warnplan/commons/tools/axe/where2.py | Python | mit | 10,028 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class VirtualNetworkPeering(SubResource):
"""Peerings in a virtual network resource.
:param id: Resource ID.
:type id: str
:param allow_virtual_network_access: Whether the VMs in the linked virtual
network space would be able to access all the VMs in local Virtual network
space.
:type allow_virtual_network_access: bool
:param allow_forwarded_traffic: Whether the forwarded traffic from the VMs
in the remote virtual network will be allowed/disallowed.
:type allow_forwarded_traffic: bool
:param allow_gateway_transit: If gateway links can be used in remote
virtual networking to link to this virtual network.
:type allow_gateway_transit: bool
:param use_remote_gateways: If remote gateways can be used on this virtual
network. If the flag is set to true, and allowGatewayTransit on remote
peering is also true, virtual network will use gateways of remote virtual
network for transit. Only one peering can have this flag set to true. This
flag cannot be set if virtual network already has a gateway.
:type use_remote_gateways: bool
:param remote_virtual_network: The reference of the remote virtual
network.
:type remote_virtual_network: :class:`SubResource
<azure.mgmt.network.v2017_03_01.models.SubResource>`
:param peering_state: The status of the virtual network peering. Possible
values are 'Initiated', 'Connected', and 'Disconnected'. Possible values
include: 'Initiated', 'Connected', 'Disconnected'
:type peering_state: str or :class:`VirtualNetworkPeeringState
<azure.mgmt.network.v2017_03_01.models.VirtualNetworkPeeringState>`
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'},
'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'},
'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'},
'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'peering_state': {'key': 'properties.peeringState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, allow_virtual_network_access=None, allow_forwarded_traffic=None, allow_gateway_transit=None, use_remote_gateways=None, remote_virtual_network=None, peering_state=None, provisioning_state=None, name=None, etag=None):
super(VirtualNetworkPeering, self).__init__(id=id)
self.allow_virtual_network_access = allow_virtual_network_access
self.allow_forwarded_traffic = allow_forwarded_traffic
self.allow_gateway_transit = allow_gateway_transit
self.use_remote_gateways = use_remote_gateways
self.remote_virtual_network = remote_virtual_network
self.peering_state = peering_state
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
| SUSE/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_03_01/models/virtual_network_peering.py | Python | mit | 4,140 |
##############################################################################
# Copyright (c) 2000-2016 Ericsson Telecom AB
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# Balasko, Jeno
# Delic, Adam
#
##############################################################################
import xml.etree.ElementTree as ET
tree = ET.parse('project_hierarchy_graph.xml')
root = tree.getroot()
f = open('project_hierarchy_graph.dot', 'w')
f.write("digraph PROJECT_HIERARCHY_GRAPH {\n")
for project in root:
for reference in project:
f.write(project.attrib['name'])
f.write(" -> ")
f.write(reference.attrib['name'])
f.write(";\n")
f.write("}\n")
f.close()
# use this to generate graph:
# > dot -Tpng project_hierarchy_graph.dot -o project_hierarchy_graph.png
| BenceJanosSzabo/titan.core | etc/scripts/tpd_graph_xml2dot.py | Python | epl-1.0 | 978 |
#!/usr/bin/env python3
# This is a simple command line script that can be used to backup the
# TACTIC database. It is independent of TACTIC, so can be run on
# servers where TACTIC is not install with the database.
import datetime
import os
import time
import subprocess
import tacticenv
from pyasm.common import Environment
from pyasm.security import Batch
# Location of zip executable
#ZIP_EXE = "C:\\Users\\user\\Documents\\backups\\7za920\\7za.exe"
ZIP_EXE = "zip"
# Location of all back-up types
BACKUP_DIR = "/spt/tactic/tactic_temp/"
# Locations of different backup types
DB_DIR = "backup_db"
PROJECT_DIR = "backup_tactic"
ASSETS_DIR = "backup_assets"
# Location of TACTIC src code
TACTIC_DIR = "/spt/tactic/tactic/"
class DatabaseBackup(object):
def execute(my):
base_dir = "%s%s" % (BACKUP_DIR, DB_DIR)
import datetime
now = datetime.datetime.now()
date = now.strftime("%Y%m%d_%H%M")
file_name = 'tacticDatabase_%s.sql' % date
path = "%s/%s" % (base_dir, file_name)
print("Backing up database to: [%s]" % path)
# Check if base_dir is exists and writable.
if not os.path.exists(base_dir):
os.mkdir(base_dir)
# Create backup, and if successful, prune old
# backups.
try:
cmd = 'pg_dumpall -U postgres -c > %s' % path
os.system(cmd)
except Exception as e:
print("Could not run database backup: %s" % e)
else:
cmd = PruneBackup()
cmd.execute(base_dir, 30)
#cmd = 'gzip -f %s' % path
#os.system(cmd)
class ProjectBackup(object):
def execute(my):
base_dir = "%s%s" % (BACKUP_DIR, PROJECT_DIR)
zip_exe = ZIP_EXE
now = datetime.datetime.now()
date = now.strftime("%Y%m%d_%H%M")
file_path = '%s/tactic_%s.zip' % (base_dir, date)
# Check if base_dir is exists and writable.
if not os.path.exists(base_dir):
os.mkdir(base_dir)
# Create backup, and if successful, prune old
# backups.
try:
subprocess.call([zip_exe, "-r", file_path, TACTIC_DIR])
except Exception as e:
print("Could not zip project directory. %s" % e)
else:
cmd = PruneBackup()
cmd.execute(base_dir, 1)
class AssetsBackup(object):
def execute(my):
base_dir = "%s%s" % (BACKUP_DIR, ASSETS_DIR)
asset_dir = Environment.get_asset_dir()
zip_exe = ZIP_EXE
now = datetime.datetime.now()
date = now.strftime("%Y%m%d_%H%M")
file_path = '%s/assets_%s.zip' % (base_dir, date)
# Check if base_dir is exists and writable.
if not os.path.exists(base_dir):
os.mkdir(base_dir)
# Create backup, and if successful, prune old
# backups.
try:
subprocess.call([zip_exe, "-r", file_path, asset_dir])
except Exception as e:
print("Could not zip assets directory: %s" % e)
else:
cmd = PruneBackup()
cmd.execute(base_dir, 3)
class PruneBackup(object):
def execute(my, directory, days):
'''Removes files in directory older than specified days.'''
dir = directory
print("Pruning backup files older than [%s] days" % days)
import datetime
today = datetime.datetime.today()
files = os.listdir(dir)
for file in files:
path = "%s/%s" % (dir, file)
(mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path)
ctime = datetime.datetime.fromtimestamp(ctime)
if today - ctime > datetime.timedelta(days=days):
os.unlink(path)
if __name__ == '__main__':
'''
# TODO
os.system("vacuumdb -U postgres --all --analyze")
'''
Batch()
cmd = DatabaseBackup()
cmd.execute()
cmd = AssetsBackup()
cmd.execute()
cmd = ProjectBackup()
#cmd.execute()
| Southpaw-TACTIC/TACTIC | src/install/backup/tactic_backup.py | Python | epl-1.0 | 4,220 |
"""A library of helper functions for the CherryPy test suite."""
import datetime
import io
import logging
import os
import re
import subprocess
import sys
import time
import unittest
import warnings
import portend
import pytest
import six
from cheroot.test import webtest
import cherrypy
from cherrypy._cpcompat import text_or_bytes, HTTPSConnection, ntob
from cherrypy.lib import httputil
from cherrypy.lib import gctools
log = logging.getLogger(__name__)
thisdir = os.path.abspath(os.path.dirname(__file__))
serverpem = os.path.join(os.getcwd(), thisdir, 'test.pem')
class Supervisor(object):
"""Base class for modeling and controlling servers during testing."""
def __init__(self, **kwargs):
for k, v in kwargs.items():
if k == 'port':
setattr(self, k, int(v))
setattr(self, k, v)
def log_to_stderr(msg, level):
return sys.stderr.write(msg + os.linesep)
class LocalSupervisor(Supervisor):
"""Base class for modeling/controlling servers which run in the same
process.
When the server side runs in a different process, start/stop can dump all
state between each test module easily. When the server side runs in the
same process as the client, however, we have to do a bit more work to
ensure config and mounted apps are reset between tests.
"""
using_apache = False
using_wsgi = False
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
cherrypy.server.httpserver = self.httpserver_class
# This is perhaps the wrong place for this call but this is the only
# place that i've found so far that I KNOW is early enough to set this.
cherrypy.config.update({'log.screen': False})
engine = cherrypy.engine
if hasattr(engine, 'signal_handler'):
engine.signal_handler.subscribe()
if hasattr(engine, 'console_control_handler'):
engine.console_control_handler.subscribe()
def start(self, modulename=None):
"""Load and start the HTTP server."""
if modulename:
# Unhook httpserver so cherrypy.server.start() creates a new
# one (with config from setup_server, if declared).
cherrypy.server.httpserver = None
cherrypy.engine.start()
self.sync_apps()
def sync_apps(self):
"""Tell the server about any apps which the setup functions mounted."""
pass
def stop(self):
td = getattr(self, 'teardown', None)
if td:
td()
cherrypy.engine.exit()
servers_copy = list(six.iteritems(getattr(cherrypy, 'servers', {})))
for name, server in servers_copy:
server.unsubscribe()
del cherrypy.servers[name]
class NativeServerSupervisor(LocalSupervisor):
"""Server supervisor for the builtin HTTP server."""
httpserver_class = 'cherrypy._cpnative_server.CPHTTPServer'
using_apache = False
using_wsgi = False
def __str__(self):
return 'Builtin HTTP Server on %s:%s' % (self.host, self.port)
class LocalWSGISupervisor(LocalSupervisor):
"""Server supervisor for the builtin WSGI server."""
httpserver_class = 'cherrypy._cpwsgi_server.CPWSGIServer'
using_apache = False
using_wsgi = True
def __str__(self):
return 'Builtin WSGI Server on %s:%s' % (self.host, self.port)
def sync_apps(self):
"""Hook a new WSGI app into the origin server."""
cherrypy.server.httpserver.wsgi_app = self.get_app()
def get_app(self, app=None):
"""Obtain a new (decorated) WSGI app to hook into the origin server."""
if app is None:
app = cherrypy.tree
if self.validate:
try:
from wsgiref import validate
except ImportError:
warnings.warn(
'Error importing wsgiref. The validator will not run.')
else:
# wraps the app in the validator
app = validate.validator(app)
return app
def get_cpmodpy_supervisor(**options):
from cherrypy.test import modpy
sup = modpy.ModPythonSupervisor(**options)
sup.template = modpy.conf_cpmodpy
return sup
def get_modpygw_supervisor(**options):
from cherrypy.test import modpy
sup = modpy.ModPythonSupervisor(**options)
sup.template = modpy.conf_modpython_gateway
sup.using_wsgi = True
return sup
def get_modwsgi_supervisor(**options):
from cherrypy.test import modwsgi
return modwsgi.ModWSGISupervisor(**options)
def get_modfcgid_supervisor(**options):
from cherrypy.test import modfcgid
return modfcgid.ModFCGISupervisor(**options)
def get_modfastcgi_supervisor(**options):
from cherrypy.test import modfastcgi
return modfastcgi.ModFCGISupervisor(**options)
def get_wsgi_u_supervisor(**options):
cherrypy.server.wsgi_version = ('u', 0)
return LocalWSGISupervisor(**options)
class CPWebCase(webtest.WebCase):
script_name = ''
scheme = 'http'
available_servers = {'wsgi': LocalWSGISupervisor,
'wsgi_u': get_wsgi_u_supervisor,
'native': NativeServerSupervisor,
'cpmodpy': get_cpmodpy_supervisor,
'modpygw': get_modpygw_supervisor,
'modwsgi': get_modwsgi_supervisor,
'modfcgid': get_modfcgid_supervisor,
'modfastcgi': get_modfastcgi_supervisor,
}
default_server = 'wsgi'
@classmethod
def _setup_server(cls, supervisor, conf):
v = sys.version.split()[0]
log.info('Python version used to run this test script: %s' % v)
log.info('CherryPy version: %s' % cherrypy.__version__)
if supervisor.scheme == 'https':
ssl = ' (ssl)'
else:
ssl = ''
log.info('HTTP server version: %s%s' % (supervisor.protocol, ssl))
log.info('PID: %s' % os.getpid())
cherrypy.server.using_apache = supervisor.using_apache
cherrypy.server.using_wsgi = supervisor.using_wsgi
if sys.platform[:4] == 'java':
cherrypy.config.update({'server.nodelay': False})
if isinstance(conf, text_or_bytes):
parser = cherrypy.lib.reprconf.Parser()
conf = parser.dict_from_file(conf).get('global', {})
else:
conf = conf or {}
baseconf = conf.copy()
baseconf.update({'server.socket_host': supervisor.host,
'server.socket_port': supervisor.port,
'server.protocol_version': supervisor.protocol,
'environment': 'test_suite',
})
if supervisor.scheme == 'https':
# baseconf['server.ssl_module'] = 'builtin'
baseconf['server.ssl_certificate'] = serverpem
baseconf['server.ssl_private_key'] = serverpem
# helper must be imported lazily so the coverage tool
# can run against module-level statements within cherrypy.
# Also, we have to do "from cherrypy.test import helper",
# exactly like each test module does, because a relative import
# would stick a second instance of webtest in sys.modules,
# and we wouldn't be able to globally override the port anymore.
if supervisor.scheme == 'https':
webtest.WebCase.HTTP_CONN = HTTPSConnection
return baseconf
@classmethod
def setup_class(cls):
''
# Creates a server
conf = {
'scheme': 'http',
'protocol': 'HTTP/1.1',
'port': 54583,
'host': '127.0.0.1',
'validate': False,
'server': 'wsgi',
}
supervisor_factory = cls.available_servers.get(
conf.get('server', 'wsgi'))
if supervisor_factory is None:
raise RuntimeError('Unknown server in config: %s' % conf['server'])
supervisor = supervisor_factory(**conf)
# Copied from "run_test_suite"
cherrypy.config.reset()
baseconf = cls._setup_server(supervisor, conf)
cherrypy.config.update(baseconf)
setup_client()
if hasattr(cls, 'setup_server'):
# Clear the cherrypy tree and clear the wsgi server so that
# it can be updated with the new root
cherrypy.tree = cherrypy._cptree.Tree()
cherrypy.server.httpserver = None
cls.setup_server()
# Add a resource for verifying there are no refleaks
# to *every* test class.
cherrypy.tree.mount(gctools.GCRoot(), '/gc')
cls.do_gc_test = True
supervisor.start(cls.__module__)
cls.supervisor = supervisor
@classmethod
def teardown_class(cls):
''
if hasattr(cls, 'setup_server'):
cls.supervisor.stop()
do_gc_test = False
def test_gc(self):
if not self.do_gc_test:
return
self.getPage('/gc/stats')
try:
self.assertBody('Statistics:')
except Exception:
'Failures occur intermittently. See #1420'
def prefix(self):
return self.script_name.rstrip('/')
def base(self):
if ((self.scheme == 'http' and self.PORT == 80) or
(self.scheme == 'https' and self.PORT == 443)):
port = ''
else:
port = ':%s' % self.PORT
return '%s://%s%s%s' % (self.scheme, self.HOST, port,
self.script_name.rstrip('/'))
def exit(self):
sys.exit()
def getPage(self, url, headers=None, method='GET', body=None,
protocol=None, raise_subcls=None):
"""Open the url. Return status, headers, body.
`raise_subcls` must be a tuple with the exceptions classes
or a single exception class that are not going to be considered
a socket.error regardless that they were are subclass of a
socket.error and therefore not considered for a connection retry.
"""
if self.script_name:
url = httputil.urljoin(self.script_name, url)
return webtest.WebCase.getPage(self, url, headers, method, body,
protocol, raise_subcls)
def skip(self, msg='skipped '):
pytest.skip(msg)
def assertErrorPage(self, status, message=None, pattern=''):
"""Compare the response body with a built in error page.
The function will optionally look for the regexp pattern,
within the exception embedded in the error page."""
# This will never contain a traceback
page = cherrypy._cperror.get_error_page(status, message=message)
# First, test the response body without checking the traceback.
# Stick a match-all group (.*) in to grab the traceback.
def esc(text):
return re.escape(ntob(text))
epage = re.escape(page)
epage = epage.replace(
esc('<pre id="traceback"></pre>'),
esc('<pre id="traceback">') + b'(.*)' + esc('</pre>'))
m = re.match(epage, self.body, re.DOTALL)
if not m:
self._handlewebError(
'Error page does not match; expected:\n' + page)
return
# Now test the pattern against the traceback
if pattern is None:
# Special-case None to mean that there should be *no* traceback.
if m and m.group(1):
self._handlewebError('Error page contains traceback')
else:
if (m is None) or (
not re.search(ntob(re.escape(pattern), self.encoding),
m.group(1))):
msg = 'Error page does not contain %s in traceback'
self._handlewebError(msg % repr(pattern))
date_tolerance = 2
def assertEqualDates(self, dt1, dt2, seconds=None):
"""Assert abs(dt1 - dt2) is within Y seconds."""
if seconds is None:
seconds = self.date_tolerance
if dt1 > dt2:
diff = dt1 - dt2
else:
diff = dt2 - dt1
if not diff < datetime.timedelta(seconds=seconds):
raise AssertionError('%r and %r are not within %r seconds.' %
(dt1, dt2, seconds))
def _test_method_sorter(_, x, y):
"""Monkeypatch the test sorter to always run test_gc last in each suite."""
if x == 'test_gc':
return 1
if y == 'test_gc':
return -1
if x > y:
return 1
if x < y:
return -1
return 0
unittest.TestLoader.sortTestMethodsUsing = _test_method_sorter
def setup_client():
"""Set up the WebCase classes to match the server's socket settings."""
webtest.WebCase.PORT = cherrypy.server.socket_port
webtest.WebCase.HOST = cherrypy.server.socket_host
if cherrypy.server.ssl_certificate:
CPWebCase.scheme = 'https'
# --------------------------- Spawning helpers --------------------------- #
class CPProcess(object):
pid_file = os.path.join(thisdir, 'test.pid')
config_file = os.path.join(thisdir, 'test.conf')
config_template = """[global]
server.socket_host: '%(host)s'
server.socket_port: %(port)s
checker.on: False
log.screen: False
log.error_file: r'%(error_log)s'
log.access_file: r'%(access_log)s'
%(ssl)s
%(extra)s
"""
error_log = os.path.join(thisdir, 'test.error.log')
access_log = os.path.join(thisdir, 'test.access.log')
def __init__(self, wait=False, daemonize=False, ssl=False,
socket_host=None, socket_port=None):
self.wait = wait
self.daemonize = daemonize
self.ssl = ssl
self.host = socket_host or cherrypy.server.socket_host
self.port = socket_port or cherrypy.server.socket_port
def write_conf(self, extra=''):
if self.ssl:
serverpem = os.path.join(thisdir, 'test.pem')
ssl = """
server.ssl_certificate: r'%s'
server.ssl_private_key: r'%s'
""" % (serverpem, serverpem)
else:
ssl = ''
conf = self.config_template % {
'host': self.host,
'port': self.port,
'error_log': self.error_log,
'access_log': self.access_log,
'ssl': ssl,
'extra': extra,
}
with io.open(self.config_file, 'w', encoding='utf-8') as f:
f.write(six.text_type(conf))
def start(self, imports=None):
"""Start cherryd in a subprocess."""
portend.free(self.host, self.port, timeout=1)
args = [
'-m',
'cherrypy',
'-c', self.config_file,
'-p', self.pid_file,
]
r"""
Command for running cherryd server with autoreload enabled
Using
```
['-c',
"__requires__ = 'CherryPy'; \
import pkg_resources, re, sys; \
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]); \
sys.exit(\
pkg_resources.load_entry_point(\
'CherryPy', 'console_scripts', 'cherryd')())"]
```
doesn't work as it's impossible to reconstruct the `-c`'s contents.
Ref: https://github.com/cherrypy/cherrypy/issues/1545
"""
if not isinstance(imports, (list, tuple)):
imports = [imports]
for i in imports:
if i:
args.append('-i')
args.append(i)
if self.daemonize:
args.append('-d')
env = os.environ.copy()
# Make sure we import the cherrypy package in which this module is
# defined.
grandparentdir = os.path.abspath(os.path.join(thisdir, '..', '..'))
if env.get('PYTHONPATH', ''):
env['PYTHONPATH'] = os.pathsep.join(
(grandparentdir, env['PYTHONPATH']))
else:
env['PYTHONPATH'] = grandparentdir
self._proc = subprocess.Popen([sys.executable] + args, env=env)
if self.wait:
self.exit_code = self._proc.wait()
else:
portend.occupied(self.host, self.port, timeout=5)
# Give the engine a wee bit more time to finish STARTING
if self.daemonize:
time.sleep(2)
else:
time.sleep(1)
def get_pid(self):
if self.daemonize:
return int(open(self.pid_file, 'rb').read())
return self._proc.pid
def join(self):
"""Wait for the process to exit."""
if self.daemonize:
return self._join_daemon()
self._proc.wait()
def _join_daemon(self):
try:
try:
# Mac, UNIX
os.wait()
except AttributeError:
# Windows
try:
pid = self.get_pid()
except IOError:
# Assume the subprocess deleted the pidfile on shutdown.
pass
else:
os.waitpid(pid, 0)
except OSError:
x = sys.exc_info()[1]
if x.args != (10, 'No child processes'):
raise
| Southpaw-TACTIC/TACTIC | 3rd_party/python2/site-packages/cherrypy/test/helper.py | Python | epl-1.0 | 17,316 |
__doc__ = """Random number array generators for numarray.
This package was ported to numarray from Numeric's RandomArray and
provides functions to generate numarray of random numbers.
"""
from RandomArray2 import *
| fxia22/ASM_xf | PythonD/site_python/numarray/random_array/__init__.py | Python | gpl-2.0 | 218 |
# Copyright 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Unit tests for nss_cache/util/timestamps.py."""
__author__ = '[email protected] (Jamie Wilkinson)'
import os
import shutil
import tempfile
import time
import unittest
import mox
from nss_cache.util import timestamps
class TestTimestamps(mox.MoxTestBase):
def setUp(self):
super(TestTimestamps, self).setUp()
self.workdir = tempfile.mkdtemp()
def tearDown(self):
super(TestTimestamps, self).tearDown()
shutil.rmtree(self.workdir)
def testReadTimestamp(self):
ts_filename = os.path.join(self.workdir, 'tsr')
ts_file = open(ts_filename, 'w')
ts_file.write('1970-01-01T00:00:01Z\n')
ts_file.close()
ts = timestamps.ReadTimestamp(ts_filename)
self.assertEqual(time.gmtime(1), ts)
def testReadTimestamp(self):
# TZ=UTC date -d @1306428781
# Thu May 26 16:53:01 UTC 2011
ts_filename = os.path.join(self.workdir, 'tsr')
ts_file = open(ts_filename, 'w')
ts_file.write('2011-05-26T16:53:01Z\n')
ts_file.close()
ts = timestamps.ReadTimestamp(ts_filename)
self.assertEqual(time.gmtime(1306428781), ts)
def testReadTimestampInFuture(self):
ts_filename = os.path.join(self.workdir, 'tsr')
ts_file = open(ts_filename, 'w')
ts_file.write('2011-05-26T16:02:00Z')
ts_file.close()
now = time.gmtime(1)
self.mox.StubOutWithMock(time, 'gmtime')
time.gmtime().AndReturn(now)
self.mox.ReplayAll()
ts = timestamps.ReadTimestamp(ts_filename)
self.assertEqual(now, ts)
def testWriteTimestamp(self):
ts_filename = os.path.join(self.workdir, 'tsw')
good_ts = time.gmtime(1)
timestamps.WriteTimestamp(good_ts, ts_filename)
self.assertEqual(good_ts, timestamps.ReadTimestamp(ts_filename))
ts_file = open(ts_filename, 'r')
self.assertEqual('1970-01-01T00:00:01Z\n', ts_file.read())
if __name__ == '__main__':
unittest.main()
| UPPMAX/nsscache | nss_cache/util/timestamps_test.py | Python | gpl-2.0 | 2,605 |
#!/usr/bin/pythonTest
# -*- coding: utf-8 -*-
#
# Web functions want links
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# The GNU General Public License is available from:
# The Free Software Foundation, Inc.
# 51 Franklin Street, Fifth Floor
# Boston MA 02110-1301 USA
#
# http://www.gnu.org/licenses/gpl.html
#
# Copyright 2015-2016 Rick Graves
#
def getUniqueLinks( sReadFile, sOutFile ):
#
from File.Get import getListFromFileLines
from File.Write import QuickDumpLines
#
from Web.Address import getHostPathTuple, getDomainOffURL
from Web.Test import isURL
#
lLines = getListFromFileLines( sReadFile )
#
setLinks= frozenset( filter( isURL, lLines ) )
#
#
lDecorate = [ ( getHostPathTuple( sURL ), sURL ) for sURL in setLinks ]
#
lDecorate = [ ( ( getDomainOffURL( t[0][0] ), t[0][1] ), t[1] ) for t in lDecorate ]
#
lDecorate.sort()
#
lLinks = [ t[1] for t in lDecorate ]
#
QuickDumpLines( lLinks, sOutFile )
if __name__ == "__main__":
#
from os.path import join
from sys import argv
#
from six import print_ as print3
#
from Dir.Get import sTempDir
from File.Test import isFileThere
from Utils.Result import sayTestResult
#
lProblems = []
#
args = argv[ 1 : ]
#
sReadFile = join( sTempDir, 'LotsOfLinks.txt' )
sOutFile = join( sTempDir, 'UniqueLinks.txt' )
#
if args:
#
sReadFile = args[0]
#
if len( args ) > 1:
#
sOutFile = args[2]
#
#
else:
#
if isFileThere( sReadFile ):
#
getUniqueLinks( sReadFile, sOutFile )
#
else:
#
print3( 'Usage: WantLinks [inputFile [, outputFile] ]' )
print3( 'default inputFile {temp dir}lotsolinks.txt' )
print3( 'default outputFile {temp dir}UniqueLinks.txt' )
#
#
#
if False:
#
lProblems.append( 'getDotQuad4IspTester()' )
#
#
#
sayTestResult( lProblems ) | netvigator/myPyPacks | pyPacks/Web/WantLinks.py | Python | gpl-2.0 | 2,653 |
import BaseHTTPServer
import cgi
import ctypes
import os
import sys
import threading
from PySide import QtGui
import MaxPlus
PORT = 8000
class MyThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.exiting = False
address = ('localhost', PORT)
self.server = BaseHTTPServer.HTTPServer(address, MyHandler)
self._stop = threading.Event()
def run(self):
self.server.serve_forever()
def stop(self):
self.server.server_close()
self.server.shutdown()
self._stop.set()
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
rootdir = os.path.join(os.path.dirname(__file__) + '/html')
try:
if self.path == '/':
self.path = '/index.html'
if self.path.endswith('.html'):
self.send_response(200)
self.send_header('Content-type','text-html')
self.end_headers()
f = open(rootdir + self.path)
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'file not found')
def do_POST(self):
if self.path=="/cmd":
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={'REQUEST_METHOD':'POST',
'CONTENT_TYPE':self.headers['Content-Type'],
})
self.send_response(301)
self.send_header('Location', '/')
self.end_headers()
try:
MaxPlus.Core.EvalMAXScript(form["cmd"].value)
MaxPlus.ViewportManager_ForceCompleteRedraw()
except:
print "Needs to be run from a 3ds max instance"
return
class MyWindow(QtGui.QWidget):
def __init__(self, parent=None):
super(MyWindow, self).__init__(parent)
self.setWindowTitle('Simple 3ds Max webserver')
self.resize(200,50)
self.btn_run = QtGui.QPushButton('Run')
layout = QtGui.QVBoxLayout()
layout.addWidget(self.btn_run)
self.setLayout(layout)
self.btn_run.clicked.connect(self.run)
self.serverThread = None
def run(self):
if not self.serverThread:
print "Serving at port", PORT
self.btn_run.setText('Stop...')
self.serverThread = MyThread()
self.serverThread.start()
else:
print "Stopping webserver"
self.btn_run.setText('Run')
self.serverThread.stop()
self.serverThread = None
def closeEvent(self, *args, **kwargs):
if self.serverThread:
print "Stopping webserver"
self.btn_run.setText('Run')
self.serverThread.stop()
self.serverThread = None
class _GCProtector(object):
controls = []
if __name__ == '__main__':
app = QtGui.QApplication.instance()
if not app:
app = QtGui.QApplication([])
window = MyWindow()
_GCProtector.controls.append(window)
window.show()
capsule = window.effectiveWinId()
ctypes.pythonapi.PyCObject_AsVoidPtr.restype = ctypes.c_void_p
ctypes.pythonapi.PyCObject_AsVoidPtr.argtypes = [ctypes.py_object]
ptr = ctypes.pythonapi.PyCObject_AsVoidPtr(capsule)
MaxPlus.Win32.Set3dsMaxAsParentWindow(ptr)
| maxwellalive/YCDIVFX_MaxPlus | Examples/simplewebserver.py | Python | gpl-2.0 | 3,452 |
########################################################################
#
# File Name: HTMLButtonElement
#
# Documentation: http://docs.4suite.com/4DOM/HTMLButtonElement.html
#
### This file is automatically generated by GenerateHtml.py.
### DO NOT EDIT!
"""
WWW: http://4suite.com/4DOM e-mail: [email protected]
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
import string
from xml.dom import Node
from xml.dom.html.HTMLElement import HTMLElement
class HTMLButtonElement(HTMLElement):
def __init__(self, ownerDocument, nodeName="BUTTON"):
HTMLElement.__init__(self, ownerDocument, nodeName)
### Attribute Methods ###
def _get_accessKey(self):
return self.getAttribute("ACCESSKEY")
def _set_accessKey(self, value):
self.setAttribute("ACCESSKEY", value)
def _get_disabled(self):
return self.hasAttribute("DISABLED")
def _set_disabled(self, value):
if value:
self.setAttribute("DISABLED", "DISABLED")
else:
self.removeAttribute("DISABLED")
def _get_form(self):
parent = self.parentNode
while parent:
if parent.nodeName == "FORM":
return parent
parent = parent.parentNode
return None
def _get_name(self):
return self.getAttribute("NAME")
def _set_name(self, value):
self.setAttribute("NAME", value)
def _get_tabIndex(self):
value = self.getAttribute("TABINDEX")
if value:
return int(value)
return 0
def _set_tabIndex(self, value):
self.setAttribute("TABINDEX", str(value))
def _get_type(self):
return self.getAttribute("TYPE")
def _get_value(self):
return self.getAttribute("VALUE")
def _set_value(self, value):
self.setAttribute("VALUE", value)
### Attribute Access Mappings ###
_readComputedAttrs = HTMLElement._readComputedAttrs.copy()
_readComputedAttrs.update({
"accessKey" : _get_accessKey,
"disabled" : _get_disabled,
"form" : _get_form,
"name" : _get_name,
"tabIndex" : _get_tabIndex,
"type" : _get_type,
"value" : _get_value
})
_writeComputedAttrs = HTMLElement._writeComputedAttrs.copy()
_writeComputedAttrs.update({
"accessKey" : _set_accessKey,
"disabled" : _set_disabled,
"name" : _set_name,
"tabIndex" : _set_tabIndex,
"value" : _set_value
})
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
HTMLElement._readOnlyAttrs + _readComputedAttrs.keys())
| carvalhomb/tsmells | guess/src/Lib/xml/dom/html/HTMLButtonElement.py | Python | gpl-2.0 | 2,860 |
# -*- coding: utf-8 -*-
# MouseTrap
#
# Copyright 2009 Flavio Percoco Premoli
#
# This file is part of mouseTrap.
#
# MouseTrap is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License v2 as published
# by the Free Software Foundation.
#
# mouseTrap is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with mouseTrap. If not, see <http://www.gnu.org/licenses/>.
""" Common MouseTrap Functions. """
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2008 Flavio Percoco Premoli."
__license__ = "GPLv2"
import os
import re
def get_py_list(dirlist):
"""
Checks for .py files on directories in dirlist
and removes the extensions.
Arguments:
- dirlist: The directories list.
"""
if not type(dirlist) is list:
dirlist = [dirlist]
reg = re.compile(r'([A-Za-z0-9]+)\.py$', re.DOTALL)
group = []
for dir in dirlist:
if not os.path.isdir(dir):
continue
group.append([ mod[0] for mod in [ reg.findall(f) for f in os.listdir("%s/" % dir) if "handler" not in f] if mod ])
return [] + [x for l in group for x in l]
| lhotchkiss/mousetrap | src/mousetrap/app/commons.py | Python | gpl-2.0 | 1,477 |
import ppc_commands
ppc_model = 'ppc440gx'
funcs = {}
ppc_commands.setup_local_functions(ppc_model, funcs)
class_funcs = { ppc_model: funcs }
ppc_commands.enable_generic_ppc_commands(ppc_model)
ppc_commands.enable_4xx_tlb_commands(ppc_model)
ppc_commands.enable_440_tlb_commands(ppc_model)
| iniverno/RnR-LLC | simics-3.0-install/simics-3.0.31/amd64-linux/lib/python/mod_ppc440gx_turbo_commands.py | Python | gpl-2.0 | 293 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-22 14:09
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('eighth', '0036_eighthscheduledactivity_administrative'),
('eighth', '0037_auto_20160307_2342'),
]
| jacobajit/ion | intranet/apps/eighth/migrations/0038_merge.py | Python | gpl-2.0 | 329 |
Subsets and Splits