repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringlengths 1
5
| size
stringlengths 4
7
| content
stringlengths 475
1M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,293,591B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
JavierGarciaD/AlgoTrader | algotrader/common/utilities.py | 1 | 3183 |
"""
Some utilities used across the program
"""
import datetime
import os
import pathlib
from functools import wraps
from itertools import tee, islice, chain
def rename_files(dir_path):
"""
Rename all files for a given extension within a folder
"""
dir_path = pathlib.Path(dir_path)
counter = 0
all_files = dir_path.glob('**/*.gz')
for old_path in dir_path.glob('**/*.gz'):
# get the components of the path
parts = pathlib.Path(old_path).parts
parent = pathlib.Path(old_path).parent
# Construct new file path
wk = parts[-1]
yr = parts[-2]
sym = parts[-3]
new_name = sym + '_' + yr + '_' + wk
new_path = parent / new_name
# Rename
os.rename(old_path, new_path)
counter += 1
print('Doing {} out of {}'.format(counter, len(list(all_files))))
def zero_bytes_files(dir_path, action=None):
"""
Perform action on small size files within a directory
Args:
dir_path:
action: 'print', 'delete', None
Returns: list of file paths
"""
zeros = []
dir_path = pathlib.Path(dir_path)
for each_file in dir_path.glob('**/*.gz'):
print('Checking file: {}'.format(each_file))
if os.stat(each_file).st_size == 100000: # size in bytes
zeros.append(each_file)
if action is None:
print('Done !!!')
elif action == 'print':
print(zeros)
elif action == 'delete':
for to_delete in zeros:
os.remove(to_delete)
print('File deleted: {}'.format(to_delete))
return zeros
def iter_islast(iterable):
"""
Generates pairs where the first element is an item from the iterable
source and the second element is a boolean flag indicating if it is the
last item in the sequence.
https://code.activestate.com/recipes/392015-finding-the-last-item-in-a-loop/
Returns: (item, islast)
"""
it = iter(iterable)
prev = it.__next__()
for item in it:
yield prev, False
prev = item
yield prev, True
def previous_and_next(some_iterable):
"""
Generates tuple with three consecutive elements of an iterable
source where the first element is the previous element of the iteration,
the second element is the current element and the last is the next.
https://stackoverflow.com/a/1012089/3512107
Returns: (previous, item, next)
"""
prevs, items, nexts = tee(some_iterable, 3)
prevs = chain([None], prevs)
nexts = chain(islice(nexts, 1, None), [None])
return zip(prevs, items, nexts)
def fn_timer(function):
"""
Define a decorator that measures the elapsed time in running the function.
http://www.marinamele.com/7-tips-to-time-python-scripts-and-control-memory-and-cpu-usage
Returns: print the elapsed time
"""
@wraps(function)
def function_timer(*args, **kwargs):
t0 = datetime.datetime.now()
result = function(*args, **kwargs)
t1 = datetime.datetime.now()
print("Total time running {}: {}".format(function.__name__, t1 - t0))
return result
return function_timer
| mit | 1,530,684,084,787,055,400 | 25.090164 | 92 | 0.61263 | false |
ruipgpinheiro/subuser | logic/subuserCommands/subuser-dry-run.py | 1 | 5131 | #!/usr/bin/env python
# This file should be compatible with both Python 2 and 3.
# If it is not, please file a bug report.
try:
import pathConfig
except ImportError:
pass
#external imports
import sys
import os
#internal imports
import subuserlib.classes.user
import subuserlib.profile
##############################################################
helpString = """
Display the command which would be issued to launch Docker if you were to run this subuser.
For example:
$ subuser dry-run iceweasel
Will display the command used to launch the subuser iceweasel.
Please note, this is only a rough approximation for debugging purposes and there is no guarantee that the command displayed here would actually work.
"""
#################################################################################################
def dryRunTestSetup():
import sys,os,getpass
os.getuid = lambda: 1000
getpass.getuser = lambda: "travis"
@subuserlib.profile.do_cprofile
def dryRun(args):
"""
Print the command that would have been run if this wasn't a dry run.
>>> dry_run = __import__("subuser-dry-run")
>>> dry_run.dryRunTestSetup()
>>> subuser = __import__("subuser-subuser")
>>> remove_old_images = __import__("subuser-remove-old-images")
If we dry run the basic foo test subuser, we will see the generated pre-run Dockerfile and also the docker command that will launch our subuser.
>>> dry_run.dryRun(["foo"])
The image will be prepared using the Dockerfile:
FROM 2
RUN useradd --uid=1000 travis ;export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo uid exists ; elif [ $exitstatus -eq 9 ]; then echo username exists. ; else exit $exitstatus ; fi
RUN test -d /home/travis || mkdir /home/travis && chown travis /home/travis
<BLANKLINE>
The command to launch the image is:
docker 'run' '--rm' '-i' '-e' 'HOME=/home/travis' '--workdir=/home/travis' '--net=none' '--user=1000' '--hostname' '<random-hostname>' '--entrypoint' '/usr/bin/foo' '3'
Running subusers installed through temporary repositories works as well. Here, we add a subuser named bar, run it, and then remove it again.
>>> subuser.subuser(["add","bar","--accept","bar@file:///home/travis/remote-test-repo"])
Adding subuser bar bar@file:///home/travis/remote-test-repo
Adding new temporary repository file:///home/travis/remote-test-repo
Verifying subuser configuration.
Verifying registry consistency...
Unregistering any non-existant installed images.
bar would like to have the following permissions:
Description: bar
Maintainer: fred
Executable: /usr/bin/bar
A - Accept and apply changes
E - Apply changes and edit result
A
Checking if images need to be updated or installed...
Checking if subuser bar is up to date.
Installing bar ...
Building...
Building...
Building...
Successfully built 4
Building...
Building...
Building...
Successfully built 5
Installed new image <5> for subuser bar
Running garbage collector on temporary repositories...
The actual dry-run call.
>>> dry_run.dryRun(["bar"])
The image will be prepared using the Dockerfile:
FROM 5
RUN useradd --uid=1000 travis ;export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo uid exists ; elif [ $exitstatus -eq 9 ]; then echo username exists. ; else exit $exitstatus ; fi
RUN test -d /home/travis || mkdir /home/travis && chown travis /home/travis
<BLANKLINE>
The command to launch the image is:
docker 'run' '--rm' '-i' '-e' 'HOME=/home/travis' '--workdir=/home/travis' '--net=none' '--user=1000' '--hostname' '<random-hostname>' '--entrypoint' '/usr/bin/bar' '6'
Cleanup.
>>> subuser.subuser(["remove","bar"])
Removing subuser bar
If you wish to remove the subusers image, issue the command $ subuser remove-old-images
Verifying subuser configuration.
Verifying registry consistency...
Unregistering any non-existant installed images.
Running garbage collector on temporary repositories...
>>> remove_old_images.removeOldImages([])
Removing unneeded image 5 : bar@file:///home/travis/remote-test-repo
Verifying subuser configuration.
Verifying registry consistency...
Unregistering any non-existant installed images.
Running garbage collector on temporary repositories...
Removing uneeded temporary repository: file:///home/travis/remote-test-repo
"""
if len(args) == 0 or {"help","-h","--help"} & set(args):
print(helpString)
sys.exit()
subuserName = args[0]
argsToPassToImage = args[1:]
user = subuserlib.classes.user.User()
if subuserName in user.getRegistry().getSubusers():
subuser = user.getRegistry().getSubusers()[subuserName]
print("The image will be prepared using the Dockerfile:")
print(subuser.getRunReadyImage().generateImagePreparationDockerfile())
print("The command to launch the image is:")
print(subuser.getRuntime(os.environ).getPrettyCommand(argsToPassToImage))
else:
sys.exit(subuserName + " not found.\n"+helpString+"\n The following subusers are available for use:"+str(user.getRegistry().getSubusers().keys()))
if __name__ == "__main__":
dryRun(sys.argv[1:])
| lgpl-3.0 | -8,418,473,655,893,674,000 | 38.775194 | 189 | 0.693237 | false |
noironetworks/aci-integration-module | aim/tests/unit/tools/cli/test_db_config.py | 1 | 2061 | # Copyright (c) 2016 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from aim import config
from aim.tests.unit.tools.cli import test_shell as base
class TestDBConfig(base.TestShell):
def setUp(self):
super(TestDBConfig, self).setUp()
self.manager = config.ConfigManager(self.ctx, '')
def test_aim_db_config(self):
# aim config
result = self.run_command('config')
self.assertTrue('Usage:' in result.output)
def test_config_update_no_host(self):
self.run_command('config update')
self.assertEqual(
['1.1.1.1', '1.1.1.2', '1.1.1.3'],
self.manager.get_option('apic_hosts', 'apic'))
def test_replace_all_no_host(self):
self.run_command('config replace')
self.assertEqual(
['1.1.1.1', '1.1.1.2', '1.1.1.3'],
self.manager.get_option('apic_hosts', 'apic'))
self.run_command('config replace', config_file='aim.conf.test.2')
self.assertEqual(
['1.1.1.4', '1.1.1.5', '1.1.1.6'],
self.manager.get_option('apic_hosts', 'apic'))
def test_set_default_values(self):
self.run_command('config replace', config_file='aim.conf.test.empty')
# All default values are set, can be useful for first time
# installations
self.assertEqual(
0.0, self.manager.get_option('agent_polling_interval', 'aim'))
self.assertEqual(
[], self.manager.get_option('apic_hosts', 'apic'))
| apache-2.0 | -706,265,330,105,441,500 | 37.166667 | 78 | 0.631732 | false |
dmccloskey/SBaaS_MFA | SBaaS_MFA/stage02_isotopomer_analysis_query.py | 1 | 4105 | #SBaaS
from .stage02_isotopomer_analysis_postgresql_models import *
from SBaaS_base.sbaas_base import sbaas_base
from SBaaS_base.sbaas_base_query_update import sbaas_base_query_update
from SBaaS_base.sbaas_base_query_drop import sbaas_base_query_drop
from SBaaS_base.sbaas_base_query_initialize import sbaas_base_query_initialize
from SBaaS_base.sbaas_base_query_insert import sbaas_base_query_insert
from SBaaS_base.sbaas_base_query_select import sbaas_base_query_select
from SBaaS_base.sbaas_base_query_delete import sbaas_base_query_delete
from SBaaS_base.sbaas_template_query import sbaas_template_query
class stage02_isotopomer_analysis_query(sbaas_template_query):
def initialize_supportedTables(self):
'''Set the supported tables dict for ...
'''
tables_supported = {'data_stage02_isotopomer_analysis':data_stage02_isotopomer_analysis,
};
self.set_supportedTables(tables_supported);
## Query from data_stage02_isotopomer_analysis
# query simulation_id
def get_simulationID_analysisID_dataStage02IsotopomerAnalysis(self,analysis_id_I):
'''Querry simulations that are used for the anlaysis'''
try:
data = self.session.query(data_stage02_isotopomer_analysis.simulation_id).filter(
data_stage02_isotopomer_analysis.analysis_id.like(analysis_id_I),
data_stage02_isotopomer_analysis.used_.is_(True)).group_by(
data_stage02_isotopomer_analysis.simulation_id).order_by(
data_stage02_isotopomer_analysis.simulation_id.asc()).all();
simulation_ids_O = [];
if data:
for d in data:
simulation_ids_O.append(d.simulation_id);
return simulation_ids_O;
except SQLAlchemyError as e:
print(e);
def add_data_stage02_isotopomer_analysis(self, data_I):
'''add rows of data_stage02_isotopomer_analysis'''
if data_I:
for d in data_I:
try:
data_add = data_stage02_isotopomer_analysis(d
#d['analysis_id'],d['simulation_id'],
#d['used_'],
#d['comment_']
);
self.session.add(data_add);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def update_data_stage02_isotopomer_analysis(self,data_I):
#TODO:
'''update rows of data_stage02_isotopomer_analysis'''
if data_I:
for d in data_I:
try:
data_update = self.session.query(data_stage02_isotopomer_analysis).filter(
data_stage02_isotopomer_analysis.id.like(d['id'])
).update(
{
'analysis_id':d['analysis_id'],
'simulation_id':d['simulation_id'],
'used_':d['used_'],
'comment_':d['comment_']},
synchronize_session=False);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def initialize_datastage02_isotopomer_analysis(self):
try:
data_stage02_isotopomer_analysis.__table__.create(self.engine,True);
except SQLAlchemyError as e:
print(e);
def drop_datastage02_isotopomer_analysis(self):
try:
data_stage02_isotopomer_analysis.__table__.drop(self.engine,True);
except SQLAlchemyError as e:
print(e);
def reset_datastage02_isotopomer_analysis(self,analysis_id_I = None):
try:
if analysis_id_I:
reset = self.session.query(data_stage02_isotopomer_analysis).filter(data_stage02_isotopomer_analysis.analysis_id.like(analysis_id_I)).delete(synchronize_session=False);
self.session.commit();
except SQLAlchemyError as e:
print(e);
| mit | -3,496,319,608,237,178,000 | 46.732558 | 184 | 0.583435 | false |
Yellowen/Barmaan | setup.py | 1 | 1952 | #!/usr/bin/env python
# -----------------------------------------------------------------------------
# Barmaan - is a very simple, easy to use yet powerful monitoring tool.
# Copyright (C) 2013-2014 Yellowen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# -----------------------------------------------------------------------------
from setuptools import setup, find_packages
setup(name='Barmaan',
version='2.67.0',
description='Barmaan is a very simple, easy to use yet powerful monitoring tool.',
author='Sameer Rahmani, Shervin Ara,',
author_email='[email protected], [email protected]',
url='http://barmaan.yellowen.com/',
download_url="http://barmaan.yellowen.com/downloads/",
keywords="Monitoring",
license='GPL v2',
packages=find_packages(),
install_requires=['Twisted',],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
]
)
| gpl-2.0 | 3,128,222,375,649,994,000 | 41.434783 | 88 | 0.625512 | false |
radamizell/WallApp | location/models.py | 1 | 1645 | from __future__ import unicode_literals
from django.db import models
from django.contrib.gis.db import models
from django.core.urlresolvers import reverse
from django.contrib.gis.geos import Point
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.contenttypes.fields import GenericRelation
from star_ratings.models import Rating
from constrainedfilefield.fields import ConstrainedFileField
import magic
from .validators import MimetypeValidator
class Places(models.Model):
title= models.CharField(max_length=100)
latitude= models.FloatField(null= True, blank=True,)
longitude= models.FloatField(null= True, blank=True,)
location = models.PointField(null= True, srid=4326,default= Point(27,-38))
objects = models.GeoManager()
sound= ConstrainedFileField(max_upload_size= 4194304,)
prefered_radius = models.IntegerField(default=5, help_text="in kilometers")
rating= GenericRelation(Rating, related_query_name='foos')
usersave= models.CharField(max_length=100)
def __str__(self):
return self.title
def save(self, *args, **kwargs):
if self.latitude and self.longitude:
self.location = Point(self.longitude, self.latitude)
super(Places,self).save(*args,**kwargs)
def get_absolute_url(self):
return reverse('posts:detail', kwargs={'id': self.id})
# def clean_file(self):
# file = self.cleaned_data.get("sound", False)
# filetype = magic.from_buffer(file.read())
# if not "audio/mpeg" in filetype:
# raise ValidationError("File is not XML.")
# return file
| mit | -2,185,394,025,348,146,700 | 16.315789 | 76 | 0.740426 | false |
26fe/jsonstat.py | jsonstat-tests/test_dataset_parsing.py | 1 | 11477 | # -*- coding: utf-8 -*-
# This file is part of https://github.com/26fe/jsonstat.py
# Copyright (C) 2016-2021 gf <[email protected]>
# See LICENSE file
# stdlib
import os
# external modules
import pytest
# jsonstat
import jsonstat
fixture_dir = os.path.join(os.path.dirname(__file__), "fixtures")
@pytest.fixture(scope='module')
def json_missing_value():
return '''
{
"label" : "three dimensions"
}
'''
@pytest.fixture(scope='module')
def json_empty_value():
return '''
{
"label" : "three dimensions",
"value" : []
}
'''
@pytest.fixture(scope='module')
def json_missing_dimension():
return '''
{
"label" : "three dimensions",
"value" : [1,2]
}
'''
@pytest.fixture(scope='module')
def json_incorrect_data_size():
return '''
{
"label" : "Unemployment rate in the OECD countries 2003-2014",
"source" : "Economic Outlook No 92 - December 2012 - OECD Annual Projections",
"value" : [1, 2, 3, 4],
"dimension" : {
"id" : ["area", "year"],
"size" : [4, 12],
"area" : {
"category" : { "index" : { "AU" : 0, "AT" : 1, "BE" : 2, "CA" : 3 } }
},
"year" : {
"category" : {
"index" : {
"2003" : 0, "2004" : 1, "2005" : 2, "2006" : 3, "2007" : 4,
"2008" : 5, "2009" : 6,
"2010" : 7, "2011" : 8, "2012" : 9, "2013" : 10, "2014" : 11
}
}
}
}
}
'''
#
# test exceptions
#
def test_exception_not_valid():
dataset = jsonstat.JsonStatDataSet("canada")
with pytest.raises(jsonstat.JsonStatException):
dataset.data(year="2003", area="CA")
def test_empty_value(json_empty_value):
dataset = jsonstat.JsonStatDataSet("canada")
with pytest.raises(jsonstat.JsonStatMalformedJson) as excinfo:
dataset.from_string(json_empty_value)
expected = "dataset 'canada': field 'value' is empty"
assert expected == str(excinfo.value)
def test_missing_value_field(json_missing_value):
dataset = jsonstat.JsonStatDataSet("canada")
with pytest.raises(jsonstat.JsonStatMalformedJson) as excinfo:
dataset.from_string(json_missing_value)
expected = "dataset 'canada': missing 'value' key"
assert expected == str(excinfo.value)
def test_missing_dimension(json_missing_dimension):
dataset = jsonstat.JsonStatDataSet("canada")
with pytest.raises(jsonstat.JsonStatMalformedJson) as excinfo:
dataset.from_string(json_missing_dimension)
expected = "dataset 'canada': missing 'dimension' key"
assert expected == str(excinfo.value)
def test_exception_dataset_size(json_incorrect_data_size):
dataset = jsonstat.JsonStatDataSet("canada")
with pytest.raises(jsonstat.JsonStatException) as excinfo:
dataset.from_string(json_incorrect_data_size)
expected = "dataset 'canada': size 4 is different from calculate size 48 by dimension"
assert expected == str(excinfo.value)
def test_exception_no_existent_dimension():
json_pathname = os.path.join(fixture_dir, "dataset", "dataset_unemployment_v1.json")
dataset = jsonstat.JsonStatDataSet("canada")
dataset.from_file(json_pathname)
assert dataset.dimension("year").did == "year"
with pytest.raises(jsonstat.JsonStatException) as excinfo:
dataset.dimension("not existent dim")
expected = "dataset 'canada': unknown dimension 'not existent dim' know dimensions ids are: serie, year, area"
assert expected == str(excinfo.value)
#
# test
#
def test_name():
dataset = jsonstat.JsonStatDataSet("canada")
assert dataset.name == "canada"
def test_dimensions():
json_pathname = os.path.join(fixture_dir, "dataset", "dataset_unemployment_v1.json")
dataset = jsonstat.JsonStatDataSet("canada")
dataset.from_file(json_pathname)
assert len(dataset.dimensions()) == 3
def test_info():
json_pathname = os.path.join(fixture_dir, "dataset", "dataset_unemployment_v1.json")
dataset = jsonstat.JsonStatDataSet("canada")
dataset.from_file(json_pathname)
expected = (
"name: 'canada'\n"
"label: 'Unemployment rate in the OECD countries'\n"
"source: 'Unemployment rate in the OECD countries'\n"
"size: 12\n"
"+-----+-------+--------------------------------+------+------+\n"
"| pos | id | label | size | role |\n"
"+-----+-------+--------------------------------+------+------+\n"
"| 0 | serie | serie | 1 | |\n"
"| 1 | year | 2012-2014 | 3 | time |\n"
"| 2 | area | OECD countries, EU15 and total | 4 | geo |\n"
"+-----+-------+--------------------------------+------+------+"
)
assert expected, dataset.__str__()
#
# test
# dataset.data()
# dataset.value()
# dataset.status()
#
def test_data_with_three_dimensions():
json_pathname = os.path.join(fixture_dir, "dataset", "three_dim_v1.json")
dataset = jsonstat.JsonStatDataSet()
dataset.from_file(json_pathname)
data = dataset.data(one="one_1", two="two_1", three="three_1")
assert data.value == 111
assert data.status is None
data = dataset.data(one="one_2", two="two_2", three="three_2")
assert data.value == 222
# using a bit different file
json_pathname = os.path.join(fixture_dir, "dataset", "three_dim_size_as_string_v1.json")
dataset = jsonstat.JsonStatDataSet()
dataset.from_file(json_pathname)
data = dataset.data(one="one_1", two="two_1", three="three_1")
assert data.value == 111
assert data.status is None
data = dataset.data(one="one_2", two="two_2", three="three_2")
assert data.value == 222
# using a bit different file
json_pathname = os.path.join(fixture_dir, "dataset", "three_dim_size_as_string_v2.json")
dataset = jsonstat.JsonStatDataSet()
dataset.from_file(json_pathname)
data = dataset.data(one="one_1", two="two_1", three="three_1")
assert data.value == 111
assert data.status is None
data = dataset.data(one="one_2", two="two_2", three="three_2")
assert data.value == 222
def test_data_with_unemployment():
json_pathname = os.path.join(fixture_dir, "dataset", "dataset_unemployment_v1.json")
dataset = jsonstat.JsonStatDataSet("canada")
dataset.from_file(json_pathname)
data = dataset.data(area="AU", year="2012")
assert data.value == 11
# using label Australia instead of index AU
data = dataset.data(area="Australia", year="2012")
assert data.value == 11
# using dictionary
data = dataset.data({'area': "Australia", 'year': "2012"})
assert data.value == 11
data = dataset.data({'area': "AU", 'year': "2012"})
assert data.value == 11
data = dataset.data({"OECD countries, EU15 and total": "AU", 'year': '2012'})
assert data.value == 11
data = dataset.data(area="BE", year="2014")
assert data.value == 33
assert data.status is None
def test_data_with_oecd_canada():
json_pathname = os.path.join(jsonstat._examples_dir, "www.json-stat.org", "oecd-canada.json")
collection = jsonstat.JsonStatCollection()
collection.from_file(json_pathname)
oecd = collection.dataset('oecd')
data = oecd.data(concept='UNR', area='AU', year='2004')
assert 5.39663128 == data.value
assert data.status is None
# first position with status at idx 10
dcat = {'concept': 'UNR', 'area': 'AU', 'year': '2013'}
data = oecd.data(dcat)
assert data.status == "e"
data = oecd.data(10)
assert data.status == "e"
data = oecd.data([0, 0, 10])
assert data.status == "e"
#
# test dataset indexes transform functions
#
def test_dcat_to_lint():
json_pathname = os.path.join(jsonstat._examples_dir, "www.json-stat.org", "oecd-canada.json")
collection = jsonstat.JsonStatCollection()
collection.from_file(json_pathname)
oecd = collection.dataset('oecd')
dcat = {'concept': 'UNR', 'area': 'AU', 'year': '2013'}
lint = oecd.dcat_to_lint(dcat)
assert lint == [0, 0, 10]
idx = oecd.lint_as_idx(lint)
assert idx == 10
def test_idx_as_lint():
json_pathname = os.path.join(jsonstat._examples_dir, "www.json-stat.org", "oecd-canada.json")
collection = jsonstat.JsonStatCollection()
collection.from_file(json_pathname)
oecd = collection.dataset('oecd')
lint = oecd.idx_as_lint(10)
assert lint == [0, 0, 10]
#
# enumeration function
# all_pos test
#
def test_all_pos():
json_pathname = os.path.join(fixture_dir, "dataset", "dataset_unemployment_v1.json")
dataset = jsonstat.JsonStatDataSet("canada")
dataset.from_file(json_pathname)
result = list(dataset.all_pos())
# fist digit is serie always 0
# second digit is year from 0 to 2
# third digit is area from 0 to 3
# order is ["serie", "year", "area"]
expected = [[0, 0, 0], [0, 0, 1], [0, 0, 2], [0, 0, 3], # first digit 0
[0, 1, 0], [0, 1, 1], [0, 1, 2], [0, 1, 3], # first digit 1
[0, 2, 0], [0, 2, 1], [0, 2, 2], [0, 2, 3]] # first digit 2
assert result == expected
def test_all_pos_reorder():
json_pathname = os.path.join(fixture_dir, "dataset", "dataset_unemployment_v1.json")
dataset = jsonstat.JsonStatDataSet("canada")
dataset.from_file(json_pathname)
result = list(dataset.all_pos(order=["area", "year", "serie"]))
# fist digit is serie always 0
# second digit is year from 0 to 2
# third digit is area from 0 to 3
# first changing digit is year (second digit)
expected = [[0, 0, 0], [0, 1, 0], [0, 2, 0], # last digit 0
[0, 0, 1], [0, 1, 1], [0, 2, 1], # last digit 1
[0, 0, 2], [0, 1, 2], [0, 2, 2], # last digit 2
[0, 0, 3], [0, 1, 3], [0, 2, 3]] # last digit 3
assert result == expected
def test_all_pos_with_block():
json_pathname = os.path.join(fixture_dir, "dataset", "dataset_unemployment_v1.json")
dataset = jsonstat.JsonStatDataSet("canada")
dataset.from_file(json_pathname)
result = list(dataset.all_pos({"area": "IT"}))
expected = [[0, 0, 3], [0, 1, 3], [0, 2, 3]]
assert result == expected
dataset.generate_all_vec(area="IT")
result = list(dataset.all_pos({"year": "2014"}))
expected = [[0, 2, 0], [0, 2, 1], [0, 2, 2], [0, 2, 3]]
assert result == expected
dataset.generate_all_vec(year='2014')
def test_all_pos_with_three_dim():
json_pathname = os.path.join(fixture_dir, "dataset", "three_dim_v1.json")
dataset = jsonstat.JsonStatDataSet()
dataset.from_file(json_pathname)
# test 1
result = list(dataset.all_pos({'one': 'one_1'}))
expected = [
[0, 0, 0], [0, 0, 1], [0, 0, 2], [0, 0, 3],
[0, 1, 0], [0, 1, 1], [0, 1, 2], [0, 1, 3],
[0, 2, 0], [0, 2, 1], [0, 2, 2], [0, 2, 3]]
assert result == expected
# test 2
dataset.generate_all_vec(one='one_1')
result = list(dataset.all_pos({"two": "two_2"}))
expected = [
[0, 1, 0], [0, 1, 1], [0, 1, 2], [0, 1, 3],
[1, 1, 0], [1, 1, 1], [1, 1, 2], [1, 1, 3]
]
assert result == expected
| lgpl-3.0 | -979,317,732,198,813,300 | 30.61708 | 114 | 0.578636 | false |
TalLinzen/russian-preps | code/python/constants.py | 1 | 1701 | # -*- coding: utf-8 -*-
# Author: Tal Linzen <[email protected]>
# License: BSD (3-clause)
# Linzen, Kasyanenko, & Gouskova (2013). (Lexical and phonological
# variation in Russian prepositions, Phonology 30(3).)
import os
project_dir = os.environ['RUSS_PREPS_ROOT']
paradigm_file = os.path.join(project_dir, 'resources', 'paradigms.txt')
yandex_shelf_file = os.path.join(project_dir, 'results', 'yandex.shelf')
rnc_shelf_file = os.path.join(project_dir, 'results', 'rnc.shelf')
default_csv_dir = os.path.join(project_dir, 'csv')
automatically_stressed_vowels = u'ё'
vowels = automatically_stressed_vowels + u'яюаеиоыуэ'
consontants = u'шртпщсдфгчклжхцвбнм'
znaks = [u'ь', u'ъ']
unvoiced_stops = u'птк'
voiced_stops = u'бдг'
unvoiced_fricatives = u'сфшщцчх'
voiced_fricatives = u'звж'
nasals = u'мн'
liquids = u'лp'
# Selkirk,
Elizabeth
(1984).
On
the
major
class
features
and
syllable
theory.
# Should we have the same sonority for palatalized consonants?
selkirk_sonority_scale = [unvoiced_stops, voiced_stops, unvoiced_fricatives,
voiced_fricatives, nasals, liquids]
s = {'cases': ['inst', 'gen'],
'variants': [u'с', u'со'],
'transcribed_variants': ['s', 'so']}
v = {'cases': ['acc', 'prep'],
'variants': [u'в', u'во'],
'transcribed_variants': ['v', 'vo']}
k = {'cases': ['dat'],
'variants': [u'к', u'ко'],
'transcribed_variants': ['k', 'ko']}
def build_sonority_dict(self):
self.sonority = {}
for group_index, group in enumerate(self.selkirk_sonority_scale):
for consonant in group:
self.sonority[consonant] = group_index
| bsd-3-clause | 4,551,813,099,564,061,700 | 26 | 77 | 0.657407 | false |
berkmancenter/mediacloud | apps/common/src/python/mediawords/test/text.py | 1 | 2616 | import colorama
import difflib
import re
from mediawords.util.perl import decode_object_from_bytes_if_needed
colorama.init()
class TestCaseTextUtilities(object):
@staticmethod
def __normalize_text(text: str) -> str:
"""Normalize text by stripping whitespace and such."""
text = text.replace("\r\n", "\n")
text = re.sub(r'\s+', ' ', text)
text = text.strip()
return text
@staticmethod
def __colorize_difflib_ndiff_line_output(diff_line: str) -> str:
"""Colorize a single line of difflib.ndiff() output by adding some ANSI colors."""
if diff_line.startswith('+'):
diff_line = colorama.Fore.GREEN + diff_line + colorama.Fore.RESET
elif diff_line.startswith('-'):
diff_line = colorama.Fore.RED + diff_line + colorama.Fore.RESET
elif diff_line.startswith('^'):
diff_line = colorama.Fore.BLUE + diff_line + colorama.Fore.RESET
return diff_line
# noinspection PyPep8Naming
def assertTextEqual(self, got_text: str, expected_text: str, msg: str = None) -> None:
"""An equality assertion for two texts.
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
got_text: First text to be compared (e.g. received from a tested function).
expected_text: Second text (e.g. the one that is expected from a tested function).
msg: Optional message to use on failure instead of a list of differences.
"""
got_text = decode_object_from_bytes_if_needed(got_text)
expected_text = decode_object_from_bytes_if_needed(expected_text)
msg = decode_object_from_bytes_if_needed(msg)
if got_text is None:
raise TypeError("Got text is None.")
if expected_text is None:
raise TypeError("Expected text is None.")
got_text = self.__normalize_text(got_text)
expected_text = self.__normalize_text(expected_text)
if got_text == expected_text:
return
got_words = got_text.split()
expected_words = expected_text.split()
if got_words == expected_words:
return
if msg is None:
differences = []
for diff_line in difflib.ndiff(expected_words, got_words):
diff_line = self.__colorize_difflib_ndiff_line_output(diff_line=diff_line)
differences.append(diff_line)
msg = " ".join(differences)
raise AssertionError(msg)
| agpl-3.0 | -5,912,170,401,044,431,000 | 33.421053 | 94 | 0.618119 | false |
juanlealz/mm2md | mm2md.py | 1 | 3338 | #!/usr/bin/env python
# -*- coding: UTF8 -*-
from xml.etree import ElementTree
from sys import argv
# Recursive
def print_node (e, header_depth=0, bullet=None, bullet_depth=0, multinode_paragraph=False):
#parse icons
icons=[]
for icon in e.findall("icon"):
icons.append(icon.attrib.get("BUILTIN"))
icons=set(icons)
#multi-node paragraph and bullets
if "bullets" in icons:
next_bullet="-"
elif "numbers" in icons:
next_bullet="1."
else:
next_bullet=None
if "multi-node_paragraph" in icons:
next_multinode_paragraph=True
else:
next_multinode_paragraph=False
#document title
if header_depth==0:
print "---"
print "title: ",
print e.attrib.get("TEXT").encode('UTF8')
print "...\n"
for node in e.findall("node"):
print_node(node, header_depth+1, multinode_paragraph=next_multinode_paragraph)
#comments
elif "comments" in icons:
pass
elif "comment" in icons:
for node in e.findall("node"):
print_node(node, header_depth, bullet, bullet_depth, multinode_paragraph=next_multinode_paragraph)
if "multi-node_paragraph" in icons and not multinode_paragraph:
print "\n\n",
#heading
elif "heading" in icons:
print "#"*header_depth,
print e.attrib.get("TEXT").encode('UTF8'),
print "\n\n",
for node in e.findall("node"):
print_node(node, header_depth+1, bullet=next_bullet, bullet_depth=bullet_depth, multinode_paragraph=next_multinode_paragraph)
#bullet-list start
elif bullet is None and ("bullets" in icons or "numbers" in icons):
print e.attrib.get("TEXT").encode('UTF8'),
print "\n\n",
for node in e.findall("node"):
print_node(node, header_depth, bullet=next_bullet, bullet_depth=bullet_depth, multinode_paragraph=next_multinode_paragraph)
print "\n",
#bullet-list item
elif bullet is not None:
print " "*bullet_depth+bullet,
if e.attrib.get("TEXT") is None:
print ""
else:
print e.attrib.get("TEXT").encode('UTF8'),
if not "multi-node_paragraph" in icons:
print "\n",
if next_bullet is None and not "multi-node_paragraph" in icons:
next_bullet="-"
for node in e.findall("node"):
print_node(node, header_depth, bullet=next_bullet, bullet_depth=bullet_depth+1, multinode_paragraph=next_multinode_paragraph)
if "multi-node_paragraph" in icons:
print "\n",
#multi-node paragraph header
elif "multi-node_paragraph" in icons:
print e.attrib.get("TEXT").encode('UTF8'),
print " ",
for node in e.findall("node"):
print_node(node, header_depth, bullet=next_bullet, bullet_depth=bullet_depth, multinode_paragraph=next_multinode_paragraph)
if not multinode_paragraph:
print "\n\n",
#multi-node paragraph item
elif multinode_paragraph:
print e.attrib.get("TEXT").encode('UTF8'),
for node in e.findall("node"):
print_node(node, header_depth, bullet=None, bullet_depth=bullet_depth, multinode_paragraph=True)
#implicit bullet-list start
elif e.find("node") is not None:
next_bullet="-"
print e.attrib.get("TEXT").encode('UTF8'),
print "\n\n",
for node in e.findall("node"):
print_node(node, header_depth, bullet=next_bullet, bullet_depth=bullet_depth, multinode_paragraph=next_multinode_paragraph)
print "\n",
return
#one-node paragraph
else:
print e.attrib.get("TEXT").encode('UTF8'),
print "\n\n",
#Start
et = ElementTree.parse(argv[1])
print_node(et.find("node")) | gpl-2.0 | 935,514,337,068,483,200 | 29.633028 | 128 | 0.696525 | false |
Talos4757/NVIDIABot | driverStation.py | 2 | 3817 | '''
Copyright (c) 2014, Rishi Desai
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import Tkinter
import tkMessageBox
import socket
import pickle
import pygame
top = Tkinter.Tk()
joyFrame = Tkinter.Frame(top)
noJoyFrame = Tkinter.Frame(top)
port = 8081
host = "10.99.99.2"
#host = "192.168.1.83"
pygame.init()
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
#j =0;
s.bind(("", 0))
started = False
def startSession():
global started
started= True
s.sendto(pickle.dumps(started), (host, port))
# change wait to 2 after done testing
top.after(200, sendJoystickVal)
def endSession():
global started
started= False
#s.bind(("", 0))
s.sendto(pickle.dumps(started), (host, port))
#top.destroy()
def closeProgram():
s.close()
top.destroy()
sessionStart = Tkinter.Button(top, text ="Start Session", command = startSession)
sessionEnd = Tkinter.Button(top, text="End Session", command=endSession)
programClose= Tkinter.Button(top, text="Close Program", command=closeProgram)
def isJoystick():
return pygame.joystick.get_count()>0
def whileJoyCon():
if(isJoystick()):
sessionStart.config(state="normal")
sessionStart.pack()
sessionEnd.config(state="normal")
sessionEnd.pack()
programClose.config(state="normal")
programClose.pack()
howTo = Tkinter.Text(top)
howTo.insert(Tkinter.INSERT, "Press Start on the Joystick or end session to stop the program")
howTo.pack()
else:
print isJoystick()
sessionStart.config(state="disable")
sessionStart.pack()
sessionEnd.config(state="disable")
sessionEnd.pack()
programClose.config(state="normal")
programClose.pack()
noJoy = Tkinter.Text(top)
noJoy.insert(Tkinter.INSERT, "No Joystick Connected. Please connect a Joystick and Restart the program")
noJoy.pack()
def sendJoystickVal():
#print isJoy
#if(isJoystick):
pygame.event.pump()
j = pygame.joystick.Joystick(0)
j.init()
xAxis = j.get_axis(1)
yAxis=j.get_axis(3)
i=1
button =-1;
for i in range(j.get_numbuttons()):
if(j.get_button(i)==True):
button = i
break
data = [started, xAxis, -yAxis, button]
s.sendto(pickle.dumps(data), (host, port))
print data
#change wait to 2 after done testing
top.after(200, sendJoystickVal)
whileJoyCon()
#rint started
#f(started):
#top.after(2000, sendJoystickVal)
top.mainloop()
| bsd-2-clause | -3,083,335,546,384,579,600 | 33.387387 | 112 | 0.691381 | false |
JesusAMR/ProgramasUNI | rJorge.py | 1 | 1060 | #! /usr/bin/python
num = raw_input("Ingrese un numero para convertir a romano: ")
numero = str(num)
lim =len(numero)
dicc = {
#Formato [1 , 5]
# [1*10, 5*10]
# [10*10, 50*10]
# ......
"1": ["I", "V"],
"2": ["X", "L"],
"3": ["C", "D"],
"4": ["M", "v"]
}
i = 0
romano = ""
chain = str(dicc[str(lim)])[1]
while(lim != 0):
if(numero[i] == "1" or numero[i] == "2" or numero[i] == "3"):
romano = romano + dicc[str(lim)][0] * int(numero[i])
lim = lim - 1
i = i + 1
elif(numero[i] == "6" or numero[i] == "7" or numero[i] == "8"):
romano = romano + dicc[str(lim)][1] + dicc[str(lim)][0] * (int(numero[i]) - 5)
lim = lim - 1
i = i + 1
elif(numero[i] == "5"):
romano = romano + dicc[str(lim)][1]
lim = lim - 1
i = i + 1
elif(numero[i] == "4"):
romano = romano + dicc[str(lim)][0] + dicc[str(lim)][1]
lim = lim - 1
i = i + 1
elif(numero[i] == "9"):
romano = romano + dicc[str(lim)][0] + dicc[str(lim + 1)][0]
lim = lim - 1
i = i + 1
elif(numero[i] == "0"):
lim = lim - 1
i = i + 1
print(romano) | gpl-3.0 | -5,882,392,963,784,815,000 | 22.577778 | 80 | 0.479245 | false |
mitocw/xsiftx | xsiftx/lti/util.py | 1 | 1626 | """
Utility support functions
"""
from xsiftx.util import get_sifters
class LTIException(Exception):
"""
Custom LTI exception for proper handling
of LTI specific errors
"""
pass
class LTIRoleException(Exception):
"""
Exception class for when LTI user doesn't have the
right role.
"""
pass
class InvalidAPIUsage(Exception):
"""
API Error handler to return helpful json when problems occur.
Stolen right from the flask docs
"""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
"""
Setup class with optional arguments for returning later
"""
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
"""
Aggregate properties into dictionary for use
in returning jsonified errors.
"""
exception_dict = dict(self.payload or ())
exception_dict['message'] = self.message
return exception_dict
def get_allowed_sifters(consumer, as_dict=False):
"""
Returns a list of sifter names allowed by the client
"""
all_sifters = get_sifters()
sifters = {}
allowed_sifters = consumer.get('allowed_sifters', None)
if allowed_sifters:
for sifter in all_sifters.keys():
if sifter in allowed_sifters:
sifters[sifter] = all_sifters[sifter]
else:
sifters = all_sifters
if not as_dict:
return sifters.keys()
else:
return sifters
| gpl-3.0 | -5,572,836,397,753,395,000 | 23.636364 | 65 | 0.615006 | false |
ctogle/dilapidator | src/dilap/BROKEN/infrastructure/infragraph.py | 1 | 26037 | import dilap.core.base as db
import dilap.core.vector as dpv
import dilap.core.tools as dpr
import dilap.core.lsystem as dls
import dilap.mesh.tools as dtl
import dilap.mesh.pointset as dps
import dilap.infrastructure.graphnode as gnd
import dilap.infrastructure.graphedge as geg
import dilap.infrastructure.graphregion as grg
import dilap.infrastructure.infralsystem as ifl
import matplotlib.pyplot as plt
import random as rm
import pdb
class graph(db.base):
def plot_regions(self,ax = None):
if ax is None:ax = dtl.plot_axes()
pdb.set_trace()
def plot(self,ax = None):
if ax is None:ax = dtl.plot_axes()
for n in self.nodes:
if not n is None:
n.plot(ax)
for eg in self.edges:
if not eg is None:
eg.plot(ax)
ax.set_xlim([-100,100])
ax.set_ylim([-100,100])
ax.set_zlim([-100,100])
return ax
def plot_xy(self,ax = None):
if ax is None:ax = dtl.plot_axes_xy()
for n in self.nodes:
if not n is None:
n.plot_xy(ax)
for eg in self.edges:
if not eg is None:
eg.plot_xy(ax)
ax.set_aspect('equal')
return ax
def __str__(self):
st = '\tinfragraph with:\n\t'
st += str(self._count_nodes())+'\tnodes\n\t'
st += str(self._count_edges())+'\tedges\n\t'
return st
def _count_nodes(self):
ncnt = 0
for x in range(self.nodecount):
if self.nodes[x]:ncnt += 1
return ncnt
def _count_edges(self):
ecnt = 0
for x in range(self.edgecount):
eg = self.edges[x]
if self.edges[x]:ecnt += 1
return ecnt
# verify graph is correct where possible
def _update(self):
for nd in self.nodes:
if not nd is None:
nd._spikes(self)
for eg in self.edges:
if not eg is None:
eg._place_road(self)
self._regions()
def __init__(self,**kwargs):
self.nodes = []
self.nodes_lookup = {}
self.edges = []
self.edges_lookup = {}
self.nodecount = 0
self.edgecount = 0
# given an edge e, direction 0 or 1, and cw or ccw
# return the forward path of e
def _loopwalk(self,ie,d,w):
def complete(inp):
i1,i2 = inp[0],inp[1]
cnt = len(inp)
for x in range(1,cnt-1):
if inp[x] == i1:
if inp[x+1] == i2:
return inp[:x+1]
if d:inpath = [ie.one.key(),ie.two.key()]
else:inpath = [ie.two.key(),ie.one.key()]
while True:
ekey = (inpath[-2],inpath[-1])
e = self.edges[self.edges_lookup[ekey]]
nx = e._walk(inpath[-1],w)
if nx is None:
inpath.append(inpath[-2])
nx = e._walk(inpath[-3],w)
nxndkey = self.nodes[nx].key()
#if ie.two.key() == (0.0,100.0,0.0):
# print('going',nxndkey,inpath)
# pdb.set_trace()
res = complete(inpath)
if not res is None:return res
#if inpath[-1] == inpath[0] and nxndkey == inpath[1]:return inpath
#if inpath.count(ie.one.key()) > 1 and nxndkey == inpath[1]:
#if inpath.count(ie.one.key()) > 1 and inpath.count(ie.two.key()) > 1:
# return inpath
else:inpath.append(nxndkey)
# return a collection of points outlining all edge loops in the graph
def _edge_loops(self):
edgelloops = []
edgerloops = []
edgestodo = self.edges[:]
while edgestodo:
e = edgestodo.pop(0)
ewalkrcw = self._loopwalk(e,1,1)
ewalkrccw = self._loopwalk(e,0,0)
ewalklccw = self._loopwalk(e,0,1)
ewalklcw = self._loopwalk(e,1,0)
if set(ewalkrcw) == set(ewalkrccw):
#print('closed loop!',len(edgestodo))
rloop = tuple(ewalkrcw)
else:
print('unclosed loop!',len(edgestodo))
pdb.set_trace()
rloop = tuple(ewalkrccw[::-1][:-1]+ewalkrcw[1:])
if set(ewalklccw) == set(ewalklcw):
#print('closed loop!',len(edgestodo))
lloop = tuple(ewalklccw)
else:
print('unclosed loop!',len(edgestodo))
pdb.set_trace()
lloop = tuple(ewalklccw[::-1][:-1]+ewalklcw[1:])
rlloop = lloop[::-1]
if not dpr.cyclic_permutation(rlloop,rloop):
edgelloops.append(lloop)
edgerloops.append(rloop)
#pdb.set_trace()
return edgelloops,edgerloops
# eloop is a list of node keys which are connected in a loop by edges
# side is either 0 (right) or 1 (left) relative to the first edge
# in the loop - other edges must be handled carefully
def _edge_loop_points(self,eloop,side):
elcnt = len(eloop)
looppts = []
ne = self.edges[self.edges_lookup[eloop[0],eloop[1]]]
if side == 0:
looppts.extend(ne.rbpts)
lnkey = ne.two.key()
elif side == 1:
looppts.extend(ne.lbpts)
lnkey = ne.one.key()
le = ne
for elx in range(2,elcnt+1):
elx1,elx2 = elx-1,elx if elx < elcnt else 0
nekey = (eloop[elx1],eloop[elx2])
if nekey[0] == nekey[1]:return looppts
ne = self.edges[self.edges_lookup[nekey]]
nelooppts = self._find_road_points(looppts[-1],le,ne)
looppts.extend(nelooppts)
le = ne
return looppts
# given the last and next edge, and the last point in a loop
# properly return the set of road points which connects
def _find_road_points(self,tip,le,ne):
# create the shortest line segment from el1 to el2
def tiptail(el1,el2):
d1 = dpv.distance(el1[ 0],el2[ 0])
d2 = dpv.distance(el1[ 0],el2[-1])
d3 = dpv.distance(el1[-1],el2[ 0])
d4 = dpv.distance(el1[-1],el2[-1])
md = min(d1,d2,d3,d4)
if md == d1:return el1[ 0],el2[ 0]
elif md == d2:return el1[ 0],el2[-1]
elif md == d3:return el1[-1],el2[ 0]
elif md == d4:return el1[-1],el2[-1]
def closer(p,r,l):
if dpv.distance(p,r) < dpv.distance(p,l):return r
else:return l
'''#
ax = dtl.plot_axes_xy()
ax = dtl.plot_point_xy(tip,ax)
ax = dtl.plot_edges_xy(le.rpts,ax)
ax = dtl.plot_edges_xy(ne.rpts,ax)
s1,s2 = tiptail(le.rpts,ne.rpts)
ax = dtl.plot_edges_xy([s1,s2],ax,lw = 5.0)
ax = dtl.plot_edges_xy([tip,closer(tip,ne.rbpts[0],ne.rbpts[-1])],ax)
ax = dtl.plot_edges_xy([tip,closer(tip,ne.lbpts[0],ne.lbpts[-1])],ax)
plt.show()
'''#
'''#
this function is verrrrry sloppy.... rewrite it....
'''#
def same_side(lp):
lp0d = dpv.distance(lp[ 0],tip)
lp1d = dpv.distance(lp[-1],tip)
lpt = lp[0] if lp0d < lp1d else lp[-1]
s1,s2 = tiptail(le.rpts,ne.rpts)
segsect = dpr.segments_intersect_noncolinear(s1,s2,lpt,tip)
if not segsect:return lpt
def connect_end(lp,lpt):
d1,d2 = dpv.distance(lp[0],lpt),dpv.distance(lp[-1],lpt)
if d1 < d2:return lp[:]
else:return lp[::-1]
if le is ne:
if tip in le.rbpts:return connect_end(ne.lbpts,tip)
else:return connect_end(ne.rbpts,tip)
else:
lrpt = same_side(ne.rbpts)
llpt = same_side(ne.lbpts)
if lrpt is None and llpt is None:
lsd = dpv.distance(tip,ne.lbpts[ 0])
led = dpv.distance(tip,ne.lbpts[-1])
rsd = dpv.distance(tip,ne.rbpts[ 0])
red = dpv.distance(tip,ne.rbpts[-1])
sxs = dpr.order_ascending([lsd,led,rsd,red])
nelooppts = None
for sx in sxs:
if sx == 0 and not tip in ne.lbpts:nelooppts = ne.lbpts[:]
elif sx == 1 and not tip in ne.lbpts:nelooppts = ne.lbpts[:-1]
elif sx == 2 and not tip in ne.rbpts:nelooppts = ne.rbpts[:]
elif sx == 3 and not tip in ne.rbpts:nelooppts = ne.rbpts[:-1]
if not nelooppts is None:break
return nelooppts
if not lrpt is None:return connect_end(ne.rbpts,lrpt)
else:return connect_end(ne.lbpts,llpt)
# return a collection of points outlining all nodes/edges in the graph
def _edge_loop_boundaries(self):
def uniq_loop(eloops,elp):
uniq = True
for elps in eloops:
for x in range(len(elps)):
p = elps[x]
for y in range(len(elp)):
q = elp[y]
if p.near(q):return False
return True
edgelloops,edgerloops = self._edge_loops()
rperms = {}
lperms = {}
for ex in range(len(edgelloops)):
lloop,rloop = edgelloops[ex],edgerloops[ex]
rkey = rloop[:-1]
isperm = False
for rps in rperms:
if dpr.cyclic_permutation(rkey,rps):isperm = True;break
if not isperm:rperms[rkey] = self._edge_loop_points(rloop,0)
lkey = lloop[:-1]
isperm = False
for lps in lperms:
if dpr.cyclic_permutation(lkey,lps):isperm = True;break
if not isperm:lperms[lkey] = self._edge_loop_points(lloop,1)
eloops = []
for el in lperms:
elp = [v for v in lperms[el]]
if uniq_loop(eloops,elp):eloops.append(elp)
for el in rperms:
elp = [v for v in rperms[el]]
if uniq_loop(eloops,elp):eloops.append(elp)
return self._rank_edge_loops(eloops)
# determine how the loops are arranged based on containment
# so that they can be properly triangulated
def _rank_edge_loops(self,eloops):
bedgeloops = {}
#ax = dtl.plot_axes_xy()
#ax = self.plot_xy(ax)
#for bedge in eloops:ax = dtl.plot_edges_xy(bedge,ax)
#plt.show()
containments = [[] for el in eloops]
for elx in range(len(eloops)):
elp = tuple(eloops[elx])
for elxo in range(len(eloops)):
if elxo == elx:continue
elpo = tuple(eloops[elxo])
isect = dpr.concaves_intersect(elp,elpo)
elins = dpr.inconcave_xy(elpo[0],elp)
if isect:raise ValueError
elif elins:containments[elx].append(elxo)
looplook = {'king':[],'interiors':[]}
for elx in range(len(eloops)):
cont = containments[elx]
if cont:looplook['king'].append(eloops[elx])
else:looplook['interiors'].append(eloops[elx])
return looplook
# provide a polygon for the terrain
#
# provide a polygon for the road
#
# the terrain runs from convex bound to the loop that contains
# all other loops
# the terrain also contains the interiors of all loops of road
#
# the road extends from the loop that contains all others to the
# collection of all other loops of road
#
# assume the graph is connected? fix if not?
# calculate polygons representing regions to place terrain
def _regions(self):
rpts = []
for eg in self.edges:rpts.extend([x.copy() for x in eg.rpts])
convexbnd = dpr.pts_to_convex_xy(rpts)
convexbnd = dpr.inflate(convexbnd,50)
eloops = self._edge_loop_boundaries()
self.tpolygons = [(tuple(convexbnd),(tuple(eloops['king'][0]),))]+\
[(tuple(i),()) for i in eloops['interiors']]
self.rpolygons = [(eloops['king'][0],tuple(eloops['interiors']))]
# add a new node to the graph or existing node index
# ndkey is a tuple(x,y,layer)
def _add_node(self,ndkey):
if ndkey in self.nodes_lookup:
nd = self.nodes[self.nodes_lookup[ndkey]]
if not nd is None:return nd.index
nx,ny,nl = ndkey
newnode = gnd.node(dpv.vector(nx,ny,20*nl),layer = nl)
newnode.index = self.nodecount
self.nodes.append(newnode)
self.nodes_lookup[ndkey] = newnode.index
self.nodecount += 1
return newnode.index
# delete an existing node from the graph
def _del_node(self,ndkey):
if ndkey in self.nodes_lookup:
nd = self.nodes[self.nodes_lookup[ndkey]]
if nd is None:return
for ekey in self.edges_lookup:
if nd.index in ekey:
self._del_edge(*ekey)
self.nodes[nd.index] = None
del self.nodes_lookup[nd.key()]
# add a new edge to the graph, or return existing index
def _add_edge(self,ndkey1,ndkey2,**kwargs):
if ndkey1 in self.nodes_lookup:
nd1 = self.nodes[self.nodes_lookup[ndkey1]]
else:nd1 = self.nodes[self._add_node(ndkey1)]
if ndkey2 in self.nodes_lookup:
nd2 = self.nodes[self.nodes_lookup[ndkey2]]
else:nd2 = self.nodes[self._add_node(ndkey2)]
newedge = geg.edge(nd1,nd2,**kwargs)
newedge.index = self.edgecount
ndir1,ndir2 = newedge._directions()
newedge.one.connect(ndir1,newedge.two)
newedge.two.connect(ndir2,newedge.one)
self.edges_lookup[(ndkey1,ndkey2)] = newedge.index
self.edges_lookup[(ndkey2,ndkey1)] = newedge.index
self.edges.append(newedge)
self.edgecount += 1
return newedge.index
# add a new edges to the graph, return indicies
def _add_edges(self,ndkeys,**kwargs):
edgexs = []
for kdx in range(1,len(ndkeys)):
ndkey1,ndkey2 = ndkeys[kdx-1],ndkeys[kdx]
edgexs.append(self._add_edge(ndkey1,ndkey2,**kwargs))
return edgexs
# delete an existing edge from the graph
def _del_edge(self,ndkey1,ndkey2):
ekey = (ndkey1,ndkey2)
if not ekey in self.edges_lookup:return
edge = self.edges[self.edges_lookup[ekey]]
edge.one.disconnect(edge.two)
edge.two.disconnect(edge.one)
del self.edges_lookup[ekey]
del self.edges_lookup[ekey[::-1]]
self.edges[edge.index] = None
# delete existing nodes from the graph and replace all
# connectivity with new edges to a new node
# ndxs is a list of existing nodes indices being merged
# nndx is the index of the new node which replaces them
def _merge_nodes(self,ndxs,nndx,**kwargs):
mnds = [self.nodes[x] for x in ndxs if not x == nndx]
for ndx in ndxs:
if ndx == nndx:continue
for ndrk in list(self.nodes[ndx].ring.keys()):
ekey = (ndrk,ndx)
if ekey in list(self.edges_lookup.keys()):
eg = self.edges_lookup[ekey]
if eg is None:continue
iterp = self.edges[eg].interpolated
self._del_edge(*ekey)
if not ndrk in ndxs:
nd1,nd2 = self.nodes[nndx],self.nodes[ndrk]
newedge = edge(nd1,nd2,interpolated = iterp)
#newedge = edge(nd1,nd2,**kwargs)
self._add_edge(newedge)
self._del_node(ndx)
# return index of closest node to p within e, or None
def _find_node(self,p,e):
nps = [nd.p for nd in self.nodes]
ndx = dpv.find_closest(p,nps,self.nodecount,1.0)
if self.nodes[ndx].p.neighborhood(p,e):return ndx
# return indices of all nodes within e of p
def _find_nodes(self,p,e):
within = []
for ndx in range(self.nodecount):
nd = self.nodes[ndx]
if nd is None:continue
if nd.p.neighborhood(p,e):
within.append(ndx)
return within
# return index of closest node within a cone,
# cone is pointed from o towards p,
# has halfangle e, and ends at p,
# or return None if no node exists
def _find_node_cone(self,o,p,e):
ca = dpr.deg(dpv.angle_from_xaxis_xy(dpv.v1_v2(o,p).normalize()))
#best,margin = None,dpv.distance(o,p)
best,margin = None,100000000000000000
for ndx in range(self.nodecount):
nd = self.nodes[ndx]
tn = dpv.v1_v2(o,nd.p).normalize()
npa = dpr.deg(dpv.angle_from_xaxis_xy(tn))
if adist(ca,npa) < e:
ndd = dpv.distance(o,nd.p)
if ndd < margin:
best = ndx
margin = ndd
return best
# add a new edge to the graph, or return existing index
# this function should do this safely, so the resulting graph
# does not carry improper intersections!
# return None if the desired edge could not be created properly
def _edge(self,nkey1,nkey2,**kwargs):
n1 = self.nodes_lookup[nkey1]
n2 = self.nodes_lookup[nkey2]
existing = self._find_edge(n1,n2)
if not existing is None:return existing
else:
nd1,nd2 = self.nodes[n1],self.nodes[n2]
newedge = edge(nd1,nd2,**kwargs)
ipts = []
ilys = []
for edx in range(self.edgecount):
eg = self.edges[edx]
if eg is None:continue
ipt = eg._tangents_intersect(newedge)
if not ipt is None:
ily = eg._layers_intersect(newedge)
ilycnt = len(ily)
if ilycnt > 1:raise ValueError
if type(ipt) is type(tuple()):
if ilycnt == 0:
continue
print('overlapping intersection!')
return None
#pdb.set_trace()
if ily:
l = ily[0]
ilys.append(l)
elif eg.one.layer == eg.two.layer:
l = eg.one.layer
if newedge.one.layer == newedge.two.layer:
iptndxxs = self._node(node(ipt,layer = newedge.one.layer))
print('shit: just like you said')
ilys.append(newedge.one.layer)
else:
print('shit: layer ambiguity')
pdb.set_trace()
else:
print('shit: layer ambiguity')
pdb.set_trace()
iptndxs = self._node(node(ipt,layer = l))
iptndx = iptndxs[l]
self._split_edge(eg.one.index,eg.two.index,iptndx)
ipts.append(ipt)
if not ipts:return self._add_edge(newedge,**kwargs)
newedgexs = []
ipts.insert(0,nd1.p)
ilys.insert(0,nd1.layer)
ipts.append(nd2.p)
ilys.append(nd2.layer)
siptxs = dpv.proximity_order_xy(nd1.p,ipts)
for iptx in range(1,len(ipts)):
ipt1,ipt2 = ipts[siptxs[iptx-1]],ipts[siptxs[iptx]]
ily1,ily2 = ilys[siptxs[iptx-1]],ilys[siptxs[iptx]]
n1 = self.nodes_lookup[(ipt1.x,ipt1.y,ily1)]
n2 = self.nodes_lookup[(ipt2.x,ipt2.y,ily2)]
nd1,nd2 = self.nodes[n1],self.nodes[n2]
print('illlys',ilys,ily1,ily2)
#pdb.set_trace()
newedge = edge(nd1,nd2,**kwargs)
newedgexs.append(self._add_edge(newedge,**kwargs))
return newedgexs
# return index of edge within connecting ndx1,ndx2, or None
def _find_edge(self,ndx1,ndx2):
if (ndx1,ndx2) in self.edges_lookup:
return self.edges_lookup[(ndx1,ndx2)]
# remove existing edge from ndx1 to ndx2
# add two new edges, connecting n to ndx1 and to ndx2
def _split_edge(self,ndx1,ndx2,newndx,**kwargs):
sekey = self.edges_lookup[(ndx1,ndx2)]
if not sekey is None:
sedge = self.edges[sekey]
kwargs['interpolated'] = sedge.interpolated
else:
print('IM BULLSHITTING OVER HERE')
return
self._del_edge(ndx1,ndx2)
nd1,nd2 = self.nodes[ndx1],self.nodes[newndx]
if nd1.p.near(nd2.p):pdb.set_trace()
newedge = edge(nd1,nd2,**kwargs)
self._add_edge(newedge)
nd1,nd2 = self.nodes[ndx2],self.nodes[newndx]
if nd1.p.near(nd2.p):pdb.set_trace()
newedge = edge(nd1,nd2,**kwargs)
self._add_edge(newedge)
def smatter():
g = graph()
g._node(node(dpv.vector(0,0,0)))
for x in range(100):
ndx = rm.choice(range(g.nodecount))
rcnt = len(g.nodes[ndx].ring)
if rcnt == 0:
ndd = dpv.xhat.copy()
ndd.rotate_z(dpr.rad(rm.choice(range(360))))
ndd.scale_u(rm.choice([100,200,300]))
elif rcnt == 1:
ndir = next(iter(g.nodes[ndx].ring.values()))
nda = ndir+180 if ndir < 180 else ndir - 180
ndd = dpv.xhat.copy().rotate_z(dpr.rad(nda))
ndd.scale_u(rm.choice([100,200,300]))
elif rcnt == 2:
r1,r2 = tuple(g.nodes[ndx].ring.values())
mpt = (r1+r2)/2.0
nda = mpt+180 if mpt < 180 else mpt - 180
ndd = dpv.xhat.copy().rotate_z(dpr.rad(nda))
ndd.scale_u(rm.choice([100,200,300]))
elif rcnt == 3:
t1,t2,t3 = tuple(g.nodes[ndx].ring.values())
d1,d2,d3 = adist(t1,t2),adist(t2,t3),adist(t3,t1)
if d1 > d2 and d1 > d3:nda = (t1+t2)/2.0
elif d2 > d1 and d2 > d3:nda = (t2+t3)/2.0
elif d3 > d1 and d3 > d2:nda = (t3+t1)/2.0
ndd = dpv.xhat.copy().rotate_z(dpr.rad(nda))
ndd.scale_u(rm.choice([100,200,300]))
elif rcnt == 4:
print('this node cannot be more connected!',ndx)
#g._extrude_safe(ndx,ndd)
g._extrude(ndx,ndd)
#g._update()
#ax = g.plot_xy()
#ax.set_aspect('equal')
#plt.show()
return g
def ramp():
g = graph()
g._add_edge((-100,-100,0),(0,-100,0),interpolated = False)
g._add_edge((0,-100,0),(100,0,0))
g._add_edge((100,0,0),(0,100,0))
g._add_edge((0,100,0),(-100,0,1))
g._add_edge((-100,0,1),(0,-100,1))
g._add_edge((0,-100,1),(100,-100,1),interpolated = False)
g._add_edge((100,-100,1),(200,-100,1))
g._add_edge((200,-100,1),(300,0,0))
g._add_edge((300,0,0),(300,100,0))
return g
def hairpin():
g = graph()
g._add_edge((0,0,0),(100,50,0))
g._add_edge((100,50,0),(0,100,0))
g._add_edge((0,100,0),(100,150,0))
#g._add_edge((0,0,0),(-50,100,1))
#g._add_edge((-50,100,2),(100,150,3))
return g
def circle():
g = graph()
g._add_edge((0,0,0),(50,50,0),interpolated = False)
#g._add_edge((0,0,0),(50,50,0))
g._add_edge((50,50,0),(0,100,0),interpolated = False)
#g._add_edge((50,50,0),(0,100,0))
g._add_edge((0,100,0),(-50,50,0),interpolated = True)
#g._add_edge((0,100,0),(-50,50,0))
g._add_edge((-50,50,0),(0,0,0),interpolated = True)
#g._add_edge((-50,50,0),(0,0,0))
return g
def newcastle():
g = graph()
l,w = 200,200
g._add_edge((0,0,0),(l*0.5,w*0.5,0),interpolated = True)
g._add_edge((l*0.5,w*0.5,0),(0,w,0),interpolated = True)
g._add_edge((0,w,0),(-l*0.5,l*0.5,0),interpolated = True)
g._add_edge((-l*0.5,w*0.5,0),(0,0,0),interpolated = True)
g._add_edge((0,0,0),(0,w*0.5,0),interpolated = True)
return g
def eight():
g = graph()
r = 100
g._add_edge((0,0,0),(r,0,0),interpolated = True)
g._add_edge((r,0,0),(2*r,0,0),interpolated = True)
g._add_edge((2*r,0,0),(2*r,r,0),interpolated = True)
g._add_edge((2*r,r,0),(r,r,0),interpolated = True)
g._add_edge((r,r,0),(0,r,0),interpolated = True)
g._add_edge((0,r,0),(0,0,0),interpolated = True)
g._add_edge((r,r,0),(r,0,0),interpolated = True)
g._update()
#ax = dtl.plot_axes()
#ax = g.plot(ax)
#ax.set_zlim([0,40])
#plt.show()
return g
def clover():
g = graph()
r = 100
g._add_edge((0,0,0),( r,0,0),interpolated = True)
g._add_edge((0,0,0),(-r,0,0),interpolated = True)
g._add_edge((0,0,0),(0, r,0),interpolated = True)
g._add_edge((0,0,0),(0,-r,0),interpolated = True)
g._add_edge(( r,0,0),(2*r,-r,0),interpolated = True)
g._add_edge((2*r,-r,0),(3*r,0,0),interpolated = True)
g._add_edge((3*r,0,0),(2*r,r,0),interpolated = True)
g._add_edge((2*r,r,0),(r,0,0),interpolated = True)
g._update()
#ax = dtl.plot_axes()
#ax = g.plot(ax)
#ax.set_zlim([0,40])
#plt.show()
return g
def opass():
g = graph()
bnd = dpr.square(100,100)
oprgn1 = grg.overpass(bnd)
oprgn1._graph(g)
bnd = dpr.square(100,100,dpv.vector(500,0,0),dpr.rad(75))
oprgn2 = grg.overpass(bnd)
oprgn2._graph(g)
bnd = dpr.square(100,100,dpv.vector(250,200,0),dpr.rad(35))
oprgn3 = grg.overpass(bnd)
oprgn3._graph(g)
oprgn1._connect(oprgn2,g)
oprgn2._connect(oprgn3,g)
oprgn3._connect(oprgn1,g)
g._update()
ax = dtl.plot_axes()
ax = oprgn1.plot(ax)
ax = oprgn2.plot(ax)
ax = oprgn3.plot(ax)
ax = g.plot(ax)
ax.set_zlim([0,40])
plt.show()
return g
def generate_graph():
#g = graph()
#g = smatter()
#g = lsystem_graph()
g = ramp()
#g = opass()
#g = hairpin()
g._update()
ax = g.plot()
plt.show()
return g
| mit | -7,481,076,039,353,380,000 | 33.39498 | 86 | 0.528095 | false |
carrdelling/topcoder | dp/prime_soccer.py | 1 | 2017 | """
Problem name: PrimeSoccer
Class: SRM 422, Division I Level One
Description: https://community.topcoder.com/stat?c=problem_statement&pm=10240
"""
import math
import numpy as np
def is_prime(a):
""" Check (in O(N)) whether a is prime or not """
if a < 2:
return False
for i in range(2, int(math.sqrt(a)) +1):
if a % i == 0:
return False
return True
def get_distribution(skill, rounds, dist):
""" Computes the full distribution of possible scores given a number of
rounds left and a skill value for the team
"""
if rounds == 0:
return dist
dist[19-rounds] = dist[18-rounds] * skill
for score in sorted(dist, reverse=True)[1:-1]:
prob = (dist[score] * (1.0 - skill)) + (dist[score-1] * skill)
dist[score] = prob
dist[0] *= (1.0 - skill)
return get_distribution(skill, rounds - 1, dist)
def prime_score(skill):
""" Compute the probability that a team reaches a prime result given its
skill score
"""
dist = {0: 1.0 - skill, 1: skill}
dist = get_distribution(skill, 17, dist)
prime = 0.0
composite = 0.0
for score in dist:
if is_prime(score):
prime += dist[score]
else:
composite += dist[score]
return prime / (prime + composite)
def solve(args):
""" Compute the prime probability for each team skill, and aggregate them
"""
team_a, team_b = args
prime_a = prime_score(team_a / 100)
prime_b = prime_score(team_b / 100)
return prime_a + ((1.0 - prime_a) * prime_b)
if __name__ == "__main__":
test_cases = [((50, 50), 0.5265618908306351),
((100, 100), 0.0),
((12, 89), 0.6772047168840167)
]
for index, case in enumerate(test_cases):
output = solve(case[0])
assert np.isclose(output, case[1]), 'Case {} failed: {} != {}'.format(
index, output, case[1])
else:
print('All tests OK')
| apache-2.0 | 3,778,689,198,419,414,500 | 21.411111 | 78 | 0.5647 | false |
enthought/python-analytics | python_analytics/tests/test_events.py | 1 | 3549 | from __future__ import absolute_import, unicode_literals
import unittest
from six import text_type
from ..event_encoder import CustomDimension, CustomMetric
from ..events import Event
class TestEvent(unittest.TestCase):
def test_event_no_label_value(self):
# Given
category = 'category'
action = 'action'
event = Event(
category=category, action=action)
expected = {
't': 'event',
'ec': category,
'ea': action,
}
# When
event_dict = event.encode()
# Then
self.assertEqual(event_dict, expected)
def test_event_label_no_value(self):
# Given
category = 'category'
action = 'action'
label = 'an-event-label'
event = Event(
category=category, action=action, label=label)
expected = {
't': 'event',
'ec': category,
'ea': action,
'el': label,
}
# When
event_dict = event.encode()
# Then
self.assertEqual(event_dict, expected)
def test_event_value_no_label(self):
# Given
category = 'category'
action = 'action'
value = 42
event = Event(
category=category, action=action, value=value)
expected = {
't': 'event',
'ec': category,
'ea': action,
'ev': value,
}
# When
event_dict = event.encode()
# Then
self.assertEqual(event_dict, expected)
def test_event_label_value(self):
# Given
category = 'category'
action = 'action'
label = 'Another event!'
value = 42
event = Event(
category=category, action=action, label=label, value=value)
expected = {
't': 'event',
'ec': category,
'ea': action,
'el': label,
'ev': value,
}
# When
event_dict = event.encode()
# Then
self.assertEqual(event_dict, expected)
def test_event_dimensions(self):
# Given
class MyEvent(Event):
some_dimension = CustomDimension(1)
dimension_value = 'some-value'
category = 'category'
action = 'action'
label = 'Another event!'
value = 42
event = MyEvent(
category=category, action=action, label=label, value=value,
some_dimension=dimension_value)
expected = {
't': 'event',
'ec': category,
'ea': action,
'el': label,
'ev': value,
'cd1': dimension_value,
}
# When
event_dict = event.encode()
# Then
self.assertEqual(event_dict, expected)
def test_event_metrics(self):
# Given
class MyEvent(Event):
some_metric = CustomMetric(5)
metric_value = 28
category = 'category'
action = 'action'
label = 'Another event!'
value = 42
event = MyEvent(
category=category, action=action, label=label, value=value,
some_metric=metric_value)
expected = {
't': 'event',
'ec': category,
'ea': action,
'el': label,
'ev': value,
'cm5': metric_value,
}
# When
event_dict = event.encode()
# Then
self.assertEqual(event_dict, expected)
| bsd-3-clause | -7,698,650,168,134,521,000 | 23.142857 | 71 | 0.494224 | false |
hookehu/utility | editors/studio/main.py | 1 | 3082 | #-*- coding:utf-8 -*-
import wx
import os
import setting
class MyFrame(wx.Frame):
"""We simple derive a new class of Frame"""
def __init__(self, parent, title):
wx.Frame.__init__(self, parent, title=title,size=(600,600))
self.cur_frame = None
self.init_panels()
self.init_menu()
self.init_statusbar()
self.Show(True)
self.Bind(wx.EVT_SIZE, self.on_size)
def on_size(self, evt):
if self.cur_frame:
self.cur_frame.SetSize(self.Size)
def init_panels(self):
#self.tree_panel = TreePanel(self)
pass
def gen_on_menu(self, container, k):
def func(self):
container.on_menu(k)
return func
def init_menu(self):
filemenu = wx.Menu()
for k, v in setting.APPS.items():
menu = filemenu.Append(wx.ID_ANY, k, " ")
print menu
self.Bind(wx.EVT_MENU, self.gen_on_menu(self, k), menu)
menu_exit = filemenu.Append(wx.ID_ANY, "Exit", "Termanate the program")
filemenu.AppendSeparator()
menu_about = filemenu.Append(wx.ID_ANY, "About", "Information about this program")#设置菜单的内容
menuBar = wx.MenuBar()
menuBar.Append(filemenu, u"编辑器")
self.SetMenuBar(menuBar)#创建菜单条
self.Bind(wx.EVT_MENU, self.on_exit, menu_exit)#把出现的事件,同需要处理的函数连接起来
def init_statusbar(self):
self.CreateStatusBar()#创建窗口底部的状态栏
def on_about(self,e):#about按钮的处理函数
dlg = wx.MessageDialog(self,"A samll text editor", "About sample Editor",wx.OK)#创建一个对话框,有一个ok的按钮
dlg.ShowModal()#显示对话框
dlg.Destroy()#完成后,销毁它。
def on_exit(self,e):
self.Close(True)
def on_menu(self, key):
pkg = setting.APPS.get(key, None)
print key, pkg
if pkg:
p = __import__(pkg)
if self.cur_frame:
self.cur_frame.Close()
self.cur_frame = None
self.cur_frame = p.init(self)
def on_open(self,e):
"""open a file"""
self.dirname = ''
dlg = wx.FileDialog(self, "Choose a file", self.dirname, "", "*.*", wx.FD_OPEN)#调用一个函数打开对话框
if dlg.ShowModal() == wx.ID_OK:
self.filename = dlg.GetFilename()
self.dirname = dlg.GetDirectory()
self.address = os.path.join(self.dirname,self.filename)
f = open(self.address,"r")
file = (f.read()).decode(encoding='utf-8')#解码,使文件可以读取中文
f.close()
self.control.Clear()
self.control.AppendText(file)#把打开的文件内容显示在多行文本框内
dlg.Destroy()
def on_save(self, e):
date = (self.control.GetValue()).encode(encoding="utf-8")#编码,使中文可以正确存储
f = open(self.address, 'w')
f.write(date)
f.close()#把文本框内的数据写入并关闭文件
dlg = wx.MessageDialog(self, u"文件已经成功保存", u"消息提示", wx.OK)
dlg.ShowModal()
dlg.Destroy()
if __name__ == "__main__":
app = wx.App(False)
frame = MyFrame(None, '编辑器')
app.MainLoop() | gpl-2.0 | 8,455,450,927,402,620,000 | 28.393617 | 100 | 0.622013 | false |
alirizakeles/tendenci | tendenci/apps/base/management/commands/downgrade_user.py | 1 | 1864 | import sys
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import Group, Permission, User
from django.core.exceptions import ObjectDoesNotExist
class Command(BaseCommand):
"""
Downgrades a user to just a regular user
This command does the following to the user account:
* Removes them from all groups
* Removes all user level permissions
* Sets is_staff to 0
* Sets is_superuser to 0
* Sets is_active to 1
* Removes them from all tendenci user_groups
"""
def add_arguments(self, parser):
parser.add_argument('--username',
dest='username',
required=True,
help='Username of the user account being downgraded')
def handle(self, *args, **options):
from tendenci.apps.user_groups.models import GroupMembership
verbosity = options['verbosity']
username = options['username']
if not username:
raise CommandError('downgrade_user: --username parameter is required')
# get the user
try:
u = User.objects.get(username=username)
except ObjectDoesNotExist:
print 'User with username (%s) could not be found' % username
return
# Remove the user from all groups
u.groups.clear()
# Remove all user-level permissions
u.user_permissions.clear()
# Reset permission bits
u.is_staff = False
u.is_superuser = False
u.is_active = True
u.save()
# Remove the tendenci group permissions
group_memberships = GroupMembership.objects.filter(member=u)
for m in group_memberships:
m.delete()
if verbosity >= 2:
print 'Done downgrading user (%s).' % u
| gpl-3.0 | -4,922,462,935,471,569,000 | 29.557377 | 82 | 0.626073 | false |
FederatedAI/FATE | examples/pipeline/feldman_verifiable_sum/pipeline-feldman-verifiable-sum.py | 1 | 3525 | #
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.backend.pipeline import PipeLine
from pipeline.component import Reader
from pipeline.component import DataIO
from pipeline.component import FeldmanVerifiableSum
from pipeline.interface import Data
from pipeline.utils.tools import load_job_config
from pipeline.runtime.entity import JobParameters
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host
backend = config.backend
work_mode = config.work_mode
guest_train_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role="guest", party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role="guest", party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role="host", party_id=hosts).component_param(table=host_train_data)
dataio_0 = DataIO(name="dataio_0")
# get and configure DataIO party instance of guest
dataio_0.get_party_instance(role="guest", party_id=guest).component_param(with_label=False, output_format="dense")
# get and configure DataIO party instance of host
dataio_0.get_party_instance(role="host", party_id=hosts).component_param(with_label=False)
# define FeldmanVerifiableSum components
feldmanverifiablesum_0 = FeldmanVerifiableSum(name="feldmanverifiablesum_0")
feldmanverifiablesum_0.get_party_instance(role="guest", party_id=guest).component_param(sum_cols=[1, 2, 3], q_n=6)
feldmanverifiablesum_0.get_party_instance(role="host", party_id=hosts).component_param(sum_cols=[1, 2, 3], q_n=6)
# add components to pipeline, in order of task execution.
pipeline.add_component(reader_0)
pipeline.add_component(dataio_0, data=Data(data=reader_0.output.data))
pipeline.add_component(feldmanverifiablesum_0, data=Data(data=dataio_0.output.data))
# compile pipeline once finished adding modules, this step will form conf and dsl files for running job
pipeline.compile()
# fit model
job_parameters = JobParameters(backend=backend, work_mode=work_mode)
pipeline.fit(job_parameters)
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| apache-2.0 | 3,601,522,424,176,903,000 | 37.736264 | 118 | 0.718865 | false |
amluto/time_domain_tools | src/td_analysis.py | 1 | 6199 | # Time domain tools for CASPER
# Copyright (C) 2011 Massachusetts Institute of Technology
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
"""Time-domain analysis functions"""
from __future__ import division
import scipy.signal
import math
import numpy
import weakref
def _asfloat(x):
"""Internal helper to coerce an array to floating-point."""
if isinstance(x, numpy.ndarray) and x.dtype == numpy.float64:
return x
else:
return numpy.asarray(x, numpy.float64)
def _as_float_or_complex(x):
"""Internal helper to coerce an array to floating-point or complex."""
if (isinstance(x, numpy.ndarray)
and x.dtype in (numpy.float64, numpy.complex128)):
return x
else:
return numpy.asarray(x, numpy.complex128)
_time_coord_cache = weakref.WeakValueDictionary()
def _time_coord(fs, num_samples, offset = 0, dec = 1):
spec = (num_samples, offset, dec)
try:
return _time_coord_cache[spec]
except KeyError:
ret = numpy.arange(offset, num_samples + offset, dec) / fs
ret.setflags(write = False)
ret = ret[:] # Make sure it never becomes writeable.
_time_coord_cache[spec] = ret
return ret
def mean_power(signal):
signal = _as_float_or_complex(signal)
if signal.ndim != 1:
raise TypeError, 'signal must be one-dimensional'
# This is the least inefficient way I can think of.
return numpy.linalg.norm(signal, ord = 2)**2 / len(signal)
class ToneEstimate(object):
def __init__(self, estimator, f, data, tone):
self.__est = estimator
self.__fs = estimator._fs
self.__f = f
self.__datalen = len(data)
self.__t_data = None
self.__t_tone = None
self.tone = tone
self.total_power = mean_power(data)
self.inband_power = mean_power(self.tone) / 2
self.inband_noise = ((self.total_power - self.inband_power)
/ (1 - estimator.fractional_band)
* estimator.fractional_band)
self.est_tone_power = self.inband_power - self.inband_noise
# We compute t_data and t_tone as lazily as possible.
@property
def t_data(self):
if self.__t_data is None:
self.__t_data = _time_coord(self.__fs, self.__datalen)
return self.__t_data
@property
def t_tone(self):
if self.__t_tone is None:
self.__t_tone = _time_coord(self.__fs,
len(self.tone) * self.__est._dec,
self.__est.offset, self.__est._dec)
return self.__t_tone
class ToneEstimator(object):
def __init__(self, fs, bw):
self._fs = fs
self._bw = bw
self._dec = 1 # Decimation factor
# Generate a symmetric FIR filter.
# Some scipy versions give a bogus warning. Ignore it.
self._nyquist = fs / 2
cutoff = bw / self._nyquist
firlen = 10.0 / cutoff
# Round to a power of 2 (so fftconvolve can be super fast)
firlen = 2**int(numpy.ceil(numpy.log2(firlen)))
old_err = numpy.seterr(invalid='ignore')
self._filter = scipy.signal.firwin(
firlen,
cutoff = cutoff)
numpy.seterr(**old_err)
self.offset = (len(self._filter) - 1) / 2
self.fractional_band = bw / self._nyquist
def estimate_tone(self, f, data):
"""Returns a ToneEstimate for the cosine wave at frequency f.
Note that the mean square of the tone is *twice* the mean square of
the original cosine wave."""
f = float(f)
data = _asfloat(data)
if data.ndim != 1:
raise TypeError, 'data must be one-dimensional'
baseband = 2 * data * numpy.exp(-2j * math.pi * f / self._fs
* numpy.arange(0, len(data)))
if len(data) < len(self._filter):
raise (ValueError,
'You need at least %d samples for specified bandwidth'
% len(self._filter))
tone = scipy.signal.fftconvolve(baseband, self._filter, mode='valid')
if self._dec != 1:
tone = tone[::self._dec]
return ToneEstimate(self, f, data, tone)
class ToneDecimatingEstimator(ToneEstimator):
def __init__(self, fs, bw):
super(ToneDecimatingEstimator, self).__init__(fs, bw)
cutoff = self._bw / self._nyquist
self._dec = int(2.0 / cutoff) # Oversample by 2 to minimize aliasing.
def estimate_tone(self, f, data):
"""Returns a ToneEstimate for the cosine wave at frequency f.
Note that the mean square of the tone is *twice* the mean square of
the original cosine wave."""
f = float(f)
data = _asfloat(data)
if data.ndim != 1:
raise TypeError, 'data must be one-dimensional'
baseband = 2 * data * numpy.exp(-2j * math.pi * f / self._fs
* numpy.arange(0, len(data)))
if len(data) < len(self._filter):
raise (ValueError,
'You need at least %d samples for specified bandwidth'
% len(self._filter))
valid_len = (len(data) - len(self._filter) + self._dec) // self._dec
tone = numpy.zeros(valid_len, dtype = baseband.dtype)
for i in xrange(valid_len):
pos = self._dec * i
tone[i] = numpy.dot(self._filter,
baseband[pos:pos+len(self._filter)])
return ToneEstimate(self, f, data, tone)
| gpl-2.0 | 2,539,962,466,386,287,600 | 34.626437 | 77 | 0.589127 | false |
Pharylon/PiClock | clock.py | 1 | 1209 | import datetime
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
#GPIO.cleanup()
ypins = [17, 18, 27, 22, 23, 24, 25]
xpins = [5, 6, 12]
def setArray(myInt, array):
asBinary = "{0:b}".format(myInt).zfill(7)
for i in range(0, 7):
if (asBinary[i] == "0"):
array[i] = False
else:
array[i] = True
for i in xpins:
GPIO.setup(i, GPIO.IN)
#GPIO.output(i, False)
for i in ypins:
GPIO.setup(i, GPIO.IN)
#GPIO.output(i, False)
grid = [[0 for x in range(7)] for x in range(3)]
'''
GPIO.setup(17, GPIO.OUT)
GPIO.setup(5, GPIO.OUT)
GPIO.output(17, False)
GPIO.output(5, True)
time.sleep(1)
'''
while True:
now = datetime.datetime.now()
setArray(now.hour, grid[0])
setArray(now.minute, grid[1])
setArray(now.second, grid[2])
for i in range(0, 7):
for j in range(0, 3):
if (grid[j][i]):
GPIO.setup(xpins[j], GPIO.OUT)
GPIO.setup(ypins[i], GPIO.OUT)
GPIO.output(xpins[j], True)
GPIO.output(ypins[i], False)
GPIO.setup(xpins[j], GPIO.IN)
GPIO.setup(ypins[i], GPIO.IN)
GPIO.cleanup()
| mit | -8,457,110,341,301,701,000 | 19.981818 | 49 | 0.543424 | false |
hongta/practice-python | data_structures/tree/binary_search_tree.py | 1 | 5713 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from tree_node import TreeNode
class BinarySearchTree(object):
def __init__(self):
self._root = None;
##################
## Iterator method
def __iter__(self):
current = self._find_minmum(self._root)
# and then, until we have reached the end:
while current is not None:
yield current
# in order to get from one Node to the next one:
current = self.successor(current)
def _replace_with(self, old_node, new_node):
if not old_node:
return False
if old_node.parent:
if old_node.parent.left == old_node:
old_node.parent.set_children(left=new_node)
else:
old_node.parent.set_children(right=new_node)
else:
if new_node:
new_node.parent = None
self._root = new_node
return True
def insert(self, k, payload=None):
# tree is empty construct the tree
if not self._root:
self._root= TreeNode(k,payload)
else:
self._insert(self._root, k, payload)
def _insert(self, tree_node, k, payload=None):
if not tree_node:
return TreeNode(k, payload)
if k < tree_node.key:
tree_node.set_children(left=self._insert(tree_node.left, k, payload))
elif k > tree_node.key:
tree_node.set_children(right=self._insert(tree_node.right, k, payload))
else:
tree_node.payload = payload
return tree_node
def remove_node(self, node):
if None:
return
node.key = node.payload = node.left = node.right = node.parent = None
del node
def delete(self, k):
node = self.search(k)
if not node:
return
p = node.parent
if node.left and node.right:
# if the node has two children, we replace the node's key and payload
# with minnum of the right substree
min_on_right = self._find_minmum(node.right)
min_parent = min_on_right.parent
node.key = min_on_right.key
node.payload = min_on_right.payload
if min_on_right != node.right:
#update min right child, make it become min's parent's left child
min_parent.set_children(left=min_on_right.right)
else:
node.set_children(right=min_on_right.right)
self.remove_node(min_on_right)
else:
# if the node has 0-1 child, we delete this node
old_node = node
if not node.left and not node.right:
# no child
node = None
elif node.left:
# has one left child
node.left.parent = p
node = node.left
elif node.right:
# has one right child
node.right.parent = p
node = node.right
if not p:
#trying to delete root node
self._root = node
else:
if p.left == old_node:
p.left = node
else:
p.right = node
self.remove_node(old_node)
def find_minnum(self):
return self._find_minmum(self._root)
def _find_minmum(self, node):
if not node:
return None
while node.left:
node = node.left
return node
def find_maxmum(self):
return self._find_maxmum(self._root)
def _find_maxmum(self, node):
if not node:
return None
while node.right:
node = node.right
return node
def traverse(self):
return self._traverse(self._root)
# Python 2 version
def _traverse(self, node):
if node:
if node.left:
for n in self._traverse(node.left):
yield n
yield node
if node.right:
for n in self._traverse(node.right):
yield n
# Python 3 version
# def _traverse(self, node):
# if node:
# yield from self._traverse(node.left)
# yield node
# yield from self._traverse(node.right)
def successor(self, node):
if not node:
return None
if node.right:
return self._find_minmum(node.right)
p = node.parent
while p and p.right == node:
node = p
p = p.parent
return p
def predecessor(self, node):
if not node:
return None
if node.left:
return self._find_maxmum(node.left)
p = node.parent
while p and p.left == node:
node = p
p = p.parent
return p
def height(self):
pass
def search(self, k):
return self._search(self._root, k)
def _search(self, node, k):
if not node:
return None
if k == node.key:
return node
if k < node.key:
return self._search(node.left, k)
else:
return self._search(node.right, k)
def count():
pass
if __name__ == "__main__":
t = BinarySearchTree()
# t.insert(3)
# t.insert(8)
# t.insert(12)
# t.insert(1)
# t.insert(15)
# t.insert(7)
data = [30, 25, 49, 35, 68, 33, 34, 38, 40, 37, 36]
for i in data:
t.insert(i)
for v in t.traverse():
print v.key
d = t._find_maxmum(t._root)
while d:
print d.key
d = t.successor(d)
| mit | 3,723,569,289,260,304,000 | 25.086758 | 83 | 0.501663 | false |
evite/nudge | nudge/automagic/scribes/python_stubs.py | 1 | 3903 | #
# Copyright (C) 2011 Evite LLC
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from nudge.automagic.scribes.default import DefaultGenerator, get_template
from nudge.utils import Dict, breakup_path
class PythonStubGenerator(DefaultGenerator):
extension = 'py'
template = get_template('python.txt')
def _prepare_data(self, project):
def arg_string(endpoint):
args = []
args.extend([arg_repr(arg) for arg in endpoint.sequential])
args.extend([arg_repr(arg, True) for arg in endpoint.named])
return ', '.join(args)
def arg_repr(arg, named=False):
if named:
return '='.join([str(arg.name), str(None)])
return arg.name
modules = {}
for section in project.sections:
for ep in section.endpoints:
# -- module_name and class_name can both be ''. They'll be put
# in the default module as simple functions
# -- module_name..function_name means a module level function
# in the given module
# -- something.otherthing = default module, something =
# class_name, otherthing = function_name
# -- something = default module, module level function called
# something
module, class_name, function = breakup_path(ep.function_name)
current = (modules.setdefault(module,{})
.setdefault(class_name, {})
.setdefault(function,
Dict({'sequential':[],
'named':{}}))
)
# Preserve order...it's super important
if len(ep.sequential) > len(current.sequential):
current.sequential = ep.sequential
func_desc = dict([(arg.name, arg) for arg in current.named])
current.named.update(func_desc)
del project['sections']
module_list = []
for module, classes in modules.iteritems():
module_dict = Dict({
'module_name':module,
'classes':[],
'project':project
})
for class_name, endpoints in classes.iteritems():
data = [{'function_name':name, 'args':arg_string(args)}
for name, args in endpoints.iteritems()]
class_name = class_name or False
class_desc = [{"name":class_name, "endpoints":data}]
module_dict.classes.extend(class_desc)
module_list.append(Dict(module_dict))
return module_list
def generate(self, project):
module_list = self._prepare_data(Dict(project))
for module in module_list:
# functions without modules go into the default file
output_file = self.output_file
# otherwise they go into their specified module file
if module.module_name:
output_file = self._filepath(module.module_name)
self._render_and_write(self.template, module, output_file)
| lgpl-2.1 | 3,267,725,642,107,230,700 | 44.383721 | 79 | 0.578786 | false |
joelverhagen/PingdomBackup | pingdombackup/tool.py | 1 | 3294 | import argparse
import sys
import logging
import pkg_resources
from . import __version__
from .PingdomBackup import PingdomBackup
def tool_main():
# this is done without ArgumentParser so required args are not enforced
if '-v' in sys.argv or '--version' in sys.argv:
print(__version__)
sys.exit(0)
# initialize the parser
parser = argparse.ArgumentParser(
prog='pingdombackup',
description='Backup Pingdom result logs to a SQLite database.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
argument_default=argparse.SUPPRESS)
# meta arguments
parser.add_argument('-v', '--version',
dest='version', action='store_true', default=False,
help='show the version and exit')
# required arguments
parser.add_argument('-e', '--email',
dest='email', required=True,
help='your Pingdom email address (used for logging in)')
parser.add_argument('-p', '--password',
dest='password', required=True,
help='your Pingdom password (used for logging in)')
parser.add_argument('-a', '--app-key',
dest='app_key', required=True,
help='a valid Pingdom API application key, see: https://my.pingdom.com/account/appkeys')
parser.add_argument('-d', '--db-path',
dest='db_path', default='pingdom.db',
help='a path to the SQLite database used for storage')
# conditionally required arguments
parser.add_argument('-n', '--check-name',
dest='check_name', default=None,
help='the name of the check to update')
# optional arguments
parser.add_argument('--offine-check',
dest='offline_check', action='store_true', default=False,
help='get the check ID by name from the database, instead of the Pingdom API')
parser.add_argument('--no-update-results',
dest='no_update_results', action='store_true', default=False,
help='do not update the results for the specified check')
parser.add_argument('--update-probes',
dest='update_probes', action='store_true', default=False,
help='update the probes')
parser.add_argument('--update-checks',
dest='update_checks', action='store_true', default=False,
help='update the checks for your account')
parser.add_argument('--verbose',
dest='verbose', action='store_true', default=False,
help='trace progress')
# parse
args = parser.parse_args()
if not args.no_update_results and args.check_name is None:
parser.error('-n/--check-name is required when updating results')
if args.verbose:
logger = logging.getLogger('PingdomBackup')
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)
pb = PingdomBackup(args.email, args.password, args.app_key, args.db_path)
if args.update_probes:
pb.update_probes()
if args.update_checks or (not args.no_update_results and not args.offline_check):
pb.update_checks()
if not args.no_update_results:
check = pb.get_check_by_name(args.check_name)
if check is None:
parser.error('no check with name "{0}" was found'.format(args.check_name))
pb.update_results(check)
if __name__ == '__main__':
tool_main()
| mit | -6,555,140,197,749,762,000 | 36.431818 | 96 | 0.649059 | false |
z0rr0/eshop | shop/sales/admin.py | 1 | 1097 | from django.contrib import admin
from .models import Category, Product, Order
class ProductAdmin(admin.ModelAdmin):
"""docstring for ProductAdmin"""
list_display = ('name', 'category', 'price')
search_fields = ('name', 'desc')
list_filter = ('category', 'modified')
class OrderAdmin(admin.ModelAdmin):
"""docstring for OrderAdmin"""
def order_products(order):
names = []
for ps in order.productset_set.all():
names.append("{0} [{1}]".format(ps.product.name, ps.number))
return '; '.join(names)
def total(order):
return order.total()
def make_sent(self, request, queryset):
queryset.update(status=1)
def make_received(self, request, queryset):
queryset.update(status=2)
list_display = ('id', 'status', 'customer', order_products, total, 'modified')
search_fields = ('desc',)
list_filter = ('status', 'modified', 'created')
actions = ('make_sent', 'make_received')
admin.site.register(Category)
admin.site.register(Product, ProductAdmin)
admin.site.register(Order, OrderAdmin)
| mit | -7,115,111,287,846,792,000 | 28.648649 | 82 | 0.64722 | false |
domthu/gasistafelice | gasistafelice/base/backends.py | 1 | 5773 | from django.db.models import Model
from django.contrib.contenttypes.models import ContentType
import permissions.utils
from permissions.models import ObjectPermission
from gasistafelice.auth.models import GlobalPermission
class DummyBackend(object):
"""A dummy authorization backend intended only for development purposes.
Using this backend, permission checks always succeed ! ;-)
"""
supports_object_permissions = True
supports_anonymous_user = True
supports_inactive_user = True
def authenticate(self, username, password):
return None
def has_perm(self, user_obj, perm, obj=None):
return True
class ObjectPermissionsBackend(object):
"""An authorization backend for Django for role-based permission checking.
Support global (per-model) and local (per-instance) Permissions.
Use it together with the default ModelBackend like this:
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'gasistafelice.base.backends.ObjectPermissionsBackend',
)
Then you can use it like:
user.has_perm("view", your_object)
where `your_object` can be a ContentType instance (if you want to check global permissions)
or a model instance (if you want to check local permissions).
"""
supports_object_permissions = True
supports_anonymous_user = True
supports_inactive_user = True
def authenticate(self, username, password):
return None
def get_group_permissions(self, user_obj, obj=None):
"""
Returns the set of Permissions (locals and globals) this User has been granted
through his/her Groups (via the Roles assigned to them).
If the `obj` argument is a model (actually, a ContentType instance), all (global) Permissions for that model are returned.
If the `obj` argument is a model instance all (local) Permissions for that instance are returned.
"""
# iterate on each Group the User belongs to
roles = []
groups = user_obj.groups.all()
for group in groups:
roles.extend(permissions.utils.get_roles(group))
if isinstance(obj, ContentType): # `obj` is a model class, so check for global Permissions for this model
perms = GlobalPermission.objects.filter(content_type=obj, role__in=roles)
elif isinstance(obj, Model) : # `obj` is a model instance, so check for local Permissions for this instance
ct = ContentType.objects.get_for_model(obj)
perms = ObjectPermission.objects.filter(content_type=ct, content_id=obj.id, role__in=roles)
else: # `obj` is neither a model class nor a model instance (e.g. obj == None), so listing Permissions is meaningless
raise TypeError, "Can't get permissions for the provided object."
return perms
def get_all_permissions(self, user_obj, obj=None):
"""
Returns the set of all Permissions (locals or globals) this User has been granted
(directly, via Roles assigned to him/her, or indirectly via those assigned to the Groups he/she belongs to).
If the `obj` argument is a model (actually, a ContentType instance), all (global) Permissions for that model are returned.
If the `obj` argument is a model instance all (local) Permissions for that instance are returned.
"""
# retrieve all the Roles assigned to the User (directly or indirectly)
roles = permissions.utils.get_roles(user_obj)
if isinstance(obj, ContentType): # `obj` is a model class, so check for global Permissions for this model
perms = GlobalPermission.objects.filter(content_type=obj, role__in=roles)
elif isinstance(obj, Model) : # `obj` is a model instance, so check for local Permissions for this instance
ct = ContentType.objects.get_for_model(obj)
perms = ObjectPermission.objects.filter(content_type=ct, content_id=obj.id, role__in=roles)
else: # `obj` is neither a model class nor a model instance (e.g. obj == None), so listing Permissions is meaningless
raise TypeError, "Can't get permissions for the provided object."
return perms
def has_perm(self, user_obj, perm, obj=None):
"""Checks whether a User has a global (local) Permission on a model (model instance).
This should be the primary method to check wether a User has a certain Permission.
Parameters
==========
perm
The codename of the Permission which should be checked.
user_obj
The User for which the Permission should be checked.
obj
The Object (either a model or model instance) for which the Permission should be checked.
"""
# if User is not authenticated or inactive, he has no Permissions
if user_obj.is_anonymous() or not user_obj.is_active():
return False
if isinstance(obj, ContentType): # `obj` is a model class, so check for global Permissions for this model
return perm in self.get_all_permissions(user_obj, obj)
elif isinstance(obj, Model) : # `obj` is a model instance, so check for local Permissions for this instance
return permissions.utils.has_permission(obj, user_obj, perm)
else: # `obj` is neither a model class nor a model instance (e.g. obj == None), so Permissions check is meaningless
raise TypeError, "Can't check permissions for the provided object." | agpl-3.0 | 7,847,195,928,386,982,000 | 44.464567 | 138 | 0.652347 | false |
tuxpiper/cloudcast | tests/template1.rsc.py | 1 | 2291 | '''
Test/sample cloudcast template
@author: David Losada Carballo <[email protected]>
'''
from cloudcast.template import *
from cloudcast.library import stack_user
from _context import stack
from cloudcast.iscm.cfninit import CfnInit
from cloudcast.iscm.shell import Shell
keyName = "ec2deploy"
PreciseAMIs = Mapping({
"us-east-1" : { "ebs": "ami-0b9c9f62", "instance": "ami-6f969506" },
"us-west-2" : { "ebs": "ami-6335a453", "instance": "ami-4933a279" }
})
# SQS queues example
SQSQueue1 = Resource("AWS::SQS::Queue")
SQSQueue2 = Resource("AWS::SQS::Queue")
AnInstance = EC2Instance(
ImageId = PreciseAMIs.find(AWS.Region, "ebs"),
InstanceType = stack.env['instance_type'],
KeyName = keyName,
iscm = [
CfnInit(
stack_user_key=stack_user.CloudFormationStackUserKey,
configs=[
{
"commands" : {
"apt-update" : {
"command" : "apt-get update"
}
}
},
{
"packages": {
"apt": { "git": [] }
},
"files": {
"/etc/myqueues.json": {
"content" : {
"Queue1": SQSQueue1['QueueName'],
"Queue2": SQSQueue2['QueueName']
},
"mode": "000644",
"owner": "root",
"group": "root"
}
}
},
{
"users": {
"awsuser": {
"uid": "1001",
"homeDir" : "/home/user"
}
}
}
],
),
Shell(
shell_vars = {
'APP_AWS_KEY_ID': stack_user.CloudFormationStackUserKey,
'APP_AWS_SECRET_KEY': stack_user.CloudFormationStackUserKey["SecretAccessKey"]
},
scripts = [
Shell.runScript("scripts/sample-script.sh"),
]
)
]
)
| mit | 5,224,452,919,540,645,000 | 28.371795 | 94 | 0.405063 | false |
SymbiFlow/prjxray | utils/mergedb.py | 1 | 3632 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017-2020 The Project X-Ray Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
import os, sys, re
from prjxray import util
TAG_PART_RE = re.compile(r"^[a-zA-Z][0-9a-zA-Z_]*(\[[0-9]+\])?$")
def check_tag_name(tag):
'''
Checks if the tag name given by the used conforms to the valid fasm
name rules.
>>> check_tag_name("CELL.feature19.ENABLED")
True
>>> check_tag_name("FEATURE")
True
>>> check_tag_name("TAG.")
False
>>> check_tag_name(".TAG")
False
>>> check_tag_name("CELL..FEATURE")
False
>>> check_tag_name("CELL.3ENABLE")
False
>>> check_tag_name("FEATURE.12.ON")
False
'''
for part in tag.split("."):
if not len(part) or TAG_PART_RE.match(part) is None:
return False
return True
def run(fn_ins, fn_out, strict=False, track_origin=False, verbose=False):
# tag to bits
entries = {}
# tag to (bits, line)
tags = dict()
# bits to (tag, line)
bitss = dict()
for fn_in in fn_ins:
for line, (tag, bits, mode, origin) in util.parse_db_lines(fn_in):
line = line.strip()
assert mode is not None or mode != "always", "strict: got ill defined line: %s" % (
line, )
if not check_tag_name(tag):
assert not strict, "strict: Invalid tag name '{}'".format(tag)
if tag in tags:
orig_bits, orig_line, orig_origin = tags[tag]
if orig_bits != bits:
print(
"WARNING: got duplicate tag %s" % (tag, ),
file=sys.stderr)
print(" Orig line: %s" % orig_line, file=sys.stderr)
print(" New line : %s" % line, file=sys.stderr)
assert not strict, "strict: got duplicate tag"
origin = os.path.basename(os.getcwd())
if track_origin and orig_origin != origin:
origin = orig_origin + "," + origin
if bits in bitss:
orig_tag, orig_line = bitss[bits]
if orig_tag != tag:
print(
"WARNING: got duplicate bits %s" % (bits, ),
file=sys.stderr)
print(" Orig line: %s" % orig_line, file=sys.stderr)
print(" New line : %s" % line, file=sys.stderr)
assert not strict, "strict: got duplicate bits"
if track_origin and origin is None:
origin = os.path.basename(os.getcwd())
entries[tag] = (bits, origin)
tags[tag] = (bits, line, origin)
if bits != None:
bitss[bits] = (tag, line)
util.write_db_lines(fn_out, entries, track_origin)
def main():
import argparse
parser = argparse.ArgumentParser(description="Combine multiple .db files")
util.db_root_arg(parser)
parser.add_argument('--verbose', action='store_true', help='')
parser.add_argument('--track_origin', action='store_true', help='')
parser.add_argument('--out', help='')
parser.add_argument('ins', nargs='+', help='Last takes precedence')
args = parser.parse_args()
run(
args.ins,
args.out,
strict=int(os.getenv("MERGEDB_STRICT", "1")),
track_origin=args.track_origin,
verbose=args.verbose)
if __name__ == '__main__':
main()
| isc | 1,326,039,727,474,506,500 | 30.310345 | 95 | 0.535793 | false |
Project-Bonfire/Bonfire | Scripts/NoC_and_TB_gen/network_entity.py | 1 | 1875 | # Copyright (C) 2016 Siavoosh Payandeh Azad, Behrad Niazmand
def generate_entity(noc_file, network_dime_x, network_dime_y, vc):
"""
noc_file: string : path to the network file
network_dime_x: integer : No. of nodes along X axis
network_dime_y: integer : No. of nodes along Y axis
"""
noc_file.write("entity network_" + str(network_dime_x) +
"x" + str(network_dime_y) + " is\n")
noc_file.write(" generic (DATA_WIDTH: integer := 32);\n")
noc_file.write("port (reset: in std_logic; \n")
noc_file.write("\tclk: in std_logic; \n")
noc_file.write("\t--------------\n")
for i in range(network_dime_x * network_dime_y):
noc_file.write("\t--------------\n")
noc_file.write("\tRX_L_" + str(i) +
": in std_logic_vector (DATA_WIDTH-1 downto 0);\n")
noc_file.write("\tcredit_out_L_" + str(i) +
", valid_out_L_" + str(i) + ": out std_logic;\n")
noc_file.write("\tcredit_in_L_" + str(i) +
", valid_in_L_" + str(i) + ": in std_logic;\n")
if vc:
noc_file.write("\tcredit_out_vc_L_" + str(i) +
", valid_out_vc_L_" + str(i) + ": out std_logic;\n")
noc_file.write("\tcredit_in_vc_L_" + str(i) +
", valid_in_vc_L_" + str(i) + ": in std_logic;\n")
if i == network_dime_x * network_dime_y-1:
noc_file.write("\tTX_L_" + str(i) +
": out std_logic_vector (DATA_WIDTH-1 downto 0)\n")
else:
noc_file.write("\tTX_L_" + str(i) +
": out std_logic_vector (DATA_WIDTH-1 downto 0);\n")
noc_file.write("\n ); \n")
noc_file.write("end network_" + str(network_dime_x) +
"x" + str(network_dime_y) + "; \n")
| gpl-3.0 | -4,288,631,835,275,140,600 | 43.642857 | 80 | 0.482133 | false |
aerialhedgehog/VyPy | trunk/VyPy/data/scaling/Linear.py | 1 | 1454 |
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
from ScalingFunction import ScalingFunction
# ----------------------------------------------------------------------
# Linear Scaling Function
# ----------------------------------------------------------------------
class Linear(ScalingFunction):
def __init__(self,scale,center=0.0):
""" o / scl ==> (o-center)/scale
o * scl ==> (o*scale)+center
"""
self.scale = scale
self.center = center
def set_scaling(self,other):
return (other-self.center)/self.scale
def unset_scaling(self,other):
return other*self.scale + self.center
def set_scaling_gradient(self,other):
return other/self.scale
def unset_scaling_gradient(self,other):
return other*self.scale
# ----------------------------------------------------------------------
# Module Tests
# ----------------------------------------------------------------------
if __name__ == '__main__':
import numpy as np
s = Linear(10.0,0.0)
a = 10.0
b = np.array([1,20,3.])
print a
print b
a = a / s
b = b / s
print a
print b
a = a * s
b = b * s
print a
print b | bsd-3-clause | 8,101,156,460,664,302,000 | 23.103448 | 72 | 0.343879 | false |
johnboxall/django_kiss | kiss/storage/base.py | 1 | 1786 | from django.utils.encoding import force_unicode, StrAndUnicode
from django.utils import simplejson
class Event(StrAndUnicode):
def __init__(self, event):
self.event = event
def _prepare(self):
self.event = simplejson.dumps(self.event)
def __unicode__(self):
return self.event
class BaseStorage(object):
def __init__(self, request, *args, **kwargs):
self.request = request
self._queued_events = []
self.used = False
self.added_new = False
super(BaseStorage, self).__init__(*args, **kwargs)
def __len__(self):
return len(self._loaded_events) + len(self._queued_events)
def __iter__(self):
self.used = True
if self._queued_events:
self._loaded_events.extend(self._queued_events)
self._queued_events = []
return iter(self._loaded_events)
@property
def _loaded_events(self):
if not hasattr(self, '_loaded_data'):
events, all_retrieved = self._get()
self._loaded_data = events or []
return self._loaded_data
def _prepare_events(self, events):
for event in events:
event._prepare()
def update(self, response):
self._prepare_events(self._queued_events)
if self.used:
return self._store(self._queued_events, response)
elif self.added_new:
events = self._loaded_events + self._queued_events
return self._store(events, response)
def add(self, event):
self.added_new = True
self._queued_events.append(Event(event))
def _get(self, *args, **kwargs):
raise NotImplementedError()
def _store(self, events, response, *args, **kwargs):
raise NotImplementedError() | mit | 4,487,836,395,715,369,000 | 28.295082 | 66 | 0.595745 | false |
SamProtas/PALiquor | scraper2.py | 1 | 4999 | from bs4 import BeautifulSoup
import sqlite3
import os
PROJECT_ROOT = os.path.dirname(os.path.realpath(__file__))
DATABASE1 = os.path.join(PROJECT_ROOT, 'dbs', 'licensees.db')
conn = sqlite3.connect(DATABASE1)
c = conn.cursor()
try:
c.execute('DROP TABLE licensees')
c.execute('DROP TABLE cases')
finally:
c.execute('''CREATE TABLE licensees (lid INT PRIMARY KEY NOT NULL UNIQUE,
name TEXT,
address TEXT,
zipcode INT,
trade_name TEXT,
license_no TEXT,
license_type TEXT,
license_type_title TEXT,
license_type_code TEXT,
status TEXT,
tavern_gaming_status TEXT,
original_owner TEXT,
current_owner TEXT,
latitude REAL,
longitude REAL)''')
c.execute('PRAGMA foreign_keys = ON')
c.execute('''CREATE TABLE cases (case_id INTEGER PRIMARY KEY,
lid INT NOT NULL,
penalty TEXT,
penalty_text TEXT,
fine INT,
initiation_date TEXT,
FOREIGN KEY(lid) REFERENCES licensees(lid))''')
fileroot = 'saved_html/licensee_source_'
filetail = '.html'
for x in range(114):
filenum = str(x+1).zfill(4)
filename = fileroot + filenum + filetail
print filename
page = open(filename)
soup = BeautifulSoup(page,'html.parser')
page.close()
main_content = soup.find(id="MainContent_UpdatePanel1").find("tbody")
rows = main_content.find_all('tr')
print 'Number of rows:'
print len(rows)
headercount = 0
locator = None
rowcount = 0
for row in rows:
rowcount+=1
attributes = row.attrs
if 'style' in attributes: #Identify headers of licensee
locatorrow = rowcount
if attributes['style'] == 'background-color:#800000': #Identify licensee
general_info={}
cases = []
casenum = 0
headercount+=1
locator = row.find('font').text
else: #Identifies sub-header of licensee
locator = row.find(class_='heading').text
if (locator == 'GENERAL LICENSEE INFORMATION' or locator == 'OWNER ISSUE DATES') and rowcount != locatorrow:
cells = row.find_all('td')
for cell in cells:
heading_title = cell.find(class_="fieldHeading")
the_data = cell.find(class_="data")
if heading_title and the_data:
if heading_title.text[:-1] == 'Address':
contents = the_data.contents
contents = [x for x in contents if x.string != None]
general_info[heading_title.text[:-1]] = " ".join(contents)
general_info['Zipcode'] = int(contents[-1][0:5])
elif heading_title.text[:-1] == 'License Type':
contents = the_data.text.split('-')
license_type_title = "-".join(contents[0:-1]).strip()
license_type_code = contents[-1].strip()[1:-1].strip()
general_info['License Type Title'] = license_type_title
general_info['License Type Code'] = license_type_code
general_info[heading_title.text[:-1]] = the_data.text
else:
general_info[heading_title.text[:-1]] = the_data.text
if locator == 'CITATION CASE INFORMATION(Click on the Case Number(s) for Citation detail)' and rowcount != locatorrow:
cells = row.find_all('td')
for cell in cells:
heading_title = cell.find(class_="fieldHeading").text[:-1]
if heading_title == 'Penalty':
penalty = cell.find(class_="data").text
penalty_split = penalty.split('-')
penalty_text = " ".join(penalty_split[0:-1]).strip()
if len(penalty_split) > 1:
fine = int(penalty_split[-1].strip()[2:-4])
else:
fine = None
if heading_title == 'Initiation Date':
initiation_date = cell.find(class_="data").text
cases.append({'penalty':penalty, 'penalty_text':penalty_text, 'fine':fine, 'initiation_date':initiation_date})
penalty = None
initiation_date = None
if locator == 'APPLICATION CASE INFORMATION' and rowcount == locatorrow:
c.execute('''INSERT INTO licensees (lid, name, address, zipcode, trade_name, license_no,
license_type, license_type_title, license_type_code, status, tavern_gaming_status,
original_owner, current_owner)
VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''',
[general_info['LID'], general_info['Name'],
general_info['Address'], general_info['Zipcode'],
general_info['Trade Name'], general_info['License No'],
general_info['License Type'],
general_info['License Type Title'],
general_info['License Type Code'],
general_info['Status'],
general_info['Tavern Gaming Status'],
general_info['Original Owner'],
general_info['Current Owner']])
if cases:
for case in cases:
c.execute('''INSERT INTO cases (lid, penalty, penalty_text, fine, initiation_date) VALUES (?, ?, ?, ?, ?)''',
[general_info['LID'], case['penalty'], case['penalty_text'], case['fine'],case['initiation_date']])
print 'HeaderCount'
print headercount
conn.commit()
c.close()
| gpl-2.0 | -5,075,766,108,287,510,000 | 32.10596 | 120 | 0.618324 | false |
masayang/py_redis | tests/test_integration.py | 1 | 5706 | import unittest
from mock import patch
from redis import StrictRedis
from ..samples import Tweet, TwitterUser, ScoreBoard
from ..pydis.redis_settings import redis_config
class TestIntegration(unittest.TestCase):
def setUp(self):
self.sr = StrictRedis(
host=redis_config['host'],
port=redis_config['port'],
db=redis_config['db'],
password=redis_config['password']
)
def tearDown(self):
self.cleanup('TwitterUser')
self.cleanup('Tweet')
self.cleanup('RedisDict')
self.cleanup('RedisList')
self.cleanup('@masayang')
self.cleanup('test_board')
def cleanup(self, prefix):
for key in self.sr.keys(prefix + '*'):
self.sr.delete(key)
def test_user_creation(self):
masayang = TwitterUser(
twitter='@masayang'
)
guest = TwitterUser(
twitter='@guest'
)
masayang['friends'].add(guest)
self.assertEquals(masayang.id, 'TwitterUser:@masayang')
self.assertEquals(masayang['friends'].members(), set(['TwitterUser:@guest']))
masayang['friends'].add(guest)
self.assertEquals(masayang['friends'].members(), set(['TwitterUser:@guest']))
raw_masayang = self.sr.hgetall("TwitterUser:@masayang")
self.assertEquals(raw_masayang, {b'twitter': b'@masayang'})
raw_masayang_friends = self.sr.smembers("TwitterUser:@masayang:friends")
self.assertEquals(raw_masayang_friends, set([b'TwitterUser:@guest']))
masayang['score_board'].add('2017-01', 100)
self.assertEqual(len(masayang['score_board']), 1)
self.assertEqual(masayang['score_board'].rank('2017-01'), 0)
@patch('os.urandom')
def test_user_and_tweet_creation(self, urandom):
urandom.return_value = b'\xd8X\xfa@\x97\x90\x00dr'
masayang = TwitterUser(
twitter='@masayang'
)
tweet = Tweet(
tweet='test message'
)
masayang['tweets'].append(tweet)
self.assertEquals(masayang.id, 'TwitterUser:@masayang')
result = []
for t in masayang['tweets']:
result.append(t.id)
self.assertEqual(result, [u'Tweet:2Fj6QJeQAGRy'])
class TestScoreBoard(unittest.TestCase):
def setUp(self):
self.sr = StrictRedis(
host=redis_config['host'],
port=redis_config['port'],
db=redis_config['db'],
password=redis_config['password']
)
self.board = ScoreBoard(id="test_board", init_items=[
{'2017-01': 100},
{'2017-02': 12},
{'2017-03': 222}
])
def tearDown(self):
self.cleanup('test_board')
def cleanup(self, prefix):
for key in self.sr.keys(prefix + '*'):
self.sr.delete(key)
def test_len(self):
self.assertEquals(len(self.board), 3)
def test_count(self):
self.assertEquals(self.board.count(0, 1000), 3)
self.assertEquals(self.board.count(100, 1000), 2)
self.assertEquals(self.board.count(200, 1000), 1)
def test_rank(self):
self.assertEquals(self.board.rank('2017-01'), 1)
self.assertEquals(self.board.rank('2017-02'), 0)
self.assertEquals(self.board.rank('2017-03'), 2)
def test_revrank(self):
self.assertEquals(self.board.revrank('2017-01'), 1)
self.assertEquals(self.board.revrank('2017-02'), 2)
self.assertEquals(self.board.revrank('2017-03'), 0)
def test_score(self):
self.assertEquals(self.board.score('2017-01'), 100)
self.assertEquals(self.board.score('2017-02'), 12)
self.assertEquals(self.board.score('2017-03'), 222)
def test_rem(self):
self.board.rem('2017-01')
self.assertEquals(len(self.board), 2)
self.assertIsNone(self.board.rank('2017-01'))
def test_remrangebyrank(self):
self.board.remrangebyrank(0, 1)
self.assertEqual(len(self.board), 1)
self.assertIsNone(self.board.score('2017-01'))
self.assertIsNone(self.board.score('2017-02'))
self.assertEquals(self.board.score('2017-03'), 222)
def test_remrangebyscore(self):
self.board.remrangebyscore(0, 99)
self.assertEqual(len(self.board), 2)
self.assertEqual(self.board.score('2017-01'), 100)
self.assertIsNone(self.board.score('2017-02'))
self.assertEquals(self.board.score('2017-03'), 222)
def test_range(self):
range = self.board.range(0, 1)
self.assertEquals(range, [('2017-02', 12), ('2017-01', 100)])
range = self.board.range(0, 1, desc=True)
self.assertEquals(range, [('2017-03', 222), ('2017-01', 100)])
def test_revrange(self):
range = self.board.revrange(0, 1)
self.assertEquals(range, [('2017-03', 222), ('2017-01', 100)])
def test_rangebyscore(self):
range = self.board.rangebyscore(0, 99)
self.assertEquals(range, [('2017-02', 12)])
range = self.board.rangebyscore(0, 100)
self.assertEquals(range, [('2017-02', 12), ('2017-01', 100)])
def test_revrangebyscore(self):
range = self.board.revrangebyscore(99, 0)
self.assertEquals(range, [('2017-02', 12)])
range = self.board.revrangebyscore(100, 0)
self.assertEquals(range, [('2017-01', 100), ('2017-02', 12)])
def test_incrby(self):
self.board.incrby('2017-01', 1000)
self.assertEqual(self.board.score('2017-01'), 1100)
| bsd-3-clause | -77,184,398,952,983,840 | 33.006135 | 85 | 0.583947 | false |
google/flax | examples/linen_design_test/linear_regression.py | 1 | 1341 | # Copyright 2021 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jax
from jax import numpy as jnp, random, lax, jit
from flax import linen as nn
from dense import Dense
X = jnp.ones((1, 10))
Y = jnp.ones((5,))
model = Dense(features=5)
@jit
def predict(params):
return model.apply({'params': params}, X)
@jit
def loss_fn(params):
return jnp.mean(jnp.abs(Y - predict(params)))
@jit
def init_params(rng):
mlp_variables = model.init({'params': rng}, X)
return mlp_variables['params']
# Get initial parameters
params = init_params(jax.random.PRNGKey(42))
print("initial params", params)
# Run SGD.
for i in range(50):
loss, grad = jax.value_and_grad(loss_fn)(params)
print(i, "loss = ", loss, "Yhat = ", predict(params))
lr = 0.03
params = jax.tree_multimap(lambda x, d: x - lr * d, params, grad)
| apache-2.0 | 2,824,153,034,652,094,000 | 26.9375 | 74 | 0.710664 | false |
maxdl/Synapse.py | synapse/point.py | 1 | 13079 | import sys
from . import geometry
from .err_warn import ProfileError, profile_warning, profile_message
def lazy_property(fn):
"""Decorator that makes a property lazily evaluated.
From https://stevenloria.com/lazy-properties/.
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazy_property(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _lazy_property
class PointList(list):
def __init__(self, pointli, ptype, profile):
super().__init__()
try:
self.extend([Point(p.x, p.y, ptype, profile) for p in pointli])
except (AttributeError, IndexError):
raise TypeError('not a list of Point elements')
class Point(geometry.Point):
def __init__(self, x=None, y=None, ptype="", profile=None):
if isinstance(x, geometry.Point):
geometry.Point.__init__(self, x.x, x.y)
else:
geometry.Point.__init__(self, x, y)
self.profile = profile
if self.profile is not None:
self.opt = self.profile.opt
else:
self.opt = None
self.discard = False
self.ptype = ptype
self.cluster = None
self.is_within_psd = None
self.lateral_dist_psd = None
self.norm_lateral_dist_psd = None
self.nearest_psd = None
self.is_associated_with_psd = None
self.associated_psd = None
self.nearest_neighbour_dist = None
self.nearest_neighbour_point = geometry.Point()
self.nearest_lateral_neighbour_dist = None
self.nearest_lateral_neighbour_point = geometry.Point()
self.nearest_neighbour = geometry.Point()
def determine_stuff(self):
def mark_to_discard(msg):
if self.ptype == 'particle': # don't warn if random points
profile_message("Discarding particle at %s: %s" % (self, msg))
self.discard = True
self.profile.n_discarded[self.ptype] += 1
return
if self.dist_to_posel is None:
mark_to_discard("Could not project on postsynaptic element")
return
if self.dist_to_prsel is None:
mark_to_discard("Could not project on presynaptic element")
return
if not self.is_within_shell:
mark_to_discard("Located outside the shell")
return
if self.is_within_hole:
mark_to_discard("Located within a profile hole")
return
__ = self.lateral_location
__ = self.strict_lateral_location
__ = self.axodendritic_location
__ = self.is_within_postsynaptic_membrane_shell
__ = self.is_postsynaptic_membrane_associated
__ = self.is_presynaptic_membrane_associated
self.get_lateral_dist_psd()
self.get_psd_association()
@lazy_property
def dist_to_posel(self):
"""Return perpendicular distance to postsynaptic element membrane"""
return self.perpend_dist(self.profile.posel, posloc=self.profile.posloc)
@lazy_property
def dist_to_prsel(self):
"""Return perpendicular distance to postsynaptic element membrane"""
if len(self.profile.prsel) > 0:
return self.perpend_dist(self.profile.prsel, posloc=self.profile.posloc)
else:
return None
@lazy_property
def is_within_hole(self):
""" Determine whether self is inside a profile hole
"""
for h in self.profile.holeli:
if self.is_within_polygon(h):
return True
return False
@lazy_property
def is_within_shell(self):
"""Determine whether self is within shell"""
return (self.dist_to_posel is not None and
abs(self.dist_to_posel) <= geometry.to_pixel_units(self.opt.shell_width,
self.profile.pixelwidth))
@lazy_property
def is_within_postsynaptic_membrane_shell(self):
return (self.dist_to_posel is not None
and abs(self.dist_to_posel) <=
geometry.to_pixel_units(self.opt.shell_width,
self.profile.pixelwidth))
@lazy_property
def lateral_location(self):
""" Determines the lateral location, defined as the location
along the mediolateral synaptic axis of the projection of
the point on the postsynaptic membrane.
Assumes that PSDs and the postsynaptic membrane have been determined.
"""
# A point is classified as synaptic if its projection on the
# postsynaptic membrane is within the spatial resolution limit
# of the postsynaptic membrane
for psd in self.profile.psdli:
if (self.lateral_dist_syn(self.profile.posel, psd.posm) -
geometry.to_pixel_units(self.opt.spatial_resolution,
self.profile.pixelwidth) <= psd.posm.length() / 2):
return "synaptic"
# If not synaptic but still within the extent of the synaptic
# membrane
if (self.lateral_dist_syn(self.profile.posel, self.profile.total_posm) <
self.profile.total_posm.length() / 2):
return "within perforation"
# Otherwise it may be perisynaptic, defined here as being
# outside the synapse but within half a diameter of the nearest
# PSD... or perhaps it should be a fixed distance, e g 200 nm?
# I don't want to determine which PSD is closest and which edge
# of that PSD faces the extrasynapse, so I just determine the
# distance to the nearest edge; if that is an internal edge,
# the point will have been identified as within perforation
# above.
for psd in self.profile.psdli:
if (self.lateral_dist_syn(self.profile.posel, psd.posm) -
geometry.to_pixel_units(self.opt.spatial_resolution,
self.profile.pixelwidth) <= psd.posm.length()):
return "perisynaptic"
# If none of the above
return "extrasynaptic"
@lazy_property
def strict_lateral_location(self):
""" Determines the lateral location, defined as the location
along the mediolateral synaptic axis of the projection of
the particle on the postsynaptic membrane. Does not take
spatial resolution into account, such that a particle is
considered synaptic only when strictly projectable on the
postsynaptic membrane.
"""
# A point is classified as synaptic if its projection on the
# postsynaptic membrane is within the postsynaptic membrane
for psd in self.profile.psdli:
if self.lateral_dist_syn(self.profile.posel, psd.posm) <= psd.posm.length() / 2.0:
return "synaptic"
# If not synaptic but still within the extent of the synaptic
# membrane
if (self.lateral_dist_syn(self.profile.posel, self.profile.total_posm) <
self.profile.total_posm.length() / 2.0):
return "within perforation"
# Otherwise it may be perisynaptic, defined here as being
# outside the synapse but within half a diameter of the nearest
# PSD... or perhaps it should be a fixed distance, e g 200 nm?
# I don't want to determine which PSD is closest and which edge
# of that PSD faces the extrasynapse, so I just determine the
# distance to the nearest edge; if that is an internal edge,
# the point will have been identified as within perforation
# above.
for psd in self.profile.psdli:
if self.lateral_dist_syn(self.profile.posel, psd.posm) <= psd.posm.length():
return "perisynaptic"
# If none of the above
return "extrasynaptic"
@lazy_property
def axodendritic_location(self):
""" Determines the particle's location along the axodendritic axis.
"""
if self.dist_to_posel >= 0:
return "postsynaptic"
elif self.dist_to_prsel is None: # if there is no presynaptic membrane,
return "not postsynaptic" # that's about all we can say
elif self.dist_to_prsel <= 0:
return "presynaptic"
else:
return "neither pre- or postsynaptic"
def get_lateral_dist_psd(self):
mindist = sys.maxsize
nearest_psd = None
for psd in self.profile.psdli:
d = self.lateral_dist_syn(self.profile.posel, psd.posm) - psd.posm.length() / 2
if d < mindist:
mindist = d
nearest_psd = psd
if not nearest_psd:
raise ProfileError(self.profile,
"could not determine lateral distance to a PSD of particle at %s"
% self)
mindist = self.lateral_dist_syn(self.profile.posel, nearest_psd.posm)
normdist = mindist / (nearest_psd.posm.length() / 2)
self.lateral_dist_psd = mindist
self.norm_lateral_dist_psd = normdist
self.nearest_psd = nearest_psd
def get_psd_association(self):
if self.is_within_psd is not None:
return
is_within_psd = False
is_associated_with_psd = False
associated_psd = None
mindist = sys.maxsize
for psd in self.profile.psdli:
if self.is_within_polygon(psd.psdposm):
is_within_psd = True
is_associated_with_psd = True
associated_psd = psd
break
dist = self.perpend_dist_closed_path(psd.psdposm, dont_care_if_on_or_off_seg=True)
if dist <= geometry.to_pixel_units(self.opt.spatial_resolution,
self.profile.pixelwidth):
is_associated_with_psd = True
if dist < mindist:
associated_psd = psd
mindist = dist
self.is_within_psd = is_within_psd
self.is_associated_with_psd = is_associated_with_psd
self.associated_psd = associated_psd
@lazy_property
def is_postsynaptic_membrane_associated(self):
if (self.dist_to_posel is not None and
abs(self.dist_to_posel) <= geometry.to_pixel_units(self.opt.spatial_resolution,
self.profile.pixelwidth)):
return True
else:
return False
@lazy_property
def is_presynaptic_membrane_associated(self):
if (self.dist_to_prsel is not None and
abs(self.dist_to_prsel) <= geometry.to_pixel_units(self.opt.spatial_resolution,
self.profile.pixelwidth)):
return True
else:
return False
def get_nearest_neighbour(self, pointli):
# Assumes that only valid (projectable, within shell etc) points
# are in pointli
mindist = float(sys.maxsize)
for p in pointli:
if p is not self:
d = self.dist(p)
if d < mindist:
mindist = d
if not mindist < float(sys.maxsize):
return None
else:
nearest_neighbour_dist = mindist
return nearest_neighbour_dist
def get_nearest_lateral_neighbour(self, pointli):
# Assumes that only valid (projectable, within shell etc) points
# are in pointli
mindist = float(sys.maxsize)
for p in pointli:
if p is not self:
d = self.lateral_dist_to_point(p, self.profile.posel)
if d < mindist:
mindist = d
if not mindist < float(sys.maxsize):
return None
else:
nearest_lateral_neighbour_dist = mindist
return nearest_lateral_neighbour_dist
def lateral_dist_to_point(self, p2, sel):
""" Determine lateral distance to a point p2 along sel.
Overrides function in geometry.Point, which only works
with a closed path.
"""
path = geometry.SegmentedPath()
p2_project, p2_seg_project = p2.project_on_path_or_endnode(sel)
project, seg_project = self.project_on_path_or_endnode(sel)
path.extend([project, p2_project])
if p2_seg_project < seg_project:
path.reverse()
for n in range(min(p2_seg_project, seg_project) + 1,
max(p2_seg_project, seg_project)):
path.insert(len(path) - 1, sel[n])
return path.length()
def lateral_dist_syn(self, sel, sm):
""" Determine lateral distance to center of (post- or pre-)
synaptic membrane.
"""
return self.lateral_dist_to_point(sm.center_point(), sel)
| mit | 5,774,043,725,307,891,000 | 40.520635 | 96 | 0.58399 | false |
702nADOS/sumo | tools/build/status.py | 1 | 2776 | #!/usr/bin/env python
"""
@file status.py
@author Michael Behrisch
@author Laura Bieker
@date 2007-03-13
@version $Id: status.py 22608 2017-01-17 06:28:54Z behrisch $
SUMO, Simulation of Urban MObility; see http://sumo.dlr.de/
Copyright (C) 2008-2017 DLR (http://www.dlr.de/) and contributors
This file is part of SUMO.
SUMO is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
"""
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import smtplib
import re
from os.path import basename, join, commonprefix
from datetime import datetime
def findErrors(line, warnings, errors, failed):
if re.search("[Ww]arn[ui]ng[: ]", line) or "[WARNING]" in line:
warnings += 1
if re.search("[Ee]rror[: ]", line) or re.search("[Ff]ehler:", line) or "[ERROR]" in line:
errors += 1
failed += line
return warnings, errors, failed
def printStatus(makeLog, makeAllLog, smtpServer="localhost", out=sys.stdout, toAddr="[email protected]"):
failed = ""
build = commonprefix([basename(makeLog), basename(makeAllLog)])
print(build, end=' ', file=out)
print(datetime.now().ctime(), file=out)
print("--", file=out)
print(basename(makeLog), file=out)
warnings = 0
errors = 0
svnLocked = False
for l in open(makeLog):
if ("svn: Working copy" in l and "locked" in l) or "svn: Failed" in l:
svnLocked = True
failed += l
warnings, errors, failed = findErrors(l, warnings, errors, failed)
if svnLocked:
failed += "svn up failed\n\n"
print(warnings, "warnings", file=out)
if errors:
print(errors, "errors", file=out)
failed += "make failed\n\n"
print("--\nbatchreport\n--", file=out)
print(basename(makeAllLog), file=out)
warnings = 0
errors = 0
for l in open(makeAllLog):
warnings, errors, failed = findErrors(l, warnings, errors, failed)
print(warnings, "warnings", file=out)
if errors:
print(errors, "errors", file=out)
failed += "make debug failed\n\n"
print("--", file=out)
if failed:
fromAddr = "[email protected]"
message = """From: "%s" <%s>
To: %s
Subject: Error occurred while building
%s""" % (build, fromAddr, toAddr, failed)
try:
server = smtplib.SMTP(smtpServer)
server.sendmail(fromAddr, toAddr, message)
server.quit()
except:
print("Could not send mail.")
if __name__ == "__main__":
printStatus(sys.argv[1], sys.argv[2], sys.argv[3], sys.stdout, sys.argv[4])
| gpl-3.0 | -8,624,987,886,662,435,000 | 31.27907 | 105 | 0.636167 | false |
jaffe59/vp-cnn | cnn_classifier/chatscript_file_generator.py | 1 | 3404 | import math
import torch
import scipy.stats as stats
import ast
# this script is used to generate results used to combine the classifier with chatscript
def calc_indices(args):
#calc fold indices
indices = []
numfolds = args.xfolds
len_dataset = 4330
fold_size = math.ceil(len_dataset/numfolds)
for fold in range(numfolds):
startidx = fold*fold_size
endidx = startidx+fold_size if startidx+fold_size < len_dataset else len_dataset
indices.append((startidx, endidx))
return indices
def read_in_labels(labels_file):
labels = []
with open(labels_file) as l:
for line in l:
line = line.strip().split("\t")
labels.append('_'.join(line[1].split(' ')))
return labels
def read_in_dialogues(dialogue_file):
dialogue_indices = []
dialogue_index = -1
turn_index = -1
if dialogue_file.endswith('indices'):
with open(dialogue_file) as l:
for line in l:
dialogue_indices.append(ast.literal_eval(line.strip()))
else:
with open(dialogue_file) as l:
for line in l:
if line.startswith('#S'):
dialogue_index += 1
turn_index = 0
else:
dialogue_indices.append((dialogue_index, turn_index))
turn_index += 1
return dialogue_indices
def read_in_chat(chat_file, dialogues):
chats = {}
with open(chat_file) as c:
for line in c:
if line.startswith('dia'):
continue
else:
line = line.strip().split(',')
this_index = (int(line[0]), int(line[1]))
# print(dialogues)
chats[this_index] = (line[-2], line[-1])
return chats
def print_test_features(tensor, confidence, ave_probs, ave_logprobs, target, dialogue_indices, labels, indices, fold_id, chats, feature_file):
# dial_id, turn_id, predicted_label, correct_bool, prob, entropy, confidence, chat_prob, chat_rank
tensor = torch.exp(tensor)
probs, predicted = torch.max(tensor, 1)
predicted = predicted.view(target.size()).data
probs = probs.view(target.size()).data
corrects = predicted == target.data
confidence = confidence.squeeze().data.cpu().numpy() / 2
ave_logprobs = ave_logprobs.squeeze().data.cpu().numpy() / 2
ave_probs = ave_probs.squeeze().data.cpu().numpy() / 2
tensor = tensor.squeeze().data.cpu().numpy()
start_id, end_id = indices[fold_id]
for ind, val in enumerate(corrects):
item = []
item_id = start_id+ind
dialogue_index, turn_index = dialogue_indices[item_id]
item.append(dialogue_index)
item.append(turn_index)
item.append(labels[predicted[ind]])
item.append(str(bool(val)))
item.append(probs[ind])
if probs[ind] < 0.0:
print(tensor[ind])
print(probs[ind], predicted[ind])
raise Exception
item.append(stats.entropy(tensor[ind]))
item.append(confidence[ind, predicted[ind]])
item.append(ave_probs[ind, predicted[ind]])
item.append(ave_logprobs[ind, predicted[ind]])
item.append(chats[(dialogue_index, turn_index)][0])
item.append(chats[(dialogue_index, turn_index)][1])
print(','.join([str(x) for x in item]), file=feature_file)
| apache-2.0 | 8,866,380,884,439,430,000 | 34.458333 | 142 | 0.595476 | false |
ValyrianTech/BitcoinSpellbook-v0.3 | spellbookserver.py | 1 | 25116 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
import sys
import time
import traceback
from datetime import datetime
from functools import wraps
from logging.handlers import RotatingFileHandler
from bottle import Bottle, request, response, static_file
from authentication import initialize_api_keys_file
from data.data import get_explorers, get_explorer_config, save_explorer, delete_explorer
from data.data import latest_block, block_by_height, block_by_hash, prime_input_address, transaction
from data.data import transactions, balance, utxos
from decorators import authentication_required, use_explorer, output_json
from helpers.actionhelpers import get_actions, get_action_config, save_action, delete_action, run_action, get_reveal
from helpers.configurationhelpers import get_host, get_port, get_notification_email, get_mail_on_exception
from helpers.hotwallethelpers import get_hot_wallet
from helpers.loghelpers import LOG, REQUESTS_LOG, get_logs
from helpers.triggerhelpers import get_triggers, get_trigger_config, save_trigger, delete_trigger, activate_trigger, \
check_triggers, verify_signed_message, http_get_request, http_post_request, http_delete_request, sign_message
from helpers.mailhelpers import sendmail
from inputs.inputs import get_sil, get_profile, get_sul
from linker.linker import get_lal, get_lbl, get_lrl, get_lsl
from randomaddress.randomaddress import random_address_from_sil, random_address_from_lbl, random_address_from_lrl, \
random_address_from_lsl
from helpers.qrhelpers import generate_qr
# Make sure the current working directory is correct
PROGRAM_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PROGRAM_DIR)
class SpellbookRESTAPI(Bottle):
def __init__(self):
super(SpellbookRESTAPI, self).__init__()
# Initialize variables
self.host = get_host()
self.port = get_port()
# Log the requests to the REST API in a separate file by installing a custom LoggingPlugin
self.install(self.log_to_logger)
# Make sure that an api_keys.json file is present, the first time the server is started
# a new random api key and secret pair will be generated
if not os.path.isfile('json/private/api_keys.json'):
LOG.info('Generating new API keys')
initialize_api_keys_file()
LOG.info('Starting Bitcoin Spellbook')
try:
get_hot_wallet()
except Exception as ex:
LOG.error('Unable to decrypt hot wallet: %s' % ex)
sys.exit(1)
LOG.info('To make the server run in the background: use Control-Z, then use command: bg %1')
# Initialize the routes for the REST API
self.route('/', method='GET', callback=self.index) # on linux this gets requested every minute or so, but not on windows
self.route('/favicon.ico', method='GET', callback=self.get_favicon)
# Route for ping, to test if server is online
self.route('/spellbook/ping', method='GET', callback=self.ping)
# Routes for managing blockexplorers
self.route('/spellbook/explorers', method='GET', callback=self.get_explorers)
self.route('/spellbook/explorers/<explorer_id:re:[a-zA-Z0-9_\-.]+>', method='POST', callback=self.save_explorer)
self.route('/spellbook/explorers/<explorer_id:re:[a-zA-Z0-9_\-.]+>', method='GET', callback=self.get_explorer_config)
self.route('/spellbook/explorers/<explorer_id:re:[a-zA-Z0-9_\-.]+>', method='DELETE', callback=self.delete_explorer)
# Routes for retrieving data from the blockchain
self.route('/spellbook/blocks/latest', method='GET', callback=self.get_latest_block)
self.route('/spellbook/blocks/<height:int>', method='GET', callback=self.get_block_by_height)
self.route('/spellbook/blocks/<block_hash:re:[a-f0-9]+>', method='GET', callback=self.get_block_by_hash)
self.route('/spellbook/transactions/<txid:re:[a-f0-9]+>/prime_input', method='GET', callback=self.get_prime_input_address)
self.route('/spellbook/transactions/<txid:re:[a-f0-9]+>', method='GET', callback=self.get_transaction)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/transactions', method='GET', callback=self.get_transactions)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/balance', method='GET', callback=self.get_balance)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/utxos', method='GET', callback=self.get_utxos)
# Routes for Simplified Inputs List (SIL)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/SIL', method='GET', callback=self.get_sil)
# Routes for Profile
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/profile', method='GET', callback=self.get_profile)
# Routes for Simplified UTXO List (SUL)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/SUL', method='GET', callback=self.get_sul)
# Routes for Linked Lists
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/LAL', method='GET', callback=self.get_lal)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/LBL', method='GET', callback=self.get_lbl)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/LRL', method='GET', callback=self.get_lrl)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/LSL', method='GET', callback=self.get_lsl)
# Routes for Random Address
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/random/SIL', method='GET', callback=self.get_random_address_from_sil)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/random/LBL', method='GET', callback=self.get_random_address_from_lbl)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/random/LRL', method='GET', callback=self.get_random_address_from_lrl)
self.route('/spellbook/addresses/<address:re:[a-zA-Z1-9]+>/random/LSL', method='GET', callback=self.get_random_address_from_lsl)
# Routes for Triggers
self.route('/spellbook/triggers', method='GET', callback=self.get_triggers)
self.route('/spellbook/triggers/<trigger_id:re:[a-zA-Z0-9_\-.]+>', method='GET', callback=self.get_trigger)
self.route('/spellbook/triggers/<trigger_id:re:[a-zA-Z0-9_\-.]+>', method='POST', callback=self.save_trigger)
self.route('/spellbook/triggers/<trigger_id:re:[a-zA-Z0-9_\-.]+>', method='DELETE', callback=self.delete_trigger)
self.route('/spellbook/triggers/<trigger_id:re:[a-zA-Z0-9_\-.]+>/activate', method='GET', callback=self.activate_trigger)
self.route('/spellbook/triggers/<trigger_id:re:[a-zA-Z0-9_\-.]+>/message', method='POST', callback=self.verify_signed_message)
self.route('/spellbook/triggers/<trigger_id:re:[a-zA-Z0-9_\-.]+>/get', method='GET', callback=self.http_get_request)
self.route('/spellbook/triggers/<trigger_id:re:[a-zA-Z0-9_\-.]+>/post', method='POST', callback=self.http_post_request)
self.route('/spellbook/triggers/<trigger_id:re:[a-zA-Z0-9_\-.]+>/delete', method='DELETE', callback=self.http_delete_request)
self.route('/spellbook/triggers/<trigger_id:re:[a-zA-Z0-9_\-.]+>/check', method='GET', callback=self.check_trigger)
self.route('/spellbook/check_triggers', method='GET', callback=self.check_all_triggers)
# Additional routes for Rest API endpoints
self.route('/api/<trigger_id:re:[a-zA-Z0-9_\-.]+>', method='GET', callback=self.http_get_request)
self.route('/api/<trigger_id:re:[a-zA-Z0-9_\-.]+>', method='POST', callback=self.http_post_request)
self.route('/api/<trigger_id:re:[a-zA-Z0-9_\-.]+>', method='DELETE', callback=self.http_delete_request)
self.route('/html/<trigger_id:re:[a-zA-Z0-9_\-.]+>', method='GET', callback=self.html_request)
self.route('/api/<trigger_id:re:[a-zA-Z0-9_\-.]+>/message', method='POST', callback=self.verify_signed_message)
self.route('/api/sign_message', method='POST', callback=self.sign_message)
# Routes for QR image generation
self.route('/api/qr', method='GET', callback=self.qr)
# Routes for Actions
self.route('/spellbook/actions', method='GET', callback=self.get_actions)
self.route('/spellbook/actions/<action_id:re:[a-zA-Z0-9_\-.]+>', method='GET', callback=self.get_action)
self.route('/spellbook/actions/<action_id:re:[a-zA-Z0-9_\-.]+>', method='POST', callback=self.save_action)
self.route('/spellbook/actions/<action_id:re:[a-zA-Z0-9_\-.]+>', method='DELETE', callback=self.delete_action)
self.route('/spellbook/actions/<action_id:re:[a-zA-Z0-9_\-.]+>/run', method='GET', callback=self.run_action)
# Routes for retrieving log messages
self.route('/spellbook/logs/<filter_string>', method='GET', callback=self.get_logs)
# Routes for RevealSecret actions
self.route('/spellbook/actions/<action_id:re:[a-zA-Z0-9_\-.]+>/reveal', method='GET', callback=self.get_reveal)
# Check if there are explorers configured, this will also initialize the default explorers on first startup
if len(get_explorers()) == 0:
LOG.warning('No block explorers configured!')
try:
# start the webserver for the REST API
self.run(host=self.host, port=self.port, debug=True)
except Exception as ex:
LOG.error('An exception occurred in the main loop: %s' % ex)
error_traceback = traceback.format_exc()
for line in error_traceback.split('\n'):
LOG.error(line)
if get_mail_on_exception() is True:
variables = {'HOST': get_host(),
'TRACEBACK': error_traceback}
body_template = os.path.join('server_exception')
sendmail(recipients=get_notification_email(),
subject='Main loop Exception occurred @ %s' % get_host(),
body_template=body_template,
variables=variables)
def index(self):
return
@staticmethod
def get_favicon():
return static_file('favicon.ico', root='.')
@staticmethod
def initialize_requests_log(logs_dir):
# Create a log file for the http requests to the REST API
logger = logging.getLogger('api_requests')
file_handler = RotatingFileHandler(os.path.join(logs_dir, 'requests.txt'), maxBytes=10000000, backupCount=5)
file_handler.setFormatter(logging.Formatter('%(message)s'))
logger.addHandler(file_handler)
logger.setLevel(logging.DEBUG)
return logger
def log_to_logger(self, fn):
@wraps(fn)
def _log_to_logger(*args, **kwargs):
start_time = int(round(time.time() * 1000))
request_time = datetime.now()
# Log information about the request before it is processed for debugging purposes
REQUESTS_LOG.info('%s | %s | %s | %s' % (request_time,
request.remote_addr,
request.method,
request.url))
if request.headers is not None:
for key, value in request.headers.items():
REQUESTS_LOG.info(' HEADERS | %s: %s' % (key, value))
if request.json is not None:
for key, value in request.json.items():
REQUESTS_LOG.info(' BODY | %s: %s' % (key, value))
actual_response = response
try:
actual_response = fn(*args, **kwargs)
except Exception as ex:
response_status = '500 ' + str(ex)
LOG.error('%s caused an exception: %s' % (request.url, ex))
error_traceback = traceback.format_exc()
for line in error_traceback.split('\n'):
LOG.error(line)
if get_mail_on_exception() is True:
variables = {'HOST': get_host(),
'TRACEBACK': error_traceback}
body_template = os.path.join('server_exception')
sendmail(recipients=get_notification_email(),
subject='Exception occurred @ %s' % get_host(),
body_template=body_template,
variables=variables)
else:
response_status = response.status
end_time = int(round(time.time() * 1000))
REQUESTS_LOG.info('%s | %s | %s | %s | %s | %s ms' % (request_time,
request.remote_addr,
request.method,
request.url,
response_status,
end_time - start_time))
return actual_response
return _log_to_logger
@staticmethod
@output_json
def ping():
response.content_type = 'application/json'
return {'success': True}
@staticmethod
@output_json
def get_explorers():
response.content_type = 'application/json'
explorers = get_explorers()
if explorers is not None:
return explorers
else:
return {'error': 'Unable to retrieve explorer_ids'}
@staticmethod
@authentication_required
def save_explorer(explorer_id):
save_explorer(explorer_id, request.json)
@staticmethod
@output_json
@authentication_required
def get_explorer_config(explorer_id):
response.content_type = 'application/json'
explorer_config = get_explorer_config(explorer_id)
if explorer_config is not None:
return explorer_config
else:
return {'error': 'No explorer configured with id: %s' % explorer_id}
@staticmethod
@authentication_required
def delete_explorer(explorer_id):
delete_explorer(explorer_id)
@staticmethod
@output_json
@use_explorer
def get_latest_block():
response.content_type = 'application/json'
return latest_block()
@staticmethod
@output_json
@use_explorer
def get_block_by_height(height):
response.content_type = 'application/json'
return block_by_height(height)
@staticmethod
@output_json
@use_explorer
def get_block_by_hash(block_hash):
response.content_type = 'application/json'
return block_by_hash(block_hash)
@staticmethod
@output_json
@use_explorer
def get_prime_input_address(txid):
response.content_type = 'application/json'
return prime_input_address(txid)
@staticmethod
@output_json
@use_explorer
def get_transaction(txid):
response.content_type = 'application/json'
return transaction(txid)
@staticmethod
@output_json
@use_explorer
def get_transactions(address):
response.content_type = 'application/json'
return transactions(address)
@staticmethod
@output_json
@use_explorer
def get_balance(address):
response.content_type = 'application/json'
return balance(address)
@staticmethod
@output_json
@use_explorer
def get_utxos(address):
response.content_type = 'application/json'
return utxos(address, int(request.query.confirmations))
@staticmethod
@output_json
@use_explorer
def get_sil(address):
response.content_type = 'application/json'
block_height = int(request.json['block_height'])
return get_sil(address, block_height)
@staticmethod
@output_json
@use_explorer
def get_profile(address):
response.content_type = 'application/json'
block_height = int(request.json['block_height'])
return get_profile(address, block_height)
@staticmethod
@output_json
@use_explorer
def get_sul(address):
response.content_type = 'application/json'
confirmations = int(request.json['confirmations'])
return get_sul(address, confirmations)
@staticmethod
@output_json
@use_explorer
def get_lal(address):
response.content_type = 'application/json'
block_height = int(request.json['block_height'])
xpub = request.json['xpub']
return get_lal(address, xpub, block_height)
@staticmethod
@output_json
@use_explorer
def get_lbl(address):
response.content_type = 'application/json'
block_height = int(request.json['block_height'])
xpub = request.json['xpub']
return get_lbl(address, xpub, block_height)
@staticmethod
@output_json
@use_explorer
def get_lrl(address):
response.content_type = 'application/json'
block_height = int(request.json['block_height'])
xpub = request.json['xpub']
return get_lrl(address, xpub, block_height)
@staticmethod
@output_json
@use_explorer
def get_lsl(address):
response.content_type = 'application/json'
block_height = int(request.json['block_height'])
xpub = request.json['xpub']
return get_lsl(address, xpub, block_height)
@staticmethod
@output_json
@use_explorer
def get_random_address_from_sil(address):
response.content_type = 'application/json'
rng_block_height = int(request.json['rng_block_height'])
sil_block_height = int(request.json['sil_block_height'])
return random_address_from_sil(address=address, sil_block_height=sil_block_height, rng_block_height=rng_block_height)
@staticmethod
@output_json
@use_explorer
def get_random_address_from_lbl(address):
response.content_type = 'application/json'
rng_block_height = int(request.json['rng_block_height'])
sil_block_height = int(request.json['sil_block_height'])
xpub = request.json['xpub']
return random_address_from_lbl(address=address, xpub=xpub, sil_block_height=sil_block_height, rng_block_height=rng_block_height)
@staticmethod
@output_json
@use_explorer
def get_random_address_from_lrl(address):
response.content_type = 'application/json'
rng_block_height = int(request.json['rng_block_height'])
sil_block_height = int(request.json['sil_block_height'])
xpub = request.json['xpub']
return random_address_from_lrl(address=address, xpub=xpub, sil_block_height=sil_block_height, rng_block_height=rng_block_height)
@staticmethod
@output_json
@use_explorer
def get_random_address_from_lsl(address):
response.content_type = 'application/json'
rng_block_height = int(request.json['rng_block_height'])
sil_block_height = int(request.json['sil_block_height'])
xpub = request.json['xpub']
return random_address_from_lsl(address=address, xpub=xpub, sil_block_height=sil_block_height, rng_block_height=rng_block_height)
@staticmethod
@output_json
def get_triggers():
response.content_type = 'application/json'
triggers = get_triggers()
if triggers is not None:
return triggers
else:
return {'error': 'Unable to retrieve explorer_ids'}
@staticmethod
@output_json
@authentication_required
def get_trigger(trigger_id):
response.content_type = 'application/json'
trigger_config = get_trigger_config(trigger_id)
if trigger_config is not None:
return trigger_config
else:
return {'error': 'No trigger configured with id: %s' % trigger_id}
@staticmethod
@output_json
@authentication_required
def save_trigger(trigger_id):
response.content_type = 'application/json'
return save_trigger(trigger_id, **request.json)
@staticmethod
@output_json
@authentication_required
def delete_trigger(trigger_id):
response.content_type = 'application/json'
return delete_trigger(trigger_id)
@staticmethod
@output_json
@authentication_required
def activate_trigger(trigger_id):
response.content_type = 'application/json'
return activate_trigger(trigger_id)
@staticmethod
@output_json
def verify_signed_message(trigger_id):
response.content_type = 'application/json'
return verify_signed_message(trigger_id, **request.json)
@staticmethod
@output_json
@authentication_required
def sign_message():
response.content_type = 'application/json'
return sign_message(**request.json)
@staticmethod
@output_json
def http_get_request(trigger_id):
response.content_type = 'application/json'
data = request.json if request.json is not None else {}
# Also add parameters passed via the query string to the data, if any parameters have the same name then the query string has priority
query = dict(request.query)
data.update(query)
return http_get_request(trigger_id, **data)
@staticmethod
@output_json
def http_post_request(trigger_id):
response.content_type = 'application/json'
data = request.json if request.json is not None else {}
# Also add parameters passed via the query string to the data, if any parameters have the same name then the query string has priority
query = dict(request.query)
data.update(query)
return http_post_request(trigger_id, **data)
@staticmethod
@output_json
def http_delete_request(trigger_id):
response.content_type = 'application/json'
data = request.json if request.json is not None else {}
# Also add parameters passed via the query string to the data, if any parameters have the same name then the query string has priority
query = dict(request.query)
data.update(query)
return http_delete_request(trigger_id, **data)
@staticmethod
def html_request(trigger_id):
response.content_type = 'text/html'
data = request.json if request.json is not None else {}
# Also add parameters passed via the query string to the data, if any parameters have the same name then the query string has priority
query = dict(request.query)
data.update(query)
return http_get_request(trigger_id, **data)
@staticmethod
def qr():
response.content_type = 'image/png'
data = request.json if request.json is not None else {}
# Also add parameters passed via the query string to the data, if any parameters have the same name then the query string has priority
query = dict(request.query)
data.update(query)
return generate_qr(**data)
@staticmethod
@output_json
@use_explorer
@authentication_required
def check_trigger(trigger_id):
response.content_type = 'application/json'
return check_triggers(trigger_id)
@staticmethod
@output_json
@use_explorer
@authentication_required
def check_all_triggers():
response.content_type = 'application/json'
return check_triggers()
@staticmethod
@output_json
def get_actions():
response.content_type = 'application/json'
actions = get_actions()
if actions is not None:
return actions
else:
return {'error': 'Unable to retrieve action_ids'}
@staticmethod
@output_json
@authentication_required
def get_action(action_id):
response.content_type = 'application/json'
action_config = get_action_config(action_id)
if action_config is not None:
return action_config
else:
return {'error': 'No action with id %s found' % action_id}
@staticmethod
@output_json
@authentication_required
def save_action(action_id):
response.content_type = 'application/json'
return save_action(action_id, **request.json)
@staticmethod
@output_json
@authentication_required
def delete_action(action_id):
response.content_type = 'application/json'
return delete_action(action_id)
@staticmethod
@output_json
@authentication_required
def run_action(action_id):
response.content_type = 'application/json'
return run_action(action_id)
@staticmethod
@output_json
def get_reveal(action_id):
response.content_type = 'application/json'
return get_reveal(action_id)
@staticmethod
@output_json
@authentication_required
def get_logs(filter_string):
response.content_type = 'application/json'
return get_logs(filter_string=filter_string)
if __name__ == "__main__":
SpellbookRESTAPI()
| gpl-3.0 | 8,038,402,898,788,254,000 | 39.509677 | 142 | 0.632187 | false |
mpiannucci/PiMonitor | Reporter/Templates/__init__.py | 1 | 4984 | from web.template import CompiledTemplate, ForLoop, TemplateResult
# coding: utf-8
def base (page):
__lineoffset__ = -4
loop = ForLoop()
self = TemplateResult(); extend_ = self.extend
extend_([u'\n'])
extend_([u'<html>\n'])
extend_([u'<head>\n'])
extend_([u' <meta name="viewport" content="width=device-width, initial-scale=1">\n'])
extend_([u' <title>PiMonitor</title>\n'])
extend_([u' <link rel="shortcut icon" type="image/x-icon" href="/static/favicon.ico" />\n'])
extend_([u' <link rel="stylesheet" href="http://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css">\n'])
extend_([u' <link rel="stylesheet" type="text/css" href="/static/pimonitor.css" />\n'])
extend_([u' <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>\n'])
extend_([u' <script src="http://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/js/bootstrap.min.js"></script>\n'])
extend_([u' <script src="/static/Scripts/pimonitor.js" type="text/javascript"></script>\n'])
extend_([u'</head>\n'])
extend_([u'\n'])
extend_([u'<body>\n'])
extend_([u' <!-- Navigation Bar -->\n'])
extend_([u' <nav class="navbar navbar-inverse navbar-fixed-top" role="navigation">\n'])
extend_([u' <div class="container">\n'])
extend_([u' <!-- Brand and toggle get grouped for better mobile display -->\n'])
extend_([u' <div class="navbar-header">\n'])
extend_([u' <button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1">\n'])
extend_([u' <span class="sr-only">Toggle navigation</span>\n'])
extend_([u' <span class="icon-bar"></span>\n'])
extend_([u' <span class="icon-bar"></span>\n'])
extend_([u' <span class="icon-bar"></span>\n'])
extend_([u' </button>\n'])
extend_([u' <a class="navbar-brand" href="http://blog.mpiannucci.com/">Matthew Iannucci</a>\n'])
extend_([u' </div>\n'])
extend_([u' <!-- Collect the nav links, forms, and other content for toggling -->\n'])
extend_([u' <div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1">\n'])
extend_([u' <ul class="nav navbar-nav">\n'])
extend_([u' <li>\n'])
extend_([u' <a href="http://blog.mpiannucci.com/blog">Blog</a>\n'])
extend_([u' </li>\n'])
extend_([u' <li>\n'])
extend_([u' <a href="http://blog.mpiannucci.com/apps">Projects</a>\n'])
extend_([u' </li>\n'])
extend_([u' <li>\n'])
extend_([u' <a href="http://blog.mpiannucci.com/bio">About</a>\n'])
extend_([u' </li>\n'])
extend_([u' </ul>\n'])
extend_([u' </div>\n'])
extend_([u' <!-- /.navbar-collapse -->\n'])
extend_([u' </div>\n'])
extend_([u' <!-- /.container -->\n'])
extend_([u' </nav>\n'])
extend_([u'<!-- <header class="jumbotron map_jumbotron" id="mainheader">\n'])
extend_([u' <div class="container">\n'])
extend_([u' <h1>MapGetter</h1>\n'])
extend_([u' <p>Get static images of a central area with coordinates in meters</p>\n'])
extend_([u' <em>Images courtesy of Google Maps</em>\n'])
extend_([u' </div>\n'])
extend_([u' </header> -->\n'])
extend_([u' <div class="container">\n'])
extend_([u' <div class="row">\n'])
extend_([u' <div class="col-sm-12 text-center" id="mapImage">\n'])
extend_([u' ', escape_(page, False), u'\n'])
extend_([u' </div>\n'])
extend_([u' </div>\n'])
extend_([u' <div class="row">\n'])
extend_([u' <div class="col-sm-12 text-center" id="mainfooter">\n'])
extend_([u' <p>Copyright 2015, Matthew Iannucci</p>\n'])
extend_([u' </div>\n'])
extend_([u' </div>\n'])
extend_([u' </div>\n'])
extend_([u'</body>\n'])
extend_([u'</html>\n'])
return self
base = CompiledTemplate(base, 'templates/base.html')
join_ = base._join; escape_ = base._escape
# coding: utf-8
def index():
__lineoffset__ = -5
loop = ForLoop()
self = TemplateResult(); extend_ = self.extend
posts = getAllReports()
extend_([u'\n'])
for post in loop.setup(posts):
extend_([escape_(post.date, True), u'\n'])
extend_([escape_(post.temperature, True), u'\n'])
extend_([escape_(post.humidity, True), u'\n'])
extend_([u'<hr>\n'])
return self
index = CompiledTemplate(index, 'templates/index.html')
join_ = index._join; escape_ = index._escape
| mit | -4,137,701,518,279,961,600 | 49.857143 | 147 | 0.507424 | false |
evantygf/BlockFun | id_list.py | 1 | 2710 | # Copyright 2016 Evan Dunning
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# image_path, name, id, type, state, breakable, drops, illuminant
sky_id = ("images/tiles/sky.png", "sky", 0, "block", 0, 0, 0, 1)
invisible_id = ("images/tiles/invisible.png", "invisible", 1, "block", 1, 0, 1, 0)
bedrock_id = ("images/tiles/bedrock.png", "bedrock", 2, "block", 1, 0, 2, 0)
grass_id = ("images/tiles/grass.png", "grass", 3, "block", 1, 1, 4, 0)
dirt_id = ("images/tiles/dirt.png", "dirt", 4, "block", 1, 1, 4, 0)
stone_id = ("images/tiles/stone.png", "stone", 5, "block", 1, 1, 5, 0)
sand_id = ("images/tiles/sand.png", "sand", 6, "block", 1, 1, 6, 0)
wood_id = ("images/tiles/wood.png", "wood", 7, "block", 0, 1, 7, 0)
leaf_id = ("images/tiles/leaf.png", "leaf", 8, "block", 0, 1, 8, 0)
chest_id = ("images/tiles/chest.png", "chest", 9, "block", 1, 1, 9, 0)
diamond_id = ("images/tiles/diamond ore.png", "diamond ore", 10, "block", 1, 1, 10, 0)
torch_id = ("images/tiles/torch.png", "torch", 11, "block", 0, 1, 11, 1)
pistol_id = ("images/items/pistol.png", "pistol", 100, "item")
all_ids = [sky_id,
invisible_id,
bedrock_id,
grass_id,
dirt_id,
stone_id,
sand_id,
wood_id,
leaf_id,
chest_id,
diamond_id,
torch_id,
pistol_id]
empty_list = [None for i in range(256)]
for i in all_ids:
empty_list[i[2]] = i
id_list = empty_list | gpl-3.0 | -2,952,824,591,461,782,000 | 52.156863 | 124 | 0.469742 | false |
xaviergmail/clargs | clargs/parser.py | 1 | 3276 | import re
import sys
full_re = r'^--(no)?([\w|-]*)(=(.*))?$'
short_re = r'^-(\w)(\w*)?$'
upper_re = r'^[a-z]*$'
def parse(argformat, argv):
# Initialize rogue list and named dict
rogue = []
named = {}
argc = len(argv)
i = 0
while i < argc:
# Current argument value in the loop
arg = argv[i]
# Search for the abbreviated options first
short = re.match(short_re, arg, re.I)
full = None
# Search for the full option if shorthand wasn't found
if not short:
# Search for the full argument
full = re.match(full_re, arg, re.I)
# Still haven't found a match. Add to rogue list and continue
if not full:
rogue.append(arg)
i += 1
continue
# Loop through argument data to find desired type. Default to str
for arg, argd in argformat.items():
argType = argd[2] if len(argd) > 2 else str
# Shorthand match!
if short and short.group(1).lower() == argd[0]:
# Boolean requested! True if lowercase, False if UPPERCASE
if argType is bool:
named[arg] = re.search(upper_re, short.group(1))
# 'Compressed' argument, Ex: -oSomething
# Take the value from the second capture group
elif short.group(2):
named[arg] = short.group(2)
# Our value is stored in the next index.
# Error out with missing argument if we go out of range
else:
if i + 2 > argc:
sys.stderr.write(
"Error: Missing value for argument %s\n" %
short.group(1))
sys.exit(1)
i += 1
# Store the value in the index
named[arg] = argv[i]
# Successfully matched a shorthand argument! Break out of loop.
break
# Full name match!
elif full and full.group(2).lower() == arg:
# Boolean requested. Assign the inverse of capture group 1 (no)
if argType is bool:
named[arg] = not bool(full.group(1))
# Equal sign found, assign value found after it
elif full.group(4):
named[arg] = full.group(4)
break # Success, exit this inner loop
else: # Did not break out of the loop, error out.
sys.stderr.write("Error: Unknown argument %s\n" %
("-" + short.group(1) if short else
"--" + full.group(1)))
sys.exit(1)
i += 1
for arg, argd in argformat.items():
# Default argument, if specified
if not arg in named and len(argd) > 1:
named[arg] = argd[1]
# Convert to the requested type, if specified. This will also convert
# the previously assigned regex/group matches to booleans.
elif len(argd) > 2:
named[arg] = argd[2](named[arg])
ret = {}
ret["named"] = named
ret["rogue"] = rogue
return ret
| gpl-2.0 | -3,713,753,290,371,247,600 | 31.117647 | 79 | 0.492979 | false |
jfindleyderegt/au_trap | test_canny.py | 1 | 1891 | import cv2
import numpy as np
def CannyThreshold(lowThreshold):
detected_edges = cv2.GaussianBlur(gray,(3,3),0)
detected_edges = cv2.Canny(detected_edges,lowThreshold,lowThreshold*ratio,apertureSize = kernel_size)
dst = cv2.bitwise_and(img,img,mask = detected_edges) # just add some colours to edges from original image.
cv2.imshow('canny demo',dst)
lowThreshold = 0
max_lowThreshold = 400
ratio = 7
kernel_size = 5
im_simple = '/home/faedrus/Documents/au_trap/20141031/1414763725.88.png'
im_complex = '/home/faedrus/Documents/au_trap/20141031/1414769658.04.png'
img = cv2.imread(im_complex)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
cv2.namedWindow('canny demo')
cv2.createTrackbar('Min threshold','canny demo',lowThreshold, max_lowThreshold, CannyThreshold)
CannyThreshold(0) # initialization
if cv2.waitKey(0) == 27:
cv2.destroyAllWindows()
#(height, width, depth) = img.shape
#
#r_count=np.sum(img[:,width/4:3*width/4,2])
#r_background=np.sum(img[:,:width/4,2])+np.sum(img[:,-width/4:,2])
#
#avg_count = r_count - r_background
#return avg_count
#path = '/home/faedrus/Documents/au_trap/20141013/'
#listing = os.listdir(path)
#listing = sorted (listing)
#print listing
#data=np.array([file_mean(path+im_file) for im_file in listing]).reshape((47,10))
#mean=np.mean(data,axis=1)
#std=np.std(data,axis=1)
#pylab.figure(1)
#pylab.hist(mean, 40)
#pylab.xlabel('intensity (a.u.)')
#pylab.ylabel('number of particles')
#pylab.figure(2)
#pylab.errorbar(range(len(mean)),mean,yerr=std)
#pylab.axis([0,50,0,6e5])
#pylab.xlabel('particle number')
#pylab.ylabel('intensity (a.u.)')
#fig1.show()
#pylab.show()
#print "Avg: " + str(r_count)
#r_min = np.array([0, 0, 0], np.uint8)
#r_max = np.array([100, 100, 255], np.uint8)
#dst = cv2.inRange(img, r_min, r_max)
#no_blue = cv2.countNonZero(dst)
#print('The number of blue pixels is: ' + str(no_blue))
| cc0-1.0 | 4,776,103,049,954,169,000 | 26.808824 | 111 | 0.703332 | false |
udapi/udapi-python | udapi/block/util/eval.py | 1 | 3902 | """Eval is a special block for evaluating code given by parameters."""
import collections
import pprint
import re
from udapi.core.block import Block
pp = pprint.pprint # pylint: disable=invalid-name
# We need exec in this block and the variables this etc. are not unused but provided for the exec
# pylint: disable=exec-used,unused-variable
class Eval(Block):
r"""Special block for evaluating code given by parameters.
Tricks:
`pp` is a shortcut for `pprint.pprint`.
`$.` is a shortcut for `this.` which is a shortcut for `node.`, `tree.` etc.
depending on context.
`count_X` is a shortcut for `self.count[X]` where X is any string (\S+)
and `self.count` is a `collections.Counter()` instance.
Thus you can use code like
`util.Eval node='count_$.upos +=1; count_"TOTAL" +=1' end="pp(self.count)"`
"""
# So many arguments is the design of this block (consistent with Perl Udapi).
# pylint: disable=too-many-arguments,too-many-instance-attributes
def __init__(self, doc=None, bundle=None, tree=None, node=None, start=None, end=None,
before_doc=None, after_doc=None, before_bundle=None, after_bundle=None,
expand_code=True, **kwargs):
super().__init__(**kwargs)
self.doc = doc
self.bundle = bundle
self.tree = tree
self.node = node
self.start = start
self.end = end
self.before_doc = before_doc
self.after_doc = after_doc
self.before_bundle = before_bundle
self.after_bundle = after_bundle
self.expand_code = expand_code
self.count = collections.Counter()
def expand_eval_code(self, to_eval):
"""Expand '$.' to 'this.', useful for oneliners."""
if not self.expand_code:
return to_eval
to_eval = re.sub(r'count_(\S+)', r'self.count[\1]', to_eval)
return to_eval.replace('$.', 'this.')
def before_process_document(self, document):
if self.before_doc:
this = doc = document
exec(self.expand_eval_code(self.before_doc))
def after_process_document(self, document):
if self.after_doc:
this = doc = document
exec(self.expand_eval_code(self.after_doc))
def process_document(self, document):
this = doc = document
if self.doc:
exec(self.expand_eval_code(self.doc))
if self.bundle or self.before_bundle or self.after_bundle or self.tree or self.node:
for bundle in doc.bundles:
# TODO if self._should_process_bundle(bundle):
self.process_bundle(bundle)
def process_bundle(self, bundle):
# Extract variables, so they can be used in eval code
document = doc = bundle.document
this = bundle
if self.before_bundle:
exec(self.expand_eval_code(self.before_bundle))
if self.bundle:
exec(self.expand_eval_code(self.bundle))
if self.tree or self.node:
trees = bundle.trees
for tree in trees:
if self._should_process_tree(tree):
self.process_tree(tree)
if self.after_bundle:
exec(self.expand_eval_code(self.after_bundle))
def process_tree(self, tree):
# Extract variables so they can be used in eval code
bundle = tree.bundle
doc = document = bundle.document
this = tree
root = tree
if self.tree:
exec(self.expand_eval_code(self.tree))
if self.node:
for node in tree.descendants():
this = node
exec(self.expand_eval_code(self.node))
def process_start(self):
if self.start:
exec(self.expand_eval_code(self.start))
def process_end(self):
if self.end:
exec(self.expand_eval_code(self.end))
| gpl-3.0 | 2,267,388,180,198,973,700 | 32.930435 | 97 | 0.60123 | false |
cellnopt/cellnopt | cno/io/cna.py | 1 | 7855 | # -*- python -*-
#
# This file is part of the cinapps.tcell package
#
# Copyright (c) 2012-2013 - EMBL-EBI
#
# File author(s): Thomas Cokelaer ([email protected])
#
# Distributed under the GLPv3 License.
# See accompanying file LICENSE.txt or copy at
# http://www.gnu.org/licenses/gpl-3.0.html
#
# website: www.cellnopt.org
#
##############################################################################
""":Topic: **Module dedicated to the CNA reactions data structure**
:Status: for production but not all features implemented.
"""
from __future__ import print_function
from cno.io.reactions import Reactions
from cno.io.sif import SIF
from cno.misc import CNOError
__all__ = ["CNA"]
class CNA(Reactions):
"""Reads a reaction file (CNA format)
This class has the :class:`Interaction` class as a Base class.
It is used to read **reactions** files from the CNA format, which
is a CSV-like format where each line looks like::
mek=erk 1 mek = 1 erk | # 0 1 0 436 825 1 1 0.01
The pipe decompose the strings into a LHS and RHS.
The LHS is made of a unique identifier without blanks (mek=erk). The remaining part is
the reaction equation. The equal sign "=" denotes the reaction arrow. Identifiers,
coefficients and equal sign must be separated by at least one blank.
The ! sign to indicate not. The + sign indicates an OR relation.
.. warning:: The + sign indicates an OR as it should be. However, keep in
mind that in CellNOptR code, the + sign
indicates an AND gate. In this package we always use **+** for an OR and
**^** or **&** for an AND gate.
.. warning:: in the CNA case, some reactions have no LHS or RHS. Such
reactions are valid in CNA but may cause issue if converted to SIF
.. note:: there don't seem to be any AND in CNA reactions.
The RHS is made of
* a default value: # or a value.
* a set of 3 flags representing the time scale
* flag 1: whether this interaction is to be excluded in logical computations
* flag 2: whether the logical interaction is treated with incomplete truth table
* flag 3: whether the interaction is monotone
* reacBoxes (columns 5,6,7,8)
* monotony (col 9)
In this class, only the LHS are used for now, however, the RHS values are
stored in different attributes.
::
>>> from cno.io import Reactions
>>> from cno import getdata
>>> a = Reactions(getdata('test_reactions'))
>>> reacs = a.reactions
.. seealso:: CNA class inherits from :class:`cno.io.reaction.Reaction`
"""
def __init__(self, filename=None, type=2, verbose=False):
""".. rubric:: Constructor
:param str filename: an optional filename containing reactions in CNA
format. If not provided, the CNA object is empty but you can
add reactions using :meth:`~cno.io.cna.CNA.add_reaction`.
However, attributes such as :attr:`~cno.io.cna.CNA.reacBoxes`
will not be populated.
:param integer type: only type 2 for now.
:param bool verbose: False by default
.. todo:: type1 will be implemented on request.
"""
super(CNA, self).__init__()
self.strict_rules = False
#self.metabolites = metabolites # must be a class LoadMetabolites
self.filename = filename
self.verbose = verbose
self.type = type
if type != 2:
raise NotImplementedError("only type 2 implemented")
# Attributes populated while reading the data.
#: populated when reading CNA reactions file
self.reacBoxes = []
#: populated when reading CNA reactions file
self.incTruthTable = []
#: populated when reading CNA reactions file
self.timeScale = []
#: populated when reading CNA reactions file
self.excludeInLogical = []
#: populated when reading CNA reactions file
self.reacText = []
#: populated when reading CNA reactions file
self.monotony = [] #flag 3
self.reacDefault = []
if filename:
self._read_reactions()
self._get_species()
def _read_reactions(self):
"""Read a reactions file and populate readID"""
f = open(self.filename, "r")
data = [] # the data structure to populate
for line in f.readlines(): # for each line
# convert tab.to white space, remove trailing and \n character
line = line.replace('\t',' ').replace('\n','').strip()
# do not consider commented or empty lines
if line.startswith("%") or line.startswith('#'):
pass
if len(line) == 0:
print("Found an empty line. Skipped")
else:
data.append(line)
f.close()
# scan all the data
for i, x in enumerate(data):
try:
beforePipe, afterPipe = x.split('|') # there should be only one pipe per
# line, so if it fails, this is a format error
except ValueError as err:
raise ValueError("Error msg to do")
reacID = beforePipe.split()[0].strip()
if reacID.count('=') != 1:
raise ValueError("Error line %s: wrong format expected one " %(i+1)
+ "only one = sign, found %s" % reacID.count('='))
else:
self.add_reaction(reacID)
reacText = beforePipe.replace(reacID, "").strip()
self.reacText.append(reacText)
parameters = afterPipe.split()
if len(parameters) != 9:
raise ValueError("Error line %s: did no find expected numbers of parameters" % i+1)
if self.type == 1:
# not finished
reacDefault, reacMin, reacMax, objFunc, d, d, d, d, reacVariance = parameters
mue = []
stoichMat = []
elif self.type == 2:
# First, the reac default value.
if parameters[0].isalnum():
self.reacDefault.append(float(parameters[0]))
elif parameters[0].strip()=='#':
self.reacDefault.append(float('NaN'))
else:
raise ValueError("""Error line %s: unexpected value in the
first column after pipe character (%s)""" % (str(i+1), parameters[0]))
self.incTruthTable.append(float(parameters[1]))
self.timeScale.append(float(parameters[2]))
self.excludeInLogical.append(float(parameters[3]))
self.monotony.append(float(parameters[8]))
self.reacBoxes.append([i+1, float(parameters[4]),
float(parameters[5]), 0, float(parameters[6]),
float(parameters[7])])
# clean up the reacDefault: could be # or number
if self.verbose == True:
print(self)
def to_sif(self, filename=None):
"""Export the reactions to SIF format
::
from cno.io import CNA
r = CNA()
r.add_reaction("a=b")
r.add_reaction("a+c=e")
r.to_sif("test.sif")
Again, be aware that "+" sign in Reaction means "OR".
Looking into the save file, we have the a+c=e reactions (a=e OR c=e)
expanded into 2 reactions (a 1 e) and (c 1 e) as expected::
a 1 b
a 1 e
c 1 e
"""
s = SIF()
for reac in self.reactions:
try:
s.add_reaction(reac)
except CNOError:
print("Skipped {} reaction".format(reac))
s.save(filename)
| bsd-2-clause | 8,088,367,737,963,195,000 | 35.534884 | 99 | 0.569064 | false |
cntnboys/cmput410-project | main/basicHttpAuth.py | 1 | 5273 | import base64
import json
from django.http import HttpResponse
from django.contrib.auth import authenticate, login
from django.template import RequestContext, loader
from django.core.exceptions import ObjectDoesNotExist
from main.models import Nodes
from django.db.models import Q
# This code snippet is taken from django snippets:
# https://djangosnippets.org/snippets/243/
# and Written By Scanner. The code snippet is used to allow for basic auth
#############################################################################
def view_or_basicauth(view, request, test_func, realm = "", *args, **kwargs):
"""
This is a helper function used by both 'logged_in_or_basicauth' and
'has_perm_or_basicauth' that does the nitty of determining if they
are already logged in or if they have provided proper http-authorization
and returning the view if all goes well, otherwise responding with a 401.
"""
if test_func(request.user):
# Already logged in, just return the view.
return view(request, *args, **kwargs)
# They are not logged in. See if they provided login credentials
#
if 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2:
# NOTE: We are only support basic authentication for now.
if auth[0].lower() == "basic":
# Require Username:Host:Passwd
try:
uname, host, passwd = base64.b64decode(auth[1]).decode('ascii').split(':')
except:
response = HttpResponse(content="{message: not authenticated}",content_type="text/HTML; charset=utf-8")
response.status_code = 401
response['message'] = 'not authenticated'
return response
# Node Checking
try:
node = Nodes.objects.get(node_name=host, node_status=True)
except ObjectDoesNotExist:
response = HttpResponse(content="{message: node approved, contact admin}",
content_type="text/HTML; charset=utf-8")
response.status_code = 401
response['message'] = 'node not approved, contact admin'
return response
user = authenticate(username=uname, password=passwd)
if user is not None:
if user.is_active:
login(request, user)
request.user = user
return view(request, *args, **kwargs)
# Either they did not provide an authorization header or
# something in the authorization attempt failed. Send a 401
# back to them to ask them to authenticate.
response = HttpResponse(content="{message: not authenticated}",content_type="text/HTML; charset=utf-8")
response.status_code = 401
response['message'] = 'not authenticated'
return response
#############################################################################
def logged_in_or_basicauth(realm = ""):
"""
A simple decorator that requires a user to be logged in. If they are not
logged in the request is examined for a 'authorization' header.
If the header is present it is tested for basic authentication and
the user is logged in with the provided credentials.
If the header is not present a http 401 is sent back to the
requestor to provide credentials.
The purpose of this is that in several django projects I have needed
several specific views that need to support basic authentication, yet the
web site as a whole used django's provided authentication.
The uses for this are for urls that are access programmatically such as
by rss feed readers, yet the view requires a user to be logged in. Many rss
readers support supplying the authentication credentials via http basic
auth (and they do NOT support a redirect to a form where they post a
username/password.)
Use is simple:
@logged_in_or_basicauth
def your_view:
...
You can provide the name of the realm to ask for authentication within.
"""
def view_decorator(func):
def wrapper(request, *args, **kwargs):
return view_or_basicauth(func, request,
lambda u: u.is_authenticated(),
realm, *args, **kwargs)
return wrapper
return view_decorator
#############################################################################
def has_perm_or_basicauth(perm, realm = ""):
"""
This is similar to the above decorator 'logged_in_or_basicauth'
except that it requires the logged in user to have a specific
permission.
Use:
@logged_in_or_basicauth('asforums.view_forumcollection')
def your_view:
...
"""
def view_decorator(func):
def wrapper(request, *args, **kwargs):
return view_or_basicauth(func, request,
lambda u: u.has_perm(perm),
realm, *args, **kwargs)
return wrapper
return view_decorator
| apache-2.0 | -4,862,675,662,238,732,000 | 40.849206 | 123 | 0.591883 | false |
ereOn/azmq | azmq/engines/base.py | 1 | 1843 | """
Base engine class.
"""
import asyncio
from pyslot import Signal
from ..common import (
CompositeClosableAsyncObject,
cancel_on_closing,
)
from ..errors import ProtocolError
from ..log import logger
class BaseEngine(CompositeClosableAsyncObject):
def __init__(
self,
*,
socket_type,
identity,
mechanism,
zap_client,
**kwargs
):
super().__init__(**kwargs)
self.socket_type = socket_type
self.identity = identity
self.mechanism = mechanism
self.zap_client = zap_client
self.on_connection_ready = Signal()
self.on_connection_lost = Signal()
self.on_connection_failure = Signal()
self.max_backoff_duration = 300 # 5 minutes.
self.min_backoff_duration = 0.001
self.current_backoff_duration = self.min_backoff_duration
def on_open(self, **kwargs):
super().on_open(**kwargs)
self.run_task = asyncio.ensure_future(self.run(), loop=self.loop)
@cancel_on_closing
async def run(self):
while not self.closing:
try:
result = await self.open_connection()
if isinstance(result, ProtocolError) and result.fatal:
logger.debug("Fatal error: %s. Not restarting.", result)
break
except asyncio.CancelledError:
break
except Exception as ex:
logger.debug("Connection error: %r.", ex)
else:
self.current_backoff_duration = self.min_backoff_duration
await asyncio.sleep(self.current_backoff_duration, loop=self.loop)
self.current_backoff_duration = min(
self.max_backoff_duration,
self.current_backoff_duration * 2,
)
| gpl-3.0 | -7,066,353,448,453,532,000 | 26.507463 | 78 | 0.578947 | false |
kubeflow/kfp-tekton-backend | components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py | 1 | 1166 | import json
import unittest
from unittest.mock import patch, Mock, MagicMock
from botocore.exceptions import ClientError
from datetime import datetime
from batch_transform.src import batch_transform
from common import _utils
from . import test_utils
# TODO : Errors out if model_name doesn't contain '-'
# fix model_name '-' bug
required_args = [
'--region', 'us-west-2',
'--model_name', 'model-test',
'--input_location', 's3://fake-bucket/data',
'--output_location', 's3://fake-bucket/output',
'--instance_type', 'ml.c5.18xlarge',
'--output_location_file', 'tmp/'
]
class BatchTransformTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
parser = batch_transform.create_parser()
cls.parser = parser
def test_sample(self):
args = self.parser.parse_args(required_args)
response = _utils.create_transform_job_request(vars(args))
self.assertEqual(response['TransformOutput']['S3OutputPath'], 's3://fake-bucket/output')
def test_empty_string(self):
args = self.parser.parse_args(required_args)
response = _utils.create_transform_job_request(vars(args))
test_utils.check_empty_string_values(response) | apache-2.0 | -7,874,638,259,791,585,000 | 28.923077 | 92 | 0.716981 | false |
Maasouza/MinVertexCover | src/heuristic.py | 1 | 1221 | import networkx as nx
from util import *
def heuristic_cover(graph , preprocess = False):
"""
heuristica
se preprocess entao
realiza o preprocessamento para remover vertices com apenas um vizinho
retornando os vertices ja visitados
enquanto existir vertices no grafo
v = vertice de maior grau de G
marcado[v]=1
adiciona v a cobertura
para cada u vizinho de v
marcado[u] = 1
remove u do grafo
remove g do grafo
retorna cobertura
"""
start = time.time()
g = nx.Graph()
g.add_edges_from(graph.edges())
if(preprocess):
cover,marked,visited = pre_process(g)
else:
cover = [False for x in range(len(g.nodes()))]
marked = [False for x in range(len(g.nodes()))]
visited = 0
while(visited!=len(graph.nodes())):
v = max_degree_vertex(g)
cover[v]=True
visited+=1
for u in g.neighbors(v):
visited+=1
g.remove_node(u)
g.remove_node(v)
end = time.time()
print("--- Heuristica")
print("\tExec time: "+str((end-start))+" sec")
return cover
| mit | 4,187,248,736,841,971,000 | 26.75 | 82 | 0.550369 | false |
codeforamerica/rva-screening | tests/unit/screener/test_screener.py | 1 | 20571 | import datetime
from StringIO import StringIO
from werkzeug.datastructures import FileStorage
from tests.unit.test_base import BaseTestCase
from tests.unit.screener.utils import (
get_user,
get_service,
get_patient
)
from app.models import AppUser, Service, ServiceTranslation, Patient
from app.prescreening import calculate_fpl, calculate_pre_screen_results
from app.utils import translate_object
import add_data.add_service_data as add_service_data
class TestScreener(BaseTestCase):
def setUp(self):
super(TestScreener, self).setUp()
self.test_user = get_user()
def test_login_logout(self):
"""Test logging in and out."""
app_user = get_user()
self.login('[email protected]', 'password')
self.assert_template_used('index.html')
self.logout()
self.assertEquals(app_user.authenticated, False)
self.login('[email protected]', 'badpassword')
self.assert_template_used('security/login_user.html')
self.assertEquals(app_user.authenticated, False)
def test_index(self):
"""Test that the index page works as expected."""
response = self.login()
response = self.client.get('/index')
self.assert200(response)
self.assert_template_used('index.html')
def test_add_patient(self):
"""Test that adding a patient works as expected."""
# Check that the new patient page loads as expected.
self.login()
response = self.client.get('/new_patient')
self.assert200(response)
self.assert_template_used('patient_details.html')
# Check that you can't save a new patient without a name
response = self.client.post('/new_patient', data=dict(
gender='',
has_prescription_coverage_yn='N',
eligible_for_vets_benefits_yn='N'
))
self.assert200(response)
self.assertEquals(len(Patient.query.all()), 0)
# Check that a new patient saves
response = self.client.post('/new_patient', data=dict(
first_name='John',
last_name='Richmond',
dob='1950-01-01',
ssn='111-11-1111',
gender='',
has_prescription_coverage_yn='N',
eligible_for_vets_benefits_yn='N'
), follow_redirects=True)
saved_patient = Patient.query.first()
self.assertEquals(
saved_patient.first_name,
'John'
)
self.assertEquals(
saved_patient.last_name,
'Richmond'
)
self.assertEquals(
saved_patient.dob,
datetime.date(1950, 1, 1)
)
self.assertEquals(
saved_patient.ssn,
'111-11-1111'
)
# Check that user stays on patient details page after saving
self.assert_template_used('patient_details.html')
def test_update_patient(self):
"""Test that updating an existing patient works as expected."""
self.login()
patient = get_patient()
# Check that the patient details page loads for an existing patient
response = self.client.get('/patient_details/{}'.format(patient.id))
self.assert200(response)
self.assert_template_used('patient_details.html')
# Check that updates to the patient save, including many-to-one fields
post_data = dict(
first_name='James',
last_name='Richmond',
dob='1950-12-12',
ssn='222-22-2222',
medical_home="CAHN",
email='[email protected]',
has_transport_yn='N',
gender='M',
transgender='No',
race='AA',
ethnicity='NHL',
languages=['EN', 'ES'],
has_interpreter_yn='N',
education_level='High school',
marital_status='MAR',
veteran_yn='N',
housing_status='REN',
# years_living_in_area='5',
# months_living_in_area='1',
time_in_area='LESS THAN 6',
city_or_county_of_residence='Richmond',
temp_visa_yn='N',
student_status='Not a student',
employment_status='FT',
spouse_employment_status='PT',
years_unemployed='0',
months_unemployed='6',
spouse_years_unemployed='1',
spouse_months_unemployed='11',
years_at_current_employer='LESS',
spouse_years_at_current_employer='LESS',
last_healthcare='Last year at VCU ED',
insurance_status='N',
coverage_type='VCC',
has_prescription_coverage_yn='N',
has_vcc='Y',
eligible_insurance_types='NE',
applied_for_vets_benefits_yn='N',
eligible_for_vets_benefits_yn='N',
applied_for_medicaid_yn='N',
medicaid_date_effective='2015-01-01',
applied_for_ssd_yn='N',
ssd_date_effective='1999-12-12',
care_due_to_accident_yn='N',
accident_work_related_yn='N',
filed_taxes_yn='N',
claimed_as_dependent_yn='N',
how_food_and_shelter='Stay with sister',
how_other_expenses='Gets money from father'
)
post_data['phone_numbers-0-phone_number'] = '(111) 111-1111'
post_data['phone_numbers-0-number_description'] = 'CELL'
post_data['phone_numbers-1-phone_number'] = '(222) 222-2222'
post_data['phone_numbers-1-number_description'] = 'HOME'
post_data['addresses-0-address1'] = '1 Main St.'
post_data['addresses-0-address2'] = 'Apt. 1'
post_data['addresses-0-city'] = 'Richmond'
post_data['addresses-0-state'] = 'VA'
post_data['addresses-0-zip'] = '11111'
post_data['addresses-0-address_description'] = 'OWN'
post_data['addresses-1-address1'] = '1 Maple St.'
post_data['addresses-1-address2'] = ''
post_data['addresses-1-city'] = 'Richmond'
post_data['addresses-1-state'] = 'VA'
post_data['addresses-1-zip'] = '11111'
post_data['addresses-1-address_description'] = 'RELATIVE'
post_data['emergency_contacts-0-full_name'] = 'Jane Johnson'
post_data['emergency_contacts-0-relationship'] = 'mother'
post_data['emergency_contacts-0-phone_number'] = '(111) 111-1111'
post_data['emergency_contacts-1-full_name'] = 'Mary Richmond'
post_data['emergency_contacts-1-relationship'] = 'sister'
post_data['emergency_contacts-1-phone_number'] = '(222) 222-2222'
post_data['household_members-0-full_name'] = 'Michael Richmond'
post_data['household_members-0-dob'] = '2000-12-12'
post_data['household_members-0-ssn'] = '999-99-9999'
post_data['household_members-0-relationship'] = 'son'
post_data['household_members-1-full_name'] = '11111'
post_data['household_members-1-dob'] = '2006-02-28'
post_data['household_members-1-ssn'] = '888-88-8888'
post_data['household_members-1-relationship'] = 'Emily Richmond'
post_data['income_sources-0-source'] = 'job'
post_data['income_sources-0-monthly_amount'] = '1000'
post_data['income_sources-1-source'] = 'food stamps'
post_data['income_sources-1-monthly_amount'] = '200'
post_data['employers-0-employer_name'] = 'Target'
post_data['employers-0-phone_number'] = '(111) 111-1111'
post_data['employers-0-employee'] = 'Patient'
post_data['employers-0-start_date'] = '2014-01-01'
post_data['employers-1-employer_name'] = 'Walmart'
post_data['employers-1-phone_number'] = '(222) 222-2222'
post_data['employers-1-employee'] = 'Spouse'
post_data['employers-1-start_date'] = '1999-12-12'
response = self.client.post(
'/patient_details/{}'.format(patient.id),
data=post_data,
follow_redirects=True
)
saved_patient = Patient.query.first()
self.assertEquals(
saved_patient.first_name,
'James'
)
self.assertEquals(
saved_patient.last_name,
'Richmond'
)
self.assertEquals(
saved_patient.dob,
datetime.date(1950, 12, 12)
)
self.assertEquals(
saved_patient.ssn,
'222-22-2222'
)
self.assertEquals(saved_patient.phone_numbers.count(), 2)
self.assertEquals(saved_patient.addresses.count(), 2)
self.assertEquals(saved_patient.emergency_contacts.count(), 2)
self.assertEquals(saved_patient.household_members.count(), 2)
self.assertEquals(saved_patient.income_sources.count(), 2)
self.assertEquals(saved_patient.employers.count(), 2)
# Check that the user stays on patient details page after saving
self.assert_template_used('patient_details.html')
# Check that updated many-to-one fields save correctly
post_data['phone_numbers-0-phone_number'] = '(333) 333-3333'
post_data['phone_numbers-0-number_description'] = 'WORK'
response = self.client.post(
'/patient_details/{}'.format(patient.id),
data=post_data,
follow_redirects=True
)
self.assert200(response)
saved_patient = Patient.query.first()
self.assertEquals(saved_patient.phone_numbers[0].phone_number, '(333) 333-3333')
self.assertEquals(saved_patient.phone_numbers[0].number_description, 'WORK')
self.assert_template_used('patient_details.html')
# Check that deleting many-to-one fields works as expected
post_data['phone_numbers-0-phone_number'] = ''
post_data['phone_numbers-0-number_description'] = ''
response = self.client.post(
'/patient_details/{}'.format(patient.id),
data=post_data,
follow_redirects=True
)
self.assert200(response)
self.assertEquals(saved_patient.phone_numbers.count(), 1)
self.assertEquals(saved_patient.phone_numbers[0].phone_number, '(222) 222-2222')
self.assertEquals(saved_patient.phone_numbers[0].number_description, 'HOME')
self.assert_template_used('patient_details.html')
# def test_document_image(self):
# """Test that uploading document images works as expected."""
# self.login()
# patient = get_patient()
# # Check that multiple document image uploads save correctly
# with open('tests/unit/screener/test_image.jpg', 'rb') as test_image:
# img_string_io = StringIO(test_image.read())
# post_data = dict(
# first_name='James',
# last_name='Richmond',
# dob='1950-12-12',
# gender='',
# transgender='',
# race='',
# ethnicity='',
# coverage_type='',
# student_status='',
# employment_status='',
# marital_status='',
# housing_status='',
# veteran_yn='',
# insurance_status='',
# spouse_employment_status='',
# has_prescription_coverage_yn='N',
# eligible_for_vets_benefits_yn='N',
# eligible_insurance_types='',
# applied_for_ssd_yn='',
# accident_work_related_yn='',
# has_vcc='',
# filed_taxes_yn='',
# applied_for_medicaid_yn='',
# has_interpreter_yn='',
# applied_for_vets_benefits_yn='',
# has_transport_yn='',
# claimed_as_dependent_yn='',
# temp_visa_yn='',
# care_due_to_accident_yn=''
# )
# post_data['document_images-0-file_name'] = FileStorage(img_string_io, filename='test_image.jpg')
# post_data['document_images-0-file_description'] = 'Test'
# post_data['document_images-1-file_name'] = FileStorage(img_string_io, filename='test_image_2.jpg')
# post_data['document_images-1-file_description'] = 'Test 2'
# response = self.client.post(
# '/patient_details/{}'.format(patient.id),
# data=post_data,
# follow_redirects=True
# )
# self.assert200(response)
# saved_patient = Patient.query.first()
# self.assertEquals(saved_patient.document_images.count(), 2)
# # Check that the page that displays the images loads correctly
# for image in saved_patient.document_images:
# response = self.client.get(
# '/document_image/{}'.format(image.id)
# )
# self.assert200(response)
# self.assert_template_used('documentimage.html')
def test_delete_patient(self):
"""Test that hard-deleting a patient works as expected."""
user = get_user()
self.login()
patient = get_patient(user)
response = self.client.get('/delete/{}'.format(patient.id), follow_redirects=True)
self.assert200(response)
# Check that patient was deleted
self.assertTrue(Patient.query.get(patient.id).deleted)
# Check that user is redirected to index page
self.assert_template_used('index.html')
def test_new_prescreening(self):
"""Test that the new prescreening page works as expected."""
response = self.client.get('/new_prescreening')
self.assert200(response)
self.assert_template_used('new_prescreening.html')
def test_patient_history(self):
"""Test that the edit history page works as expected."""
self.login()
patient = get_patient()
response = self.client.get('/patient_history/{}'.format(patient.id))
self.assert200(response)
self.assert_template_used('patient_history.html')
def test_patient_share(self):
"""Test that the share patient page works as expected."""
self.login()
patient = get_patient()
response = self.client.get('/patient_share/{}'.format(patient.id))
self.assert200(response)
self.assert_template_used('patient_share.html')
def test_add_referral(self):
"""Test that adding a referral works as expected."""
self.login()
user = AppUser.query.first()
patient = get_patient()
response = self.client.post('/add_referral', data=dict(
patient_id=patient.id,
app_user_id=user.id,
service_id='1',
notes='this is a note'
), follow_redirects=True)
self.assert200(response)
referral = Patient.query.first().referrals[0]
self.assertEquals(referral.from_app_user_id, user.id)
self.assertEquals(referral.to_service_id, 1)
def test_user(self):
"""Test that the user profile page works as expected."""
user = get_user()
response = self.client.get('/user/{}'.format(user.id))
self.assert200(response)
self.assert_template_used('user_profile.html')
def test_service(self):
"""Test that the service profile page works as expected."""
service = get_service()
response = self.client.get('/service/{}'.format(service.id))
self.assert200(response)
self.assert_template_used('service_profile.html')
def test_fpl_calculation(self):
"""Test that calculating a patient's Federal Poverty Level percentage
works as expected.
"""
self.assertEquals(calculate_fpl(8, 40890), 100)
self.assertEquals(calculate_fpl(1, 0), 0)
def test_prescreening_basic(self):
"""Test that the prescreening input page works as expected."""
# Make sure the prescreening input page loads
response = self.client.get('/prescreening_basic')
self.assert200(response)
self.assert_template_used('prescreening_basic.html')
# Make sure submitting the form works
response = self.client.post('/prescreening_basic', data=dict(
household_size='5',
household_income='1000',
has_health_insurance='N',
is_eligible_for_medicaid='N'
))
self.assertRedirects(response, '/prescreening_results')
def test_calculate_pre_screen_results(self):
"""Test that calculating prescreening results works as expected."""
add_service_data.main(self.app)
daily_planet = Service.query.filter(Service.name == 'Daily Planet').first()
result = calculate_pre_screen_results(
fpl=0,
has_health_insurance='no',
is_eligible_for_medicaid='no',
service_ids=[daily_planet.id]
)[0]
self.assertEquals(result['name'], daily_planet.name)
self.assertEquals(result['eligible'], True)
self.assertEquals(result['fpl_cutoff'], daily_planet.fpl_cutoff)
self.assertEquals(result['fpl_eligible'], True)
self.assertEquals(result['uninsured_only_yn'], daily_planet.uninsured_only_yn)
self.assertEquals(
result['medicaid_ineligible_only_yn'],
daily_planet.medicaid_ineligible_only_yn
)
self.assertEquals(
result['residence_requirement_yn'],
daily_planet.residence_requirement_yn
)
self.assertEquals(
result['time_in_area_requirement_yn'],
daily_planet.time_in_area_requirement_yn
)
self.assertEquals(result['sliding_scale'], 'Nominal')
self.assertEquals(result['sliding_scale_range'], 'between 0% and 100%')
self.assertEquals(result['id'], daily_planet.id)
def test_patient_screening_history(self):
"""Test that the patient referral/screening history page works as expected."""
add_service_data.main(self.app)
user = get_user()
user.service = Service.query.filter(Service.name == 'Daily Planet').first()
self.login()
patient = get_patient()
# Make sure the page loads as expected
response = self.client.get('/patient_screening_history/{}'.format(patient.id))
self.assert200(response)
self.assert_template_used('patient_screening_history.html')
def test_patient_overview(self):
"""Test that the patient overview and screening result page works as expected."""
add_service_data.main(self.app)
user = get_user()
user.service = Service.query.filter(Service.name == 'Daily Planet').first()
self.login()
patient = get_patient(user)
# Make sure the page loads as expected
response = self.client.get('/patient_overview/{}'.format(patient.id))
self.assert200(response)
self.assert_template_used('patient_overview.html')
# Make sure you can save a new screening result
response = self.client.post(
'/patient_overview/{}'.format(patient.id),
data=dict(
eligible_yn='Y',
sliding_scale_id=user.service.sliding_scales[0].id,
notes='Test'
),
follow_redirects=True
)
self.assert200(response)
# User should stay on the same page after saving
self.assert_template_used('patient_overview.html')
screening_result = Patient.query.first().screening_results[0]
self.assertEquals(screening_result.service_id, user.service_id)
self.assertEquals(screening_result.eligible_yn, 'Y')
self.assertEquals(screening_result.sliding_scale_id, user.service.sliding_scales[0].id)
self.assertEquals(screening_result.notes, 'Test')
def test_translate_object(self):
"""Test that translating text from the database works as expected."""
# Test that the object stays the same if no translation exists
service = Service(
name='Richmond Clinic',
description='English description'
)
translated_service = translate_object(service, 'es_US')
self.assertEquals(translated_service.description, 'English description')
# Test that the object is translated when a translation exists
service.translations.append(
ServiceTranslation(
language_code='es_US',
description='Spanish description'
)
)
translated_service = translate_object(service, 'es_US')
self.assertEquals(translated_service.description, 'Spanish description')
| bsd-3-clause | -2,623,195,424,517,089,000 | 40.142 | 108 | 0.596374 | false |
tsileo/dirtools | test_dirtools.py | 1 | 6852 | # -*- coding: utf-8 -*-
""" test_dirtools.py - Test the dirtools module with pyfakefs. """
import shutil
import unittest
import os
import tarfile
import time
try:
import fake_filesystem
import fake_filesystem_shutil
except ImportError:
print "You must install pyfakefs in order to run the test suite."
import dirtools
class TestDirtools(unittest.TestCase):
def setUp(self):
""" Initialize a fake filesystem and dirtools. """
# First we create a fake filesystem in order to test dirtools
fk = fake_filesystem.FakeFilesystem()
fk.CreateDirectory('/test_dirtools')
fk.CreateFile('/test_dirtools/file1', contents='contents1')
fk.CreateFile('/test_dirtools/file2', contents='contents2')
fk.CreateFile('/test_dirtools/file3.py', contents='print "ok"')
fk.CreateFile('/test_dirtools/file3.pyc', contents='')
fk.CreateFile('/test_dirtools/.exclude', contents='excluded_dir/\n*.pyc')
fk.CreateDirectory('/test_dirtools/excluded_dir')
fk.CreateFile('/test_dirtools/excluded_dir/excluded_file',
contents='excluded')
fk.CreateDirectory('/test_dirtools/dir1')
fk.CreateDirectory('/test_dirtools/dir1/subdir1')
fk.CreateFile('/test_dirtools/dir1/subdir1/file_subdir1',
contents='inside subdir1')
fk.CreateFile('/test_dirtools/dir1/subdir1/.project')
fk.CreateDirectory('/test_dirtools/dir2')
fk.CreateFile('/test_dirtools/dir2/file_dir2', contents='inside dir2')
# Sort of "monkey patch" to make dirtools use the fake filesystem
dirtools.os = fake_filesystem.FakeOsModule(fk)
dirtools.open = fake_filesystem.FakeFileOpen(fk)
# Dirtools initialization
self.dir = dirtools.Dir('/test_dirtools')
self.os = dirtools.os
self.open = dirtools.open
self.shutil = fake_filesystem_shutil.FakeShutilModule(fk)
self.fk = fk
def testFiles(self):
""" Check that Dir.files return all files, except those excluded. """
self.assertEqual(sorted(self.dir.files()),
sorted(["file1",
"file2",
"file3.py",
".exclude",
"dir1/subdir1/file_subdir1",
"dir1/subdir1/.project",
"dir2/file_dir2"]))
def testFilesWithPatterns(self):
""" Check that Dir.files return all files matching the pattern, except those excluded. """
self.assertEqual(sorted(self.dir.files("*.py")),
sorted(["file3.py"]))
self.assertEqual(sorted(self.dir.files("*_dir2")),
sorted(["dir2/file_dir2"]))
def testSubdirs(self):
""" Check that Dir.subdirs return all subdirs, except those excluded. """
self.assertEqual(sorted(self.dir.subdirs()),
sorted(["dir1",
"dir1/subdir1",
"dir2"]))
def testSubdirsWithPatterns(self):
""" Check that Dir.subdirs return all subdirs matching the pattern, except those excluded. """
self.assertEqual(sorted(self.dir.subdirs("*1")),
sorted(["dir1",
"dir1/subdir1"]))
def testHashdir(self):
""" Check that the hashdir changes when a file change in the tree. """
hashdir = self.dir.hash(dirtools.filehash)
with self.open('/test_dirtools/file2', 'w') as f:
f.write("new content")
new_hashdir = self.dir.hash(dirtools.filehash)
self.assertNotEqual(hashdir, new_hashdir)
def testDirState(self):
dir_state = dirtools.DirState(self.dir, index_cmp=dirtools.filehash)
self.shutil.copytree('/test_dirtools', 'test_dirtools2')
with self.open('/test_dirtools2/dir1/subdir1/file_subdir1', 'w') as f:
f.write("dir state")
with self.open('/test_dirtools2/new_file', 'w') as f:
f.write("dir state")
self.os.remove('/test_dirtools2/file1')
self.shutil.rmtree('/test_dirtools2/dir2')
dir_state2 = dirtools.DirState(dirtools.Dir('/test_dirtools2'), index_cmp=dirtools.filehash)
diff = dir_state2 - dir_state
self.assertEqual(diff, {'deleted': ['file1', 'dir2/file_dir2'], 'updated': ['dir1/subdir1/file_subdir1'], 'deleted_dirs': ['dir2'], 'created': ['new_file']})
self.assertEqual(diff, dirtools.compute_diff(dir_state2.state, dir_state.state))
def testExclude(self):
""" Check that Dir.is_excluded actually exclude files. """
self.assertTrue(self.dir.is_excluded("excluded_dir"))
# Only the dir is excluded, the exclude line is excluded_dir/ not excluded_dir/*
self.assertFalse(self.dir.is_excluded("excluded_dir/excluded_file"))
self.assertTrue(self.dir.is_excluded("file3.pyc"))
self.assertFalse(self.dir.is_excluded("file3.py"))
def testProjects(self):
""" Check if Dir.find_projects find all projects in the directory tree. """
self.assertEqual(self.dir.find_projects(".project"), ['dir1/subdir1'])
def testCompression(self):
""" Check the compression, withouth pyfakefs because it doesn't support tarfile. """
dirtools.os = os
dirtools.open = open
test_dir = '/tmp/test_dirtools'
if os.path.isdir(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
with open(os.path.join(test_dir, 'file1'), 'w') as f:
f.write(os.urandom(2 ** 10))
with open(os.path.join(test_dir, 'file2.pyc'), 'w') as f:
f.write('excluded')
os.mkdir(os.path.join(test_dir, 'dir1'))
with open(os.path.join(test_dir, 'dir1/file1'), 'w') as f:
f.write(os.urandom(2 ** 10))
cdir = dirtools.Dir(test_dir)
archive_path = cdir.compress_to()
tar = tarfile.open(archive_path)
test_dir_extract = '/tmp/test_dirtools_extract'
if os.path.isdir(test_dir_extract):
shutil.rmtree(test_dir_extract)
os.mkdir(test_dir_extract)
tar.extractall(test_dir_extract)
extracted_dir = dirtools.Dir(test_dir_extract)
self.assertEqual(sorted(extracted_dir.files()),
sorted(cdir.files()))
self.assertEqual(sorted(extracted_dir.subdirs()),
sorted(cdir.subdirs()))
self.assertEqual(extracted_dir.hash(dirtools.filehash),
cdir.hash(dirtools.filehash))
shutil.rmtree(test_dir)
shutil.rmtree(test_dir_extract)
os.remove(archive_path)
if __name__ == '__main__':
unittest.main()
| mit | 8,629,326,201,781,697,000 | 38.595376 | 165 | 0.594307 | false |
fyookball/electrum | lib/blockchain.py | 1 | 25888 | # Electrum - lightweight Bitcoin client
# Copyright (C) 2012 [email protected]
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import sys
import threading
from typing import Optional
from . import asert_daa
from . import networks
from . import util
from .bitcoin import *
class VerifyError(Exception):
'''Exception used for blockchain verification errors.'''
CHUNK_FORKS = -3
CHUNK_BAD = -2
CHUNK_LACKED_PROOF = -1
CHUNK_ACCEPTED = 0
def bits_to_work(bits):
return (1 << 256) // (bits_to_target(bits) + 1)
def bits_to_target(bits):
if bits == 0:
return 0
size = bits >> 24
assert size <= 0x1d
word = bits & 0x00ffffff
assert 0x8000 <= word <= 0x7fffff
if size <= 3:
return word >> (8 * (3 - size))
else:
return word << (8 * (size - 3))
def target_to_bits(target):
if target == 0:
return 0
target = min(target, MAX_TARGET)
size = (target.bit_length() + 7) // 8
mask64 = 0xffffffffffffffff
if size <= 3:
compact = (target & mask64) << (8 * (3 - size))
else:
compact = (target >> (8 * (size - 3))) & mask64
if compact & 0x00800000:
compact >>= 8
size += 1
assert compact == (compact & 0x007fffff)
assert size < 256
return compact | size << 24
HEADER_SIZE = 80 # bytes
MAX_BITS = 0x1d00ffff
MAX_TARGET = bits_to_target(MAX_BITS)
# indicates no header in data file
NULL_HEADER = bytes([0]) * HEADER_SIZE
NULL_HASH_BYTES = bytes([0]) * 32
NULL_HASH_HEX = NULL_HASH_BYTES.hex()
def serialize_header(res):
s = int_to_hex(res.get('version'), 4) \
+ rev_hex(res.get('prev_block_hash')) \
+ rev_hex(res.get('merkle_root')) \
+ int_to_hex(int(res.get('timestamp')), 4) \
+ int_to_hex(int(res.get('bits')), 4) \
+ int_to_hex(int(res.get('nonce')), 4)
return s
def deserialize_header(s, height):
h = {}
h['version'] = int.from_bytes(s[0:4], 'little')
h['prev_block_hash'] = hash_encode(s[4:36])
h['merkle_root'] = hash_encode(s[36:68])
h['timestamp'] = int.from_bytes(s[68:72], 'little')
h['bits'] = int.from_bytes(s[72:76], 'little')
h['nonce'] = int.from_bytes(s[76:80], 'little')
h['block_height'] = height
return h
def hash_header_hex(header_hex):
return hash_encode(Hash(bfh(header_hex)))
def hash_header(header):
if header is None:
return NULL_HASH_HEX
if header.get('prev_block_hash') is None:
header['prev_block_hash'] = '00'*32
return hash_header_hex(serialize_header(header))
blockchains = {}
def read_blockchains(config):
blockchains[0] = Blockchain(config, 0, None)
fdir = os.path.join(util.get_headers_dir(config), 'forks')
if not os.path.exists(fdir):
os.mkdir(fdir)
l = filter(lambda x: x.startswith('fork_'), os.listdir(fdir))
l = sorted(l, key = lambda x: int(x.split('_')[1]))
for filename in l:
parent_base_height = int(filename.split('_')[1])
base_height = int(filename.split('_')[2])
b = Blockchain(config, base_height, parent_base_height)
blockchains[b.base_height] = b
return blockchains
def check_header(header):
if type(header) is not dict:
return False
for b in blockchains.values():
if b.check_header(header):
return b
return False
def can_connect(header):
for b in blockchains.values():
if b.can_connect(header):
return b
return False
def verify_proven_chunk(chunk_base_height, chunk_data):
chunk = HeaderChunk(chunk_base_height, chunk_data)
header_count = len(chunk_data) // HEADER_SIZE
prev_header = None
prev_header_hash = None
for i in range(header_count):
header = chunk.get_header_at_index(i)
# Check the chain of hashes for all headers preceding the proven one.
this_header_hash = hash_header(header)
if i > 0:
if prev_header_hash != header.get('prev_block_hash'):
raise VerifyError("prev hash mismatch: %s vs %s" % (prev_header_hash, header.get('prev_block_hash')))
prev_header_hash = this_header_hash
# Copied from electrumx
def root_from_proof(hash, branch, index):
hash_func = Hash
for elt in branch:
if index & 1:
hash = hash_func(elt + hash)
else:
hash = hash_func(hash + elt)
index >>= 1
if index:
raise ValueError('index out of range for branch')
return hash
class HeaderChunk:
def __init__(self, base_height, data):
self.base_height = base_height
self.header_count = len(data) // HEADER_SIZE
self.headers = [deserialize_header(data[i * HEADER_SIZE : (i + 1) * HEADER_SIZE],
base_height + i)
for i in range(self.header_count)]
def __repr__(self):
return "HeaderChunk(base_height={}, header_count={})".format(self.base_height, self.header_count)
def get_count(self):
return self.header_count
def contains_height(self, height):
return height >= self.base_height and height < self.base_height + self.header_count
def get_header_at_height(self, height):
assert self.contains_height(height)
return self.get_header_at_index(height - self.base_height)
def get_header_at_index(self, index):
return self.headers[index]
class Blockchain(util.PrintError):
"""
Manages blockchain headers and their verification
"""
def __init__(self, config, base_height, parent_base_height):
self.config = config
self.catch_up = None # interface catching up
self.base_height = base_height
self.parent_base_height = parent_base_height
self.lock = threading.Lock()
with self.lock:
self.update_size()
def __repr__(self):
return "<{}.{} {}>".format(__name__, type(self).__name__, self.format_base())
def format_base(self):
return "{}@{}".format(self.get_name(), self.get_base_height())
def parent(self):
return blockchains[self.parent_base_height]
def get_max_child(self):
children = list(filter(lambda y: y.parent_base_height==self.base_height, blockchains.values()))
return max([x.base_height for x in children]) if children else None
def get_base_height(self):
mc = self.get_max_child()
return mc if mc is not None else self.base_height
def get_branch_size(self):
return self.height() - self.get_base_height() + 1
def get_name(self):
return self.get_hash(self.get_base_height()).lstrip('00')[0:10]
def check_header(self, header):
header_hash = hash_header(header)
height = header.get('block_height')
return header_hash == self.get_hash(height)
def fork(parent, header):
base_height = header.get('block_height')
self = Blockchain(parent.config, base_height, parent.base_height)
open(self.path(), 'w+').close()
self.save_header(header)
return self
def height(self):
return self.base_height + self.size() - 1
def size(self):
with self.lock:
return self._size
def update_size(self):
p = self.path()
self._size = os.path.getsize(p)//HEADER_SIZE if os.path.exists(p) else 0
def verify_header(self, header, prev_header, bits=None):
prev_header_hash = hash_header(prev_header)
this_header_hash = hash_header(header)
if prev_header_hash != header.get('prev_block_hash'):
raise VerifyError("prev hash mismatch: %s vs %s" % (prev_header_hash, header.get('prev_block_hash')))
# We do not need to check the block difficulty if the chain of linked header hashes was proven correct against our checkpoint.
if bits is not None:
# checkpoint BitcoinCash fork block
if (header.get('block_height') == networks.net.BITCOIN_CASH_FORK_BLOCK_HEIGHT and hash_header(header) != networks.net.BITCOIN_CASH_FORK_BLOCK_HASH):
err_str = "block at height %i is not cash chain fork block. hash %s" % (header.get('block_height'), hash_header(header))
raise VerifyError(err_str)
if bits != header.get('bits'):
raise VerifyError("bits mismatch: %s vs %s" % (bits, header.get('bits')))
target = bits_to_target(bits)
if int('0x' + this_header_hash, 16) > target:
raise VerifyError("insufficient proof of work: %s vs target %s" % (int('0x' + this_header_hash, 16), target))
def verify_chunk(self, chunk_base_height, chunk_data):
chunk = HeaderChunk(chunk_base_height, chunk_data)
prev_header = None
if chunk_base_height != 0:
prev_header = self.read_header(chunk_base_height - 1)
header_count = len(chunk_data) // HEADER_SIZE
for i in range(header_count):
header = chunk.get_header_at_index(i)
# Check the chain of hashes and the difficulty.
bits = self.get_bits(header, chunk)
self.verify_header(header, prev_header, bits)
prev_header = header
def path(self):
d = util.get_headers_dir(self.config)
filename = 'blockchain_headers' if self.parent_base_height is None else os.path.join('forks', 'fork_%d_%d'%(self.parent_base_height, self.base_height))
return os.path.join(d, filename)
def save_chunk(self, base_height, chunk_data):
chunk_offset = (base_height - self.base_height) * HEADER_SIZE
if chunk_offset < 0:
chunk_data = chunk_data[-chunk_offset:]
chunk_offset = 0
# Headers at and before the verification checkpoint are sparsely filled.
# Those should be overwritten and should not truncate the chain.
top_height = base_height + (len(chunk_data) // HEADER_SIZE) - 1
truncate = top_height > networks.net.VERIFICATION_BLOCK_HEIGHT
self.write(chunk_data, chunk_offset, truncate)
self.swap_with_parent()
def swap_with_parent(self):
if self.parent_base_height is None:
return
parent_branch_size = self.parent().height() - self.base_height + 1
if parent_branch_size >= self.size():
return
self.print_error("swap", self.base_height, self.parent_base_height)
parent_base_height = self.parent_base_height
base_height = self.base_height
parent = self.parent()
with open(self.path(), 'rb') as f:
my_data = f.read()
with open(parent.path(), 'rb') as f:
f.seek((base_height - parent.base_height)*HEADER_SIZE)
parent_data = f.read(parent_branch_size*HEADER_SIZE)
self.write(parent_data, 0)
parent.write(my_data, (base_height - parent.base_height)*HEADER_SIZE)
# store file path
for b in blockchains.values():
b.old_path = b.path()
# swap parameters
self.parent_base_height = parent.parent_base_height; parent.parent_base_height = parent_base_height
self.base_height = parent.base_height; parent.base_height = base_height
self._size = parent._size; parent._size = parent_branch_size
# move files
for b in blockchains.values():
if b in [self, parent]: continue
if b.old_path != b.path():
self.print_error("renaming", b.old_path, b.path())
os.rename(b.old_path, b.path())
# update pointers
blockchains[self.base_height] = self
blockchains[parent.base_height] = parent
def write(self, data, offset, truncate=True):
filename = self.path()
with self.lock:
with open(filename, 'rb+') as f:
if truncate and offset != self._size*HEADER_SIZE:
f.seek(offset)
f.truncate()
f.seek(offset)
f.write(data)
f.flush()
os.fsync(f.fileno())
self.update_size()
def save_header(self, header):
delta = header.get('block_height') - self.base_height
data = bfh(serialize_header(header))
assert delta == self.size()
assert len(data) == HEADER_SIZE
self.write(data, delta*HEADER_SIZE)
self.swap_with_parent()
def read_header(self, height, chunk=None):
# If the read is done within an outer call with local unstored header data, we first look in the chunk data currently being processed.
if chunk is not None and chunk.contains_height(height):
return chunk.get_header_at_height(height)
assert self.parent_base_height != self.base_height
if height < 0:
return
if height < self.base_height:
return self.parent().read_header(height)
if height > self.height():
return
delta = height - self.base_height
name = self.path()
if os.path.exists(name):
with open(name, 'rb') as f:
f.seek(delta * HEADER_SIZE)
h = f.read(HEADER_SIZE)
# Is it a pre-checkpoint header that has never been requested?
if h == NULL_HEADER:
return None
return deserialize_header(h, height)
def get_hash(self, height):
if height == -1:
return NULL_HASH_HEX
elif height == 0:
return networks.net.GENESIS
return hash_header(self.read_header(height))
# Not used.
def BIP9(self, height, flag):
v = self.read_header(height)['version']
return ((v & 0xE0000000) == 0x20000000) and ((v & flag) == flag)
def get_median_time_past(self, height, chunk=None):
if height < 0:
return 0
times = [
self.read_header(h, chunk)['timestamp']
for h in range(max(0, height - 10), height + 1)
]
return sorted(times)[len(times) // 2]
def get_suitable_block_height(self, suitableheight, chunk=None):
#In order to avoid a block in a very skewed timestamp to have too much
#influence, we select the median of the 3 top most block as a start point
#Reference: github.com/Bitcoin-ABC/bitcoin-abc/master/src/pow.cpp#L201
blocks2 = self.read_header(suitableheight, chunk)
blocks1 = self.read_header(suitableheight-1, chunk)
blocks = self.read_header(suitableheight-2, chunk)
if (blocks['timestamp'] > blocks2['timestamp'] ):
blocks,blocks2 = blocks2,blocks
if (blocks['timestamp'] > blocks1['timestamp'] ):
blocks,blocks1 = blocks1,blocks
if (blocks1['timestamp'] > blocks2['timestamp'] ):
blocks1,blocks2 = blocks2,blocks1
return blocks1['block_height']
_cached_asert_anchor: Optional[asert_daa.Anchor] = None # cached Anchor, per-Blockchain instance
def get_asert_anchor(self, prevheader, mtp, chunk=None):
if networks.net.asert_daa.anchor is not None:
# Checkpointed (hard-coded) value exists, just use that
return networks.net.asert_daa.anchor
if (self._cached_asert_anchor is not None
and self._cached_asert_anchor.height <= prevheader['block_height']):
return self._cached_asert_anchor
# ****
# This may be slow -- we really should be leveraging the hard-coded
# checkpointed value. TODO: add hard-coded value to networks.py after
# Nov. 15th 2020 HF to ASERT DAA
# ****
anchor = prevheader
activation_mtp = networks.net.asert_daa.MTP_ACTIVATION_TIME
while mtp >= activation_mtp:
ht = anchor['block_height']
prev = self.read_header(ht - 1, chunk)
if prev is None:
self.print_error("get_asert_anchor missing header {}".format(ht - 1))
return None
prev_mtp = self.get_median_time_past(ht - 1, chunk)
if prev_mtp < activation_mtp:
# Ok, use this as anchor -- since it is the first in the chain
# after activation.
bits = anchor['bits']
self._cached_asert_anchor = asert_daa.Anchor(ht, bits, prev['timestamp'])
return self._cached_asert_anchor
mtp = prev_mtp
anchor = prev
def get_bits(self, header, chunk=None):
'''Return bits for the given height.'''
# Difficulty adjustment interval?
height = header['block_height']
# Genesis
if height == 0:
return MAX_BITS
prior = self.read_header(height - 1, chunk)
if prior is None:
raise Exception("get_bits missing header {} with chunk {!r}".format(height - 1, chunk))
bits = prior['bits']
# NOV 13 HF DAA and/or ASERT DAA
prevheight = height - 1
daa_mtp = self.get_median_time_past(prevheight, chunk)
# ASERTi3-2d DAA activated on Nov. 15th 2020 HF
if daa_mtp >= networks.net.asert_daa.MTP_ACTIVATION_TIME:
header_ts = header['timestamp']
prev_ts = prior['timestamp']
if networks.net.TESTNET:
# testnet 20 minute rule
if header_ts - prev_ts > 20*60:
return MAX_BITS
anchor = self.get_asert_anchor(prior, daa_mtp, chunk)
assert anchor is not None, "Failed to find ASERT anchor block for chain {!r}".format(self)
return networks.net.asert_daa.next_bits_aserti3_2d(anchor.bits,
prev_ts - anchor.prev_time,
prevheight - anchor.height)
# Mon Nov 13 19:06:40 2017 DAA HF
if prevheight >= networks.net.CW144_HEIGHT:
if networks.net.TESTNET:
# testnet 20 minute rule
if header['timestamp'] - prior['timestamp'] > 20*60:
return MAX_BITS
# determine block range
daa_starting_height = self.get_suitable_block_height(prevheight-144, chunk)
daa_ending_height = self.get_suitable_block_height(prevheight, chunk)
# calculate cumulative work (EXcluding work from block daa_starting_height, INcluding work from block daa_ending_height)
daa_cumulative_work = 0
for daa_i in range (daa_starting_height+1, daa_ending_height+1):
daa_prior = self.read_header(daa_i, chunk)
daa_bits_for_a_block = daa_prior['bits']
daa_work_for_a_block = bits_to_work(daa_bits_for_a_block)
daa_cumulative_work += daa_work_for_a_block
# calculate and sanitize elapsed time
daa_starting_timestamp = self.read_header(daa_starting_height, chunk)['timestamp']
daa_ending_timestamp = self.read_header(daa_ending_height, chunk)['timestamp']
daa_elapsed_time = daa_ending_timestamp - daa_starting_timestamp
if (daa_elapsed_time>172800):
daa_elapsed_time=172800
if (daa_elapsed_time<43200):
daa_elapsed_time=43200
# calculate and return new target
daa_Wn = (daa_cumulative_work*600) // daa_elapsed_time
daa_target = (1 << 256) // daa_Wn - 1
daa_retval = target_to_bits(daa_target)
daa_retval = int(daa_retval)
return daa_retval
#END OF NOV-2017 DAA
N_BLOCKS = networks.net.LEGACY_POW_RETARGET_BLOCKS # Normally 2016
if height % N_BLOCKS == 0:
return self.get_new_bits(height, chunk)
if networks.net.TESTNET:
# testnet 20 minute rule
if header['timestamp'] - prior['timestamp'] > 20*60:
return MAX_BITS
# special case for a newly started testnet (such as testnet4)
if height < N_BLOCKS:
return MAX_BITS
return self.read_header(height // N_BLOCKS * N_BLOCKS, chunk)['bits']
# bitcoin cash EDA
# Can't go below minimum, so early bail
if bits == MAX_BITS:
return bits
mtp_6blocks = self.get_median_time_past(height - 1, chunk) - self.get_median_time_past(height - 7, chunk)
if mtp_6blocks < 12 * 3600:
return bits
# If it took over 12hrs to produce the last 6 blocks, increase the
# target by 25% (reducing difficulty by 20%).
target = bits_to_target(bits)
target += target >> 2
return target_to_bits(target)
def get_new_bits(self, height, chunk=None):
N_BLOCKS = networks.net.LEGACY_POW_RETARGET_BLOCKS
assert height % N_BLOCKS == 0
# Genesis
if height == 0:
return MAX_BITS
first = self.read_header(height - N_BLOCKS, chunk)
prior = self.read_header(height - 1, chunk)
prior_target = bits_to_target(prior['bits'])
target_span = networks.net.LEGACY_POW_TARGET_TIMESPAN # usually: 14 * 24 * 60 * 60 = 2 weeks
span = prior['timestamp'] - first['timestamp']
span = min(max(span, target_span // 4), target_span * 4)
new_target = (prior_target * span) // target_span
return target_to_bits(new_target)
def can_connect(self, header, check_height=True):
height = header['block_height']
if check_height and self.height() != height - 1:
return False
if height == 0:
return hash_header(header) == networks.net.GENESIS
previous_header = self.read_header(height -1)
if not previous_header:
return False
prev_hash = hash_header(previous_header)
if prev_hash != header.get('prev_block_hash'):
return False
bits = self.get_bits(header)
try:
self.verify_header(header, previous_header, bits)
except VerifyError as e:
self.print_error('verify header {} failed at height {:d}: {}'
.format(hash_header(header), height, e))
return False
return True
def connect_chunk(self, base_height, hexdata, proof_was_provided=False):
chunk = HeaderChunk(base_height, hexdata)
header_count = len(hexdata) // HEADER_SIZE
top_height = base_height + header_count - 1
# We know that chunks before the checkpoint height, end at the checkpoint height, and
# will be guaranteed to be covered by the checkpointing. If no proof is provided then
# this is wrong.
if top_height <= networks.net.VERIFICATION_BLOCK_HEIGHT:
if not proof_was_provided:
return CHUNK_LACKED_PROOF
# We do not truncate when writing chunks before the checkpoint, and there's no
# way at this time to know if we have this chunk, or even a consecutive subset.
# So just overwrite it.
elif base_height < networks.net.VERIFICATION_BLOCK_HEIGHT and proof_was_provided:
# This was the initial verification request which gets us enough leading headers
# that we can calculate difficulty and verify the headers that we add to this
# chain above the verification block height.
if top_height <= self.height():
return CHUNK_ACCEPTED
elif base_height != self.height() + 1:
# This chunk covers a segment of this blockchain which we already have headers
# for. We need to verify that there isn't a split within the chunk, and if
# there is, indicate the need for the server to fork.
intersection_height = min(top_height, self.height())
chunk_header = chunk.get_header_at_height(intersection_height)
our_header = self.read_header(intersection_height)
if hash_header(chunk_header) != hash_header(our_header):
return CHUNK_FORKS
if intersection_height <= self.height():
return CHUNK_ACCEPTED
else:
# This base of this chunk joins to the top of the blockchain in theory.
# We need to rule out the case where the chunk is actually a fork at the
# connecting height.
our_header = self.read_header(self.height())
chunk_header = chunk.get_header_at_height(base_height)
if hash_header(our_header) != chunk_header['prev_block_hash']:
return CHUNK_FORKS
try:
if not proof_was_provided:
self.verify_chunk(base_height, hexdata)
self.save_chunk(base_height, hexdata)
return CHUNK_ACCEPTED
except VerifyError as e:
self.print_error('verify_chunk failed: {}'.format(e))
return CHUNK_BAD
| mit | -3,008,171,902,361,770,000 | 39.323988 | 160 | 0.600587 | false |
JRcard/GPIOSeq | GPIOSeq.py | 1 | 1848 | #!/usr/bin/env python
# encoding: utf-8
##################################### PEXPECT DOIT ÊTRE INSTALLÉ POUR EXECUTER LE CODE SOURCE.
import wx, os
from Resources.TOP_FRAME import *
class MyApp(wx.App):
def OnInit(self):
frame = SeqFrame(None, "GPIOSeq")
self.SetTopWindow(frame)
return True
if __name__ == "__main__":
app = MyApp(False)
app.MainLoop()
###### OU J'EN SUIS:
# grid de 3 miniutes, avec zoom fonctionel;
# Prochaine étape: scrollbar!!!!!..... voir Scene.py et Grid.py
# Et une barre de temps déffilante au moment où la sequence est enclanché.
# Les rectangles sont autonomes. Ils peuvent être effacés, agrandis et déplacés.
# Petit extra: le curseur de la souris change de forme selon la fonction qu'il effectura.
# Prochaine étape: pouvoir agir sur un groupe de rectangle.
# La gestion des préférences de connection est fonctionnel.
# La connection au raspi est fonctionelle. PEXPECT DOIT ÊTRE INSTALLÉ POUR EXECUTER LE CODE SOURCE.
# Une gestion minimal pour guider l'utilisateur dans l'ordre des procedures pour la connection a été mise en place.
# Ex. S'il appuie sur "play" avant d'être connecté un message apparaît. S'il veut se connecter et qu'il n'a pas entrer ces préférences, le panneau apparaît, etc...
# Il en reste beaucoup à faire au niveau de tous les messages possibles d'erreur de connection qui devront être rendu visible à l'utilisateur.
# À ce sujet, voir: Pref.py
# Une fois connecté, le bouton "play" fonctionne. Le raspberry agit rapidement.
# Prochaine étape: réussir à arrêter le processus en cours...
# Il y a une création de fichiers temporaires pour l'envoi vers le raspi, mais il ne se crée pas au bon endroit. Facile à résoudre.
# Voilà pour le moment ce que j'ai pu faire en un peu plus de 3 mois.
| gpl-3.0 | 4,296,455,469,886,068,000 | 42.119048 | 163 | 0.712866 | false |
jk0/pyhole | pyhole/core/plugin.py | 1 | 10821 | # Copyright 2011-2015 Chris Behrens
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pyhole Plugin Manager"""
import functools
import os
import re
import sys
import time
import logger
import utils
LOG = logger.get_logger()
_plugin_instances = []
_plugin_hooks = {}
def _reset_variables():
"""Local function to init some variables that are common between
load and reload
"""
global _plugin_instances
global _plugin_hooks
_plugin_instances = []
_plugin_hooks = {}
for x in _hook_names:
_plugin_hooks[x] = []
def hook_add(hookname, arg, poll_timer=60):
"""Generic decorator to add hooks. Generally, this is not called
directly by plugins. Decorators that plugins use are automatically
generated below with the setattrs you'll see
"""
def wrap(f):
if hookname == "poll":
@utils.spawn
def _f(self, *args, **kwargs):
while True:
f(self, *args, **kwargs)
time.sleep(poll_timer)
setattr(_f, "_is_%s_hook" % hookname, True)
_f._hook_arg = arg
return _f
else:
setattr(f, "_is_%s_hook" % hookname, True)
f._hook_arg = arg
return f
return wrap
def hook_get(hookname):
"""Function to return the list of hooks of a particular type. Genearlly
this is not called directly. Callers tend to use the dynamically
generated calls 'hook_get_*' that are created below with the setattrs
"""
return _plugin_hooks[hookname]
def active_get(hookname):
"""Function to return the list of hook arguments. Genearlly
this is not called directly. Callers tend to use the dynamically
generated calls 'active_get_*' that are created below with the
setattrs
"""
return ", ".join(sorted([x[2] for x in _plugin_hooks[hookname]]))
_hook_names = ["keyword", "command", "msg_regex", "poll"]
_reset_variables()
_this_mod = sys.modules[__name__]
for x in _hook_names:
# Dynamically create the decorators and functions for various hooks
setattr(_this_mod, "hook_add_%s" % x, functools.partial(hook_add, x))
setattr(_this_mod, "hook_get_%ss" % x, functools.partial(hook_get, x))
setattr(_this_mod, "active_%ss" % x, functools.partial(active_get, x))
class PluginMetaClass(type):
"""The metaclass that makes all of the plugin magic work. All subclassing
gets caught here, which we can use to have plugins automagically
register themselves
"""
def __init__(cls, name, bases, attrs):
"""Catch subclassing. If the class doesn't yet have _plugin_classes,
it means it's the Plugin class itself, otherwise it's a class
that's been subclassed from Plugin (ie, a real plugin class)
"""
if not hasattr(cls, "_plugin_classes"):
cls._plugin_classes = []
else:
cls._plugin_classes.append(cls)
cls.__name__ = name
class Plugin(object):
"""The class that all plugin classes should inherit from"""
__metaclass__ = PluginMetaClass
def __init__(self, session, *args, **kwargs):
"""Default constructor for Plugin. Stores the client instance, etc"""
self.session = session
self.name = self.__class__.__name__
def _init_plugins(*args, **kwargs):
"""Create instances of the plugin classes and create a cache
of their hook functions
"""
for cls in Plugin._plugin_classes:
# Create instance of 'p'
instance = cls(*args, **kwargs)
# Store the instance
_plugin_instances.append(instance)
# Setup _keyword_hooks by looking at all of the attributes
# in the class and finding the ones that have a _is_*_hook
# attribute
for attr_name in dir(instance):
attr = getattr(instance, attr_name)
for hook_key in _hook_names:
if getattr(attr, "_is_%s_hook" % hook_key, False):
hook_arg = getattr(attr, "_hook_arg", None)
# Append (module, method, arg) tuple
_plugin_hooks[hook_key].append((attr.__module__, attr,
hook_arg))
def load_user_plugin(plugin, *args, **kwargs):
"""Load a user plugin"""
sys.path.append(utils.get_home_directory() + "plugins")
user_plugins = os.listdir(utils.get_directory("plugins"))
for user_plugin in user_plugins:
if user_plugin.endswith(".py"):
user_plugin = user_plugin[:-3]
if plugin == user_plugin:
try:
__import__(plugin, globals(), locals(), [plugin])
except Exception, exc:
LOG.error(exc)
def load_plugins(*args, **kwargs):
"""Module function that loads plugins from a particular directory"""
config = utils.get_config()
plugin_names = config.get("plugins", type="list")
for plugin_name in plugin_names:
load_user_plugin(plugin_name, *args, **kwargs)
try:
__import__("pyhole.plugins", globals(), locals(), [plugin_name])
except Exception, exc:
LOG.exception(exc)
_init_plugins(*args, **kwargs)
def reload_plugins(*args, **kwargs):
"""Module function that'll reload all of the plugins"""
config = utils.get_config()
# Terminate running poll instances
for plugin in _plugin_instances:
for attr_name in dir(plugin):
attr = getattr(plugin, attr_name)
if getattr(attr, "_is_poll_hook", False):
# TODO(jk0): Doing this kills the entire process. We need to
# figure out how to kill it properly. Until this is done,
# reloading will not work with polls.
# attr().throw(KeyboardInterrupt)
pass
# When the modules are reloaded, the meta class will append
# all of the classes again, so we need to make sure this is empty
Plugin._plugin_classes = []
_reset_variables()
# Now reload all of the plugins
plugins_to_reload = []
plugindir = "pyhole.plugins"
local_plugin_dir = utils.get_home_directory() + "plugins"
# Reload existing plugins
for mod, val in sys.modules.items():
if plugindir in mod and val and mod != plugindir:
mod_file = val.__file__
if not os.path.isfile(mod_file):
continue
for p in config.get("plugins", type="list"):
if plugindir + "." + p == mod:
plugins_to_reload.append(mod)
if local_plugin_dir in str(val):
plugins_to_reload.append(mod)
for plugin in plugins_to_reload:
try:
reload(sys.modules[plugin])
except Exception, exc:
LOG.error(exc)
# Load new plugins
load_plugins(*args, **kwargs)
def active_plugins():
"""Get the loaded plugin names"""
return ", ".join(sorted([x.__name__ for x in Plugin._plugin_classes]))
def active_plugin_classes():
"""Get the loaded plugin classes"""
return Plugin._plugin_classes
def run_hook_command(session, mod_name, func, message, arg, **kwargs):
"""Make a call to a plugin hook."""
try:
if arg:
session.log.debug("Calling: %s.%s(\"%s\")" % (mod_name,
func.__name__, arg))
else:
session.log.debug("Calling: %s.%s(None)" % (mod_name,
func.__name__))
func(message, arg, **kwargs)
except Exception, exc:
session.log.exception(exc)
def run_hook_polls(session):
"""Run polls in the background."""
message = None
for mod_name, func, cmd in hook_get_polls():
run_hook_command(session, mod_name, func, message, cmd)
def run_msg_regexp_hooks(session, message, private):
"""Run regexp hooks."""
msg = message.message
for mod_name, func, msg_regex in hook_get_msg_regexs():
match = re.search(msg_regex, msg, re.I)
if match:
run_hook_command(session, mod_name, func, message, match,
private=private)
def run_keyword_hooks(session, message, private):
"""Run keyword hooks."""
msg = message.message
words = msg.split(" ")
for mod_name, func, kwarg in hook_get_keywords():
for word in words:
match = re.search("^%s(.+)" % kwarg, word, re.I)
if match:
run_hook_command(session, mod_name, func, message,
match.group(1), private=private)
def run_command_hooks(session, message, private):
"""Run command hooks."""
msg = message.message
for mod_name, func, cmd in hook_get_commands():
session.addressed = False
if private:
match = re.search("^%s$|^%s\s(.*)$" % (cmd, cmd), msg,
re.I)
if match:
run_hook_command(session, mod_name, func, message,
match.group(1), private=private,
addressed=session.addressed)
if msg.startswith(session.command_prefix):
# Strip off command prefix
msg_rest = msg[len(session.command_prefix):]
else:
# Check for command starting with nick being addressed
msg_start_upper = msg[:len(session.nick) + 1].upper()
if msg_start_upper == session.nick.upper() + ":":
# Get rest of string after "nick:" and white spaces
msg_rest = re.sub("^\s+", "",
msg[len(session.nick) + 1:])
else:
continue
session.addressed = True
match = re.search("^%s$|^%s\s(.*)$" % (cmd, cmd), msg_rest, re.I)
if match:
run_hook_command(session, mod_name, func, message, match.group(1),
private=private,
addressed=session.addressed)
def poll_messages(session, message, private=False):
"""Watch for known commands."""
session.addressed = False
run_command_hooks(session, message, private)
run_keyword_hooks(session, message, private)
run_msg_regexp_hooks(session, message, private)
| apache-2.0 | -7,470,589,184,519,020,000 | 31.990854 | 78 | 0.584696 | false |
egbertbouman/tribler-g | Tribler/Core/DecentralizedTracking/pymdht/plugins/routing_bep5.py | 1 | 13339 | # Copyright (C) 2009-2010 Raul Jimenez
# Released under GNU LGPL 2.1
# See LICENSE.txt for more information
"""
This module intends to implement the routing policy specified in BEP5:
-
-
-
-
"""
import random
import core.ptime as time
import heapq
import logging
import core.identifier as identifier
import core.message as message
from core.querier import Query
import core.node as node
from core.node import Node, RoutingNode
from core.routing_table import RoutingTable
logger = logging.getLogger('dht')
NUM_BUCKETS = identifier.ID_SIZE_BITS
"""
We need 160 sbuckets to cover all the cases. See the following table:
Index | Distance | Comment
0 | [2^0,2^1) | All bits equal but the least significant bit
1 | [2^1,2^2) | All bits equal till the second least significant bit
...
158 | [2^159,2^160) | The most significant bit is equal the second is not
159 | [2^159,2^160) | The most significant bit is different
IMPORTANT: Notice there is NO bucket for -1
-1 | 0 | The bit strings are equal
"""
DEFAULT_NUM_NODES = 8
NODES_PER_BUCKET = [] # 16, 32, 64, 128, 256]
NODES_PER_BUCKET[:0] = [DEFAULT_NUM_NODES] \
* (NUM_BUCKETS - len(NODES_PER_BUCKET))
REFRESH_PERIOD = 15 * 60 # 15 minutes
QUARANTINE_PERIOD = 3 * 60 # 3 minutes
MAX_NUM_TIMEOUTS = 2
PING_DELAY_AFTER_TIMEOUT = 30 #seconds
MIN_RNODES_BOOTSTRAP = 10
NUM_NODES_PER_BOOTSTRAP_STEP = 1
BOOTSTRAP_MODE = 'bootstrap_mode'
FIND_NODES_MODE = 'find_nodes_mode'
NORMAL_MODE = 'normal_mode'
_MAINTENANCE_DELAY = {BOOTSTRAP_MODE: .2,
FIND_NODES_MODE: 2,
NORMAL_MODE: 2}
class RoutingManager(object):
def __init__(self, my_node, bootstrap_nodes):
self.my_node = my_node
#Copy the bootstrap list
self.bootstrap_nodes = iter(bootstrap_nodes)
self.table = RoutingTable(my_node, NODES_PER_BUCKET)
self.ping_msg = message.OutgoingPingQuery(my_node.id)
self.find_closest_msg = message.OutgoingFindNodeQuery(
my_node.id,
my_node.id)
# maintenance variables
self._maintenance_mode = BOOTSTRAP_MODE
self._pinged_q_rnodes = {} # questionable nodes which have been
# recently pinged
self._maintenance_tasks = [self._refresh_stale_bucket,
#self._ping_a_staled_rnode,
# self._ping_a_query_received_node,
# self._ping_a_found_node,
]
def do_maintenance(self):
queries_to_send = []
maintenance_lookup_target = None
if self._maintenance_mode == BOOTSTRAP_MODE:
try:
node_ = self.bootstrap_nodes.next()
queries_to_send = [self._get_maintenance_query(node_)]
except (StopIteration):
maintenance_lookup_target = self.my_node.id
self._maintenance_mode = FIND_NODES_MODE
return (10, [], maintenance_lookup_target)
else:
maintenance_lookup_target = self._refresh_stale_bucket()
return (_MAINTENANCE_DELAY[self._maintenance_mode],
queries_to_send, maintenance_lookup_target)
def _refresh_stale_bucket(self):
maintenance_lookup_target = None
current_time = time.time()
for i in xrange(self.table.lowest_index, NUM_BUCKETS):
sbucket = self.table.get_sbucket(i)
m_bucket = sbucket.main
if not m_bucket:
continue
inactivity_time = current_time - m_bucket.last_changed_ts
if inactivity_time > REFRESH_PERIOD:
# print time.time(), '>>>>>>>>>>>>>>> refreshing bucket %d after %f secs' % (
# i, inactivity_time)
maintenance_lookup_target = self.my_node.id.generate_close_id(
i)
m_bucket.last_changed_ts = current_time
return maintenance_lookup_target
self._maintenance_mode = NORMAL_MODE
return None
def _get_maintenance_query(self, node_):
return Query(self.ping_msg, node_)
def on_query_received(self, node_):
'''
Return None when nothing to do
Return a list of queries when queries need to be sent (the queries
will be sent out by the caller)
'''
if self._maintenance_mode != NORMAL_MODE:
return
log_distance = self.my_node.log_distance(node_)
try:
sbucket = self.table.get_sbucket(log_distance)
except(IndexError):
return # Got a query from myself. Just ignore it.
m_bucket = sbucket.main
rnode = m_bucket.get_rnode(node_)
if rnode:
# node in routing table: inform rnode
self._update_rnode_on_query_received(rnode)
return
# node is not in the routing table
if m_bucket.there_is_room():
# There is room in the bucket. Just add the new node.
rnode = node_.get_rnode(log_distance)
m_bucket.add(rnode)
self.table.update_lowest_index(log_distance)
self.table.num_rnodes += 1
self._update_rnode_on_query_received(rnode)
return
# No room in the main routing table
# Check whether there is a bad node to be replaced.
bad_rnode = self._pop_bad_rnode(m_bucket)
if bad_rnode:
# We have a bad node in the bucket. Replace it with the new node.
rnode = node_.get_rnode(log_distance)
m_bucket.add(rnode)
self._update_rnode_on_query_received(rnode)
self.table.update_lowest_index(log_distance)
self.table.num_rnodes += 0
return
# No bad nodes. Check for questionable nodes
q_rnodes = self._get_questionable_rnodes(m_bucket)
queries_to_send = []
# if q_rnodes:
# print time.time(), '-----pinging questionable nodes in',
# print log_distance
# print q_rnodes
for q_rnode in q_rnodes:
# Ping questinable nodes to check whether they are still alive.
# (0 timeouts so far, candidate node)
c_rnode = node_.get_rnode(log_distance)
self._update_rnode_on_query_received(c_rnode)
self._pinged_q_rnodes[q_rnode] = [0, c_rnode]
queries_to_send.append(Query(self.ping_msg, q_rnode))
return queries_to_send
def on_response_received(self, node_, rtt, nodes):
log_distance = self.my_node.log_distance(node_)
try:
sbucket = self.table.get_sbucket(log_distance)
except(IndexError):
return # Got a response from myself. Just ignore it.
m_bucket = sbucket.main
rnode = m_bucket.get_rnode(node_)
if rnode:
# node in routing table: update
self._update_rnode_on_response_received(rnode, rtt)
if self._maintenance_mode == NORMAL_MODE:
m_bucket.last_changed_ts = time.time()
if node_ in self._pinged_q_rnodes:
# This node is questionable. This response proves that it is
# alive. Remove it from the questionable dict.
del self._pinged_q_rnodes[node_]
return
# The node is not in main
if m_bucket.there_is_room():
rnode = node_.get_rnode(log_distance)
m_bucket.add(rnode)
self.table.update_lowest_index(log_distance)
self.table.num_rnodes += 1
self._update_rnode_on_response_received(rnode, rtt)
if self._maintenance_mode == NORMAL_MODE:
m_bucket.last_changed_ts = time.time()
return
# The main bucket is full
# if there is a bad node inside the bucket,
# replace it with the sending node_
bad_rnode = self._pop_bad_rnode(m_bucket)
if bad_rnode:
rnode = node_.get_rnode(log_distance)
m_bucket.add(rnode)
self._update_rnode_on_response_received(rnode, rtt)
if self._maintenance_mode == NORMAL_MODE:
m_bucket.last_changed_ts = time.time()
self.table.update_lowest_index(log_distance)
self.table.num_rnodes += 0
return
# There are no bad nodes. Ping questionable nodes (if any)
q_rnodes = self._get_questionable_rnodes(m_bucket)
queries_to_send = []
for q_rnode in q_rnodes:
# (0 timeouts so far, candidate node)
c_rnode = node_.get_rnode(log_distance)
self._update_rnode_on_response_received(c_rnode, rtt)
self._pinged_q_rnodes[q_rnode] = [0, c_rnode]
queries_to_send.append(Query(self.ping_msg, q_rnode))
return queries_to_send
def _pop_bad_rnode(self, mbucket):
for rnode in mbucket.rnodes:
if rnode.timeouts_in_a_row() >= 2:
mbucket.remove(rnode)
return rnode
def _get_questionable_rnodes(self, m_bucket):
q_rnodes = []
for rnode in m_bucket.rnodes:
inactivity_time = time.time() - rnode.last_seen
if inactivity_time > REFRESH_PERIOD:
q_rnodes.append(rnode)
if rnode.num_responses == 0:
q_rnodes.append(rnode)
return q_rnodes
def on_error_received(self, node_):
pass
def on_timeout(self, node_):
if not node_.id:
return # This is a bootstrap node (just addr, no id)
log_distance = self.my_node.log_distance(node_)
try:
sbucket = self.table.get_sbucket(log_distance)
except (IndexError):
return # Got a timeout from myself, WTF? Just ignore.
m_bucket = sbucket.main
rnode = m_bucket.get_rnode(node_)
if not rnode:
# This node is not in the table. Nothing to do here
return
# The node is in the table. Update it
self._update_rnode_on_timeout(rnode)
t_strikes, c_rnode = self._pinged_q_rnodes.get(node_, (None, None))
if t_strikes is None:
# The node is not being checked by a "questinable ping".
return
elif t_strikes == 0:
# This is the first timeout
self._pinged_q_rnodes[node_] = (1, c_rnode)
# Let's give it another chance
return [Query(self.ping_msg, rnode)]
elif t_strikes == 1:
# Second timeout. You're a bad node, replace if possible
# check if the candidate node is in the routing table
log_distance = self.my_node.log_distance(c_rnode)
m_bucket = self.table.get_sbucket(log_distance).main
c_rnode_in_table = m_bucket.get_rnode(c_rnode)
if c_rnode_in_table:
print 'questionable node replaced'
# replace
m_bucket.remove(rnode)
m_bucket.add(c_rnode)
self.table.update_lowest_index(log_distance)
self.table.num_rnodes += 0
def get_closest_rnodes(self, log_distance, num_nodes, exclude_myself):
if not num_nodes:
num_nodes = NODES_PER_BUCKET[log_distance]
return self.table.get_closest_rnodes(log_distance, num_nodes,
exclude_myself)
def get_main_rnodes(self):
return self.table.get_main_rnodes()
def print_stats(self):
self.table.print_stats()
def _update_rnode_on_query_received(self, rnode):
"""Register a query from node.
You should call this method when receiving a query from this node.
"""
current_time = time.time()
rnode.last_action_ts = time.time()
rnode.msgs_since_timeout += 1
rnode.num_queries += 1
rnode.add_event(current_time, node.QUERY)
rnode.last_seen = current_time
def _update_rnode_on_response_received(self, rnode, rtt):
"""Register a reply from rnode.
You should call this method when receiving a response from this rnode.
"""
rnode.rtt = rtt
current_time = time.time()
#rnode._reset_refresh_task()
if rnode.in_quarantine:
rnode.in_quarantine = rnode.last_action_ts < (
current_time - QUARANTINE_PERIOD)
rnode.last_action_ts = current_time
rnode.num_responses += 1
rnode.add_event(time.time(), node.RESPONSE)
rnode.last_seen = current_time
def _update_rnode_on_timeout(self, rnode):
"""Register a timeout for this rnode.
You should call this method when getting a timeout for this node.
"""
rnode.last_action_ts = time.time()
rnode.msgs_since_timeout = 0
rnode.num_timeouts += 1
rnode.add_event(time.time(), node.TIMEOUT)
def _worst_rnode(self, rnodes):
max_num_timeouts = -1
worst_rnode_so_far = None
for rnode in rnodes:
num_timeouots = rnode.timeouts_in_a_row()
if num_timeouots >= max_num_timeouts:
max_num_timeouts = num_timeouots
worst_rnode_so_far = rnode
return worst_rnode_so_far
| lgpl-2.1 | 1,855,917,418,661,080,000 | 35.848066 | 92 | 0.580628 | false |
JunctionAt/JunctionWWW | blueprints/forum/views/topic_view.py | 1 | 3817 | from flask import render_template, redirect, abort, url_for
import math
from .. import blueprint
from models.forum_model import Topic, Post
from post_reply import TopicReplyForm
from blueprints.auth import current_user
from topic_edit import PostEditForm
from ..forum_util import forum_template_data
POSTS_PER_PAGE = 20
PAGINATION_VALUE_RANGE = 3
@blueprint.route('/forum/t/<int:topic_id>/<string:topic_name>/', defaults={'page': 1})
@blueprint.route('/forum/t/<int:topic_id>/<string:topic_name>/page/<int:page>/')
def view_topic(topic_id, topic_name, page):
if page == 0:
abort(404)
topic_reply_form = TopicReplyForm()
topic = Topic.objects(topic_url_id=topic_id).exclude().first()
if topic is None:
abort(404)
if not topic_name == topic.get_url_name():
return redirect(topic.get_url())
if current_user.is_authenticated():
topic.update(add_to_set__users_read_topic=current_user.to_dbref())
board = topic.board
forum = board.forum
# Get our sorted posts and the number of posts.
posts = Post.objects(topic=topic).order_by('+date')
num_posts = len(posts)
# Calculate the total number of pages and make sure the request is a valid page.
num_pages = int(math.ceil(num_posts / float(POSTS_PER_PAGE)))
if num_pages < page:
if page == 1:
return render_template('forum_topic_view.html', topic=topic, board=board, forum=forum,
posts=posts, topic_reply_form=topic_reply_form,
total_pages=num_pages, current_page=page, next=None, prev=None, links=[])
abort(404)
# Compile the list of topics we want displayed.
display_posts = posts.skip((page - 1) * POSTS_PER_PAGE).limit(POSTS_PER_PAGE)
# Find the links we want for the next/prev buttons if applicable.
next_page = url_for('forum.view_topic', page=page + 1, **topic.get_url_info()) if page < num_pages else None
prev_page = url_for('forum.view_topic', page=page - 1, **topic.get_url_info()) if page > 1 and not num_pages == 1 else None
# Mash together a list of what pages we want linked to in the pagination bar.
links = []
for page_mod in range(-min(PAGINATION_VALUE_RANGE, page - 1), min(PAGINATION_VALUE_RANGE, num_pages-page) + 1):
num = page + page_mod
links.append({'num': num, 'url': url_for('forum.view_topic', page=num, **topic.get_url_info()),
'active': (num == page)})
return render_template('forum_topic_view.html', topic=topic, board=board, forum=forum,
posts=display_posts, topic_reply_form=topic_reply_form,
total_pages=num_pages, current_page=page,
next=next_page, prev=prev_page, links=links, markdown_escape=markdown_escape,
post_edit=PostEditForm(), forum_menu_current=board.id, **forum_template_data(forum))
from markupsafe import Markup, text_type
def markdown_escape(s):
"""Convert the characters &, <, >, ' and " in string s to HTML-safe
sequences. Use this if you need to display text that might contain
such characters in HTML. Marks return value as markup string.
"""
if hasattr(s, '__html__'):
return s.__html__().replace('>', '>')
return Markup(text_type(s)
.replace('&', '&')
.replace('>', '>')
.replace('<', '<')
.replace("'", ''')
.replace('"', '"')
)
#@blueprint.route('/f/ulastpost')
#def ulastpost():
# topics = Topic.objects()
# for topic in topics:
# post = Post.objects(topic=topic).order_by('-date').first()
# topic.last_post_date = post.date
# topic.save()
# return 'ye' | agpl-3.0 | 8,476,154,457,414,899,000 | 39.617021 | 127 | 0.614357 | false |
Kinovarobotics/kinova-ros | kinova_demo/nodes/kinova_demo/gravity_compensated_mode.py | 1 | 1165 | #! /usr/bin/env python
"""A test program to test action servers for the JACO and MICO arms."""
import roslib; roslib.load_manifest('kinova_demo')
import actionlib
import kinova_msgs.msg
import geometry_msgs.msg
import tf
import std_msgs.msg
import math
import thread
from kinova_msgs.srv import *
import argparse
from robot_control_modules import *
prefix = 'j2s7s300_'
nbJoints = 7
interactive = True
duration_sec = 100
if __name__ == '__main__':
try:
prefix, nbJoints = argumentParser(None)
rospy.init_node('torque_compensated_mode')
if (interactive == True):
nb = raw_input("Moving robot to candle like position, and setting zero torques, press return to start, n to skip")
if (nb != "n" and nb != "N"):
result = joint_position_client([180]*7, prefix)
if (interactive == True):
nb = raw_input('Setting torques to zero, press return')
ZeroTorque(prefix)
if (interactive == True):
nb = raw_input('Starting gravity compensation mode')
publishTorqueCmd([0,0,0,0,0,0,0], duration_sec, prefix)
print("Done!")
except rospy.ROSInterruptException:
print "program interrupted before completion"
| bsd-3-clause | 3,772,988,054,336,513,000 | 24.888889 | 117 | 0.692704 | false |
lordgittgenstein/eGov | corect/utility.py | 1 | 4923 | from datetime import datetime
import random
import string
from corect.models import Officer, Location, Office, Complaint, History
def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def get_group(user_id):
return Officer.objects.get(user=user_id).type
def get_higher_office(office):
officer = Officer.objects.get(id=office.head.id)
boss = Officer.objects.get(id=officer.boss.id)
return Office.objects.get(head=boss.id)
def get_offices_in(location):
queryset = Location.objects.none()
if location.type == 'state' or location.type == 'ut':
queryset = Location.objects.filter(state=location.state)
if location.type == 'division':
queryset = Location.objects.filter(division=location.division)
if location.type == 'district':
queryset = Location.objects.filter(district=location.district)
if location.type == 'subdistrict':
queryset = Location.objects.filter(subdistrict=location.subdistrict)
if location.type == 'locality' or location.type == 'village':
queryset = Location.objects.filter(locality=location.locality)
office_list = Office.objects.none()
for q in queryset:
office_list = office_list | Office.objects.filter(location=q.id)
return office_list
def get_offices_over(location):
queryset = Location.objects.none()
aqueryset = Location.objects.none()
flag = 'dont'
if location.type == 'state' or location.type == 'ut':
queryset = Location.objects.filter(state=location.state)
if location.type == 'division':
aqueryset = Location.objects.filter(state=location.state)
queryset = Location.objects.filter(division=location.division)
if location.type == 'district':
aqueryset = Location.objects.filter(division=location.division)
queryset = Location.objects.filter(district=location.district)
flag = 'do'
if location.type == 'subdistrict':
aqueryset = Location.objects.filter(district=location.district)
queryset = Location.objects.filter(subdistrict=location.subdistrict)
flag = 'do'
if location.type == 'locality' or location.type == 'village':
aqueryset = Location.objects.filter(subdistrict=location.subdistrict)
queryset = Location.objects.filter(locality=location.locality)
flag = 'do'
office_list = Office.objects.none()
if flag == 'do':
for q in aqueryset:
office_list = office_list | Office.objects.filter(location=q.id)
for q in queryset:
office_list = office_list | Office.objects.filter(location=q.id)
return office_list
def get_complaints_under(officer):
officers_under = Officer.objects.filter(boss=officer.id)
offices = Office.objects.none()
for o in officers_under:
offices = offices | Office.objects.filter(head=o.id)
complaints_under = Complaint.objects.none()
for oo in offices:
complaints_under = complaints_under | Complaint.objects.filter(office=oo.id)
complaints = Complaint.objects.filter(office=Office.objects.get(head=officer.id).id)
return complaints | complaints_under
def n_deadlines(officer):
officers_under = Officer.objects.filter(boss=officer.id)
offices = Office.objects.none()
for o in officers_under:
offices = offices | Office.objects.filter(head=o.id)
n_complaints_under = 0
for oo in offices:
n_complaints_under = n_complaints_under + Complaint.objects.filter(
office=oo.id,
wake_up__lte=datetime.now().date(),
resolved=False).count()
n_complaints = Complaint.objects.filter(office=Office.objects.get(
head=officer.id).id,
wake_up__lte=datetime.now().date(),
resolved=False).count()
return n_complaints + n_complaints_under
def n_recent(officer, last_login):
complaints, n_events = get_complaints_under(officer), 0
for c in complaints:
if c.office.id == Office.objects.get(head=officer.id).id:
n_events = n_events + History.objects.filter(complaint=c.id,
datetime__gte=datetime.combine(last_login, datetime.min.time()),
is_read_officer=False).exclude(user=officer.user).count()
else:
n_events = n_events + History.objects.filter(
complaint=c.id,
datetime__gte=datetime.combine(last_login, datetime.min.time()),
is_read_boss=False).exclude(user=officer.user).count()
return n_events | gpl-3.0 | -8,112,677,190,083,402,000 | 46.805825 | 100 | 0.628682 | false |
openbaton/ems | ems/ems.py | 1 | 4643 | # Copyright (c) 2015 Fraunhofer FOKUS. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# !/usr/bin/env python
import threading
import os
import time
import logging
import ConfigParser
import pika
from receiver import on_message
from utils import get_map
__author__ = 'ogo'
log = logging.getLogger(__name__)
def on_request(ch, method, props, body):
response = on_message(body)
ch.basic_publish(exchange='', routing_key=props.reply_to,
properties=pika.BasicProperties(correlation_id=props.correlation_id, content_type='text/plain'),
body=str(response))
ch.basic_ack(delivery_tag=method.delivery_tag)
log.info("Answer sent")
def thread_function(ch, method, properties, body):
threading.Thread(target=on_request, args=(ch, method, properties, body)).start()
def main():
sleep_time = 1
logging_dir='/var/log/openbaton/'
#logging_dir = 'log/openbaton/'
if not os.path.exists(logging_dir):
os.makedirs(logging_dir)
logging.basicConfig(filename=logging_dir + '/ems-receiver.log', level=logging.INFO, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M')
config_file_name = "/etc/openbaton/openbaton-ems.properties"
log.debug(config_file_name)
config = ConfigParser.ConfigParser()
config.read(config_file_name) # read config file
_map = get_map(section='ems', config=config) # get the data from map
queue_type = _map.get("type") # get type of the queue
hostname = _map.get("hostname")
username = _map.get("username")
password = _map.get("password")
autodel = _map.get("autodelete")
heartbeat = _map.get("heartbeat")
broker_port = _map.get("broker_port")
exchange_name = _map.get("exchange")
virtual_host = _map.get("virtual_host")
queuedel = True
if autodel == 'false':
queuedel = False
if not heartbeat:
heartbeat = '60'
if not exchange_name:
exchange_name = 'openbaton-exchange'
if not broker_port:
broker_port = "5672"
if not virtual_host:
virtual_host = "/"
if not queue_type:
queue_type = "generic"
log.info(
"EMS configuration paramters are "
"hostname: %s, username: %s, password: *****, autodel: %s, heartbeat: %s, exchange name: %s" % (
hostname, username, autodel, heartbeat, exchange_name))
rabbit_credentials = pika.PlainCredentials(username, password)
while True:
try:
connection = pika.BlockingConnection(
pika.ConnectionParameters(host=_map.get("broker_ip"), port=int(broker_port),
virtual_host=virtual_host, credentials=rabbit_credentials, heartbeat_interval=int(heartbeat)))
channel = connection.channel()
#channel.exchange_declare(exchange=exchange_name, type="topic", durable=True)
#channel.queue_declare(queue='ems.%s.register'%queue_type, auto_delete=queuedel)
channel.queue_bind(exchange=exchange_name, queue='ems.%s.register' % queue_type)
channel.queue_declare(queue='vnfm.%s.actions' % hostname, auto_delete=queuedel)
channel.queue_bind(exchange=exchange_name, queue='ems.%s.register' % queue_type)
channel.queue_bind(exchange=exchange_name, queue='vnfm.%s.actions' % hostname)
channel.basic_publish(exchange='', routing_key='ems.%s.register' % queue_type,
properties=pika.BasicProperties(content_type='text/plain'),
body='{"hostname":"%s"}' % hostname)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(thread_function, queue='vnfm.%s.actions' % hostname)
channel.start_consuming()
except Exception:
# logging.exception('')
time.sleep(sleep_time)
if (sleep_time < 10):
sleep_time = sleep_time + 1
else:
sleep_time = sleep_time + 10
#print("Trying to reconnect")
# log.info("Trying to reconnect...")
| apache-2.0 | -3,791,214,746,877,738,000 | 40.828829 | 172 | 0.639242 | false |
YuepengGuo/backtrader | docs/datafeed-develop-general/vchart-test.py | 1 | 1988 | #!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import datetime
import backtrader as bt
from vchart import VChartData
if __name__ == '__main__':
# Create a cerebro entity
cerebro = bt.Cerebro(stdstats=False)
# Add a strategy
cerebro.addstrategy(bt.Strategy)
###########################################################################
# Note:
# The goog.fd file belongs to VisualChart and cannot be distributed with
# backtrader
#
# VisualChart can be downloaded from www.visualchart.com
###########################################################################
# Create a Data Feed
datapath = '../../datas/goog.fd'
data = VChartData(
dataname=datapath,
fromdate=datetime.datetime(2006, 1, 1),
todate=datetime.datetime(2006, 12, 31),
timeframe=bt.TimeFrame.Days
)
# Add the Data Feed to Cerebro
cerebro.adddata(data)
# Run over everything
cerebro.run()
# Plot the result
cerebro.plot(style='bar')
| gpl-3.0 | -2,867,846,791,584,958,000 | 32.133333 | 79 | 0.577465 | false |
luca76/QGIS | python/plugins/MetaSearch/dialogs/newconnectiondialog.py | 1 | 3231 | # -*- coding: utf-8 -*-
###############################################################################
#
# CSW Client
# ---------------------------------------------------------
# QGIS Catalogue Service client.
#
# Copyright (C) 2010 NextGIS (http://nextgis.org),
# Alexander Bruy ([email protected]),
# Maxim Dubinin ([email protected])
#
# Copyright (C) 2014 Tom Kralidis ([email protected])
#
# This source is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This code is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# A copy of the GNU General Public License is available on the World Wide Web
# at <http://www.gnu.org/copyleft/gpl.html>. You can also obtain it by writing
# to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston,
# MA 02111-1307, USA.
#
###############################################################################
from PyQt4.QtCore import QSettings
from PyQt4.QtGui import QDialog, QMessageBox
from MetaSearch.ui.newconnectiondialog import Ui_NewConnectionDialog
class NewConnectionDialog(QDialog, Ui_NewConnectionDialog):
"""Dialogue to add a new CSW entry"""
def __init__(self, conn_name=None):
"""init"""
QDialog.__init__(self)
self.setupUi(self)
self.settings = QSettings()
self.conn_name = None
self.conn_name_orig = conn_name
def accept(self):
"""add CSW entry"""
conn_name = self.leName.text().strip()
conn_url = self.leURL.text().strip()
if any([conn_name == '', conn_url == '']):
QMessageBox.warning(self, self.tr('Save connection'),
self.tr('Both Name and URL must be provided'))
return
if conn_name is not None:
key = '/MetaSearch/%s' % conn_name
keyurl = '%s/url' % key
key_orig = '/MetaSearch/%s' % self.conn_name_orig
# warn if entry was renamed to an existing connection
if all([self.conn_name_orig != conn_name,
self.settings.contains(keyurl)]):
res = QMessageBox.warning(self, self.tr('Save connection'),
self.tr('Overwrite %s?' % conn_name),
QMessageBox.Ok | QMessageBox.Cancel)
if res == QMessageBox.Cancel:
return
# on rename delete original entry first
if all([self.conn_name_orig is not None,
self.conn_name_orig != conn_name]):
self.settings.remove(key_orig)
self.settings.setValue(keyurl, conn_url)
self.settings.setValue('/MetaSearch/selected', conn_name)
QDialog.accept(self)
def reject(self):
"""back out of dialogue"""
QDialog.reject(self)
| gpl-2.0 | -4,703,666,397,091,688,000 | 36.569767 | 79 | 0.563603 | false |
ZeitOnline/zeit.calendar | src/zeit/calendar/calendar.py | 1 | 3226 | import datetime
import BTrees.OOBTree
import zope.component
import zope.interface
import zope.lifecycleevent
import zope.proxy
import zope.app.container.btree
import zeit.calendar.interfaces
class Calendar(zope.app.container.btree.BTreeContainer):
zope.interface.implements(zeit.calendar.interfaces.ICalendar)
def __init__(self):
super(Calendar, self).__init__()
self._date_index = BTrees.OOBTree.OOBTree()
self._key_index = BTrees.OOBTree.OOBTree()
def getEvents(self, date):
"""Return the events occuring on `date`."""
for event_id in self._date_index.get(date, []):
yield self[event_id]
def haveEvents(self, date):
"""Return whether there are events occuring on `date`."""
return bool(self._date_index.get(date))
def __setitem__(self, key, value):
event = zeit.calendar.interfaces.ICalendarEvent(value)
super(Calendar, self).__setitem__(key, value)
self._index(key, event.start, event.end)
def __delitem__(self, key):
super(Calendar, self).__delitem__(key)
self._unindex(key)
def _index(self, key, start, end):
if end is None:
check = (start,)
else:
check = (start, end)
for day in check:
if not isinstance(day, datetime.date):
raise ValueError("Expected date object, got %r instead" % day)
for day in date_range(start, end):
try:
day_idx = self._date_index[day]
except KeyError:
self._date_index[day] = day_idx = BTrees.OOBTree.OOTreeSet()
day_idx.insert(key)
self._key_index[key] = (start, end)
def _unindex(self, key):
start, end = self._key_index[key]
del self._key_index[key]
for day in date_range(start, end):
self._date_index[day].remove(key)
@zope.component.adapter(
zeit.calendar.interfaces.ICalendarEvent,
zope.lifecycleevent.IObjectModifiedEvent)
def updateIndexOnEventChange(calendar_event, event):
calendar = zope.proxy.removeAllProxies(calendar_event.__parent__)
key = calendar_event.__name__
calendar._unindex(key)
calendar._index(key, calendar_event.start, calendar_event.end)
def date_range(start, end):
"""Generate all datetime.date objects from start through end.
If end is None or earlier than start, yield only start. The range is never
empty so every event is always listed for at least one day. Otherwise
faulty dates might render an event unreachable via the index.
>>> day1 = datetime.date(2008, 1, 30)
>>> day2 = datetime.date(2008, 2, 2)
>>> list(date_range(day1, day2))
[datetime.date(2008, 1, 30), datetime.date(2008, 1, 31),
datetime.date(2008, 2, 1), datetime.date(2008, 2, 2)]
>>> list(date_range(day1, None))
[datetime.date(2008, 1, 30)]
>>> list(date_range(day1, day1))
[datetime.date(2008, 1, 30)]
>>> list(date_range(day2, day1))
[datetime.date(2008, 2, 2)]
"""
if end is None or end <= start:
yield start
else:
for i in xrange(start.toordinal(), end.toordinal() + 1):
yield datetime.date.fromordinal(i)
| bsd-3-clause | 5,608,074,890,873,393,000 | 31.26 | 78 | 0.626472 | false |
skosukhin/spack | var/spack/repos/builtin/packages/r-irdisplay/package.py | 1 | 1679 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RIrdisplay(RPackage):
"""An interface to the rich display capabilities of Jupyter front-ends
(e.g. 'Jupyter Notebook') Designed to be used from a running IRkernel
session"""
homepage = "https://irkernel.github.io"
url = "https://cran.rstudio.com/src/contrib/IRdisplay_0.4.4.tar.gz"
version('0.4.4', '5be672fb82185b90f23bd99ac1e1cdb6')
depends_on('r-repr', type=('build', 'run'))
| lgpl-2.1 | 1,886,206,090,123,074,800 | 42.051282 | 78 | 0.673615 | false |
MaterialsDiscovery/PyChemia | pychemia/core/element.py | 1 | 6941 | from collections.abc import Mapping
from pychemia.utils.periodic import *
madelung_exceptions = {'Cr': ['Ar', '4s1', '3d5'],
'Cu': ['Ar', '4s1', '3d10'],
'Nb': ['Kr', '5s1', '4d4'],
'Mo': ['Kr', '5s1', '4d5'],
'Ru': ['Kr', '5s1', '4d7'],
'Rh': ['Kr', '5s1', '4d8'],
'Pd': ['Kr', '4d10'],
'Ag': ['Kr', '5s1', '4d10'],
'La': ['Xe', '6s2', '5d1'],
'Ce': ['Xe', '6s2', '4f1', '5d1'],
'Gd': ['Xe', '6s2', '4f7', '5d1'],
'Pt': ['Xe', '6s1', '4f14', '5d9'],
'Au': ['Xe', '6s1', '4f14', '5d10'],
'Ac': ['Rn', '7s2', '6d1'],
'Th': ['Rn', '7s2', '6d2'],
'Pa': ['Rn', '7s2', '5f2', '6d1'],
'U': ['Rn', '7s2', '5f3', '6d1'],
'Np': ['Rn', '7s2', '5f4', '6d1'],
'Cm': ['Rn', '7s2', '5f7', '6d1'],
'Lr': ['Rn', '7s2', '5f14', '7p1']}
class Element:
def __init__(self, value=None):
if value in atomic_symbols:
self.symbol = value
elif value.capitalize() in atomic_symbols:
self.symbol = value.capitalize()
else:
raise ValueError('Symbol %s does not appear on the periodic table' % value)
@property
def name(self):
return atomic_name(self.symbol)
@property
def atomic_number(self):
return atomic_number(self.symbol)
@property
def group(self):
return group(self.symbol)
@property
def period(self):
return period(self.symbol)
@property
def block(self):
return block(self.symbol)
@property
def valence(self):
return valence(self.symbol)
@property
def valence_nominal(self):
return valence_nominal(self.symbol)
@property
def mass(self):
return mass(self.symbol)
@property
def covalent_radius(self):
return covalent_radius(self.symbol)
@property
def electronegativity(self):
return electronegativity(self.symbol)
@property
def crystal_structure(self):
return crystal_structure(self.symbol)
@property
def phase(self):
return phase(self.symbol)
@property
def boiling_point(self):
return boiling_point(self.symbol)
@property
def melting_point(self):
return melting_point(self.symbol)
@property
def oxidation_states(self):
return oxidation_state(self.symbol)
@property
def oxidation_states_common(self):
return oxidation_state(self.symbol, common=True)
def __str__(self):
ret = """
Symbol: %s
Name : %s
Z : %d
Group : %d
Period: %d
Block : %s
Valence : %f
Valence (Nominal): %f
Mass : %f
Covalent Radius : %f
Electronegativity : %f
Crystal Structure : %s
Phase : %s
Boiling Point : %f
Melting Point : %f
""" % (self.symbol, self.name, self.atomic_number, self.group, self.period, self.block,
self.valence, self.valence_nominal,
self.mass, self.covalent_radius, self.electronegativity, self.crystal_structure, self.phase,
self.boiling_point, self.melting_point)
return ret
def previous_inert(self):
inerts = ['He', 'Ne', 'Ar', 'Kr', 'Xe', 'Rn', 'Og']
if self.period == 1:
return None
else:
# In case the element is already a noble gas the previous one look one period above in Periodic Table
return inerts[self.period - 2]
@property
def madelung_filling(self):
order = ['1s', '2s', '2p', '3s', '3p', '4s', '3d', '4p', '5s', '4d',
'5p', '6s', '4f', '5d', '6p', '7s', '5f', '6d', '7p', '8s',
'5g', '6f', '7d', '8p', '9s']
# We start with the total number of electron and get those associated to orbitals following the order
capacities = {}
# l quantum number
for lqn in range(4):
label = Element.orbital_label_from_number(lqn)
nele = Element.max_electrons_subshell(label)
capacities[label] = nele
max_electrons = {}
for ishell in order:
label = ishell[-1]
maxele = Element.max_electrons_subshell(label)
max_electrons[ishell] = maxele
ret = []
if self.previous_inert() is not None:
inert = self.__class__(self.previous_inert())
ret.append(inert.symbol)
inert_remain = inert.atomic_number
# Consume the shells up to the previous inert atom
numele = self.atomic_number - inert.atomic_number
else:
numele = self.atomic_number
inert_remain = 0
for i in order:
if inert_remain >= max_electrons[i]:
inert_remain -= max_electrons[i]
elif inert_remain == 0:
if numele >= max_electrons[i]:
numele -= max_electrons[i]
ret.append(i + str(max_electrons[i]))
elif numele == 0:
break
elif numele < max_electrons[i]:
ret.append(i + str(numele))
break
return ret
@staticmethod
def azimuthal_quantum_number(label):
aqn = {'s': 0, 'p': 1, 'd': 2, 'f': 3, 'g': 4}
if label not in ['s', 'p', 'd', 'f', 'g']:
raise ValueError('Not such label for an orbital: %s' % label)
return aqn[label]
# lqn = angular momemtum
@staticmethod
def orbital_label_from_number(lqn):
orbitals = ['s', 'p', 'd', 'f', 'g']
if lqn not in range(4):
raise ValueError('Not such azimuthal quantum number: %s' % lqn)
return orbitals[lqn]
@staticmethod
def max_electrons_subshell(subshell):
if subshell in ['s', 'p', 'd', 'f', 'g']:
ll = Element.azimuthal_quantum_number(subshell)
elif subshell in [0, 1, 2, 3, 4]:
ll = subshell
else:
raise ValueError('Not a valid subshell: %s' % subshell)
return 2 * (2 * ll + 1)
@property
def electronic_configuration(self):
"""
Return the known electronic configuration including exceptions to Madelung's rule
Based on:
https://en.wikipedia.org/wiki/Electron_configuration#Atoms:_Aufbau_principle_and_Madelung_rule
:return:
"""
if self.symbol in madelung_exceptions:
return madelung_exceptions[self.symbol]
else:
return self.madelung_filling
@property
def is_madelung_exception(self):
return self.symbol in madelung_exceptions
| mit | -169,579,305,929,761,860 | 29.310044 | 113 | 0.511742 | false |
medunigraz/outpost | src/outpost/django/video/migrations/0001_initial.py | 1 | 5601 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-18 13:23
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
from ...base.utils import Uuid4Upload
class Migration(migrations.Migration):
initial = True
dependencies = [
('geo', '0015_auto_20170809_0948'),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Recorder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('hostname', models.CharField(max_length=128)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Server',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hostname', models.CharField(blank=True, max_length=128)),
('port', models.PositiveIntegerField(default=2022)),
('key', models.BinaryField(default=b'')),
('active', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Epiphan',
fields=[
('recorder_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='video.Recorder')),
('username', models.CharField(max_length=128)),
('password', models.CharField(max_length=128)),
('key', models.BinaryField(null=True)),
('active', models.BooleanField(default=True)),
],
options={
'abstract': False,
},
bases=('video.recorder',),
),
migrations.AlterUniqueTogether(
name='server',
unique_together=set([('hostname', 'port')]),
),
migrations.AddField(
model_name='recorder',
name='polymorphic_ctype',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_video.recorder_set+', to='contenttypes.ContentType'),
),
migrations.AddField(
model_name='epiphan',
name='server',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='video.Server'),
),
migrations.CreateModel(
name='Recording',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('data', models.FileField(upload_to=Uuid4Upload)),
('recorder', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='video.Recorder')),
('info', django.contrib.postgres.fields.jsonb.JSONField(default={})),
],
options={
'abstract': False,
'ordering': ('-modified', '-created'),
'get_latest_by': 'modified',
},
),
migrations.CreateModel(
name='Export',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'abstract': False,
},
),
migrations.RemoveField(
model_name='epiphan',
name='active',
),
migrations.AddField(
model_name='recorder',
name='active',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='recorder',
name='room',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='geo.Room'),
),
migrations.AlterField(
model_name='epiphan',
name='key',
field=models.BinaryField(default=b''),
preserve_default=False,
),
migrations.CreateModel(
name='SideBySideExport',
fields=[
('export_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='video.Export')),
('data', models.FileField(upload_to=Uuid4Upload)),
],
options={
'abstract': False,
},
bases=('video.export',),
),
migrations.AddField(
model_name='export',
name='polymorphic_ctype',
field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_video.export_set+', to='contenttypes.ContentType'),
),
migrations.AddField(
model_name='export',
name='recording',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='video.Recording'),
),
]
| bsd-2-clause | -8,162,181,520,460,886,000 | 39.883212 | 193 | 0.555972 | false |
ryansb/mediapublic | server/mediapublic/mediapublic/models.py | 1 | 17235 | from sqlalchemy import (
Column,
Index,
ForeignKey,
Integer,
Text,
UnicodeText,
DateTime,
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship,
backref,
)
import transaction
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension(), expire_on_commit=False))
Base = declarative_base()
class CreationMixin():
@classmethod
def add(cls, **kwargs):
with transaction.manager:
thing = cls(**kwargs)
DBSession.add(thing)
transaction.commit()
return thing
@classmethod
def get_all(cls):
with transaction.manager:
things = DBSession.query(
cls,
).all()
#retThings = []
#for t in things:
# retThings.append(t.to_dict())
#return retThings
return things
@classmethod
def get_by_id(cls, id):
with transaction.manager:
thing = DBSession.query(
cls,
).filter(
cls.id == id,
).first()
return thing
@classmethod
def delete_by_id(cls, id):
with transaction.manager:
thing = cls.get_by_id(id)
if not thing is None:
DBSession.delete(thing)
transaction.commit()
return thing
@classmethod
def update_by_id(cls, id, **kwargs):
print '\nupdate_by_id(), args:'
#print args
with transaction.manager:
keys = set(cls.__dict__)
thing = cls.get_by_id(id)
if not thing is None:
for k in kwargs:
if k in keys:
setattr(thing, k, kwargs[k])
DBSession.add(thing)
transaction.commit()
return thing
@classmethod
def reqkeys(cls):
keys = []
for key in cls.__table__.columns:
if '__required__' in type(key).__dict__:
keys.append(str(key).split('.')[1])
return keys
class ReqColumn(Column):
__required__ = True
class UserTypes(Base, CreationMixin):
__tablename__ = 'user_types'
id = Column(Integer, primary_key=True)
name = ReqColumn(UnicodeText)
description = ReqColumn(UnicodeText)
value = ReqColumn(Integer)
creation_datetime = Column(DateTime)
def to_dict(self):
resp = dict(
id = self.id,
name = self.name,
description = self.description,
value = self.value,
)
return resp
class Users(Base, CreationMixin):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
unique = Column(Text)
first = ReqColumn(UnicodeText)
last = ReqColumn(UnicodeText)
email = ReqColumn(UnicodeText)
twitter = ReqColumn(UnicodeText)
creation_datetime = Column(UnicodeText)
last_longin_datetime = Column(UnicodeText)
user_type_id = ReqColumn(ForeignKey('user_types.id'))
organization_id = Column(ForeignKey('organizations.id'), nullable=True)
def to_dict(self):
resp = dict(
id = self.id,
first = self.first,
last = self.last,
email = self.email,
user_type = self.user_type_id,
organization_id = self.organization_id,
)
return resp
class Comments(Base, CreationMixin):
__tablename__ = 'comments'
id = Column(Integer, primary_key=True)
subject = ReqColumn(UnicodeText)
contents = ReqColumn(UnicodeText)
creation_datetime = Column(DateTime)
parent_comment_id = ReqColumn(Integer, ForeignKey('comments.id'))
author_id = ReqColumn(Integer, ForeignKey('users.id'))
organization_id = Column(ForeignKey('organizations.id'), nullable=True)
people_id = Column(ForeignKey('people.id'), nullable=True)
recording_id = Column(ForeignKey('recordings.id'), nullable=True)
howto_id = Column(ForeignKey('howtos.id'), nullable=True)
blog_id = Column(ForeignKey('blogs.id'), nullable=True)
def to_dict(self):
resp = dict(
id = self.id,
subject = self.subject,
contents = self.contents,
creation_datetime = str(self.creation_datetime),
parent_comment_id = self.parent_comment_id,
author_id = self.author_id,
)
return resp
@classmethod
def get_by_organization_id(cls, id):
with transaction.manager:
comments = DBSession.query(
Comments,
).filter(
Comments.organization_id == id,
).all()
return comments
@classmethod
def get_by_people_id(cls, id):
with transaction.manager:
comments = DBSession.query(
Comments,
).filter(
Comments.people_id == id,
).all()
return comments
@classmethod
def get_by_recording_id(cls, id):
with transaction.manager:
comments = DBSession.query(
Comments,
).filter(
Comments.recording_id == id,
).all()
return comments
@classmethod
def get_by_howto_id(cls, id):
with transaction.manager:
comments = DBSession.query(
Comments,
).filter(
Comments.howto_id == id,
).all()
return comments
@classmethod
def get_by_blog_id(cls, id):
with transaction.manager:
comments = DBSession.query(
Comments,
).filter(
Comments.blog_id == id,
).all()
return comments
class Organizations(Base, CreationMixin):
__tablename__ = 'organizations'
id = Column(Integer, primary_key=True)
short_name = ReqColumn(UnicodeText)
long_name = ReqColumn(UnicodeText)
short_description = ReqColumn(UnicodeText)
long_description = ReqColumn(UnicodeText)
address_0 = ReqColumn(UnicodeText)
address_1 = ReqColumn(UnicodeText)
city = ReqColumn(UnicodeText)
state = ReqColumn(UnicodeText)
zipcode = ReqColumn(UnicodeText)
phone = ReqColumn(UnicodeText)
fax = ReqColumn(UnicodeText)
primary_website = ReqColumn(UnicodeText)
secondary_website = ReqColumn(UnicodeText)
creation_datetime = Column(DateTime)
def to_dict(self):
resp = dict(
id = self.id,
short_name = self.short_name,
long_name = self.long_name,
short_description = self.short_description,
long_description = self.long_description,
address_0 = self.address_0,
address_1 = self.address_1,
city = self.city,
state = self.state,
zipcode = self.zipcode,
phone = self.phone,
fax = self.fax,
primary_website = self.primary_website,
secondary_website = self.secondary_website,
creation_datetime = str(self.creation_datetime),
)
return resp
class PlaylistAssignments(Base, CreationMixin):
__tablename__ = 'playlist_assignments'
id = Column(Integer, primary_key=True)
playlist_id = Column(Integer, ForeignKey('playlists.id'))
recording_id = ReqColumn(Integer, ForeignKey('recordings.id'))
creation_datetime = Column(DateTime)
@classmethod
def delete_by_playlist_id_and_recording_id(cls, pid, rid):
success = False
with transaction.manager:
playlist = DBSession.query(
PlaylistAssignments,
).filter(
PlaylistAssignments.playlist_id == pid,
PlaylistAssignment.recording_id == rid,
).first()
if not playlist is None:
DBSession.remove(playlist)
transaction.commit()
success = True
return success
def to_dict(self):
resp = dict(
id = self.id,
playlist_id = self.playlist_id,
recording_id = self.recording_id,
)
return resp
class Playlists(Base, CreationMixin):
__tablename__ = 'playlists'
id = Column(Integer, primary_key=True)
author_id = Column(Integer, ForeignKey('people.id'))
title = ReqColumn(UnicodeText)
description = ReqColumn(UnicodeText)
creation_datetime = Column(DateTime)
recordings = relationship(
"Recordings",
secondary=PlaylistAssignments.__table__,
backref="playlists",
)
@classmethod
def get_by_owner_id(cls, id):
with transaction.manager:
playlists = DBSession.query(
Playlists,
).filter(
Playlists.author_id == id,
).all()
return playlists
@classmethod
def remove_recording_ny_id(cls, pid, rid):
with transaction.manager:
assignment = DBSession.query(
PlaylistAssignments,
).filter(
PlaylistAssignments.playlist_id == pid,
PlaylistAssignments.recording_id == rid,
).first()
DBSession.delete(assignment)
@classmethod
def get_recordings_by_playlist_id(self, id):
with transaction.manager:
recordings = DBSession.query(
Recordings,
).join(
PlaylistAssignments,
).filter(
PlaylistAssignments.playlist_id == id,
).all()
if recordings is None:
recordings = []
if not isinstance(recordings, list):
recordings = [recordings]
return recordings
def to_dict(self):
resp = dict(
id = self.id,
author_id = self.author_id,
title = self.title,
# This should cause a LEFT JOIN against the many-to-many
# recording_assignments table, and get the recordings
# that are associated with the playlist
#recordings = [r.to_dict() for r in self.recordings]
recordings = [r.to_dict() for r in Playlists.get_recordings_by_playlist_id(self.id)],
)
return resp
class People(Base, CreationMixin):
__tablename__= 'people'
id = Column(Integer, primary_key=True)
first = ReqColumn(UnicodeText)
last = ReqColumn(UnicodeText)
address_0 = ReqColumn(UnicodeText)
address_1 = ReqColumn(UnicodeText)
city = ReqColumn(UnicodeText)
state = ReqColumn(UnicodeText)
zipcode = ReqColumn(UnicodeText)
phone = ReqColumn(UnicodeText)
fax = ReqColumn(UnicodeText)
primary_website = ReqColumn(UnicodeText)
secondary_website = ReqColumn(UnicodeText)
creation_datetime = Column(DateTime)
# these should probably be brough out into a seperate table as
# many to one so we don't have to keep adding colyumns ...
twitter = ReqColumn(UnicodeText)
facebook = ReqColumn(UnicodeText)
instagram = ReqColumn(UnicodeText)
periscope = ReqColumn(UnicodeText)
user_id = ReqColumn(ForeignKey('users.id'), nullable=True)
organization_id = Column(ForeignKey('organizations.id'), nullable=True)
def to_dict(self):
resp = dict(
id = self.id,
first = self.first,
address_0 = self.address_0,
address_1 = self.address_1,
city = self.city,
state = self.state,
zipcode = self.zipcode,
phone = self.phone,
fax = self.fax,
primary_website = self.primary_website,
secondary_website = self.secondary_website,
creation_datetime = str(self.creation_datetime),
# see note on definitions
twitter = self.twitter,
facebook = self.facebook,
instagram = self.instagram,
periscope = self.periscope,
user_id = self.user_id,
organization_id = self.organization_id,
)
return resp
@classmethod
def get_by_organization_id(cls, id):
with transaction.manager:
people = DBSession.query(
People,
).filter(
People.organization_id == id,
).all()
return people
class Recordings(Base, CreationMixin):
__tablename__= 'recordings'
id = Column(Integer, primary_key=True)
title = ReqColumn(UnicodeText)
url = ReqColumn(UnicodeText)
recorded_datetime = ReqColumn(DateTime)
creation_datetime = Column(DateTime)
organization_id = Column(Integer, ForeignKey('organizations.id'))
def to_dict(self):
resp = dict(
id = self.id,
title = self.title,
url = self.url,
recorded_datetime = str(self.recorded_datetime),
creation_datetime = str(self.creation_datetime),
organization_id = self.organization_id,
)
return resp
@classmethod
def get_by_organization_id(cls, id):
with transaction.manager:
recordings = DBSession.query(
Recordings,
# RecordingCategories,
).filter(
Recordings.organization_id == id,
#).join(
# RecordingCategoryAssignments,
).all()
return recordings
class RecordingCategories(Base, CreationMixin):
__tablename__ = 'recording_categories'
id = Column(Integer, primary_key=True)
name = ReqColumn(UnicodeText)
short_description = ReqColumn(UnicodeText)
long_description = ReqColumn(UnicodeText)
creation_datetime = Column(DateTime)
def to_dict(self):
resp = dict(
id = self.id,
name = self.name,
short_description = self.short_description,
long_description = self.long_description,
creation_datetime = str(self.creation_datetime),
)
return resp
class RecordingCategoryAssignments(Base, CreationMixin):
__tablename__ = 'recording_category_assignments'
id = Column(Integer, primary_key=True)
recording_category_id = ReqColumn(Integer, ForeignKey('recording_categories.id'))
recording_id = ReqColumn(Integer, ForeignKey('recordings.id'))
creation_datetime = Column(DateTime)
def to_dict(self):
resp = dict(
id = self.id,
recording_category_id = self.recording_category_id,
recording_id = self.recording_id,
creation_datetime = str(self.creation_datetime),
)
return resp
class Howtos(Base, CreationMixin):
__tablename__ = 'howtos'
id = Column(Integer, primary_key=True)
title = ReqColumn(UnicodeText)
contents = ReqColumn(UnicodeText)
creation_datetime = Column(DateTime)
edit_datetime = Column(DateTime)
tags = ReqColumn(UnicodeText)
def to_dict(self):
resp = dict(
id = self.id,
title = self.title,
contents = self.contents,
creation_datetime = str(self.creation_datetime),
edit_datetime = self.edit_datetime,
tags = self.tags,
)
return resp
class HowtoCategories(Base, CreationMixin):
__tablename__ = 'howto_categories'
id = Column(Integer, primary_key=True)
name = ReqColumn(UnicodeText)
short_description = ReqColumn(UnicodeText)
long_description = ReqColumn(UnicodeText)
creation_datetime = Column(DateTime)
def to_dict(self):
resp = dict(
id = self.id,
name = self.name,
short_description = self.short_description,
long_description = self.long_description,
creation_datetime = str(self.creation_datetime),
)
return resp
class HowtoCategoryAssignments(Base, CreationMixin):
__tablename__ = 'howto_category_assignments'
id = Column(Integer, primary_key=True)
howto_category_id = ReqColumn(Integer, ForeignKey('howto_categories.id'))
howto_id = ReqColumn(Integer, ForeignKey('howtos.id'))
creation_datetime = Column(DateTime)
def to_dict(self):
resp = dict(
id = self.id,
howto_category_id = self.howto_category_id,
howto_id = self.howto_id,
creation_datetime = str(self.creation_datetime),
)
return resp
class Blogs(Base, CreationMixin):
__tablename__ = 'blogs'
id = Column(Integer, primary_key=True)
title = ReqColumn(UnicodeText)
contents = ReqColumn(UnicodeText)
creation_datetime = Column(DateTime)
edit_datetime = Column(DateTime)
tags = ReqColumn(UnicodeText)
author_id = Column(ForeignKey('users.id'))
def to_dict(self):
resp = dict(
id = self.id,
title = self.title,
contents = self.contents,
creation_datetime = str(self.creation_datetime),
edit_datetime = self.edit_datetime,
tags = self.tags,
author_id = self.author_id,
)
return resp
| gpl-3.0 | -7,340,058,238,994,979,000 | 28.361158 | 102 | 0.583406 | false |
IQSS/geoconnect | gc_apps/worldmap_connect/jointarget_formatter.py | 1 | 11627 | """
Helper class to format JSON in the JoinTargetInformation model's "target_info" field
- In terms of UI, this data is used for:
1. Creating a list of Geospatial Identifiers
- e.g. Census Tract, Zip code
2. Creating a list of Names/Years based on the chosen Geospatial Identifiers
- e.g. If Cenus Tract is chosen, list might be:
"US Census 2010", "US Census 2000", "US Census 1990", etc.
3. Based on the chosen JoinTarget, prep data for WorldMap datatables API
- The Upload and Join API
- Parms: name of target layer, name of target layer column
"""
import json
from collections import OrderedDict
from gc_apps.worldmap_connect.single_join_target_info import SingleJoinTargetInfo
class JoinTargetFormatter(object):
"""
Helper class to format JSON in the JoinTargetInformation model's "target_info" field
Sample target info data:
{
"data": [
{
"layer": "geonode:massachusetts_census_nhu",
"geocode_type": "US Census Tract",
"geocode_type_slug": "us-census-tract",
"attribute": {
"attribute": "TRACTCE",
"type": "xsd:string"
},
"year": 2010,
"type": null,
"id": 3
}
],
"success": true
}
"""
def __init__(self, target_info):
"""Initialize using target_info JSON retrieved from WorldMap"""
self.err_found = False
self.err_message = None
self.target_info = target_info
self.initial_check()
def is_valid(self):
return self.err_found
def add_error(self, err_msg):
"""
Error detected, store a messsage in the class
"""
self.err_found = True
self.err_message = err_msg
def initial_check(self):
"""
Make sure that 'target_info' has the expected data
"""
if self.target_info is None:
self.add_error("target_info should not be None")
return False
# Is this a dict? (e.g. not a list or blank, etc)
#print 'target_info', self.target_info
if not hasattr(self.target_info, 'has_key'):
# OK, Maybe it's a JSON string that can be converted to a dict
print 'type self.target_info', type(self.target_info)
try:
self.target_info = json.loads(self.target_info)
except ValueError:
self.add_error("target_info should always be a JSON string or python dict")
return False
# Is there a 'success' attribute?
if not 'success' in self.target_info:
self.add_error("target_info does not have a 'success' attribute")
return False
# Is success True?
if not self.target_info['success'] is True:
self.add_error("target_info does not have a 'success' marked as True")
return False
# Is there a data attribute?
if not 'data' in self.target_info:
self.add_error("target_info does not have a 'data' attribute")
return False
# Does the data attribute contain any elements?
if len(self.target_info['data']) == 0:
self.add_error("There are no JoinTargets available.")
return False
return True
@staticmethod
def get_formatted_name(geocode_type, year=None, title=None):
if geocode_type is None:
return None
if year and title:
return "{0} ({1}) {2}".format(geocode_type, year, title)
if year:
return "{0} ({1})".format(geocode_type, year)
if title:
return "{0} - {1}".format(geocode_type, title)
return "{0}".format(geocode_type)
def get_single_join_target_info(self, target_layer_id):
"""
Given a target_layer_id, send back:
- target layer name
- target layer column
- zero pad length
- zero_pad_length is either an integer or None
return (target layer name, target layer column, zero_pad_length)
"""
if target_layer_id is None:
return (None, None, None)
for info in self.target_info['data']:
if 'id' in info and target_layer_id == info['id']:
return SingleJoinTargetInfo(info)
#return SingleJoinTargetInfo(
# info['layer'],
# info['attribute']['attribute'],
# info['attribute']['type'],
# self.get_formatting_zero_pad_length(target_layer_id)
# )
return None
def get_geocode_types(self):
"""
Create a list tuples for available Geospatial Identifiers
- Tuple Format: (name, slug)
- e.g. [("Census Tract", "census-tract'"), ("Zip code", "zip-code")]
"""
if self.err_found:
return None
gtypes = []
type_dict = {}
for info in self.target_info['data']:
# Have we already added this type to the list?
if not info['geocode_type_slug'] in type_dict:
# Nope, add it
gtypes.append((info['geocode_type'], info['geocode_type_slug']))
type_dict.update({ info['geocode_type_slug']: 1 })
return gtypes
def get_available_layers_list_by_type(self, chosen_geocode_type=None, for_json=False):
"""
Used for populating form dropdown with list of layers
Create a list of items, each item has the following attributes:
[
{
"join_target_id" : 8
"name" : "2014 - Election Precincts, Boston",
"expected_format" : "Boston Election Precinct ID (integer)"
}
]
value - join target id
text - (year) layer title
"""
if self.err_found:
return None
join_targets = []
for info in self.target_info['data']:
gtype_slug = info['geocode_type_slug']
if chosen_geocode_type == gtype_slug or\
chosen_geocode_type is None:
if 'name' not in info:
continue
join_target_id = info['id']
info_line = "{0} - {1}".format(info['year'], info['name'])
description = info.get('expected_format', {}).get('description', '')
if for_json:
info_dict = OrderedDict()
info_dict['join_target_id'] = info['id']
info_dict['name'] = info_line
info_dict['description'] = description
join_targets.append(info_dict)
else:
join_targets.append((join_target_id, info_line))
return join_targets
def get_format_info_for_target_layer(self, target_layer_id):
if target_layer_id is None:
return None
for info in self.target_info['data']:
if 'id' in info and target_layer_id == info['id']:
if 'expected_format' in info:
return info['expected_format']
return None
def get_formatting_zero_pad_length(self, target_layer_id):
"""
Used to format join columns before sending them over to WorldMap.
If this Target layer expects zero padding, return the
length of the expected field.
If no zero padding needed, return None
"""
expected_format = self.get_format_info_for_target_layer(target_layer_id)
if expected_format is None:
return None
if expected_format.get('is_zero_padded') is True\
and expected_format.get('expected_zero_padded_length', -1) > 0:
return expected_format['expected_zero_padded_length']
return None
def get_zero_pad_note(self, info):
"""
If the format type JSON includes zero padding info,
show it
Example JSON:
"expected_format": {
"expected_zero_padded_length": 6,
"is_zero_padded": true,
"description": "Remove non integers. Check for empty string. Pad with zeros until 6 digits.",
"name": "Census Tract (6 digits, no decimal)"
},
"""
if info is None or not hasattr(info, 'get'):
return None
if not 'expected_format' in info:
return None
expected_format = info['expected_format']
if expected_format.get('is_zero_padded') is True\
and expected_format.get('expected_zero_padded_length', -1) > 0:
return 'Zero padded to %s digits' %\
expected_format['expected_zero_padded_length']
return None
def get_format_name(self, info):
"""
If the format type JSON includes zero padding info,
show it
Example JSON:
"expected_format": {
"expected_zero_padded_length": 6,
"is_zero_padded": true,
"description": "Remove non integers. Check for empty string. Pad with zeros until 6 digits.",
"name": "Census Tract (6 digits, no decimal)"
},
"""
if info is None or not hasattr(info, 'get'):
return None
if not 'expected_format' in info:
return None
expected_format = info['expected_format']
return expected_format.get('name', None)
def get_join_targets_by_type(self, chosen_geocode_type=None):
"""
Creating a list of tuples of Names/Years based on the chosen Geospatial Identifier
- Tuple Format:
[(join target name, join_target_id),]
join_target_name = name (year)
join_target_id = JoinTarget id on the WorldMap system
- Used in the Geoconnect form
- e.g. If Cenus Tract is chosen, list might be:
[("US Census 2010", 7), ("US Census 2000", 3), etc.]
Note: if chosen_geocode_type is None, all identifiers will be retrieved
"""
join_targets = []
for info in self.target_info['data']:
gtype_slug = info['geocode_type_slug']
if chosen_geocode_type == gtype_slug or\
chosen_geocode_type is None:
info_line = JoinTargetFormatter.get_formatted_name(
info['geocode_type'])
#info['year'])
gtype_tuple = (info['geocode_type_slug'], info_line)
if not gtype_tuple in join_targets:
join_targets.append(gtype_tuple)
# Sort list by geocode_type name
join_targets.sort(key=lambda tup: tup[1]) # sorts in place
return join_targets
"""
python manage.py shell
from gc_apps.worldmap_connect.utils import get_latest_jointarget_information
from gc_apps.worldmap_connect.jointarget_formatter import JoinTargetFormatter
jt = get_latest_jointarget_information()
formatter = JoinTargetFormatter(jt.target_info)
gtypes = formatter.get_geocode_types()
print gtypes
print '-- targets for each type --'
cnt = 0
for g in gtypes:
cnt +=1
print '({0}) {1}'.format(cnt, formatter.get_join_targets_by_type(g))
cnt = 0
print '\n-- all targets --'
for item in formatter.get_join_targets_by_type(g):
cnt +=1
print '({0}) {1}'.format(cnt, item)
"""
| apache-2.0 | 7,038,965,172,254,172,000 | 32.604046 | 105 | 0.551991 | false |
retresco/Spyder | test/test_workerprocess_processing.py | 1 | 1944 | #
# Copyright (c) 2011 Daniel Truemper [email protected]
#
# test_workerprocess_processing.py 18-Jan-2011
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from spyder.core.constants import CURI_OPTIONAL_TRUE
from spyder.core.constants import CURI_EXTRACTION_FINISHED
from spyder.core.settings import Settings
from spyder.processor import limiter
from spyder.thrift.gen.ttypes import CrawlUri
from spyder import workerprocess
class WorkerProcessingUnittest(unittest.TestCase):
def test_that_creating_processing_function_works(self):
settings = Settings()
processors = settings.SPYDER_EXTRACTOR_PIPELINE
processors.extend(settings.SPYDER_SCOPER_PIPELINE)
processors.append('test_workerprocess')
self.assertRaises(ValueError, workerprocess.create_processing_function,
settings, processors)
processors.pop()
processors.append('test_workerprocess_unspec')
self.assertRaises(ValueError, workerprocess.create_processing_function,
settings, processors)
processors.pop()
processing = workerprocess.create_processing_function(settings,
processors)
curi = CrawlUri(optional_vars=dict())
curi.effective_url = "http://127.0.0.1/robots.txt"
curi2 = processing(curi)
self.assertEqual(CURI_OPTIONAL_TRUE,
curi2.optional_vars[CURI_EXTRACTION_FINISHED])
| apache-2.0 | 9,184,573,123,651,894,000 | 35.679245 | 79 | 0.729424 | false |
SergeySatskiy/codimension | codimension/debugger/excpt.py | 1 | 2978 | # -*- coding: utf-8 -*-
#
# codimension - graphics python two-way code editor and analyzer
# Copyright (C) 2010-2012 Sergey Satskiy <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Debugger exceptions viewer"""
from ui.qt import Qt, pyqtSignal, QVBoxLayout, QWidget, QSplitter
from .clientexcptviewer import ClientExceptionsViewer
from .ignoredexcptviewer import IgnoredExceptionsViewer
class DebuggerExceptions(QWidget):
"""Implements the debugger context viewer"""
sigClientExceptionsCleared = pyqtSignal()
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.__createLayout()
self.clientExcptViewer.sigClientExceptionsCleared.connect(
self.__onClientExceptionsCleared)
def __createLayout(self):
"""Creates the widget layout"""
verticalLayout = QVBoxLayout(self)
verticalLayout.setContentsMargins(1, 1, 1, 1)
self.splitter = QSplitter(Qt.Vertical)
self.ignoredExcptViewer = IgnoredExceptionsViewer(self.splitter)
self.clientExcptViewer = ClientExceptionsViewer(
self.splitter, self.ignoredExcptViewer)
self.splitter.addWidget(self.clientExcptViewer)
self.splitter.addWidget(self.ignoredExcptViewer)
self.splitter.setCollapsible(0, False)
self.splitter.setCollapsible(1, False)
verticalLayout.addWidget(self.splitter)
def clear(self):
"""Clears everything"""
self.clientExcptViewer.clear()
def addException(self, exceptionType, exceptionMessage, stackTrace):
"""Adds the exception to the view"""
self.clientExcptViewer.addException(exceptionType, exceptionMessage,
stackTrace)
def isIgnored(self, exceptionType):
"""Returns True if this exception type should be ignored"""
return self.ignoredExcptViewer.isIgnored(exceptionType)
def setFocus(self):
"""Sets the focus to the client exception window"""
self.clientExcptViewer.setFocus()
def getTotalClientExceptionCount(self):
"""Provides the total number of the client exceptions"""
return self.clientExcptViewer.getTotalCount()
def __onClientExceptionsCleared(self):
"""Triggered when the user cleared exceptions"""
self.sigClientExceptionsCleared.emit()
| gpl-3.0 | -3,956,236,440,457,816,000 | 35.317073 | 76 | 0.708193 | false |
BartGo/bottle-cuturl | app/settings.py | 1 | 1381 | # -*- coding: utf-8 -*-
import os
APP_NAME = 'bottle-cuturl'
# disabled but also removed crashreporter==1.11 from setup.py, somehow does not like setuptools==21.0.0
CRASH_REPORT = 0
# Paths
PROJECT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)))
TEMPLATE_PATH = os.path.join(PROJECT_PATH, 'views')
STATIC_PATH = os.path.join(PROJECT_PATH, 'assets')
# SQL Alchemy
# *** PostgreSQL
SQL_SQLITE_ONLY = 1
SQL_PG_USE_LOCAL = 0
SQL_PG_DBENGINE_LOCAL = "postgresql+psycopg2://cuturl:cuturl@localhost:5432/bottle-cuturl"
if (not SQL_SQLITE_ONLY):
try:
import psycopg2
# # for windows, add to PATH: C:\Program Files\PostgreSQL\9.4\bin
# DATABASE_URL is an environment variable used by Heroku
if SQL_PG_USE_LOCAL == 1:
SQA_DBENGINE = SQL_PG_DBENGINE_LOCAL
else:
SQA_DBENGINE = os.environ["DATABASE_URL"]
except (OSError, ImportError, KeyError):
# *** SQLite
SQL_SQLITE_ONLY = 1
if SQL_SQLITE_ONLY:
SQA_DBENGINE = 'sqlite:///data//sqlite.db'
SQA_ECHO = True
SQA_KEYWORD = 'db'
SQA_CREATE = True
SQA_COMMIT = True
SQA_USE_KWARGS = False
# Crashreporter
if CRASH_REPORT == 1:
from crashreporter import CrashReporter
cr = CrashReporter(report_dir='crashreporter', check_interval=10, config='.crashreporter.cfg')
cr.application_name = APP_NAME
cr.application_version = '0.0.22' # bumpversion updates that
| mit | 5,902,666,382,758,505,000 | 26.078431 | 103 | 0.694424 | false |
nagyistoce/geokey | geokey/contributions/tests/observations/test_views.py | 1 | 38917 | import json
from django.test import TestCase
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.contrib.auth.models import AnonymousUser
from nose.tools import raises
from rest_framework.test import APIRequestFactory, force_authenticate
from geokey.projects.tests.model_factories import UserF, ProjectF
from geokey.projects.models import Project
from geokey.categories.tests.model_factories import (
CategoryFactory, TextFieldFactory, NumericFieldFactory
)
from geokey.users.tests.model_factories import UserGroupF
from geokey.subsets.tests.model_factories import SubsetFactory
from ..model_factories import (
ObservationFactory, CommentFactory, LocationFactory
)
from geokey.contributions.views.observations import (
SingleAllContributionAPIView, SingleContributionAPIView,
ProjectObservations
)
from geokey.contributions.models import Observation
class SingleContributionAPIViewTest(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.admin = UserF.create()
self.creator = UserF.create()
self.moderator = UserF.create()
self.viewer = UserF.create()
self.project = ProjectF(
add_admins=[self.admin],
add_contributors=[self.creator],
add_viewer=[self.viewer]
)
self.moderators = UserGroupF(add_users=[self.moderator], **{
'project': self.project,
'can_moderate': True
})
self.observation = ObservationFactory.create(**{
'project': self.project,
'creator': self.creator,
'status': 'active'
})
def test_approve_pending_with_admin(self):
self.observation.status = 'pending'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "active"}}
request.user = self.admin
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'active'
)
def test_approve_pending_with_admin_empty_properties(self):
self.observation.properties = None
self.observation.status = 'pending'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "active"}}
request.user = self.admin
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'active'
)
def test_suspend_pending_with_admin(self):
self.observation.status = 'active'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "pending"}}
request.user = self.admin
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'pending'
)
def test_approve_pending_with_moderator(self):
self.observation.status = 'pending'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "active"}}
request.user = self.moderator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'active'
)
@raises(PermissionDenied)
def test_approve_pending_with_contributor(self):
self.observation.status = 'pending'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "active"}}
request.user = self.creator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'pending'
)
def test_approve_pending_with_contributor_who_is_moderator(self):
self.moderators.users.add(self.creator)
self.observation.status = 'pending'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "active"}}
request.user = self.creator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'active'
)
def test_flag_with_admin(self):
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "pending"}}
request.user = self.admin
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'pending'
)
def test_flag_with_moderator(self):
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "pending"}}
request.user = self.moderator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
ref = Observation.objects.get(pk=self.observation.id)
self.assertEqual(ref.status, 'pending')
def test_flag_with_moderator_and_edit(self):
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {
'properties': {
'key': 'updated'
},
'meta': {
'status': 'pending',
}
}
request.user = self.moderator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
ref = Observation.objects.get(pk=self.observation.id)
self.assertEqual(ref.status, 'pending')
self.assertEqual(ref.properties.get('key'), 'updated')
def test_flag_with_contributor(self):
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "pending"}}
request.user = self.creator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'pending'
)
@raises(PermissionDenied)
def test_flag_with_anonymous(self):
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "pending"}}
request.user = AnonymousUser()
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'active'
)
@raises(PermissionDenied)
def test_update_user(self):
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'properies': {'text': 'blah'}}
request.user = self.viewer
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
def test_update_under_review(self):
CommentFactory.create(**{
'commentto': self.observation,
'review_status': 'open'
})
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': 'active'}}
request.user = self.admin
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
ref = Observation.objects.get(pk=self.observation.id)
self.assertEqual(ref.status, 'review')
@raises(PermissionDenied)
def test_commit_from_draft_admin(self):
self.observation.status = 'draft'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "active"}}
request.user = self.admin
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'pending'
)
@raises(PermissionDenied)
def test_commit_from_draft_with_moderator(self):
self.observation.status = 'draft'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "active"}}
request.user = self.moderator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'pending'
)
def test_commit_from_draft_with_contributor(self):
self.moderators.users.add(self.creator)
self.observation.status = 'draft'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "active"}}
request.user = self.creator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'active'
)
def test_commit_from_draft_with_contributor_who_is_moderator(self):
self.observation.status = 'draft'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {'meta': {'status': "active"}}
request.user = self.creator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
self.assertEqual(
Observation.objects.get(pk=self.observation.id).status,
'pending'
)
def test_commit_from_draft_with_contributor_with_data(self):
self.observation.status = 'draft'
self.observation.save()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = self.factory.patch(url)
request.DATA = {
'properties': {
'key': 'updated'
},
'meta': {
'status': "active",
}
}
request.user = self.creator
view = SingleContributionAPIView()
view.update_and_respond(request, self.observation)
ref = Observation.objects.get(pk=self.observation.id)
self.assertEqual(ref.status, 'pending')
self.assertEqual(ref.properties.get('key'), 'updated')
class SingleAllContributionAPIViewTest(TestCase):
def setUp(self):
self.admin = UserF.create()
self.creator = UserF.create()
self.project = ProjectF(
add_admins=[self.admin],
add_contributors=[self.creator]
)
self.observation = ObservationFactory.create(**{
'project': self.project,
'creator': self.creator,
'status': 'active'
})
def test_get_object_with_creator(self):
view = SingleAllContributionAPIView()
view.get_object(
self.creator, self.observation.project.id, self.observation.id)
def test_get_object_with_admin(self):
view = SingleAllContributionAPIView()
observation = view.get_object(
self.admin, self.observation.project.id, self.observation.id)
self.assertEqual(observation, self.observation)
@raises(Project.DoesNotExist)
def test_get_object_with_some_dude(self):
some_dude = UserF.create()
view = SingleAllContributionAPIView()
view.get_object(
some_dude, self.observation.project.id, self.observation.id)
@raises(Observation.DoesNotExist)
def test_get_draft_object_with_admin(self):
self.observation.status = 'draft'
self.observation.save()
view = SingleAllContributionAPIView()
view.get_object(
self.admin, self.observation.project.id, self.observation.id)
def test_api_with_admin(self):
CommentFactory.create_batch(5, **{'commentto': self.observation})
factory = APIRequestFactory()
url = reverse('api:project_single_observation', kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
})
request = factory.get(url)
force_authenticate(request, user=self.admin)
theview = SingleAllContributionAPIView.as_view()
response = theview(
request,
project_id=self.project.id,
observation_id=self.observation.id).render()
self.assertEqual(response.status_code, 200)
class ProjectPublicApiTest(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.admin = UserF.create()
self.contributor = UserF.create()
self.non_member = UserF.create()
self.project = ProjectF(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.category = CategoryFactory(**{
'status': 'active',
'project': self.project
})
TextFieldFactory.create(**{
'key': 'key_1',
'category': self.category,
'required': True,
'order': 1
})
NumericFieldFactory.create(**{
'key': 'key_2',
'category': self.category,
'minval': 0,
'maxval': 1000,
'order': 2
})
self.data = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
-0.13404607772827148,
51.52439200896907
]
},
"properties": {
"key_1": "value 1",
"key_2": 12
},
"meta": {
"category": self.category.id,
},
"location": {
"name": "UCL",
"description": "UCL's main quad",
"private": True
}
}
def _post(self, data, user):
url = reverse(
'api:project_observations',
kwargs={
'project_id': self.project.id
}
)
request = self.factory.post(
url, json.dumps(data), content_type='application/json')
force_authenticate(request, user=user)
view = ProjectObservations.as_view()
return view(request, project_id=self.project.id).render()
def test_contribute_with_wrong_category(self):
self.data['meta']['category'] = 3864
response = self._post(self.data, self.admin)
self.assertEqual(response.status_code, 400)
def test_contribute_with_invalid(self):
data = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
-0.13404607772827148,
51.52439200896907
]
},
"properties": {
"key_1": 12,
"key_2": "jsdbdjhsb"
},
"meta": {
"category": self.category.id,
},
"location": {
"name": "UCL",
"description": "UCL's main quad",
"private": True
}
}
response = self._post(data, self.admin)
self.assertEqual(response.status_code, 400)
def test_contribute_with_invalid_number(self):
data = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
-0.13404607772827148,
51.52439200896907
]
},
"properties": {
"key_1": 12,
"key_2": 2000
},
"meta": {
"category": self.category.id,
},
"location": {
"name": "UCL",
"description": "UCL's main quad",
"private": True
}
}
response = self._post(data, self.admin)
self.assertEqual(response.status_code, 400)
def test_contribute_with_existing_location(self):
location = LocationFactory()
data = {
"type": "Feature",
"geometry": location.geometry.geojson,
"location": {
"id": location.id,
"name": location.name,
"description": location.description,
"private": location.private
},
"properties": {
"key_1": "value 1",
"key_2": 12
},
"meta": {
"category": self.category.id,
}
}
response = self._post(data, self.admin)
self.assertEqual(response.status_code, 201)
def test_contribute_with_private_for_project_location(self):
location = LocationFactory(**{
'private': True,
'private_for_project': self.project
})
data = {
"type": "Feature",
"geometry": location.geometry.geojson,
"location": {
"id": location.id,
"name": location.name,
"description": location.description,
"private": location.private
},
"properties": {
"key_1": "value 1",
"key_2": 12
},
"meta": {
"category": self.category.id,
}
}
response = self._post(data, self.admin)
self.assertEqual(response.status_code, 201)
def test_contribute_with_wrong_project_location(self):
project = ProjectF()
location = LocationFactory(**{
'private': True,
'private_for_project': project
})
data = {
"type": "Feature",
"geometry": location.geometry.geojson,
"location": {
"id": location.id,
"name": location.name,
"description": location.description,
"private": location.private
},
"properties": {
"key_1": "value 1",
"key_2": 12
},
"meta": {
"category": self.category.id,
}
}
response = self._post(data, self.admin)
self.assertEqual(response.status_code, 400)
def test_contribute_with_private_location(self):
location = LocationFactory(**{
'private': True
})
data = {
"type": "Feature",
"geometry": location.geometry.geojson,
"location": {
"id": location.id,
"name": location.name,
"description": location.description,
"private": location.private
},
"properties": {
"key_1": "value 1",
"key_2": 12
},
"meta": {
"category": self.category.id,
}
}
response = self._post(data, self.admin)
self.assertEqual(response.status_code, 400)
def test_contribute_valid_draft(self):
self.data = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
-0.13404607772827148,
51.52439200896907
]
},
"location": {
"name": "UCL",
"description": "UCL's main quad",
"private": True
},
"properties": {
"key_1": "value 1",
"key_2": 12
},
"meta": {
"category": self.category.id,
"status": "draft"
}
}
response = self._post(self.data, self.admin)
self.assertEqual(response.status_code, 201)
self.assertIn('"status":"draft"', response.content)
def test_contribute_valid_draft_with_empty_required(self):
self.data = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
-0.13404607772827148,
51.52439200896907
]
},
"properties": {
"key_1": None,
"key_2": 12
},
"meta": {
"category": self.category.id,
"status": "draft"
},
"location": {
"name": "UCL",
"description": "UCL's main quad",
"private": True
}
}
response = self._post(self.data, self.admin)
self.assertEqual(response.status_code, 201)
self.assertIn('"status":"draft"', response.content)
def test_contribute_invalid_draft(self):
self.data = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
-0.13404607772827148,
51.52439200896907
]
},
"properties": {
"key_1": "value 1",
"key_2": 'Blah'
},
"meta": {
"category": self.category.id,
"status": "draft"
},
"location": {
"name": "UCL",
"description": "UCL's main quad",
"private": True
},
}
response = self._post(self.data, self.admin)
self.assertEqual(response.status_code, 400)
def test_contribute_to_public_everyone_with_Anonymous(self):
self.project.everyone_contributes = 'true'
self.project.isprivate = False
self.project.save()
response = self._post(self.data, AnonymousUser())
self.assertEqual(response.status_code, 201)
def test_contribute_to_public_with_admin(self):
self.project.isprivate = False
self.project.save()
response = self._post(self.data, self.admin)
self.assertEqual(response.status_code, 201)
self.assertIn('"status":"active"', response.content)
def test_contribute_to_public_with_contributor(self):
self.project.isprivate = False
self.project.save()
response = self._post(self.data, self.contributor)
self.assertEqual(response.status_code, 201)
self.assertIn('"status":"pending"', response.content)
def test_contribute_to_public_with_non_member(self):
self.project.isprivate = False
self.project.save()
response = self._post(self.data, self.non_member)
self.assertEqual(response.status_code, 403)
def test_contribute_to_public_with_anonymous(self):
self.project.isprivate = False
self.project.save()
response = self._post(self.data, AnonymousUser())
self.assertEqual(response.status_code, 403)
def test_contribute_to_private_with_admin(self):
response = self._post(self.data, self.admin)
self.assertEqual(response.status_code, 201)
self.assertEqual(len(self.project.observations.all()), 1)
def test_contribute_to_private_with_contributor(self):
response = self._post(self.data, self.contributor)
self.assertEqual(response.status_code, 201)
self.assertEqual(len(self.project.observations.all()), 1)
def test_contribute_to_private_with_non_member(self):
response = self._post(self.data, self.non_member)
self.assertEqual(response.status_code, 404)
self.assertEqual(len(self.project.observations.all()), 0)
def test_contribute_to_private_with_anonymous(self):
response = self._post(self.data, AnonymousUser())
self.assertEqual(response.status_code, 404)
self.assertEqual(len(self.project.observations.all()), 0)
def test_contribute_to_inactive_with_admin(self):
self.project.status = 'inactive'
self.project.save()
response = self._post(self.data, self.admin)
self.assertEqual(response.status_code, 403)
self.assertEqual(len(self.project.observations.all()), 0)
def test_contribute_to_inactive_with_contributor(self):
self.project.status = 'inactive'
self.project.save()
response = self._post(self.data, self.contributor)
self.assertEqual(response.status_code, 404)
self.assertEqual(len(self.project.observations.all()), 0)
def test_contribute_to_inactive_with_non_member(self):
self.project.status = 'inactive'
self.project.save()
response = self._post(self.data, self.non_member)
self.assertEqual(response.status_code, 404)
self.assertEqual(len(self.project.observations.all()), 0)
def test_contribute_to_inactive_with_Anonymous(self):
self.project.status = 'inactive'
self.project.save()
response = self._post(self.data, AnonymousUser())
self.assertEqual(response.status_code, 404)
self.assertEqual(len(self.project.observations.all()), 0)
def test_contribute_to_deleted_with_admin(self):
self.project.status = 'deleted'
self.project.save()
response = self._post(self.data, self.admin)
self.assertEqual(response.status_code, 404)
def test_contribute_to_deleted_with_contributor(self):
self.project.status = 'deleted'
self.project.save()
response = self._post(self.data, self.contributor)
self.assertEqual(response.status_code, 404)
def test_contribute_to_deleted_with_non_member(self):
self.project.status = 'deleted'
self.project.save()
response = self._post(self.data, self.non_member)
self.assertEqual(response.status_code, 404)
def test_contribute_to_deleted_with_anonymous(self):
self.project.status = 'deleted'
self.project.save()
response = self._post(self.data, AnonymousUser())
self.assertEqual(response.status_code, 404)
class GetSingleObservationInProject(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.admin = UserF.create()
self.contributor = UserF.create()
self.project = ProjectF(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.observation = ObservationFactory(
**{'project': self.project, 'creator': self.contributor})
def _get(self, user):
url = reverse(
'api:project_single_observation',
kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
}
)
request = self.factory.get(url)
force_authenticate(request, user=user)
view = SingleAllContributionAPIView.as_view()
return view(
request, project_id=self.project.id,
observation_id=self.observation.id).render()
def test_get_with_admin(self):
response = self._get(self.admin)
self.assertEqual(response.status_code, 200)
def test_get_with_contributor(self):
response = self._get(self.contributor)
self.assertEqual(response.status_code, 200)
def test_get_with_non_member(self):
user = UserF.create()
response = self._get(user)
self.assertEqual(response.status_code, 404)
class UpdateObservationInProject(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.admin = UserF.create()
self.contributor = UserF.create()
self.non_member = UserF.create()
self.project = ProjectF(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
self.category = CategoryFactory(**{
'status': 'active',
'project': self.project
})
TextFieldFactory.create(**{
'key': 'key_1',
'category': self.category,
'order': 0
})
NumericFieldFactory.create(**{
'key': 'key_2',
'category': self.category,
'order': 1
})
location = LocationFactory()
self.observation = ObservationFactory.create(**{
'properties': {
"key_1": "value 1",
"key_2": 12,
},
'category': self.category,
'project': self.project,
'location': location,
'creator': self.admin,
'status': 'active'
})
self.update_data = {
"properties": {
"version": 1,
"key_2": 15
}
}
def _patch(self, data, user):
url = reverse(
'api:project_single_observation',
kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
}
)
request = self.factory.patch(
url, json.dumps(data), content_type='application/json')
force_authenticate(request, user=user)
view = SingleAllContributionAPIView.as_view()
return view(
request, project_id=self.project.id,
observation_id=self.observation.id).render()
def _delete(self, user):
url = reverse(
'api:project_single_observation',
kwargs={
'project_id': self.project.id,
'observation_id': self.observation.id
}
)
request = self.factory.delete(url, content_type='application/json')
force_authenticate(request, user=user)
view = SingleAllContributionAPIView.as_view()
return view(
request, project_id=self.project.id,
observation_id=self.observation.id).render()
def test_update_conflict(self):
response = self._patch(
self.update_data,
self.admin
)
self.assertEqual(response.status_code, 200)
data = {"properties": {"attributes": {"version": 1, "key_2": 2}}}
response = self._patch(
data,
self.admin
)
self.assertEqual(response.status_code, 200)
def test_update_location_with_admin(self):
self.update_data['geometry'] = {
'type': 'Point',
'coordinates': [
-0.1444154977798462,
51.54671869005856
]
}
self.update_data['properties']['location'] = {
'name': 'New name'
}
response = self._patch(
self.update_data,
self.admin
)
self.assertEqual(response.status_code, 200)
observation = Observation.objects.get(pk=self.observation.id)
self.assertEqual(
observation.properties.get('key_2'), 15)
self.assertContains(response, 'New name')
self.assertContains(response, '-0.144415')
def test_update_with_admin(self):
response = self._patch(
self.update_data,
self.admin
)
self.assertEqual(response.status_code, 200)
observation = Observation.objects.get(pk=self.observation.id)
self.assertEqual(
observation.properties.get('key_2'), 15)
@raises(Observation.DoesNotExist)
def test_delete_with_admin(self):
response = self._delete(
self.admin
)
self.assertEqual(response.status_code, 204)
Observation.objects.get(pk=self.observation.id)
def test_update_with_contributor(self):
response = self._patch(
self.update_data,
self.contributor
)
self.assertEqual(response.status_code, 403)
observation = Observation.objects.get(pk=self.observation.id)
self.assertEqual(
observation.properties.get('key_2'), 12)
def test_delete_with_contributor(self):
response = self._delete(
self.contributor
)
self.assertEqual(response.status_code, 403)
def test_update_with_non_member(self):
response = self._patch(
self.update_data,
self.non_member
)
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.observation.properties.get('key_2'), 12)
def test_delete_with_non_member(self):
response = self._delete(
self.non_member
)
self.assertEqual(response.status_code, 404)
self.assertNotEqual(
Observation.objects.get(pk=self.observation.id).status,
'deleted'
)
class TestProjectPublicApi(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.admin = UserF.create()
self.contributor = UserF.create()
self.project = ProjectF.create(
add_admins=[self.admin],
add_contributors=[self.contributor]
)
def get(self, user, search=None, subset=None):
url = reverse('api:project_observations', kwargs={
'project_id': self.project.id
})
if search:
url += '?search=blah'
if subset:
url += '?subset=' + str(subset)
request = self.factory.get(url)
force_authenticate(request, user=user)
theview = ProjectObservations.as_view()
return theview(
request,
project_id=self.project.id).render()
def test_get_with_subset(self):
category_1 = CategoryFactory(**{'project': self.project})
category_2 = CategoryFactory(**{'project': self.project})
subset = SubsetFactory.create(**{
'project': self.project,
'filters': {category_1.id: {}}
})
for x in range(0, 2):
ObservationFactory.create(**{
'project': self.project,
'category': category_1}
)
ObservationFactory.create(**{
'project': self.project,
'category': category_2}
)
response = self.get(self.admin, subset=subset.id)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(json.loads(response.content).get('features')), 2)
def test_get_with_search(self):
category = CategoryFactory(**{'project': self.project})
TextFieldFactory.create(**{'key': 'text', 'category': category})
for x in range(0, 2):
ObservationFactory.create(**{
'project': self.project,
'category': category,
'properties': {'text': 'blah'}}
)
ObservationFactory.create(**{
'project': self.project,
'category': category,
'properties': {'text': 'blub'}}
)
response = self.get(self.admin, search='blah')
self.assertEqual(response.status_code, 200)
self.assertEqual(len(json.loads(response.content).get('features')), 2)
def test_get_with_admin(self):
response = self.get(self.admin)
self.assertEqual(response.status_code, 200)
def test_get_with_contributor(self):
response = self.get(self.contributor)
self.assertEqual(response.status_code, 200)
def test_get_with_some_dude(self):
some_dude = UserF.create()
response = self.get(some_dude)
self.assertEqual(response.status_code, 404)
def test_get_with_anonymous(self):
response = self.get(AnonymousUser())
self.assertEqual(response.status_code, 404)
| apache-2.0 | 5,508,809,007,634,089,000 | 31.84135 | 78 | 0.558368 | false |
lizardsystem/lizard-waterbalance | lizard_wbcomputation/level_control_assignment.py | 1 | 2649 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#******************************************************************************
#
# This file is part of the lizard_waterbalance Django app.
#
# The lizard_waterbalance app is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# the lizard_waterbalance app. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2011 Nelen & Schuurmans
#
#******************************************************************************
#
# Initial programmer: Pieter Swinkels
# Initial date: 2011-01-26
#
#******************************************************************************
from timeseries.timeseriesstub import multiply_timeseries
class LevelControlAssignment:
def compute(self, level_control, pumping_stations):
"""Computes and returns the computed level control time series.
Parameters:
* level_control -- pair of (total incoming, total outgoing) time series
* pumping_stations -- list of PumpingStation(s) to handle the water flow
The total incoming and total outgoing level control volumes have to be
assigned to the intakes and pumps that can be used for level control. This
method computes that assignment and returns it as a dictionary of
PumpingStation to SparseTimeseriesStub.
The keys of the returned dictionary are the intakes and pumps that can
be used for level control. The associated value is the level control
time series.
"""
assignment = {}
(incoming_timeseries, outgoing_timeseries) = level_control
for pumping_station in pumping_stations:
timeseries = None
fraction = pumping_station.percentage / 100.0
if pumping_station.is_computed:
if pumping_station.into:
timeseries = multiply_timeseries(incoming_timeseries, fraction)
else:
timeseries = multiply_timeseries(outgoing_timeseries, fraction)
if timeseries is None:
continue
assignment[pumping_station] = timeseries
return assignment
| gpl-3.0 | 8,728,992,048,921,368,000 | 37.391304 | 83 | 0.626274 | false |
jake-delorme/skypi | Daemons/skypi/GPS.py | 1 | 3873 | """Manages retrieving data from our GPSD daemon"""
import threading
import logging
import Queue
import time
import calendar
import re
from gps import *
from skypi.Manager import Event
class GPS(object):
"""Runs a thread for retrieving GPS status and a queue for giving location"""
def __init__(self, pimanager):
# create the object yo
logging.debug("Create the GPS object")
self.name = "GPS"
self.pimanager = pimanager
# Create the local queue
self.queue = Queue.PriorityQueue()
# GPS object
self.gpsd = gps(mode=WATCH_ENABLE)
self.gpslocation = Gpslocation()
# Register for messages
self.pimanager.register(self, "SystemTest")
self.pimanager.register(self, "GetGPS")
# Create and start the threads
self.listenerthread = threading.Thread(target=self.__listener, name=self.name+"-listener")
self.listenerthread.daemon = True
self.listenerthread.start()
self.consumerthread = threading.Thread(target=self.__queueconsumer, name=self.name+"-consumer")
self.consumerthread.daemon = True
self.consumerthread.start()
def __listener(self):
"""Continuously read the GPS data and update the gpslocation object"""
name = threading.current_thread().getName()
logging.debug("Running the "+name+" thread")
while True:
self.gpsd.next()
# match only if we got a valid date (partial fix)
if re.match(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.000Z', self.gpsd.utc):
# convert utc to epoch
parsedtime = time.strptime(self.gpsd.utc, "%Y-%m-%dT%H:%M:%S.000Z")
parsedepoch = calendar.timegm(parsedtime)
# 2 = 2D_FIX 3 = 3D_FIX
if self.gpsd.fix.mode > 1:
self.gpslocation.lattitude = self.gpsd.fix.latitude
self.gpslocation.longtitude = self.gpsd.fix.longitude
self.gpslocation.lastfix = parsedepoch
if self.gpsd.fix.mode == 3:
self.gpslocation.altitude = self.gpsd.fix.altitude*3.28084
self.gpslocation.lastaltitudefix = parsedepoch
# logging.debug('GPS fix mode %s', self.gpsd.fix.mode)
# logging.debug( 'latitude %s' , self.gpsd.fix.latitude )
# logging.debug( 'longitude %s' , self.gpsd.fix.longitude )
# logging.debug( 'time utc %s + %s' , self.gpsd.utc , self.gpsd.fix.time )
# logging.debug( 'altitude (f) %s' , self.gpsd.fix.altitude*3.28084 )
# logging.debug( 'eps ' , self.gpsd.fix.eps)
# logging.debug( 'epx ' , self.gpsd.fix.epx)
# logging.debug( 'epv ' , self.gpsd.fix.epv)
# logging.debug( 'ept ' , self.gpsd.fix.ept)
# logging.debug( 'speed (f/s) ' , self.gpsd.fix.speed*3.28084)
# logging.debug( 'climb ' , self.gpsd.fix.climb)
# logging.debug( 'track ' , self.gpsd.fix.track)
# logging.debug( 'mode ' , self.gpsd.fix.mode)
# logging.debug('')
# logging.debug( 'sats ' , self.gpsd.satellites)
def addToQueue(self, event, priority=99):
"""Adds an item to the GPS queue to be processed"""
self.queue.put((priority, event))
def __queueconsumer(self):
name = threading.current_thread().getName()
logging.debug("Running the %s thread", name)
# process queue objects as the come in run the thread forever
while 1:
item = self.queue.get(True)
task = item[1].getTask()
logging.debug("Process Queue task %s", task)
if task == "GetGPS" or task == "SystemTest":
event = Event("GPSLocation", self.gpslocation)
event.setadditionalarg("callingevent", item[1])
self.pimanager.addToQueue(event)
else:
logging.error('Recieved message %s but i dont use this message', task)
self.queue.task_done()
class Gpslocation(object):
"""Holds the current GPSStatus including location"""
def __init__(self):
self.lattitude = 'Nan'
self.longtitude = 'Nan'
self.altitude = 'Nan'
self.lastfix = 0
self.lastaltitudefix = 0
def getgoogleurl(self):
return 'https://www.google.com/maps/place/%s,%s' % (self.lattitude, self.longtitude)
| mit | -7,834,503,266,576,936,000 | 35.537736 | 97 | 0.678286 | false |
opotowsky/learn-me-fuel | tests/test_learnme.py | 1 | 2599 | from learn.tools import splitXY, track_predictions, errors_and_scores, validation_curves, learning_curves, ext_test_compare
from sklearn.datasets import make_regression
from sklearn.neighbors import KNeighborsRegressor, KNeighborsClassifier
from sklearn.tree import DecisionTreeRegressor, DecisionTreeClassifier
from sklearn.svm import SVR, SVC
from sklearn.model_selection import KFold, StratifiedKFold
import pandas as pd
import numpy as np
import pytest
import os
# For now, skipping pandas df-related functions; unsure how to test this
n_obs = 500
n_feats = 50
def data_setup():
X, y = make_regression(n_samples=n_obs, n_features=n_feats, noise=.2)
# mimics output of splitXY
X = pd.DataFrame(X, index=np.arange(0, n_obs), columns=np.arange(0, n_feats))
y = pd.Series(y)
score = 'explained_variance'
kfold = KFold(n_splits=5, shuffle=True)
alg1_init = KNeighborsRegressor(weights='distance')
alg2_init = DecisionTreeRegressor()
alg3_init = SVR()
csv_name = 'test.csv'
return X, y, score, kfold, alg1_init, alg2_init, alg3_init, csv_name
def test_track_predictions(tmpdir):
X, y, _, kfold, alg1_init, alg2_init, alg3_init, csv_name = data_setup()
csv_name = tmpdir.join(csv_name)
track_predictions(X, y, alg1_init, alg2_init, alg3_init, kfold, csv_name, X)
def test_errors_and_scores(tmpdir):
X, y, score, kfold, alg1_init, alg2_init, alg3_init, csv_name = data_setup()
csv_name = tmpdir.join(csv_name)
scores = [score,]
errors_and_scores(X, y, alg1_init, alg2_init, alg3_init, scores, kfold, csv_name)
def test_validation_curves(tmpdir):
X, y, score, kfold, alg1_init, alg2_init, alg3_init, csv_name = data_setup()
csv_name = tmpdir.join(csv_name)
learning_curves(X, y, alg1_init, alg2_init, alg3_init, kfold, score, csv_name)
def test_learning_curves(tmpdir):
X, y, score, kfold, alg1_init, alg2_init, alg3_init, csv_name = data_setup()
csv_name = tmpdir.join(csv_name)
validation_curves(X, y, alg1_init, alg2_init, alg3_init, kfold, score, csv_name)
def test_ext_test_compare(tmpdir):
_, _, _, _, alg1_init, alg2_init, alg3_init, csv_name = data_setup()
csv_name = tmpdir.join(csv_name)
# Get real origen data for this test
trainpath = 'learn/pkl_trainsets/2jul2018/2jul2018_trainset1_'
pkl = trainpath + 'nucs_fissact_not-scaled.pkl'
trainXY = pd.read_pickle(pkl)
trainXY.reset_index(inplace=True, drop=True)
trainXY = trainXY.sample(frac=0.1)
X, rY, cY, eY, bY = splitXY(trainXY)
ext_test_compare(X, bY, alg1_init, alg2_init, alg3_init, csv_name)
| bsd-3-clause | 9,082,336,159,638,925,000 | 42.316667 | 123 | 0.701808 | false |
jsymolon/ARMSim | TestADD.py | 1 | 6295 | import unittest
import armv6instrdecode
import globals
import utils
import logging
import ARMCPU
import pdb
# if ConditionPassed(cond) then
# Rd = Rn + shifter_operand
# if S == 1 and Rd == R15 then
# if CurrentModeHasSPSR() then
# CPSR = SPSR
# else UNPREDICTABLE
# else if S == 1 then
# N Flag = Rd[31]
# Z Flag = if Rd == 0 then 1 else 0
# C Flag = CarryFrom(Rn + shifter_operand)
# V Flag = OverflowFrom(Rn + shifter_operand)
logfile = "TestADD.log"
with open(logfile, 'w'):
pass
logging.basicConfig(filename=logfile,level=logging.DEBUG)
class TestADD(unittest.TestCase):
"""Instructions"""
# preparing to test
def setUp(self):
""" Setting up for the test """
self.addr = 0
# ending the test
def tearDown(self):
"""Cleaning up after the test"""
# E2810A01 010A81E2 ADDAL R0, R1, #4096
# E289A00F 0FA089E2 ADDAL R10, R9, #15 @ dp imm
# E0856004 046085E0 ADDAL R6, R5, R4 @ dp imm shift
# E0866415 156486E0 ADDAL R6, R5, LSL R4 @ dp imm reg shift
def testADD_Imm1(self):
logging.debug("------------------------------------------")
logging.debug("TestDecode:testADD_Imm1")
code = 0xE2810A01 # ADDAL R0, R1, #4096
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 0)
logging.debug("1:" + instrStr)
self.assertEqual(instrStr, " E2810A01 ADD AL R00, R01 #01", instrStr)
logging.debug("2:" + instrStr)
globals.regs[1] = 3 # 3 shift <--- 2 = 12
globals.regs[0] = 1
reg = utils.buildRegValString(self, 1)
self.assertEqual(reg, "R01:00000003", reg)
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 1)
reg = utils.buildRegValString(self, 0)
self.assertEqual(reg, "R00:00001003", reg)
def testADD_Imm2(self):
logging.debug("------------------------------------------")
logging.debug("TestDecode:testADD_Imm2")
# 33222222222211111111110000000000
# 10987654321098765432109876543210
code = 0xE289A00F #2000101
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 0)
logging.debug("1:" + instrStr)
# RN:3, RD:1 r1 = r3 +
# Rd = Rn + shifter_operand + C Flag
self.assertEqual(instrStr, " E289A00F ADD AL R10, R09 #0F", instrStr)
logging.debug("2:" + instrStr)
globals.regs[9] = 3 # 3 shift <--- 2 = 12
globals.regs[10] = 1
reg = utils.buildRegValString(self, 9)
self.assertEqual(reg, "R09:00000003", reg)
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 1)
reg = utils.buildRegValString(self, 10)
self.assertEqual(reg, "R10:00000012", reg)
def testADD_ImmShft(self):
logging.debug("------------------------------------------")
logging.debug("TestDecode:testADD_ImmShft")
code = 0xE0856004
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 0)
logging.debug("1:" + instrStr)
self.assertEqual(instrStr, " E0856004 ADD AL R06, R05 R04", instrStr)
globals.regs[4] = 3 # 3 shift <--- 2 = 12
globals.regs[5] = 1
globals.regs[globals.CPSR] = globals.regs[globals.CPSR] | ARMCPU.CARRYBIT
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 1)
reg = utils.buildRegValString(self, 6)
self.assertEqual(reg, "R06:00000009", reg)
def testADD_RegShft(self):
logging.debug("------------------------------------------")
logging.debug("TestDecode:testADD_RegShft")
code = 0xE0866415
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 0)
logging.debug("1:" + instrStr)
self.assertEqual(instrStr, " E0866415 ADD AL R06, R05 LSL R04", instrStr)
globals.regs[4] = 1
globals.regs[5] = 0x40000000
globals.regs[6] = 1
globals.regs[globals.CPSR] = globals.regs[globals.CPSR] | ARMCPU.CARRYBIT
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 1)
reg = utils.buildRegValString(self, 6)
self.assertEqual(reg, "R06:80000001", reg)
def testADD_setflag_c(self):
logging.debug("------------------------------------------")
logging.debug("TestDecode:testADD_setflag_c - should produce an carry")
code = 0xE2910001 # ADDALS R0, R1, #1
globals.regs[1] = 0xFFFFFFFF
globals.regs[0] = 0
globals.regs[globals.CPSR] = 0
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 1)
reg = utils.buildRegValString(self, 0)
self.assertEqual(reg, "R00:00000000", reg)
# N Flag = Rd[31]
self.assertEqual(1, globals.regs[globals.CPSR] & ARMCPU.NEGATIVEBIT == 0, 1)
# Z Flag = if Rd == 0 then 1 else 0
self.assertEqual(1, globals.regs[globals.CPSR] & ARMCPU.ZEROBIT > 0, 1)
# C Flag = CarryFrom(Rn + shifter_operand)
# V Flag = OverflowFrom(Rn + shifter_operand)
#logging.debug(hex(globals.regs[globals.CPSR] & ARMCPU.OVERBIT))
self.assertEqual(1, globals.regs[globals.CPSR] & ARMCPU.CARRYBIT > 0, 1)
def testADD_setflag_o(self):
logging.debug("------------------------------------------")
logging.debug("TestDecode:testADD_setflag_o - should produce an overflow")
code = 0xE2910001 # ADDALS R0, R1, #1
globals.regs[1] = 0x7FFFFFFF
globals.regs[0] = 0
globals.regs[globals.CPSR] = 0
instrStr = armv6instrdecode.getInstructionFromCode(self, code, 1)
reg = utils.buildRegValString(self, 0)
self.assertEqual(reg, "R00:80000000", reg)
# N Flag = Rd[31]
self.assertEqual(1, globals.regs[globals.CPSR] & ARMCPU.NEGATIVEBIT > 0, 1)
# Z Flag = if Rd == 0 then 1 else 0
self.assertEqual(1, globals.regs[globals.CPSR] & ARMCPU.ZEROBIT == 0, 1)
# C Flag = CarryFrom(Rn + shifter_operand)
# V Flag = OverflowFrom(Rn + shifter_operand)
#logging.debug(hex(globals.regs[globals.CPSR] & ARMCPU.OVERBIT))
self.assertEqual(1, globals.regs[globals.CPSR] & ARMCPU.OVERBIT > 0, 1)
if __name__ == "__main__":
unittest.main() | gpl-2.0 | 5,673,797,915,211,875,000 | 41.255034 | 84 | 0.596823 | false |
Naereen/mazhe | phystricksCommuns.py | 1 | 1054 | # -*- coding: utf8 -*-
# Note : si tu modifie ce fichier, tu dois le copier à la main vers le répertoire de test.
# ~/script/modules/phystricks/tests
from phystricks import *
def CorrectionParametrique(curve,LLms,name,dilatation=1):
fig = GenericFigure("SubfiguresCDU"+name,script_filename="Communs")
ssfig1 = fig.new_subfigure(u"Quelque points de repères","SS1"+name)
pspict1 = ssfig1.new_pspicture(name+"psp1")
ssfig2 = fig.new_subfigure(u"La courbe","SS2"+name)
pspict2 = ssfig2.new_pspicture(name+"psp2")
for llam in LLms :
P=curve(llam)
tangent=curve.get_tangent_segment(llam)
second=curve.get_second_derivative_vector(llam)
normal=curve.get_normal_vector(llam)
normal.parameters.color="green"
tangent.parameters.color="brown"
pspict1.DrawGraphs(P,second,tangent,normal)
pspict2.DrawGraphs(P,tangent)
curve.parameters.style="dashed"
pspict2.DrawGraphs(curve)
pspict1.DrawDefaultAxes()
pspict1.dilatation(dilatation)
pspict2.DrawDefaultAxes()
pspict2.dilatation(dilatation)
fig.conclude()
fig.write_the_file()
| gpl-3.0 | 3,700,023,193,729,939,500 | 29.911765 | 90 | 0.756422 | false |
ljean/coop_cms | coop_cms/forms/base.py | 1 | 1030 | # -*- coding: utf-8 -*-
"""forms"""
import floppyforms.__future__ as floppyforms
from coop_html_editor.widgets import get_inline_html_widget
class InlineHtmlEditableModelForm(floppyforms.ModelForm):
"""Base class for form with inline-HTML editor fields"""
is_inline_editable = True # The cms_edition templatetag checks this for switching to edit mode
def __init__(self, *args, **kwargs):
super(InlineHtmlEditableModelForm, self).__init__(*args, **kwargs) # pylint: disable=E1002
for field_name in self.Meta.fields:
no_inline_html_widgets = getattr(self.Meta, 'no_inline_editable_widgets', ())
if field_name not in no_inline_html_widgets:
self.fields[field_name].widget = get_inline_html_widget()
class Media:
css = {
'all': ('css/colorbox.css', ),
}
js = (
'js/jquery.form.js',
'js/jquery.pageslide.js',
'js/jquery.colorbox-min.js',
'js/colorbox.coop.js',
)
| bsd-3-clause | 9,131,538,540,414,729,000 | 34.517241 | 99 | 0.607767 | false |
bugzPDX/airmozilla | airmozilla/main/tests/test_views.py | 1 | 132507 | import os
import datetime
import httplib
import json
import urllib2
import urllib
import re
import copy
from django.contrib.auth.models import Group, User, AnonymousUser, Permission
from django.utils import timezone
from django.utils.timezone import utc
from django.conf import settings
from django.core.cache import cache
from django.core.files import File
from funfactory.urlresolvers import reverse
from nose.tools import eq_, ok_
import mock
import pyquery
from airmozilla.main.models import (
Approval,
Event,
EventOldSlug,
Tag,
UserProfile,
Channel,
Location,
Template,
EventHitStats,
CuratedGroup,
EventRevision,
RecruitmentMessage,
Picture,
VidlySubmission,
EventLiveHits,
)
from airmozilla.staticpages.models import StaticPage
from airmozilla.base.tests.test_mozillians import (
Response,
GROUPS1,
GROUPS2,
VOUCHED_FOR
)
from airmozilla.base.tests.testbase import DjangoTestCase
class TestPages(DjangoTestCase):
fixtures = ['airmozilla/manage/tests/main_testdata.json']
main_image = 'airmozilla/manage/tests/firefox.png'
def setUp(self):
super(TestPages, self).setUp()
# Make the fixture event live as of the test.
event = Event.objects.get(title='Test event')
event.start_time = timezone.now()
event.archive_time = None
event.save()
self._upload_media(self.main_image)
self.main_channel = Channel.objects.get(
slug=settings.DEFAULT_CHANNEL_SLUG
)
def _calendar_url(self, privacy, location=None):
url = reverse('main:calendar_ical', args=(privacy,))
if location:
if isinstance(location, int):
url += '?location=%s' % location
else:
if not isinstance(location, int) and 'name' in location:
location = location.name
url += '?location=%s' % urllib.quote_plus(location)
return url
def test_contribute_json(self):
response = self.client.get('/contribute.json')
eq_(response.status_code, 200)
# Should be valid JSON, but it's a streaming content because
# it comes from django.views.static.serve
ok_(json.loads(''.join(response.streaming_content)))
eq_(response['Content-Type'], 'application/json')
def test_is_contributor(self):
from airmozilla.main.views import is_contributor
anonymous = AnonymousUser()
ok_(not is_contributor(anonymous))
employee_wo_profile = User.objects.create_user(
'worker', '[email protected]', 'secret'
)
ok_(not is_contributor(employee_wo_profile))
employee_w_profile = User.objects.create_user(
'worker2', '[email protected]', 'secret'
)
assert not UserProfile.objects.filter(user=employee_wo_profile)
up = UserProfile.objects.create(
user=employee_w_profile,
contributor=False
)
ok_(not is_contributor(employee_w_profile))
up.contributor = True
up.save()
# re-fetch to avoid internal django cache on profile fetching
employee_w_profile = User.objects.get(pk=employee_w_profile.pk)
ok_(is_contributor(employee_w_profile))
contributor = User.objects.create_user(
'nigel', '[email protected]', 'secret'
)
UserProfile.objects.create(
user=contributor,
contributor=True
)
ok_(is_contributor(contributor))
def test_is_employee(self):
from airmozilla.main.views import is_employee
user = User.objects.create(username='a', email='[email protected]')
ok_(not is_employee(user))
from random import choice
user = User.objects.create(
username='b',
email='foo@' + choice(settings.ALLOWED_BID)
)
ok_(is_employee(user))
def test_can_view_event(self):
event = Event.objects.get(title='Test event')
assert event.privacy == Event.PRIVACY_PUBLIC # default
anonymous = AnonymousUser()
employee_wo_profile = User.objects.create_user(
'worker', '[email protected]', 'secret'
)
employee_w_profile = User.objects.create_user(
'worker2', '[email protected]', 'secret'
)
assert not UserProfile.objects.filter(user=employee_wo_profile)
UserProfile.objects.create(
user=employee_w_profile,
contributor=False
)
contributor = User.objects.create_user(
'nigel', '[email protected]', 'secret'
)
UserProfile.objects.create(
user=contributor,
contributor=True
)
from airmozilla.main.views import can_view_event, is_contributor
ok_(can_view_event(event, anonymous))
assert not is_contributor(anonymous)
ok_(can_view_event(event, contributor))
assert is_contributor(contributor)
ok_(can_view_event(event, employee_wo_profile))
assert not is_contributor(employee_wo_profile)
ok_(can_view_event(event, employee_w_profile))
assert not is_contributor(employee_w_profile)
event.privacy = Event.PRIVACY_COMPANY
event.save()
ok_(not can_view_event(event, anonymous))
ok_(not can_view_event(event, contributor))
ok_(can_view_event(event, employee_wo_profile))
ok_(can_view_event(event, employee_w_profile))
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.save()
ok_(not can_view_event(event, anonymous))
ok_(can_view_event(event, contributor))
ok_(can_view_event(event, employee_wo_profile))
ok_(can_view_event(event, employee_w_profile))
def test_view_event_with_pin(self):
cache.clear()
event = Event.objects.get(title='Test event')
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.description = "My Event Description"
event.pin = '12345'
event.save()
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
self.assertRedirects(
response,
reverse('main:login') + '?next=%s' % url
)
User.objects.create_user(
'mary', '[email protected]', 'secret'
)
assert self.client.login(username='mary', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.description in response.content)
contributor = User.objects.create_user(
'nigel', '[email protected]', 'secret'
)
UserProfile.objects.create(
user=contributor,
contributor=True
)
assert self.client.login(username='nigel', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.description not in response.content)
ok_('id="id_pin"' in response.content)
# attempt a pin
response = self.client.post(url, {'pin': '1'})
eq_(response.status_code, 200)
ok_(event.description not in response.content)
ok_('id="id_pin"' in response.content)
ok_('Incorrect pin' in response.content)
response = self.client.post(url, {'pin': ' 12345 '})
eq_(response.status_code, 302)
self.assertRedirects(response, url)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.description in response.content)
ok_('id="id_pin"' not in response.content)
def test_view_public_event_with_pin(self):
event = Event.objects.get(title='Test event')
event.privacy = Event.PRIVACY_PUBLIC
event.description = "My Event Description"
event.pin = '12345'
event.save()
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
# expect the pin input to be there
ok_('id="id_pin"' in response.content)
response = self.client.post(url, {'pin': ' 12345 '})
eq_(response.status_code, 302)
self.assertRedirects(response, url)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.description in response.content)
ok_('id="id_pin"' not in response.content)
def test_view_private_events_with_notices(self):
# for https://bugzilla.mozilla.org/show_bug.cgi?id=821458
event = Event.objects.get(title='Test event')
assert event.privacy == Event.PRIVACY_PUBLIC # default
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.save()
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
self.assertRedirects(
response,
reverse('main:login') + '?next=%s' % url
)
contributor = User.objects.create_user(
'nigel', '[email protected]', 'secret'
)
UserProfile.objects.create(
user=contributor,
contributor=True
)
assert self.client.login(username='nigel', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(
'This event is available only to Mozilla volunteers and staff'
in response.content
)
event.privacy = Event.PRIVACY_COMPANY
event.save()
response = self.client.get(url)
permission_denied_url = reverse(
'main:permission_denied',
args=(event.slug,)
)
self.assertRedirects(response, permission_denied_url)
# actually go there
response = self.client.get(permission_denied_url)
eq_(response.status_code, 200)
ok_('This event is only for Mozilla staff')
User.objects.create_user(
'worker', '[email protected]', 'secret'
)
assert self.client.login(username='worker', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(
'This event is available only to Mozilla staff'
in response.content
)
def test_home(self):
"""Index page loads and paginates correctly."""
response = self.client.get(reverse('main:home'))
eq_(response.status_code, 200)
response_empty_page = self.client.get(reverse('main:home',
kwargs={'page': 10000}))
eq_(response_empty_page.status_code, 200)
def test_event(self):
"""Event view page loads correctly if the event is public and
scheduled and approved; request a login otherwise."""
event = Event.objects.get(title='Test event')
group = Group.objects.get()
approval = Approval(event=event, group=group)
approval.save()
event_page = reverse('main:event', kwargs={'slug': event.slug})
response_fail_approval = self.client.get(event_page)
eq_(response_fail_approval.status_code, 200)
ok_('not approved' in response_fail_approval.content)
approval.approved = True
approval.processed = True
approval.save()
response_ok = self.client.get(event_page)
eq_(response_ok.status_code, 200)
event.privacy = Event.PRIVACY_COMPANY
event.save()
response_fail = self.client.get(event_page)
self.assertRedirects(
response_fail,
reverse('main:login') + '?next=%s' % event_page
)
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.save()
response_fail = self.client.get(event_page)
self.assertRedirects(
response_fail,
reverse('main:login') + '?next=%s' % event_page
)
event.privacy = Event.PRIVACY_PUBLIC
event.status = Event.STATUS_INITIATED
event.save()
response_fail = self.client.get(event_page)
eq_(response_fail.status_code, 200)
ok_('This event is no longer available.' in response_fail.content)
self.client.logout()
event.privacy = Event.PRIVACY_COMPANY
event.status = Event.STATUS_SCHEDULED
event.save()
response_fail = self.client.get(event_page)
self.assertRedirects(
response_fail,
reverse('main:login') + '?next=%s' % event_page
)
nigel = User.objects.create_user('nigel', '[email protected]', 'secret')
UserProfile.objects.create(user=nigel, contributor=True)
assert self.client.login(username='nigel', password='secret')
response_fail = self.client.get(event_page)
self.assertRedirects(
response_fail,
reverse('main:permission_denied', args=(event.slug,))
)
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.save()
response_ok = self.client.get(event_page)
eq_(response_ok.status_code, 200)
def test_view_event_channels(self):
event = Event.objects.get(title='Test event')
channel1 = Channel.objects.create(
name='Test Channel1',
slug='test-channel1')
channel2 = Channel.objects.create(
name='Test Channel2',
slug='test-channel2')
event.channels.add(channel1)
event.channels.add(channel2)
event_url = reverse('main:event', kwargs={'slug': event.slug})
response = self.client.get(event_url)
eq_(response.status_code, 200)
main_channel_url = reverse(
'main:home_channels',
args=(self.main_channel.slug,))
test_channel1_url = reverse(
'main:home_channels',
args=(channel1.slug,))
test_channel2_url = reverse(
'main:home_channels',
args=(channel2.slug,))
ok_(self.main_channel.name in response.content
and main_channel_url in response.content)
ok_('Test Channel1' in response.content
and test_channel1_url in response.content)
ok_('Test Channel2' in response.content
and test_channel2_url in response.content)
def test_view_event_with_autoplay(self):
event = Event.objects.get(title='Test event')
vidly = Template.objects.create(
name="Vid.ly HD",
content=(
'<iframe src="{{ tag }}?autoplay={{ autoplay }}"></iframe>'
)
)
event.template = vidly
event.template_environment = {'tag': 'abc123'}
event.save()
url = reverse('main:event', kwargs={'slug': event.slug})
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('autoplay=false' in response.content)
response = self.client.get(url, {'autoplay': 'true'})
eq_(response.status_code, 200)
ok_('autoplay=true' in response.content)
response = self.client.get(url, {'autoplay': '1'})
eq_(response.status_code, 200)
ok_('autoplay=false' in response.content)
def test_event_with_vidly_download_links(self):
event = Event.objects.get(title='Test event')
vidly = Template.objects.create(
name="Vid.ly HD",
content='<iframe src="{{ tag }}"></iframe>'
)
event.template = vidly
event.template_environment = {'tag': 'abc123'}
event.save()
url = reverse('main:event', kwargs={'slug': event.slug})
response = self.client.get(url)
eq_(response.status_code, 200)
assert event.privacy == Event.PRIVACY_PUBLIC
ok_(
'https://vid.ly/abc123?content=video&format=webm'
in response.content
)
ok_(
'https://vid.ly/abc123?content=video&format=mp4'
in response.content
)
ok_(
'https://vid.ly/abc123?content=video&format=hd_webm'
not in response.content
)
ok_(
'https://vid.ly/abc123?content=video&format=hd_mp4'
not in response.content
)
def test_private_event_redirect(self):
event = Event.objects.get(title='Test event')
event.privacy = Event.PRIVACY_COMPANY
event.save()
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:login') + '?next=%s' % url
)
def test_event_upcoming(self):
"""View an upcoming event and it should show the local time"""
event = Event.objects.get(title='Test event')
date = datetime.datetime(2099, 1, 1, 18, 0, 0).replace(tzinfo=utc)
event.start_time = date
event.save()
group = Group.objects.get()
approval = Approval(event=event, group=group)
approval.approved = True
approval.save()
event_page = reverse('main:event', kwargs={'slug': event.slug})
response = self.client.get(event_page)
eq_(response.status_code, 200)
assert event.location
ok_(event.location.name in response.content)
# 18:00 in UTC on that 1st Jan 2099 is 10AM in Pacific time
assert event.location.timezone == 'US/Pacific'
ok_('10:00AM' in response.content)
def test_event_in_cyberspace(self):
event = Event.objects.get(title='Test event')
assert 'Cyberspace' not in event.location.name
event_page = reverse('main:event', kwargs={'slug': event.slug})
response = self.client.get(event_page)
eq_(response.status_code, 200)
ok_(event.location.name in response.content)
cyberspace, __ = Location.objects.get_or_create(
name='Cyberspace',
timezone='UTC'
)
event.location = cyberspace
event.save()
response = self.client.get(event_page)
eq_(response.status_code, 200)
ok_(event.location.name not in response.content)
cyberspace_pacific, __ = Location.objects.get_or_create(
name='Cyberspace Pacific',
timezone='US/Pacific'
)
event.location = cyberspace_pacific
event.save()
response = self.client.get(event_page)
eq_(response.status_code, 200)
ok_(event.location.name not in response.content)
def test_old_slug(self):
"""An old slug will redirect properly to the current event page."""
event = Event.objects.get(title='Test event')
old_event_slug = EventOldSlug.objects.create(
event=event,
slug='test-old-slug',
)
response = self.client.get(
reverse('main:event', kwargs={'slug': old_event_slug.slug})
)
self.assertRedirects(
response,
reverse('main:event', kwargs={'slug': old_event_slug.event.slug})
)
def test_calendar_ical(self):
url = self._calendar_url('public')
response_public = self.client.get(url)
eq_(response_public.status_code, 200)
eq_(response_public['Access-Control-Allow-Origin'], '*')
ok_('LOCATION:Mountain View' in response_public.content)
private_url = self._calendar_url('company')
response_private = self.client.get(private_url)
eq_(response_private.status_code, 200)
# Cache tests
event_change = Event.objects.get(id=22)
event_change.title = 'Hello cache clear!'
event_change.save()
response_changed = self.client.get(url)
ok_(response_changed.content != response_public.content)
ok_('cache clear!' in response_changed.content)
def test_calendar_duration(self):
"""Test the behavior of duration in the iCal feed."""
event = Event.objects.get(title='Test event')
url = self._calendar_url('public')
dtend = event.start_time + datetime.timedelta(
seconds=3600)
dtend = dtend.strftime("DTEND:%Y%m%dT%H%M%SZ")
response_public = self.client.get(url)
ok_(dtend in response_public.content)
event.duration = 1234
event.save()
dtend = event.start_time + datetime.timedelta(
seconds=1234)
dtend = dtend.strftime("DTEND:%Y%m%dT%H%M%SZ")
response_public = self.client.get(url)
ok_(dtend in response_public.content)
def test_calendar_ical_cors_cached(self):
url = self._calendar_url('public')
response_public = self.client.get(url)
eq_(response_public.status_code, 200)
eq_(response_public['Access-Control-Allow-Origin'], '*')
ok_('LOCATION:Mountain View' in response_public.content)
response_public = self.client.get(url)
eq_(response_public.status_code, 200)
eq_(response_public['Access-Control-Allow-Origin'], '*')
def test_calendar_with_location(self):
london = Location.objects.create(
name='London',
timezone='Europe/London'
)
event1 = Event.objects.get(title='Test event')
# know your fixtures
assert event1.location.name == 'Mountain View'
event2 = Event.objects.create(
title='Second test event',
description='Anything',
start_time=event1.start_time,
archive_time=event1.archive_time,
privacy=Event.PRIVACY_PUBLIC,
status=event1.status,
placeholder_img=event1.placeholder_img,
location=london
)
event2.channels.add(self.main_channel)
assert event1.location != event2.location
url = self._calendar_url('public')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Test event' in response.content)
ok_('Second test event' in response.content)
response = self.client.get(url, {'location': 'bla bla'})
eq_(response.status_code, 404)
response = self.client.get(url, {'location': event1.location.name})
eq_(response.status_code, 200)
ok_('Test event' in response.content)
ok_('Second test event' not in response.content)
response = self.client.get(url, {'location': event2.location.name})
eq_(response.status_code, 200)
ok_('Test event' not in response.content)
ok_('Second test event' in response.content)
# same can be reached by ID
response = self.client.get(url, {'location': event1.location.id})
eq_(response.status_code, 200)
ok_('Test event' in response.content)
ok_('Second test event' not in response.content)
def test_calendars_page(self):
london = Location.objects.create(
name='London',
timezone='Europe/London'
)
event1 = Event.objects.get(title='Test event')
# know your fixtures
assert event1.location.name == 'Mountain View'
event2 = Event.objects.create(
title='Second test event',
description='Anything',
start_time=event1.start_time,
archive_time=event1.archive_time,
privacy=Event.PRIVACY_PUBLIC,
status=event1.status,
placeholder_img=event1.placeholder_img,
location=london
)
event2.channels.add(self.main_channel)
assert event1.location != event2.location
url = reverse('main:calendars')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('London' in response.content)
ok_('Mountain View' in response.content)
# we can expect three URLs to calendar feeds to be in there
url_all = self._calendar_url('public')
url_lon = self._calendar_url('public', london.pk)
url_mv = self._calendar_url('public', event1.location.pk)
ok_(url_all in response.content)
ok_(url_lon in response.content)
ok_(url_mv in response.content)
# now, log in as a contributor
contributor = User.objects.create_user(
'nigel', '[email protected]', 'secret'
)
UserProfile.objects.create(
user=contributor,
contributor=True
)
assert self.client.login(username='nigel', password='secret')
url = reverse('main:calendars')
response = self.client.get(url)
eq_(response.status_code, 200)
url_all = self._calendar_url('contributors')
url_lon = self._calendar_url('contributors', london.pk)
url_mv = self._calendar_url('contributors', event1.location.pk)
ok_(url_all in response.content)
ok_(url_lon in response.content)
ok_(url_mv in response.content)
# now log in as an employee
User.objects.create_user(
'peterbe', '[email protected]', 'secret'
)
assert self.client.login(username='peterbe', password='secret')
url = reverse('main:calendars')
response = self.client.get(url)
eq_(response.status_code, 200)
url_all = self._calendar_url('company')
url_lon = self._calendar_url('company', london.pk)
url_mv = self._calendar_url('company', event1.location.pk)
ok_(url_all in response.content)
ok_(url_lon in response.content)
ok_(url_mv in response.content)
def test_calendars_page_locations_disappear(self):
london = Location.objects.create(
name='London',
timezone='Europe/London'
)
event1 = Event.objects.get(title='Test event')
# know your fixtures
assert event1.location.name == 'Mountain View'
event2 = Event.objects.create(
title='Second test event',
description='Anything',
start_time=event1.start_time,
archive_time=event1.archive_time,
privacy=Event.PRIVACY_PUBLIC,
status=event1.status,
placeholder_img=event1.placeholder_img,
location=london
)
event2.channels.add(self.main_channel)
assert event1.location != event2.location
url = reverse('main:calendars')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('London' in response.content)
ok_('Mountain View' in response.content)
# we can expect three URLs to calendar feeds to be in there
url_all = self._calendar_url('public')
url_lon = self._calendar_url('public', london.pk)
url_mv = self._calendar_url('public', event1.location.pk)
ok_(url_all in response.content)
ok_(url_lon in response.content)
ok_(url_mv in response.content)
# but, suppose the events belonging to MV is far future
now = timezone.now()
event1.start_time = now + datetime.timedelta(days=20)
event1.save()
# and, suppose the events belong to London is very very old
event2.start_time = now - datetime.timedelta(days=100)
event2.archive_time = now - datetime.timedelta(days=99)
event2.save()
assert event2 in Event.objects.archived().all()
url = reverse('main:calendars')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('London' not in response.content)
ok_('Mountain View' in response.content)
# we can expect three URLs to calendar feeds to be in there
ok_(url_all in response.content)
ok_(url_lon not in response.content)
ok_(url_mv in response.content)
def test_calendars_description(self):
event = Event.objects.get(title='Test event')
event.description = """
Check out the <a href="http://example.com">Example</a> page
and <strong>THIS PAGE</strong> here.
Lorem Ipsum is simply dummy text of the printing and typesetting
industry. Lorem Ipsum has been the industry's standard dummy text
ever since the 1500s, when an unknown printer took a galley of type
and scrambled it to make a type specimen book.
If the text is getting really long it will be truncated.
""".strip()
event.save()
response_public = self.client.get(self._calendar_url('public'))
eq_(response_public.status_code, 200)
ok_('Check out the Example page' in response_public.content)
ok_('and THIS PAGE here' in response_public.content)
ok_('will be truncated' not in response_public.content)
event.short_description = 'One-liner'
event.save()
response_public = self.client.get(self._calendar_url('public'))
eq_(response_public.status_code, 200)
ok_('Check out the' not in response_public.content)
ok_('One-liner' in response_public.content)
def test_filter_by_tags(self):
url = reverse('main:home')
delay = datetime.timedelta(days=1)
event1 = Event.objects.get(title='Test event')
event1.status = Event.STATUS_SCHEDULED
event1.start_time -= delay
event1.archive_time = event1.start_time
event1.save()
eq_(Event.objects.approved().count(), 1)
eq_(Event.objects.archived().count(), 1)
event2 = Event.objects.create(
title='Second test event',
description='Anything',
start_time=event1.start_time,
archive_time=event1.archive_time,
privacy=Event.PRIVACY_PUBLIC,
status=event1.status,
placeholder_img=event1.placeholder_img,
)
event2.channels.add(self.main_channel)
eq_(Event.objects.approved().count(), 2)
eq_(Event.objects.archived().count(), 2)
tag1 = Tag.objects.create(name='tag1')
tag2 = Tag.objects.create(name='tag2')
tag3 = Tag.objects.create(name='tag3')
event1.tags.add(tag1)
event1.tags.add(tag2)
event2.tags.add(tag2)
event2.tags.add(tag3)
# check that both events appear
response = self.client.get(url)
ok_('Test event' in response.content)
ok_('Second test event' in response.content)
response = self.client.get(url, {'tag': 'tag2'})
ok_('Test event' in response.content)
ok_('Second test event' in response.content)
response = self.client.get(url, {'tag': 'tag1'})
ok_('Test event' in response.content)
ok_('Second test event' not in response.content)
response = self.client.get(url, {'tag': 'tag3'})
ok_('Test event' not in response.content)
ok_('Second test event' in response.content)
response = self.client.get(url, {'tag': ['tag1', 'tag3']})
ok_('Test event' in response.content)
ok_('Second test event' in response.content)
response = self.client.get(url, {'tag': 'Bogus'})
eq_(response.status_code, 301)
response = self.client.get(url, {'tag': ['tag1', 'Bogus']})
eq_(response.status_code, 301)
# the good tag stays
ok_('?tag=tag1' in response['Location'])
def test_filter_by_duplicate_tags(self):
"""this is mainly a fix for a legacy situation where you might
have accidentally allowed in two equal tags that are only
different in their case"""
url = reverse('main:home')
delay = datetime.timedelta(days=1)
event1 = Event.objects.get(title='Test event')
event1.status = Event.STATUS_SCHEDULED
event1.start_time -= delay
event1.archive_time = event1.start_time
event1.save()
eq_(Event.objects.approved().count(), 1)
eq_(Event.objects.archived().count(), 1)
event2 = Event.objects.create(
title='Second test event',
description='Anything',
start_time=event1.start_time,
archive_time=event1.archive_time,
privacy=Event.PRIVACY_PUBLIC,
status=event1.status,
placeholder_img=event1.placeholder_img,
)
event2.channels.add(self.main_channel)
eq_(Event.objects.approved().count(), 2)
eq_(Event.objects.archived().count(), 2)
tag1a = Tag.objects.create(name='tag1')
tag1b = Tag.objects.create(name='TAG1')
event1.tags.add(tag1a)
event2.tags.add(tag1b)
# check that both events appear
response = self.client.get(url)
ok_('Test event' in response.content)
ok_('Second test event' in response.content)
response = self.client.get(url, {'tag': 'TaG1'})
ok_('Test event' in response.content)
ok_('Second test event' in response.content)
def test_feed(self):
delay = datetime.timedelta(days=1)
event1 = Event.objects.get(title='Test event')
event1.status = Event.STATUS_SCHEDULED
event1.start_time -= delay
event1.archive_time = event1.start_time
event1.save()
eq_(Event.objects.approved().count(), 1)
eq_(Event.objects.archived().count(), 1)
event = Event.objects.create(
title='Second test event',
description='Anything',
start_time=event1.start_time,
archive_time=event1.archive_time,
privacy=Event.PRIVACY_COMPANY, # Note!
status=event1.status,
placeholder_img=event1.placeholder_img,
)
event.channels.add(self.main_channel)
eq_(Event.objects.approved().count(), 2)
eq_(Event.objects.archived().count(), 2)
url = reverse('main:feed', args=('public',))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Test event' in response.content)
ok_('Second test event' not in response.content)
url = reverse('main:feed') # public feed
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Test event' in response.content)
ok_('Second test event' not in response.content)
url = reverse('main:feed', args=('company',))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Test event' in response.content)
ok_('Second test event' in response.content)
def test_feed_with_webm_format(self):
delay = datetime.timedelta(days=1)
event1 = Event.objects.get(title='Test event')
event1.status = Event.STATUS_SCHEDULED
event1.start_time -= delay
event1.archive_time = event1.start_time
vidly_template = Template.objects.create(
name='Vid.ly Something',
content='<script>'
)
event1.template = vidly_template
event1.template_environment = {'tag': 'abc123'}
event1.save()
eq_(Event.objects.approved().count(), 1)
eq_(Event.objects.archived().count(), 1)
url = reverse('main:feed_format_type', args=('public', 'webm'))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(
'<link>https://vid.ly/abc123?content=video&format=webm</link>'
in response.content
)
def test_feed_cache(self):
delay = datetime.timedelta(days=1)
event = Event.objects.get(title='Test event')
event.start_time -= delay
event.archive_time = event.start_time
event.save()
url = reverse('main:feed')
eq_(Event.objects.approved().count(), 1)
eq_(Event.objects.archived().count(), 1)
response = self.client.get(url)
ok_('Test event' in response.content)
event.title = 'Totally different'
event.save()
response = self.client.get(url)
ok_('Test event' in response.content)
ok_('Totally different' not in response.content)
def test_private_feeds_by_channel(self):
channel = Channel.objects.create(
name='Culture and Context',
slug='culture-and-context',
)
delay = datetime.timedelta(days=1)
event1 = Event.objects.get(title='Test event')
event1.status = Event.STATUS_SCHEDULED
event1.start_time -= delay
event1.archive_time = event1.start_time
event1.save()
event1.channels.clear()
event1.channels.add(channel)
eq_(Event.objects.approved().count(), 1)
eq_(Event.objects.archived().count(), 1)
event = Event.objects.create(
title='Second test event',
description='Anything',
start_time=event1.start_time,
archive_time=event1.archive_time,
privacy=Event.PRIVACY_COMPANY, # Note!
status=event1.status,
placeholder_img=event1.placeholder_img,
)
event.channels.add(channel)
eq_(Event.objects.approved().count(), 2)
eq_(Event.objects.archived().count(), 2)
eq_(Event.objects.filter(channels=channel).count(), 2)
url = reverse(
'main:channel_feed',
args=('culture-and-context', 'public')
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Test event' in response.content)
ok_('Second test event' not in response.content)
# public feed
url = reverse(
'main:channel_feed_default',
args=('culture-and-context',)
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Test event' in response.content)
ok_('Second test event' not in response.content)
url = reverse(
'main:channel_feed',
args=('culture-and-context', 'company')
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Test event' in response.content)
ok_('Second test event' in response.content)
def test_feeds_by_channel_with_webm_format(self):
channel = Channel.objects.create(
name='Culture and Context',
slug='culture-and-context',
)
delay = datetime.timedelta(days=1)
event1 = Event.objects.get(title='Test event')
event1.status = Event.STATUS_SCHEDULED
event1.start_time -= delay
event1.archive_time = event1.start_time
vidly_template = Template.objects.create(
name='Vid.ly Something',
content='<script>'
)
event1.template = vidly_template
event1.template_environment = {'tag': 'abc123'}
event1.save()
event1.channels.clear()
event1.channels.add(channel)
event = Event.objects.create(
title='Second test event',
description='Anything',
start_time=event1.start_time,
archive_time=event1.archive_time,
privacy=Event.PRIVACY_PUBLIC,
status=event1.status,
placeholder_img=event1.placeholder_img,
)
event.channels.add(channel)
eq_(Event.objects.approved().count(), 2)
eq_(Event.objects.archived().count(), 2)
eq_(Event.objects.filter(channels=channel).count(), 2)
url = reverse(
'main:channel_feed_format_type',
args=('culture-and-context', 'public', 'webm')
)
response = self.client.get(url)
eq_(response.status_code, 200)
assert 'Second test event' in response.content
ok_(
'<link>https://vid.ly/abc123?content=video&format=webm</link>'
in response.content
)
def test_rendering_additional_links(self):
event = Event.objects.get(title='Test event')
event.additional_links = 'Google'
event.save()
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Google' in response.content)
event.additional_links = """
Google http://google.com
""".strip()
event.save()
response = self.client.get(url)
ok_(
'Google <a href="http://google.com">http://google.com</a>' in
response.content
)
event.additional_links = """
Google http://google.com\nYahii http://yahii.com
""".strip()
event.save()
response = self.client.get(url)
ok_(
'Google <a href="http://google.com">http://google.com</a><br>'
'Yahii <a href="http://yahii.com">http://yahii.com</a>'
in response.content
)
@mock.patch('airmozilla.manage.vidly.urllib2.urlopen')
def test_event_with_vidly_token_urlerror(self, p_urlopen):
# based on https://bugzilla.mozilla.org/show_bug.cgi?id=811476
event = Event.objects.get(title='Test event')
# first we need a template that uses `vidly_tokenize()`
template = event.template
template.content = """
{% set token = vidly_tokenize(tag, 90) %}
<iframe src="http://s.vid.ly/embeded.html?
link={{ tag }}{% if token %}&token={{ token }}{% endif %}"></iframe>
"""
template.save()
event.template_environment = "tag=abc123"
event.save()
p_urlopen.side_effect = urllib2.URLError('ANGER!')
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Temporary network error' in response.content)
@mock.patch('airmozilla.manage.vidly.urllib2.urlopen')
def test_event_with_vidly_token_badstatusline(self, p_urlopen):
# based on https://bugzilla.mozilla.org/show_bug.cgi?id=842588
event = Event.objects.get(title='Test event')
# first we need a template that uses `vidly_tokenize()`
template = event.template
template.content = """
{% set token = vidly_tokenize(tag, 90) %}
<iframe src="http://s.vid.ly/embeded.html?
link={{ tag }}{% if token %}&token={{ token }}{% endif %}"></iframe>
"""
template.save()
event.template_environment = "tag=abc123"
event.save()
p_urlopen.side_effect = httplib.BadStatusLine('TroubleX')
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Temporary network error' in response.content)
ok_('TroubleX' not in response.content)
def test_404_page_with_side_events(self):
"""404 pages should work when there's stuff in the side bar"""
event1 = Event.objects.get(title='Test event')
now = timezone.now()
event = Event.objects.create(
title='Second test event',
description='Anything',
start_time=now + datetime.timedelta(days=10),
privacy=Event.PRIVACY_PUBLIC,
status=event1.status,
placeholder_img=event1.placeholder_img,
)
event.channels.add(self.main_channel)
event = Event.objects.create(
title='Third test event',
description='Anything',
start_time=now + datetime.timedelta(days=20),
privacy=Event.PRIVACY_COMPANY,
status=event1.status,
placeholder_img=event1.placeholder_img,
)
event.channels.add(self.main_channel)
response = self.client.get(reverse('main:home'))
eq_(response.status_code, 200)
ok_('Second test event' in response.content)
ok_('Third test event' not in response.content)
response = self.client.get('/doesnotexist/')
eq_(response.status_code, 404)
ok_('Second test event' in response.content)
ok_('Third test event' not in response.content)
User.objects.create_user(
'worker', '[email protected]', 'secret'
)
assert self.client.login(username='worker', password='secret')
response = self.client.get(reverse('main:home'))
eq_(response.status_code, 200)
ok_('Second test event' in response.content)
ok_('Third test event' in response.content)
response = self.client.get('/doesnotexist/')
eq_(response.status_code, 404)
ok_('Second test event' in response.content)
ok_('Third test event' in response.content)
def test_render_favicon(self):
# because /favicon.ico isn't necessarily set up in Apache
response = self.client.get('/favicon.ico')
eq_(response.status_code, 200)
ok_content_types = ('image/vnd.microsoft.icon', 'image/x-icon')
# it's potentially differnet content type depending on how different
# servers guess .ico files
# On my OSX it's image/x-icon
ok_(response['Content-Type'] in ok_content_types)
def test_channels_page(self):
channel = Channel.objects.create(
name='Culture & Context',
slug='culture-and-context',
)
Channel.objects.create(
name='Sub-Culture & Subtle-Context',
slug='sub-culture-and-sub-context',
parent=channel
)
# create an archived event that can belong to this channel
event1 = Event.objects.get(title='Test event')
now = timezone.now()
event = Event.objects.create(
title='Third test event',
description='Anything',
start_time=now - datetime.timedelta(days=20),
archive_time=now - datetime.timedelta(days=19),
privacy=Event.PRIVACY_COMPANY,
status=Event.STATUS_SCHEDULED,
placeholder_img=event1.placeholder_img,
)
assert event in list(Event.objects.archived().all())
event.channels.add(channel)
response = self.client.get(reverse('main:channels'))
eq_(response.status_code, 200)
ok_('Main' not in response.content)
ok_('Culture & Context' in response.content)
ok_('Sub-Culture & Subtle-Context' not in response.content)
channel_url = reverse('main:home_channels', args=(channel.slug,))
ok_(channel_url in response.content)
ok_('1 sub-channel' in response.content)
# visiting that channel, there should be a link to the sub channel
response = self.client.get(channel_url)
eq_(response.status_code, 200)
ok_('Sub-Culture & Subtle-Context' in response.content)
event.privacy = Event.PRIVACY_PUBLIC
event.save()
response = self.client.get(reverse('main:channels'))
eq_(response.status_code, 200)
ok_('1 archived event' in response.content)
# make it private again
event.privacy = Event.PRIVACY_COMPANY
event.save()
assert Event.objects.archived().all().count() == 1
# let's say you log in
User.objects.create_user(
'worker', '[email protected]', 'secret'
)
assert self.client.login(username='worker', password='secret')
response = self.client.get(reverse('main:channels'))
eq_(response.status_code, 200)
ok_('1 archived event' in response.content)
# suppose you log in as a contributor
contributor = User.objects.create_user(
'nigel', '[email protected]', 'secret'
)
UserProfile.objects.create(
user=contributor,
contributor=True
)
assert self.client.login(username='nigel', password='secret')
response = self.client.get(reverse('main:channels'))
eq_(response.status_code, 200)
ok_('1 sub-channel' in response.content)
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.save()
response = self.client.get(reverse('main:channels'))
eq_(response.status_code, 200)
ok_('1 archived event' in response.content)
def test_channels_page_without_archived_events(self):
channel = Channel.objects.create(
name='Culture & Context',
slug='culture-and-context',
)
url = reverse('main:channels')
channel_url = reverse('main:home_channels', args=(channel.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(channel_url not in response.content)
# create an event in that channel
event1 = Event.objects.get(title='Test event')
now = timezone.now()
event = Event.objects.create(
title='Third test event',
description='Anything',
start_time=now + datetime.timedelta(days=1),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_INITIATED,
placeholder_img=event1.placeholder_img,
)
assert event not in list(Event.objects.archived().all())
assert event not in list(Event.objects.live().all())
assert event not in list(Event.objects.upcoming().all())
event.channels.add(channel)
url = reverse('main:channels')
response = self.client.get(url)
eq_(response.status_code, 200)
# still not there because it's not scheduled
ok_(channel_url not in response.content)
# make it upcoming
event.status = Event.STATUS_SCHEDULED
event.save()
assert event in list(Event.objects.upcoming().all())
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(channel_url in response.content)
def test_channel_page(self):
event1 = Event.objects.get(title='Test event')
event1.featured = True
event1.save()
now = timezone.now()
channel = Channel.objects.create(
name='Culture & Context',
slug='culture-and-context',
description="""
<p>The description</p>
""",
image='animage.png',
)
event = Event.objects.create(
title='Second test event',
slug='second-event',
description='Anything',
start_time=now - datetime.timedelta(days=20),
archive_time=now - datetime.timedelta(days=19),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event1.placeholder_img,
)
assert event in list(Event.objects.archived().all())
event.channels.add(channel)
event = Event.objects.create(
title='Third test event',
description='Anything',
start_time=now - datetime.timedelta(days=10),
archive_time=now - datetime.timedelta(days=9),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event1.placeholder_img,
featured=True,
)
assert event in list(Event.objects.archived().all())
event.channels.add(channel)
event = Event.objects.create(
title='Fourth test event',
description='Anything',
start_time=now + datetime.timedelta(days=10),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event1.placeholder_img,
)
assert event in list(Event.objects.upcoming().all())
event.channels.add(channel)
url = reverse('main:home_channels', args=(channel.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('>Culture & Context<' in response.content)
ok_('<p>The description</p>' in response.content)
ok_(channel.description in response.content)
ok_('alt="Culture & Context"' in response.content)
ok_('Test event' not in response.content)
ok_('Second test event' in response.content)
ok_('Third test event' in response.content)
# because the third event is featured, we'll expect to see it
# also in the sidebar
eq_(response.content.count('Third test event'), 2)
# because it's Upcoming
ok_('Fourth test event' in response.content)
# ...but because it's in the alt text too, multiple by 2
eq_(response.content.count('Fourth test event'), 1 * 2)
# view the channel page when the image is a banner
channel.image_is_banner = True
channel.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<p>The description</p>' in response.content)
# view one of them from the channel
url = reverse('main:event', args=('second-event',))
response = self.client.get(url)
eq_(response.status_code, 200)
# featured but not this channel
ok_('Test event' not in response.content)
# featured but no event hits
ok_('Third test event' not in response.content)
# upcoming
ok_('Fourth test event' in response.content)
def test_view_channel_in_reverse_order(self):
channel = Channel.objects.create(
name='Culture & Context',
slug='culture-and-context',
description="""
<p>The description</p>
""",
image='animage.png',
)
event = Event.objects.get(title='Test event')
one = Event.objects.create(
title='First Title',
description=event.description,
start_time=event.start_time - datetime.timedelta(2),
archive_time=event.start_time - datetime.timedelta(2),
location=event.location,
placeholder_img=event.placeholder_img,
slug='one',
status=Event.STATUS_SCHEDULED,
privacy=Event.PRIVACY_PUBLIC,
)
one.channels.add(channel)
two = Event.objects.create(
title='Second Title',
description=event.description,
start_time=event.start_time - datetime.timedelta(1),
archive_time=event.start_time - datetime.timedelta(1),
location=event.location,
placeholder_img=event.placeholder_img,
slug='two',
status=Event.STATUS_SCHEDULED,
privacy=Event.PRIVACY_PUBLIC,
)
two.channels.add(channel)
assert one in Event.objects.archived()
assert two in Event.objects.archived()
url = reverse('main:home_channels', args=(channel.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(one.title in response.content)
ok_(two.title in response.content)
ok_(
response.content.find(two.title)
<
response.content.find(one.title)
)
channel.reverse_order = True
channel.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(one.title in response.content)
ok_(two.title in response.content)
ok_(
response.content.find(one.title)
<
response.content.find(two.title)
)
def test_render_home_without_channel(self):
# if there is no "main" channel it gets automatically created
self.main_channel.delete()
ok_(not Channel.objects.filter(slug=settings.DEFAULT_CHANNEL_SLUG))
response = self.client.get(reverse('main:home'))
eq_(response.status_code, 200)
ok_(Channel.objects.filter(slug=settings.DEFAULT_CHANNEL_SLUG))
def test_render_invalid_channel(self):
url = reverse('main:home_channels', args=('junk',))
response = self.client.get(url)
eq_(response.status_code, 404)
def test_channel_page_with_pagination(self):
event1 = Event.objects.get(title='Test event')
now = timezone.now()
channel = Channel.objects.create(
name='Culture & Context',
slug='culture-and-context',
description="""
<p>The description</p>
""",
image='animage.png',
)
for i in range(1, 40):
event = Event.objects.create(
title='%d test event' % i,
description='Anything',
start_time=now - datetime.timedelta(days=100 - i),
archive_time=now - datetime.timedelta(days=99 - i),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event1.placeholder_img,
)
event.channels.add(channel)
url = reverse('main:home_channels', args=(channel.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
prev_url = reverse('main:home_channels', args=(channel.slug, 2))
next_url = reverse('main:home_channels', args=(channel.slug, 1))
ok_(prev_url in response.content)
ok_(next_url not in response.content)
# go to page 2
response = self.client.get(prev_url)
eq_(response.status_code, 200)
prev_url = reverse('main:home_channels', args=(channel.slug, 3))
next_url = reverse('main:home_channels', args=(channel.slug, 1))
ok_(prev_url in response.content)
ok_(next_url in response.content)
def test_home_page_with_pagination(self):
event1 = Event.objects.get(title='Test event')
now = timezone.now()
for i in range(1, 40):
event = Event.objects.create(
title='%d test event' % i,
description='Anything',
start_time=now - datetime.timedelta(days=100 - i),
archive_time=now - datetime.timedelta(days=99 - i),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event1.placeholder_img,
)
event.channels.add(self.main_channel)
url = reverse('main:home')
response = self.client.get(url)
eq_(response.status_code, 200)
prev_url = reverse('main:home', args=(2,))
next_url = reverse('main:home', args=(1,))
ok_(prev_url in response.content)
ok_(next_url not in response.content)
# go to page 2
response = self.client.get(prev_url)
eq_(response.status_code, 200)
prev_url = reverse('main:home', args=(3,))
next_url = reverse('main:home', args=(1,))
ok_(prev_url in response.content)
ok_(next_url in response.content)
def test_sidebar_static_content(self):
# create some flat pages
StaticPage.objects.create(
url='sidebar_top_main',
content='<p>Sidebar Top Main</p>'
)
StaticPage.objects.create(
url='sidebar_bottom_main',
content='<p>Sidebar Bottom Main</p>'
)
StaticPage.objects.create(
url='sidebar_top_testing',
content='<p>Sidebar Top Testing</p>'
)
StaticPage.objects.create(
url='sidebar_bottom_testing',
content='<p>Sidebar Bottom Testing</p>'
)
response = self.client.get('/')
eq_(response.status_code, 200)
ok_('<p>Sidebar Top Main</p>' in response.content)
ok_('<p>Sidebar Bottom Main</p>' in response.content)
ok_('<p>Sidebar Top Testing</p>' not in response.content)
ok_('<p>Sidebar Bottom Testing</p>' not in response.content)
Channel.objects.create(
name='Testing',
slug='testing',
description='Anything'
)
url = reverse('main:home_channels', args=('testing',))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<p>Sidebar Top Main</p>' not in response.content)
ok_('<p>Sidebar Bottom Main</p>' not in response.content)
ok_('<p>Sidebar Top Testing</p>' in response.content)
ok_('<p>Sidebar Bottom Testing</p>' in response.content)
def test_sidebar_static_content_all_channels(self):
# create some flat pages
StaticPage.objects.create(
url='sidebar_top_*',
content='<p>Sidebar Top All</p>'
)
response = self.client.get('/')
eq_(response.status_code, 200)
ok_('<p>Sidebar Top All</p>' in response.content)
Channel.objects.create(
name='Testing',
slug='testing',
description='Anything'
)
url = reverse('main:home_channels', args=('testing',))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<p>Sidebar Top All</p>' in response.content)
def test_sidebar_static_content_almost_all_channels(self):
# create some flat pages
StaticPage.objects.create(
url='sidebar_top_*',
content='<p>Sidebar Top All</p>'
)
StaticPage.objects.create(
url='sidebar_top_testing',
content='<p>Sidebar Top Testing</p>'
)
response = self.client.get('/')
eq_(response.status_code, 200)
ok_('<p>Sidebar Top All</p>' in response.content)
ok_('<p>Sidebar Top Testing</p>' not in response.content)
Channel.objects.create(
name='Testing',
slug='testing',
description='Anything'
)
url = reverse('main:home_channels', args=('testing',))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<p>Sidebar Top All</p>' not in response.content)
ok_('<p>Sidebar Top Testing</p>' in response.content)
def test_view_event_belonging_to_multiple_channels(self):
event = Event.objects.get(title='Test event')
fosdem = Channel.objects.create(
name='Fosdem',
slug='fosdem'
)
event.channels.add(fosdem)
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
def test_event_staticpage_fallback(self):
StaticPage.objects.create(
url='/test-page',
title='Flat Test page',
content='<p>Hi</p>'
)
# you can always reach the staticpage by the long URL
response = self.client.get('/pages/test-page')
eq_(response.status_code, 200)
# or from the root
response = self.client.get('/test-page')
eq_(response.status_code, 301)
self.assertRedirects(
response,
reverse('main:event', args=('test-page',)),
status_code=301
)
response = self.client.get('/test-page/')
eq_(response.status_code, 200)
ok_('Flat Test page' in response.content)
event = Event.objects.get(slug='test-event')
response = self.client.get('/test-event/')
eq_(response.status_code, 200)
# but if the event takes on a slug that clashes with the
# staticpage, the staticpage will have to step aside
event.slug = 'test-page'
event.save()
response = self.client.get('/test-page/')
eq_(response.status_code, 200)
ok_('Flat Test page' not in response.content)
ok_(event.title in response.content)
# but you can still use
response = self.client.get('/pages/test-page')
eq_(response.status_code, 200)
ok_('Flat Test page' in response.content)
event.slug = 'other-page'
event.save()
assert EventOldSlug.objects.get(slug='test-page')
response = self.client.get('/test-page/')
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=('other-page',))
)
def test_link_to_feed_url(self):
"""every page has a link to the feed that depends on how you're
logged in"""
self.client.logout()
url = reverse('main:home')
feed_url_anonymous = reverse('main:feed', args=('public',))
feed_url_employee = reverse('main:feed', args=('company',))
feed_url_contributor = reverse('main:feed', args=('contributors',))
def extrac_content(content):
return (
content
.split('type="application/rss+xml"')[0]
.split('<link')[-1]
)
response = self.client.get(url)
eq_(response.status_code, 200)
content = extrac_content(response.content)
ok_(feed_url_anonymous in content)
ok_(feed_url_employee not in content)
ok_(feed_url_contributor not in content)
self.client.logout()
UserProfile.objects.create(
user=User.objects.create_user(
'nigel', '[email protected]', 'secret'
),
contributor=True
)
contributor = User.objects.get(username='nigel')
assert contributor.profile.contributor
assert self.client.login(username='nigel', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
content = extrac_content(response.content)
ok_(feed_url_anonymous not in content)
ok_(feed_url_employee not in content)
ok_(feed_url_contributor in content)
User.objects.create_user(
'zandr', '[email protected]', 'secret'
)
assert self.client.login(username='zandr', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
content = extrac_content(response.content)
ok_(feed_url_anonymous not in content)
ok_(feed_url_employee in content)
ok_(feed_url_contributor not in content)
def test_view_event_video_only(self):
event = Event.objects.get(title='Test event')
assert event.privacy == Event.PRIVACY_PUBLIC # default
url = reverse('main:event_video', kwargs={'slug': event.slug})
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response['X-Frame-Options'], 'ALLOWALL')
ok_(event.title in response.content)
def test_view_event_video_only_not_public(self):
event = Event.objects.get(title='Test event')
event.privacy = Event.PRIVACY_COMPANY
event.save()
url = reverse('main:event_video', kwargs={'slug': event.slug})
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response['X-Frame-Options'], 'ALLOWALL')
ok_("Not a public event" in response.content)
# it won't help to be signed in
User.objects.create_user(
'zandr', '[email protected]', 'secret'
)
assert self.client.login(username='zandr', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response['X-Frame-Options'], 'ALLOWALL')
ok_("Not a public event" in response.content)
# but that's ignored if you set the ?embedded=false
response = self.client.get(url, {'embedded': False})
eq_(response.status_code, 200)
ok_("Not a public event" not in response.content)
eq_(response['X-Frame-Options'], 'DENY') # back to the default
def test_view_event_video_not_found(self):
url = reverse('main:event_video', kwargs={'slug': 'xxxxxx'})
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response['X-Frame-Options'], 'ALLOWALL')
ok_("Event not found" in response.content)
def test_tag_cloud(self):
url = reverse('main:tag_cloud')
response = self.client.get(url)
eq_(response.status_code, 200)
# add some events
events = []
event1 = Event.objects.get(title='Test event')
now = timezone.now()
for i in range(1, 10):
event = Event.objects.create(
title='%d test event' % i,
description='Anything',
start_time=now - datetime.timedelta(days=100 - i),
archive_time=now - datetime.timedelta(days=99 - i),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event1.placeholder_img,
)
event.channels.add(self.main_channel)
events.append(event)
tag1 = Tag.objects.create(name='Tag1')
tag2 = Tag.objects.create(name='Tag2')
tag3 = Tag.objects.create(name='Tag3')
tag4 = Tag.objects.create(name='Tag4')
tag5 = Tag.objects.create(name='Tag5')
events[0].tags.add(tag1)
events[0].tags.add(tag2)
events[0].save()
events[1].tags.add(tag1)
events[1].save()
events[2].tags.add(tag2)
events[2].save()
events[3].tags.add(tag3)
events[3].save()
events[4].tags.add(tag3)
events[4].tags.add(tag4)
events[4].privacy = Event.PRIVACY_CONTRIBUTORS
events[4].save()
events[5].tags.add(tag5)
events[5].privacy = Event.PRIVACY_COMPANY
events[5].save()
events[6].tags.add(tag5)
events[6].privacy = Event.PRIVACY_COMPANY
events[6].save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(tag1.name in response.content)
ok_(tag2.name in response.content)
ok_(tag3.name not in response.content)
# view it as a contributor
UserProfile.objects.create(
user=User.objects.create_user(
'nigel', '[email protected]', 'secret'
),
contributor=True
)
assert self.client.login(username='nigel', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(tag1.name in response.content)
ok_(tag2.name in response.content)
ok_(tag3.name in response.content)
ok_(tag5.name not in response.content)
# view it as a regular signed in person
User.objects.create_user(
'zandr', '[email protected]', 'secret'
)
assert self.client.login(username='zandr', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(tag1.name in response.content)
ok_(tag2.name in response.content)
ok_(tag3.name in response.content)
ok_(tag5.name in response.content)
def test_all_tags(self):
url = reverse('main:all_tags')
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(
sorted(data['tags']),
sorted(x.name for x in Tag.objects.all())
)
def test_calendar_page(self):
url = reverse('main:calendar')
response = self.client.get(url)
eq_(response.status_code, 200)
data_url = reverse('main:calendar_data')
ok_(data_url in response.content)
calendars_url = reverse('main:calendars')
ok_(calendars_url in response.content)
def test_calendar_data(self):
url = reverse('main:calendar_data')
response = self.client.get(url)
eq_(response.status_code, 400)
response = self.client.get(url, {
'start': '2015-02-02',
'end': 'not a number'
})
eq_(response.status_code, 400)
response = self.client.get(url, {
'start': 'not a number',
'end': '2015-02-02'
})
eq_(response.status_code, 400)
first = datetime.datetime.now()
while first.day != 1:
first -= datetime.timedelta(days=1)
first = first.date()
last = first
while last.month == first.month:
last += datetime.timedelta(days=1)
first_date = first.strftime('%Y-%m-%d')
last_date = last.strftime('%Y-%m-%d')
# start > end
response = self.client.get(url, {
'start': last_date,
'end': first_date
})
eq_(response.status_code, 400)
response = self.client.get(url, {
'start': first_date,
'end': last_date
})
eq_(response.status_code, 200)
structure = json.loads(response.content)
test_event = Event.objects.get(title='Test event')
assert test_event.start_time.date() >= first
assert test_event.start_time.date() < last
assert len(structure) == 1
item, = structure
eq_(item['title'], test_event.title)
eq_(item['url'], reverse('main:event', args=(test_event.slug,)))
def test_calendar_data_privacy(self):
url = reverse('main:calendar_data')
response = self.client.get(url)
first = datetime.datetime.now()
while first.day != 1:
first -= datetime.timedelta(days=1)
first = first.date()
last = first
while last.month == first.month:
last += datetime.timedelta(days=1)
first_date = first.strftime('%Y-%m-%d')
last_date = last.strftime('%Y-%m-%d')
params = {
'start': first_date,
'end': last_date
}
response = self.client.get(url, params)
eq_(response.status_code, 200)
structure = json.loads(response.content)
event = Event.objects.get(title='Test event')
assert first <= event.start_time.date() <= last
item, = structure
eq_(item['title'], event.title)
# make it only available to contributors (and staff of course)
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.save()
response = self.client.get(url, params)
eq_(response.status_code, 200)
structure = json.loads(response.content)
ok_(not structure)
contributor = User.objects.create_user(
'nigel', '[email protected]', 'secret'
)
UserProfile.objects.create(
user=contributor,
contributor=True
)
assert self.client.login(username='nigel', password='secret')
response = self.client.get(url, params)
eq_(response.status_code, 200)
structure = json.loads(response.content)
ok_(structure)
event.privacy = Event.PRIVACY_COMPANY
event.save()
response = self.client.get(url, params)
eq_(response.status_code, 200)
structure = json.loads(response.content)
ok_(not structure)
User.objects.create_user(
'worker', '[email protected]', 'secret'
)
assert self.client.login(username='worker', password='secret')
response = self.client.get(url, params)
eq_(response.status_code, 200)
structure = json.loads(response.content)
ok_(structure)
def test_calendar_data_bogus_dates(self):
url = reverse('main:calendar_data')
response = self.client.get(url, {
'start': '1393196400',
'end': '4444444444444444'
})
eq_(response.status_code, 400)
def test_open_graph_details(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
assert os.path.isfile(event.placeholder_img.path)
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
head = response.content.split('</head>')[0]
ok_('<meta property="og:title" content="%s">' % event.title in head)
from airmozilla.main.helpers import short_desc
ok_(
'<meta property="og:description" content="%s">' % short_desc(event)
in head
)
ok_('<meta property="og:image" content="htt' in head)
absolute_url = 'http://testserver' + url
ok_('<meta property="og:url" content="%s">' % absolute_url in head)
def test_meta_keywords(self):
event = Event.objects.get(title='Test event')
event.save()
event.tags.add(Tag.objects.create(name="One"))
event.tags.add(Tag.objects.create(name="Two"))
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
head = response.content.split('</head>')[0]
content = re.findall(
'<meta name="keywords" content="([^\"]+)">',
head
)[0]
ok_("One" in content)
ok_("Two" in content)
def test_featured_in_sidebar(self):
# use the calendar page so that we only get events that appear
# in the side bar
url = reverse('main:calendar')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Trending' not in response.content)
# set up 3 events
event0 = Event.objects.get(title='Test event')
now = timezone.now()
event1 = Event.objects.create(
title='1 Test Event',
description='Anything',
start_time=now - datetime.timedelta(days=3),
archive_time=now - datetime.timedelta(days=2),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event0.placeholder_img,
)
event1.channels.add(self.main_channel)
event2 = Event.objects.create(
title='2 Test Event',
description='Anything',
start_time=now - datetime.timedelta(days=4),
archive_time=now - datetime.timedelta(days=3),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event0.placeholder_img,
)
event2.channels.add(self.main_channel)
event3 = Event.objects.create(
title='3 Test Event',
description='Anything',
start_time=now - datetime.timedelta(days=5),
archive_time=now - datetime.timedelta(days=4),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event0.placeholder_img,
)
event3.channels.add(self.main_channel)
# now, we can expect all of these three to appear in the side bar
response = self.client.get(url)
eq_(response.status_code, 200)
# because they don't have any EventHitStats for these
ok_('Trending' not in response.content)
EventHitStats.objects.create(
event=event1,
total_hits=1000,
shortcode='abc123'
)
EventHitStats.objects.create(
event=event2,
total_hits=1000,
shortcode='xyz123'
)
stats3 = EventHitStats.objects.create(
event=event3,
total_hits=1000,
shortcode='xyz987'
)
# to reset the cache on the sidebar queries, some event
# needs to change
event3.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Trending' in response.content)
ok_(event1.title in response.content)
ok_(event2.title in response.content)
ok_(event3.title in response.content)
# event 1 is top-most because it's the youngest
ok_(
response.content.find(event1.title) <
response.content.find(event2.title) <
response.content.find(event3.title)
)
# boost event3 by making it featured
event3.featured = True
event3.save()
# event3 is 3 days old, has 1000 views, thus
# score = 2*1000 / 3 ^ 1.8 ~= 276
# but event2 is 2 days old and same number of view, thus
# score = 1000 / 2 ^ 1.8 ~= 287
# so, give event3 a couple more events
stats3.total_hits += 100
stats3.save()
response = self.client.get(url)
eq_(response.status_code, 200)
# event 1 is top-most because it's the youngest
# but now event3 has gone up a bit
ok_(
response.content.find(event1.title) <
response.content.find(event3.title) <
response.content.find(event2.title)
)
# now, let's make event2 be part of a channel that is supposed to be
# excluded from the Trending sidebar
poison = Channel.objects.create(
name='Poisonous',
exclude_from_trending=True
)
event2.channels.add(poison)
all_but_event2 = Event.objects.exclude(
channels__exclude_from_trending=True
)
assert event2 not in all_but_event2
cache.clear()
response = self.client.get(url)
eq_(response.status_code, 200)
# print response.content
# event 1 is top-most because it's the youngest
# but now event3 has gone up a bit
ok_(event2.title not in response.content)
def test_featured_sidebar_for_contributors(self):
"""if you're a contributor your shouldn't be tempted to see private
events in the sidebar of featured events"""
# use the calendar page so that we only get events that appear
# in the side bar
url = reverse('main:calendar')
# set up 3 events
event0 = Event.objects.get(title='Test event')
now = timezone.now()
event1 = Event.objects.create(
title='1 Test Event',
description='Anything',
start_time=now - datetime.timedelta(days=3),
archive_time=now - datetime.timedelta(days=2),
privacy=Event.PRIVACY_PUBLIC,
status=Event.STATUS_SCHEDULED,
placeholder_img=event0.placeholder_img,
)
event1.channels.add(self.main_channel)
event2 = Event.objects.create(
title='2 Test Event',
description='Anything',
start_time=now - datetime.timedelta(days=4),
archive_time=now - datetime.timedelta(days=3),
privacy=Event.PRIVACY_CONTRIBUTORS,
status=Event.STATUS_SCHEDULED,
placeholder_img=event0.placeholder_img,
)
event2.channels.add(self.main_channel)
event3 = Event.objects.create(
title='3 Test Event',
description='Anything',
start_time=now - datetime.timedelta(days=5),
archive_time=now - datetime.timedelta(days=4),
privacy=Event.PRIVACY_COMPANY,
status=Event.STATUS_SCHEDULED,
placeholder_img=event0.placeholder_img,
)
event3.channels.add(self.main_channel)
EventHitStats.objects.create(
event=event1,
total_hits=1000,
shortcode='abc123'
)
EventHitStats.objects.create(
event=event2,
total_hits=1000,
shortcode='xyz123'
)
EventHitStats.objects.create(
event=event3,
total_hits=1000,
shortcode='xyz987'
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Trending' in response.content)
ok_(event1.title in response.content)
ok_(event2.title not in response.content)
ok_(event3.title not in response.content)
# sign in as a contributor
UserProfile.objects.create(
user=User.objects.create_user(
'peterbe', '[email protected]', 'secret'
),
contributor=True
)
assert self.client.login(username='peterbe', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Trending' in response.content)
ok_(event1.title in response.content)
ok_(event2.title in response.content)
ok_(event3.title not in response.content)
# sign in as staff
User.objects.create_user(
'zandr', '[email protected]', 'secret'
)
assert self.client.login(username='zandr', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Trending' in response.content)
ok_(event1.title in response.content)
ok_(event2.title in response.content)
ok_(event3.title in response.content)
@mock.patch('logging.error')
@mock.patch('requests.get')
def test_view_curated_group_event(self, rget, rlogging):
def mocked_get(url, **options):
if 'peterbe' in url:
return Response(VOUCHED_FOR)
if 'offset=0' in url:
return Response(GROUPS1)
if 'offset=500' in url:
return Response(GROUPS2)
raise NotImplementedError(url)
rget.side_effect = mocked_get
# sign in as a contributor
UserProfile.objects.create(
user=User.objects.create_user(
'peterbe', '[email protected]', 'secret'
),
contributor=True
)
assert self.client.login(username='peterbe', password='secret')
event = Event.objects.get(title='Test event')
event.privacy = Event.PRIVACY_COMPANY
event.save()
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 302)
permission_denied_url = reverse(
'main:permission_denied',
args=(event.slug,)
)
self.assertRedirects(response, permission_denied_url)
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
# make it so that viewing the event requires that you're a
# certain group
vip_group = CuratedGroup.objects.create(
event=event,
name='vip',
url='https://mozillians.org/vip',
)
response = self.client.get(url)
eq_(response.status_code, 302)
self.assertRedirects(response, permission_denied_url)
# and view that page
response = self.client.get(permission_denied_url)
eq_(response.status_code, 200)
ok_(vip_group.url in response.content)
CuratedGroup.objects.create(
event=event,
name='ugly tuna',
url='https://mozillians.org/ugly-tuna',
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(re.findall(
'This event is available only to staff and Mozilla volunteers '
'who are members of the\s+ugly tuna\s+or\s+vip\s+group.',
response.content,
re.M
))
@mock.patch('logging.error')
@mock.patch('requests.get')
def test_view_curated_group_event_as_staff(self, rget, rlogging):
def mocked_get(url, **options):
if 'peterbe' in url:
return Response(VOUCHED_FOR)
if 'offset=0' in url:
return Response(GROUPS1)
if 'offset=500' in url:
return Response(GROUPS2)
raise NotImplementedError(url)
rget.side_effect = mocked_get
# sign in as a member of staff
User.objects.create_user(
'mary', '[email protected]', 'secret'
)
assert self.client.login(username='mary', password='secret')
event = Event.objects.get(title='Test event')
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.save()
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
# make it so that viewing the event requires that you're a
# certain group
CuratedGroup.objects.create(
event=event,
name='vip',
url='https://mozillians.org/vip',
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
def test_view_removed_event(self):
event = Event.objects.get(title='Test event')
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
event.status = Event.STATUS_REMOVED
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('This event is no longer available.' in response.content)
ok_(event.title in response.content)
# let's view it as a signed in user
# shouldn't make a difference
user = self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('This event is no longer available.' in response.content)
ok_(event.title in response.content)
# but if signed in as a superuser, you can view it
user.is_superuser = True
user.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('This event is no longer available.' not in response.content)
ok_(event.title in response.content)
# but there is a flash message warning on the page that says...
ok_(
'Event is not publicly visible - not scheduled.'
in response.content
)
def test_edgecast_smil(self):
url = reverse('main:edgecast_smil')
response = self.client.get(url, {
'venue': 'Something',
'token': 'XXXX'
})
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'application/smil')
eq_(response['Access-Control-Allow-Origin'], '*')
ok_('XXXX' in response.content)
ok_('/Restricted/' in response.content)
ok_('Something' in response.content)
# do it once without `token`
response = self.client.get(url, {
'venue': 'Something',
})
eq_(response.status_code, 200)
ok_('/Restricted/' not in response.content)
ok_('Something' in response.content)
def test_crossdomain_xml(self):
url = reverse('main:crossdomain_xml')
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'text/xml')
eq_(response['Access-Control-Allow-Origin'], '*')
ok_('<allow-access-from domain="*" />' in response.content)
def test_picture_over_placeholder(self):
event = Event.objects.get(title='Test event')
assert event in Event.objects.live()
self._attach_file(event, self.main_image)
assert os.path.isfile(event.placeholder_img.path)
response = self.client.get('/')
assert event.title in response.content
doc = pyquery.PyQuery(response.content)
img, = doc('.tag-live img')
eq_(img.attrib['width'], '160')
live_src = img.attrib['src']
with open(self.main_image) as fp:
picture = Picture.objects.create(file=File(fp))
event.picture = picture
event.save()
response = self.client.get('/')
assert event.title in response.content
doc = pyquery.PyQuery(response.content)
img, = doc('.tag-live img')
live_src_after = img.attrib['src']
ok_(live_src != live_src_after)
# make it not a live event
now = timezone.now()
yesterday = now - datetime.timedelta(days=1)
event.archive_time = yesterday
event.start_time = yesterday
event.picture = None
event.save()
assert event not in Event.objects.live()
assert event in Event.objects.archived()
response = self.client.get('/')
assert event.title in response.content
doc = pyquery.PyQuery(response.content)
img, = doc('article img')
eq_(img.attrib['width'], '160')
archived_src = img.attrib['src']
# put the picture back on
event.picture = picture
event.save()
response = self.client.get('/')
doc = pyquery.PyQuery(response.content)
img, = doc('article img')
archived_src_after = img.attrib['src']
ok_(archived_src_after != archived_src)
# now make it appear in the upcoming
event.archive_time = None
tomorrow = now + datetime.timedelta(days=1)
event.start_time = tomorrow
event.picture = None
event.save()
assert event not in Event.objects.live()
assert event not in Event.objects.archived()
assert event in Event.objects.upcoming()
response = self.client.get('/')
assert event.title in response.content
doc = pyquery.PyQuery(response.content)
img, = doc('aside img') # side event
eq_(img.attrib['width'], '160')
upcoming_src = img.attrib['src']
# put the picture back on
event.picture = picture
event.save()
response = self.client.get('/')
doc = pyquery.PyQuery(response.content)
img, = doc('aside img')
upcoming_src_after = img.attrib['src']
ok_(upcoming_src_after != upcoming_src)
def test_view_event_without_location(self):
event = Event.objects.get(title='Test event')
location = Location.objects.create(
name='London',
timezone='Europe/London'
)
event.location = location
now = timezone.now()
tomorrow = now + datetime.timedelta(days=1)
event.start_time = tomorrow
event.save()
assert event in Event.objects.upcoming()
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('London' in response.content)
location.delete()
# reload
event = Event.objects.get(id=event.id)
ok_(event.location is None)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('London' not in response.content)
# the start time will be described in UTC
ok_(event.start_time.strftime('%H:%M %Z') in response.content)
def test_view_upcoming_event_without_placeholder_img(self):
"""This is a stupidity fix for
https://bugzilla.mozilla.org/show_bug.cgi?id=1110004
where you try to view an *upcoming* event (which doesn't have a
video) that doesn't have a placeholder_img upload.
"""
event = Event.objects.get(title='Test event')
event.archive_time = None
event.start_time = timezone.now() + datetime.timedelta(days=1)
event.placeholder_img = None
with open(self.main_image) as fp:
picture = Picture.objects.create(file=File(fp))
event.picture = picture
event.save()
event.save()
assert event in Event.objects.upcoming()
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
def test_unpicked_pictures(self):
event = Event.objects.get(title='Test event')
event.start_time -= datetime.timedelta(days=1)
event.archive_time = event.start_time
event.save()
assert event in Event.objects.archived()
assert event.privacy == Event.PRIVACY_PUBLIC
edit_url = reverse('main:event_edit', args=(event.slug,))
url = reverse('main:unpicked_pictures')
response = self.client.get(url)
# because we're not logged in
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
# but it doesn't appear because it has no pictures
ok_(edit_url not in response.content)
with open(self.main_image) as fp:
picture = Picture.objects.create(
file=File(fp),
notes='general picture'
)
event.picture = picture
# but also make a screencap available
picture2 = Picture.objects.create(
file=File(fp),
event=event,
notes='screencap 1'
)
response = self.client.get(url)
eq_(response.status_code, 200)
# but it doesn't appear because it has no pictures
ok_(edit_url in response.content)
event.picture = picture2
event.save()
# now it shouldn't be offered because it already has a picture
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(edit_url not in response.content)
def test_unpicked_pictures_contributor(self):
event = Event.objects.get(title='Test event')
event.start_time -= datetime.timedelta(days=1)
event.archive_time = event.start_time
event.save()
assert event in Event.objects.archived()
assert event.privacy == Event.PRIVACY_PUBLIC
edit_url = reverse('main:event_edit', args=(event.slug,))
url = reverse('main:unpicked_pictures')
with open(self.main_image) as fp:
# but also make a screencap available
Picture.objects.create(
file=File(fp),
event=event,
notes='screencap 1'
)
user = self._login()
UserProfile.objects.create(
user=user,
contributor=True
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(edit_url in response.content)
# and it should continue to be offered if the event is...
event.privacy = Event.PRIVACY_CONTRIBUTORS
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(edit_url in response.content)
# but not if it's only company
event.privacy = Event.PRIVACY_COMPANY
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(edit_url not in response.content) # note the not
def test_hd_download_links(self):
event = Event.objects.get(title='Test event')
vidly = Template.objects.create(
name="Vid.ly HD",
content='<iframe src="{{ tag }}"></iframe>'
)
event.template = vidly
event.template_environment = {'tag': 'abc123'}
event.save()
vidly_submission = VidlySubmission.objects.create(
event=event,
url='https://s3.amazonaws.com/airmozilla/example.mp4',
tag='abc123',
hd=True
)
url = reverse('main:event', kwargs={'slug': event.slug})
response = self.client.get(url)
eq_(response.status_code, 200)
assert event.privacy == Event.PRIVACY_PUBLIC
ok_(
'https://vid.ly/abc123?content=video&format=webm'
in response.content
)
ok_(
'https://vid.ly/abc123?content=video&format=mp4'
in response.content
)
ok_(
'https://vid.ly/abc123?content=video&format=hd_webm'
in response.content
)
ok_(
'https://vid.ly/abc123?content=video&format=hd_mp4'
in response.content
)
vidly_submission.hd = False
vidly_submission.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(
'https://vid.ly/abc123?content=video&format=hd_webm'
not in response.content
)
ok_(
'https://vid.ly/abc123?content=video&format=hd_mp4'
not in response.content
)
@mock.patch('requests.get')
def test_contributors_page(self, rget):
def mocked_get(url, **options):
# we need to deconstruct the NO_VOUCHED_FOR fixture
# and put it together with some dummy data
result = json.loads(VOUCHED_FOR)
objects = result['objects']
assert len(objects) == 1
objects[0]['username'] = 'peterbe'
cp = copy.copy(objects[0])
cp['username'] = 'nophoto'
cp['photo'] = ''
objects.append(cp)
cp = copy.copy(cp)
cp['username'] = 'notvouched'
cp['photo'] = 'http://imgur.com/a.jpg'
cp['is_vouched'] = False
objects.append(cp)
result['objects'] = objects
assert len(objects) == 3
return Response(json.dumps(result))
rget.side_effect = mocked_get
url = reverse('main:contributors')
contributors = (
'peterbe',
'nophoto',
'notfound',
'notvouched'
)
with self.settings(CONTRIBUTORS=contributors):
response = self.client.get(url)
eq_(response.status_code, 200)
user = json.loads(VOUCHED_FOR)['objects'][0]
ok_(user['full_name'] in response.content)
ok_(user['url'] in response.content)
def test_event_duration(self):
event = Event.objects.get(title='Test event')
event.duration = 3840
event.save()
url = reverse('main:event', kwargs={'slug': event.slug})
response = self.client.get(url)
eq_(response.status_code, 200)
assert event.privacy == Event.PRIVACY_PUBLIC
ok_('Duration: 1 hour 4 minutes' in response.content)
event.duration = 49
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Duration: 49 seconds' in response.content)
def test_executive_summary(self):
"""Note! The Executive Summary page is a very low priority page.
For example, it's not linked to from any other page.
Hence, the test is very sparse and just makes sure it renders.
"""
url = reverse('main:executive_summary')
response = self.client.get(url)
eq_(response.status_code, 200)
response = self.client.get(url, {'start': 'xxx'})
eq_(response.status_code, 400)
response = self.client.get(url, {'start': '2015-01-05'})
eq_(response.status_code, 200)
ok_('Week of 05 - 11 January 2015' in response.content)
ok_('"?start=2014-12-29"' in response.content)
ok_('"?start=2015-01-12"' in response.content)
# make the subtitle span two different months
response = self.client.get(url, {'start': '2014-12-29'})
eq_(response.status_code, 200)
ok_('Week of 29 December - 04 January 2015' in response.content)
response = self.client.get(url, {'start': '2015-01-04'})
# a valid date but not a Monday
eq_(response.status_code, 400)
def test_event_livehits(self):
def get_hits(resp):
eq_(resp.status_code, 200)
return json.loads(resp.content)['hits']
event = Event.objects.get(title='Test event')
assert event.is_live()
url = reverse('main:event_livehits', args=(event.id,))
response = self.client.get(url)
eq_(get_hits(response), 0)
# post to it it once
response = self.client.post(url)
eq_(get_hits(response), 1)
eq_(EventLiveHits.objects.get(event=event).total_hits, 1)
# another get
response = self.client.get(url)
eq_(get_hits(response), 1)
# another push
response = self.client.post(url)
eq_(get_hits(response), 1)
eq_(EventLiveHits.objects.get(event=event).total_hits, 1)
# change something about our request
response = self.client.post(url, HTTP_USER_AGENT='Mozilla/Django')
eq_(get_hits(response), 2)
eq_(EventLiveHits.objects.get(event=event).total_hits, 2)
# be signed in
self._login()
response = self.client.post(url)
eq_(get_hits(response), 3)
eq_(EventLiveHits.objects.get(event=event).total_hits, 3)
# and a second time as signed in
response = self.client.post(url)
eq_(get_hits(response), 3)
eq_(EventLiveHits.objects.get(event=event).total_hits, 3)
class TestEventEdit(DjangoTestCase):
fixtures = ['airmozilla/manage/tests/main_testdata.json']
main_image = 'airmozilla/manage/tests/firefox.png'
other_image = 'airmozilla/manage/tests/other_logo.png'
third_image = 'airmozilla/manage/tests/other_logo_reversed.png'
def _event_to_dict(self, event):
from airmozilla.main.views import EventEditView
return EventEditView.event_to_dict(event)
def test_link_to_edit(self):
event = Event.objects.get(title='Test event')
response = self.client.get(reverse('main:event', args=(event.slug,)))
eq_(response.status_code, 200)
url = reverse('main:event_edit', args=(event.slug,))
ok_(url not in response.content)
self._login()
response = self.client.get(reverse('main:event', args=(event.slug,)))
eq_(response.status_code, 200)
ok_(url in response.content)
def test_cant_view(self):
event = Event.objects.get(title='Test event')
url = reverse('main:event_edit', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
response = self.client.post(url, {})
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
def test_edit_title(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('main:event_edit', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 302)
user = self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
data = self._event_to_dict(event)
previous = json.dumps(data)
data = {
'event_id': event.id,
'previous': previous,
'title': 'Different title',
'short_description': event.short_description,
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()]
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
# this should have created 2 EventRevision objects.
initial, current = EventRevision.objects.all().order_by('created')
eq_(initial.event, event)
eq_(current.event, event)
eq_(initial.user, None)
eq_(current.user, user)
eq_(initial.title, 'Test event')
eq_(current.title, 'Different title')
# reload the event
event = Event.objects.get(pk=event.pk)
eq_(event.title, 'Different title')
def test_edit_nothing(self):
"""basically pressing save without changing anything"""
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('main:event_edit', args=(event.slug,))
data = self._event_to_dict(event)
previous = json.dumps(data)
data = {
'event_id': event.id,
'previous': previous,
'title': event.title,
'short_description': event.short_description,
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()]
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
self._login()
response = self.client.post(url, data)
eq_(response.status_code, 302)
ok_(not EventRevision.objects.all())
def test_edit_no_image(self):
"""basically pressing save without changing anything"""
event = Event.objects.get(title='Test event')
event.placeholder_img = None
event.save()
url = reverse('main:event_edit', args=(event.slug,))
data = self._event_to_dict(event)
previous = json.dumps(data)
data = {
'event_id': event.id,
'previous': previous,
'title': event.title,
'short_description': event.short_description,
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()]
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
self._login()
response = self.client.post(url, data)
eq_(response.status_code, 200)
ok_('Events needs to have a picture' in
response.context['form'].errors['__all__'])
ok_('Events needs to have a picture' in response.content)
def test_bad_edit_title(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('main:event_edit', args=(event.slug,))
self._login()
data = self._event_to_dict(event)
previous = json.dumps(data)
data = {
'event_id': event.id,
'previous': previous,
'title': '',
'short_description': event.short_description,
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()]
}
response = self.client.post(url, data)
eq_(response.status_code, 200)
ok_('This field is required' in response.content)
def test_edit_on_bad_url(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('main:event_edit', args=('xxx',))
response = self.client.get(url)
eq_(response.status_code, 404)
old_slug = event.slug
event.slug = 'new-slug'
event.save()
data = self._event_to_dict(event)
previous = json.dumps(data)
data = {
'previous': previous,
'title': event.title,
'short_description': event.short_description,
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()]
}
url = reverse('main:event_edit', args=(old_slug,))
response = self.client.get(url)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
url = reverse('main:event_edit', args=(event.slug,))
response = self.client.get(url)
# because you're not allowed to view it
eq_(response.status_code, 302)
url = reverse('main:event_edit', args=(event.slug,))
response = self.client.post(url, data)
# because you're not allowed to view it, still
eq_(response.status_code, 302)
def test_edit_all_simple_fields(self):
"""similar to test_edit_title() but changing all fields
other than the placeholder_img
"""
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
assert event.tags.all()
assert event.channels.all()
url = reverse('main:event_edit', args=(event.slug,))
self._login()
data = self._event_to_dict(event)
previous = json.dumps(data)
new_channel = Channel.objects.create(
name='New Stuff',
slug='new-stuff'
)
new_channel2 = Channel.objects.create(
name='New Stuff II',
slug='new-stuff-2'
)
data = {
'event_id': event.id,
'previous': previous,
'title': 'Different title',
'short_description': 'new short description',
'description': 'new description',
'additional_links': 'new additional_links',
'tags': 'newtag',
'channels': [new_channel.pk, new_channel2.pk]
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
# this should have created 2 EventRevision objects.
initial, current = EventRevision.objects.all().order_by('created')
eq_(initial.event, event)
eq_(initial.title, 'Test event')
eq_(current.title, 'Different title')
# reload the event
event = Event.objects.get(pk=event.pk)
eq_(event.title, 'Different title')
eq_(event.description, 'new description')
eq_(event.short_description, 'new short description')
eq_(event.additional_links, 'new additional_links')
eq_(
sorted(x.name for x in event.tags.all()),
['newtag']
)
eq_(
sorted(x.name for x in event.channels.all()),
['New Stuff', 'New Stuff II']
)
def test_edit_recruitmentmessage(self):
"""Change the revision message from nothing, to something
to another one.
"""
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
assert event.tags.all()
assert event.channels.all()
url = reverse('main:event_edit', args=(event.slug,))
user = self._login()
data = self._event_to_dict(event)
previous = json.dumps(data)
msg1 = RecruitmentMessage.objects.create(
text='Web Developer',
url='http://careers.mozilla.com/123',
active=True
)
msg2 = RecruitmentMessage.objects.create(
text='C++ Developer',
url='http://careers.mozilla.com/456',
active=True
)
msg3 = RecruitmentMessage.objects.create(
text='Fortran Developer',
url='http://careers.mozilla.com/000',
active=False # Note!
)
# if you don't have the right permission, you can't see this choice
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Recruitment message' not in response.content)
# give the user the necessary permission
recruiters = Group.objects.create(name='Recruiters')
permission = Permission.objects.get(
codename='change_recruitmentmessage'
)
recruiters.permissions.add(permission)
user.groups.add(recruiters)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Recruitment message' in response.content)
ok_(msg1.text in response.content)
ok_(msg2.text in response.content)
ok_(msg3.text not in response.content) # not active
with open('airmozilla/manage/tests/firefox.png') as fp:
picture = Picture.objects.create(file=File(fp))
data = {
'event_id': event.id,
'previous': previous,
'recruitmentmessage': msg1.pk,
'title': event.title,
'picture': picture.id,
'description': event.description,
'short_description': event.short_description,
'channels': [x.id for x in event.channels.all()],
'tags': [x.name for x in event.tags.all()],
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
# this should have created 2 EventRevision objects.
initial, current = EventRevision.objects.all().order_by('created')
eq_(initial.event, event)
ok_(not initial.recruitmentmessage)
eq_(current.recruitmentmessage, msg1)
# reload the event
event = Event.objects.get(pk=event.pk)
eq_(event.recruitmentmessage, msg1)
# now change it to another message
data = self._event_to_dict(event)
previous = json.dumps(data)
data['recruitmentmessage'] = msg2.pk
data['previous'] = previous
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
# reload the event
event = Event.objects.get(pk=event.pk)
eq_(event.recruitmentmessage, msg2)
initial, __, current = (
EventRevision.objects.all().order_by('created')
)
eq_(current.recruitmentmessage, msg2)
# lastly, change it to blank
data = self._event_to_dict(event)
previous = json.dumps(data)
data['recruitmentmessage'] = ''
data['previous'] = previous
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
# reload the event
event = Event.objects.get(pk=event.pk)
eq_(event.recruitmentmessage, None)
initial, __, __, current = (
EventRevision.objects.all().order_by('created')
)
eq_(current.recruitmentmessage, None)
def test_edit_placeholder_img(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('main:event_edit', args=(event.slug,))
self._login()
old_placeholder_img_path = event.placeholder_img.path
data = self._event_to_dict(event)
previous = json.dumps(data)
with open(self.other_image) as fp:
data = {
'event_id': event.id,
'previous': previous,
'title': event.title,
'short_description': event.short_description,
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()],
'placeholder_img': fp,
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
# this should have created 2 EventRevision objects.
initial, current = EventRevision.objects.all().order_by('created')
ok_(initial.placeholder_img)
ok_(current.placeholder_img)
# reload the event
event = Event.objects.get(pk=event.pk)
new_placeholder_img_path = event.placeholder_img.path
ok_(old_placeholder_img_path != new_placeholder_img_path)
ok_(os.path.isfile(old_placeholder_img_path))
ok_(os.path.isfile(new_placeholder_img_path))
def test_edit_placeholder_img_to_unselect_picture(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
# also, let's pretend the event has a picture already selected
with open(self.main_image) as fp:
picture = Picture.objects.create(file=File(fp))
event.picture = picture
event.save()
url = reverse('main:event_edit', args=(event.slug,))
self._login()
old_placeholder_img_path = event.placeholder_img.path
data = self._event_to_dict(event)
previous = json.dumps(data)
with open(self.other_image) as fp:
data = {
'event_id': event.id,
'previous': previous,
'title': event.title,
'short_description': event.short_description,
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()],
'placeholder_img': fp,
# this is a hidden field you can't not send
'picture': picture.id,
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
self.assertRedirects(
response,
reverse('main:event', args=(event.slug,))
)
# this should have created 2 EventRevision objects.
initial, current = EventRevision.objects.all().order_by('created')
ok_(initial.placeholder_img)
ok_(current.placeholder_img)
ok_(not current.picture)
# reload the event
event = Event.objects.get(pk=event.pk)
ok_(not event.picture)
new_placeholder_img_path = event.placeholder_img.path
ok_(old_placeholder_img_path != new_placeholder_img_path)
ok_(os.path.isfile(old_placeholder_img_path))
ok_(os.path.isfile(new_placeholder_img_path))
def test_edit_conflict(self):
"""You can't edit the title if someone else edited it since the
'previous' JSON dump was taken."""
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('main:event_edit', args=(event.slug,))
self._login()
data = self._event_to_dict(event)
previous = json.dumps(data)
event.title = 'Sneak Edit'
event.save()
data = {
'event_id': event.id,
'previous': previous,
'title': 'Different title',
'short_description': event.short_description,
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()]
}
response = self.client.post(url, data)
eq_(response.status_code, 200)
ok_('Conflict error!' in response.content)
def test_edit_conflict_on_placeholder_img(self):
"""You can't edit the title if someone else edited it since the
'previous' JSON dump was taken."""
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('main:event_edit', args=(event.slug,))
self._login()
data = self._event_to_dict(event)
previous = json.dumps(data)
self._attach_file(event, self.other_image)
with open(self.third_image) as fp:
data = {
'event_id': event.id,
'previous': previous,
'title': event.title,
'short_description': event.short_description,
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()],
'placeholder_img': fp
}
response = self.client.post(url, data)
eq_(response.status_code, 200)
ok_('Conflict error!' in response.content)
def test_edit_conflict_near_miss(self):
"""If the event changes between the time you load the edit page
and you pressing 'Save' it shouldn't be a problem as long as
you're changing something different."""
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('main:event_edit', args=(event.slug,))
self._login()
data = self._event_to_dict(event)
previous = json.dumps(data)
event.title = 'Sneak Edit'
event.save()
data = {
'event_id': event.id,
'previous': previous,
'title': 'Test event',
'short_description': 'new short description',
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()]
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
event = Event.objects.get(pk=event.pk)
eq_(event.title, 'Sneak Edit')
eq_(event.short_description, 'new short description')
def test_view_revision_change_links(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('main:event_edit', args=(event.slug,))
user = self._login()
data = self._event_to_dict(event)
previous = json.dumps(data)
data = {
'event_id': event.id,
'previous': previous,
'title': 'Test event',
'short_description': 'new short description',
'description': event.description,
'additional_links': event.additional_links,
'tags': ', '.join(x.name for x in event.tags.all()),
'channels': [x.pk for x in event.channels.all()]
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
eq_(EventRevision.objects.filter(event=event).count(), 2)
base_revision = EventRevision.objects.get(
event=event,
user__isnull=True
)
user_revision = EventRevision.objects.get(
event=event,
user=user
)
# reload the event edit page
response = self.client.get(url)
eq_(response.status_code, 200)
# because there's no difference between this and the event now
# we should NOT have a link to see the difference for the user_revision
ok_(
reverse('main:event_difference',
args=(event.slug, user_revision.pk))
not in response.content
)
# but there should be a link to the change
ok_(
reverse('main:event_change',
args=(event.slug, user_revision.pk))
in response.content
)
# since the base revision doesn't have any changes there shouldn't
# be a link to it
ok_(
reverse('main:event_change',
args=(event.slug, base_revision.pk))
not in response.content
)
# but there should be a link to the change
ok_(
reverse('main:event_difference',
args=(event.slug, base_revision.pk))
in response.content
)
def test_cant_view_all_revision_changes(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
# base revision
base_revision = EventRevision.objects.create_from_event(event)
# change the event without saving so we can make a new revision
event.title = 'Different title'
user = User.objects.create_user(
'mary', '[email protected]', 'secret'
)
user_revision = EventRevision.objects.create_from_event(
event,
user=user
)
change_url = reverse(
'main:event_change',
args=(event.slug, user_revision.pk)
)
difference_url = reverse(
'main:event_difference',
args=(event.slug, base_revision.pk)
)
# you're not allowed to view these if you're not signed in
response = self.client.get(change_url)
eq_(response.status_code, 302)
response = self.client.get(difference_url)
eq_(response.status_code, 302)
def test_view_revision_change(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
# base revision
base_revision = EventRevision.objects.create_from_event(event)
# change the event without saving so we can make a new revision
event.title = 'Different title'
event.description = 'New description'
event.short_description = 'New short description'
event.additional_links = 'New additional links'
event.save()
user = User.objects.create_user(
'bob', '[email protected]', 'secret'
)
user_revision = EventRevision.objects.create_from_event(
event,
user=user
)
user_revision.tags.add(Tag.objects.create(name='newtag'))
user_revision.channels.remove(Channel.objects.get(name='Main'))
user_revision.channels.add(
Channel.objects.create(name='Web dev', slug='webdev')
)
with open(self.other_image, 'rb') as f:
img = File(f)
user_revision.placeholder_img.save(
os.path.basename(self.other_image),
img
)
# view the change
url = reverse('main:event_change', args=(event.slug, user_revision.pk))
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Different title' in response.content)
ok_('New description' in response.content)
ok_('New short description' in response.content)
ok_('New additional links' in response.content)
ok_('Web dev' in response.content)
ok_('newtag, testing' in response.content)
event.tags.add(Tag.objects.create(name='newtag'))
event.channels.remove(Channel.objects.get(name='Main'))
event.channels.add(
Channel.objects.get(name='Web dev')
)
# view the difference
url = reverse(
'main:event_difference',
args=(event.slug, base_revision.pk))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Different title' in response.content)
ok_('New description' in response.content)
ok_('New short description' in response.content)
ok_('New additional links' in response.content)
ok_('Web dev' in response.content)
ok_('newtag, testing' in response.content)
def test_view_revision_change_on_recruitmentmessage(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
# base revision
EventRevision.objects.create_from_event(event)
user = User.objects.create_user(
'bob', '[email protected]', 'secret'
)
user_revision = EventRevision.objects.create_from_event(
event,
user=user
)
msg1 = RecruitmentMessage.objects.create(
text='Web Developer',
url='http://careers.mozilla.com/123',
active=True
)
user_revision.recruitmentmessage = msg1
user_revision.save()
# view the change
url = reverse('main:event_change', args=(event.slug, user_revision.pk))
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(msg1.text in response.content)
| bsd-3-clause | 6,115,568,737,943,818,000 | 35.293344 | 79 | 0.581969 | false |
4a616d6573205265696c6c79/tartiflette | streaming/stream.py | 1 | 15191 | __doc__ = """
tartiflette
Program to analyse real-time traceroute inormation for routing changes.
Usage:
tartiflette --num_procs=<NUM> --v4_nets=<V4_FILE> --v6_nets=<V6_FILE>[--time=<SECONDS>] [-b=<bucket>]
Options:
--num_procs=<NUM> Number of worker processes to spin up to handle
load. Uses one asyncio event loop per process.
--time=<SECONDS> Number of seconds to run the analysis for. If
ommitted, run forever.
--v4_nets=<V4_FILE> File with a list of v4 networks
--v6_nets=<V6_FILE> File with a list of v6 networks
-b=<bucket_name> Compute stats for this time bucket
"""
import asyncio
import docopt
import ipaddress
import json
import pprint
import multiprocessing
import redis
import time
import numpy as np
from datetime import datetime
from collections import defaultdict
from ripe.atlas.cousteau import AtlasStream
WORK_QUEUE = multiprocessing.Queue()
RESULT_QUEUE = multiprocessing.Queue()
OTHER_QUEUE = multiprocessing.Queue()
pp = pprint.PrettyPrinter(indent=4)
RD = redis.StrictRedis(host='localhost', port=6379, db=0)
ONE_HOUR = 60*60
PARAMS = {
"timeWindow": 60 * 60, # in seconds
"alpha": 0.01, # parameter for exponential smoothing
"minCorr": -0.25,
"minSeen": 3,
"af": "6",
}
def dd():
return defaultdict(int)
def all_routes():
return defaultdict(dd)
class Measure(multiprocessing.Process):
def __init__(self, work_queue, result_queue):
self.WORK_QUEUE = work_queue
self.RESULT_QUEUE = result_queue
policy = asyncio.get_event_loop_policy()
policy.set_event_loop(policy.new_event_loop())
self.LOOP = asyncio.get_event_loop()
super().__init__()
@asyncio.coroutine
def main(self):
"""Loop forever looking for work from the queue"""
while True:
if not self.WORK_QUEUE.empty():
traceroute = self.WORK_QUEUE.get()
yield from self.process(traceroute)
def run(self):
self.LOOP.run_until_complete(self.main())
@asyncio.coroutine
def process(self, traceroute):
next_hops = defaultdict(dd)
res = yield from self.isValidMeasurement(traceroute)
if not res:
return
dstIp = traceroute["dst_addr"]
srcIp = traceroute["from"]
ts = int(traceroute["timestamp"])
bucket = yield from self.make_time_bucket(ts)
prevIps = [srcIp] * 3
currIps = []
yield from self.print_measurement(traceroute, bucket)
for hop in traceroute["result"]:
if not self.isValidHop(hop):
continue
for hopid, res in enumerate(hop["result"]):
ip = res.get("from", "x")
is_private = yield from self.isPrivate(ip)
if is_private:
continue
for prevIp in prevIps:
next_hops[prevIp][ip] += 1
count = next_hops[prevIp][ip]
yield from self.save_hop(dstIp, prevIp, ip, count, bucket, 6 * ONE_HOUR)
currIps.append(ip)
prevIps = currIps
currIps = []
# Measure.print_routes(next_hops)
# self.RESULT_QUEUE.put((dstIp, next_hops))
@asyncio.coroutine
def isPrivate(self, ip):
if ip == "x":
return False
ipaddr = ipaddress.ip_address(ip)
return ipaddr.is_private
@asyncio.coroutine
def make_time_bucket(self, ts, minutes=60):
return 'time_bucket/{}'.format(ts // (60 * minutes))
@asyncio.coroutine
def isValidMeasurement(self, msm):
return msm and "result" in msm and "dst_addr" in msm
@asyncio.coroutine
def isValidTraceResult(self, result):
return result and not "error" in result["result"][0]
@asyncio.coroutine
def isValidHop(self, hop):
return hop and "result" in hop and not "err" in hop["result"][0]
@staticmethod
def print_routes(routes):
data_as_dict = json.loads(json.dumps(routes))
pp.pprint(data_as_dict)
@asyncio.coroutine
def print_measurement(self, msm, bucket):
srcIp = msm["from"]
print("TS: {}, SRC: {}, DST: {} ({}) - Bucket: {}, Seen: {}".format(
msm['timestamp'],
msm['src_addr'],
msm['dst_addr'],
msm['dst_name'],
bucket,
self.has_target(srcIp, bucket)))
def get_time_bucket(self, bucket):
routes = defaultdict(all_routes)
targets = self.get_targets(bucket)
for target in targets:
links = self.get_target_links(bucket, target)
for (ip0, ip1) in links:
route_count_key = "route_{}_{}_{}_{}".format(bucket, target, ip0, ip1)
count = RD.get(route_count_key)
# print("route: {} -> {} => {}".format(ip0, ip1, int(count)))
routes[target][ip0][ip1] = count
return routes
def get_target_routes(self, routes, target):
return routes[target]
def get_targets(self, bucket):
"""Returns all destination ips in a time bucket"""
targets_key = "targets_{}".format(bucket)
targets = RD.smembers(targets_key)
return [t.decode() for t in targets]
def get_target_links(self, bucket, target):
"""Returns a list of ip0-ip1 tuples for a particular target in a bucket"""
target_to_routes_key = "routes_{}_{}".format(bucket, target)
target_to_routes = RD.smembers(target_to_routes_key)
links = []
for route in target_to_routes:
_route = route.decode()
# todo: use a regexp for this instead of a split
# since the bucket contains an underscore
_, _, ip0, ip1 = route.decode().split("_")
links.append((ip0, ip1))
return links
def compare_buckets(self, reference, bucket, target):
"""from routeChangeDetection function"""
bucket_ts = int(bucket.split("/")[1]) # time_bucket/406642
# ts = datetime.utcfromtimestamp(bucket_ts * 3600) # todo: use a param
ts = bucket_ts * 3600 # todo: use a param
bucket_links = self.get_time_bucket(bucket)
reference_links = self.get_time_bucket(reference)
routes = self.get_target_routes(bucket_links, target)
routes_ref = self.get_target_routes(reference_links, target)
alarms = []
alpha = PARAMS["alpha"]
for ip0, nextHops in routes.items():
nextHopsRef = routes_ref[ip0]
allHops = set(["0"])
for key in set(nextHops.keys()).union(
[k for k, v in nextHopsRef.items() if
isinstance(v, float)]):
if nextHops[key] or nextHopsRef[key]:
allHops.add(key)
reported = False
nbSamples = np.sum(nextHops.values())
nbSamplesRef = np.sum([x for x in nextHopsRef.values() if isinstance(x, int)])
if len(allHops) > 2 and "stats" in nextHopsRef and nextHopsRef["stats"]["nbSeen"] >= PARAMS["minSeen"]:
count = []
countRef = []
for ip1 in allHops:
count.append(nextHops[ip1])
countRef.append(nextHopsRef[ip1])
if len(count) > 1:
if np.std(count) == 0 or np.std(countRef) == 0:
print("{}, {}, {}, {}".format(allHops, countRef, count, nextHopsRef))
corr = np.corrcoef(count, countRef)[0][1]
if corr < PARAMS["minCorr"]:
reported = True
alarm = {"ip": ip0, "corr": corr,
"dst_ip": target,
"refNextHops": list(nextHopsRef.items()),
"obsNextHops": list(nextHops.items()),
"nbSamples": nbSamples,
"nbPeers": len(count),
"nbSeen": nextHopsRef["stats"]["nbSeen"]}
print("Alarm: {}".format(alarm))
alarms.append(alarm)
# Update the reference
if not "stats" in nextHopsRef:
nextHopsRef["stats"] = {"nbSeen": 0, "firstSeen": ts, "lastSeen": ts, "nbReported": 0}
if reported:
nextHopsRef["stats"]["nbReported"] += 1
nextHopsRef["stats"]["nbSeen"] += 1
nextHopsRef["stats"]["lastSeen"] = ts
for ip1 in allHops:
newCount = int(nextHops[ip1])
# print("newCount: {}".format(newCount))
nextHopsRef[ip1] = int((1.0 - alpha) * nextHopsRef[ip1] + alpha * int(newCount))
return routes_ref
@asyncio.coroutine
def save_links(self, target, links, bucket="ref", ttl=30*24*60*60):
for ip0, nextHops in links.iteritems():
for ip1, count in nextHops.iteritems():
yield from self.save_hop(target, ip0, ip1, count, bucket, ttl)
@asyncio.coroutine
def save_hop(self, target, ip0, ip1, count, bucket="ref", ttl=12*3600):
expires = int(time.time()) + ttl
p = RD.pipeline()
# a list of time bucket names
p.sadd("time_buckets", bucket)
# a set of all dst addr
target_key = "targets_{}".format(bucket)
p.sadd(target_key, target)
# a set of hops for each target dst addr
target_to_hops = "hops_{}_{}".format(bucket, target)
# a set of ip0_ip1 pairs for each target
target_to_routes = "routes_{}_{}".format(bucket, target)
# holds the total counters
route_count_key = "route_{}_{}_{}_{}".format(bucket, target, ip0, ip1)
route_key = "{}_{}_{}".format(bucket, ip0, ip1)
p.sadd(target_to_hops, ip0)
p.sadd(target_to_routes, route_key)
p.incrby(route_count_key, count)
# Set the expiration for all keys
p.expireat(bucket, expires)
p.expireat(target_key, expires)
p.expireat(target_to_hops, expires)
p.expireat(target_to_routes, expires)
p.expireat(route_count_key, expires)
p.execute()
@asyncio.coroutine
def get_route(self, target, ip0, ip1, bucket="ref"):
route_count_key = "route_{}_{}_{}_{}".format(bucket, target, ip0, ip1)
return RD.get(route_count_key)
def has_target(self, target, bucket="ref"):
return RD.sismember("targets_{}".format(bucket), target)
class IPMatcher(multiprocessing.Process):
def __init__(self, work_queue, result_queue, v4_nets, v6_nets):
self.WORK_QUEUE = work_queue
self.RESULT_QUEUE = result_queue
policy = asyncio.get_event_loop_policy()
policy.set_event_loop(policy.new_event_loop())
self.LOOP = asyncio.get_event_loop()
self.NETWORKS = {
4: [
ipaddress.ip_network(u'{}'.format(net.strip()), strict=False) for
net in open(v4_nets).readlines()
],
6: [
ipaddress.ip_network(u'{}'.format(net.strip()), strict=False) for
net in open(v6_nets).readlines()
],
}
super().__init__()
@asyncio.coroutine
def main(self):
"""Loop forever looking for work from the queue"""
while True:
if not self.WORK_QUEUE.empty():
traceroute = self.WORK_QUEUE.get()
yield from self.filter_hop_rtt(traceroute)
def run(self):
self.LOOP.run_until_complete(self.main())
@asyncio.coroutine
def filter_hop_rtt(self, traceroute):
"""Given a traceroute result, filter out the unnecessary data and
hand off for analysis"""
m_result = traceroute
if 'result' in m_result.keys() and m_result['result']:
for hop in m_result['result']:
if not 'result' in hop.keys():
continue
for address in hop['result']:
if 'from' in address.keys():
res = yield from self.in_monitored_network(
address['from']
)
if res:
self.RESULT_QUEUE.put(m_result)
return None
# The lovely folks at ripe added in some server side filtering for
# prefixes, to this code isn't really needed now. Leaving it in just
# in case anyone wants to do further filtering of the data
# UPDATE: server side is a WIP, we still need this
@asyncio.coroutine
def in_monitored_network(self, ip_address):
"""Returns true if this is in one of our monitored networks"""
address = ipaddress.ip_address(ip_address)
for network in self.NETWORKS[address.version]:
if address in network:
return True
return False
def on_result_recieved(*args):
"""Add the trqceroute result to a queue to be processed"""
WORK_QUEUE.put(args[0])
def stream_results(v4_nets, v6_nets, seconds=None, filters={}):
"""Set up the atlas stream for all traceroute results"""
atlas_stream = AtlasStream()
atlas_stream.connect()
atlas_stream.bind_channel('result', on_result_recieved)
prefixes = []
prefixes.extend([net.strip() for net in open(v4_nets).readlines()])
prefixes.extend([net.strip() for net in open(v6_nets).readlines()])
# for prefix in prefixes:
# stream_parameters = {"type": "traceroute", "passThroughPrefix": prefix}
# stream_parameters.update(filters)
# atlas_stream.start_stream(stream_type="result", **stream_parameters)
stream_parameters = {"type": "traceroute"}
stream_parameters.update(filters)
atlas_stream.start_stream(stream_type="result", **stream_parameters)
print("Before streaming")
atlas_stream.timeout(seconds=seconds)
atlas_stream.disconnect()
if __name__ == '__main__':
"""Start up one worker process to deal with handling checking traceroute
results, and just use the main thread to read from atlas."""
args = docopt.docopt(__doc__)
policy = asyncio.get_event_loop_policy()
policy.set_event_loop(policy.new_event_loop())
v4_nets = args['--v4_nets']
v6_nets = args['--v6_nets']
bucket = args['-b'] # 'time_bucket/406642'
if bucket:
measure = Measure(RESULT_QUEUE, OTHER_QUEUE)
targets = measure.get_targets(bucket)
for target in targets:
ref = measure.compare_buckets('reference', bucket, target)
# Measure.print_routes(ref)
exit()
procs = []
measure = Measure(RESULT_QUEUE, OTHER_QUEUE)
measure.start()
procs.append(measure)
for i in range(int(args['--num_procs'])):
proc = IPMatcher(WORK_QUEUE, RESULT_QUEUE, v4_nets, v6_nets)
procs.append(proc)
proc.start()
if args['--time']:
seconds = int(args['--time'])
else:
seconds = None
stream_results(v4_nets, v6_nets, seconds)
for proc in procs:
proc.terminate()
exit()
| mit | 2,678,837,684,548,656,000 | 35.516827 | 115 | 0.569745 | false |
SethGreylyn/gwells | gwells/urls.py | 1 | 2002 | """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from . import views
from django.views.generic import TemplateView
urlpatterns = [
# Examples:
# url(r'^$', 'project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.HelloWorldView.as_view(), name='home'),
url(r'^search$', views.well_search, name='search'),
#url(r'^(?P<pk>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$', views.DetailView.as_view(), name='detail'),
url(r'^submission/$', views.ActivitySubmissionListView.as_view(), name='activity_submission_list'),
url(r'^submission/create$', views.ActivitySubmissionWizardView.as_view(views.FORMS), name='activity_submission_create'),
url(r'^submission/(?P<pk>[0-9]+)$', views.ActivitySubmissionDetailView.as_view(), name='activity_submission_detail'),
url(r'^well/(?P<pk>[0-9]+)$', views.WellDetailView.as_view(), name='well_detail'),
url(r'^health$', views.health),
url(r'^admin/', admin.site.urls),
url(r'^additional-information', TemplateView.as_view(template_name='gwells/additional_information.html'), name='additional_information'),
url(r'^ajax/map_well_search/$', views.map_well_search, name='map_well_search'),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns | apache-2.0 | 85,206,001,416,177,220 | 45.581395 | 141 | 0.687313 | false |
ROGUE-JCTD/django-tilebundler | setup.py | 1 | 1166 | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
setup(
name='django-tilebundler',
version='0.1-beta3',
author='Syrus Mesdaghi',
author_email='[email protected]',
url='https://github.com/ROGUE-JCTD/django-tilebundler',
download_url='https://github.com/ROGUE-JCTD/django-tilebundler',
description='Service that creates tilesets from layer sources and serves them',
long_description=open(os.path.join(here, 'README.md')).read(),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Topic :: Utilities',
'Natural Language :: English',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Environment :: Web Environment',
'Framework :: Django',
'Development Status :: 1 - Planning',
'Programming Language :: Python :: 2.7'
],
install_requires=[
'Django>=1.6.10',
'MapProxy==1.8.0',
'PyYAML>=3.10',
'django-tastypie>=0.12.1',
'Shapely>=1.5.9',
'psutil>=3.0.1'
]
)
| mit | 7,172,282,942,100,561,000 | 30.513514 | 83 | 0.614065 | false |
Apogaea/voldb | volunteer/apps/shifts/models.py | 1 | 6000 | from __future__ import unicode_literals
import datetime
from django.db import models
from django.core.validators import MaxValueValidator
from django.conf import settings
from django.utils import timezone
from django.utils import timesince
from django.utils.encoding import python_2_unicode_compatible
from volunteer.core.models import Timestamped
from volunteer.apps.shifts.utils import DENVER_TIMEZONE
class ShiftQuerySet(models.QuerySet):
use_for_related_fields = True
def filter_to_active_event(self, active_event=None):
if active_event is None:
from volunteer.apps.events.models import Event
active_event = Event.objects.get_current()
if active_event is None:
return self
else:
return self.filter(event=active_event)
def human_readable_minutes(minutes):
now = timezone.now()
return timesince.timeuntil(now + timezone.timedelta(minutes=minutes), now)
@python_2_unicode_compatible
class Shift(Timestamped):
event = models.ForeignKey(
'events.Event', related_name='shifts', on_delete=models.PROTECT,
)
role = models.ForeignKey(
'departments.Role', related_name='shifts', on_delete=models.PROTECT,
)
is_closed = models.BooleanField(
blank=True, default=False,
help_text=(
"This will restrict anyone from claiming slots on this shift."
),
)
start_time = models.DateTimeField(
'shift begins',
help_text=(
"Format: `YYYY-MM-DD HH:MM` with the hours in 24-hour (military) "
"format. (eg, 2pm is 14:00)."
),
)
SHIFT_MINUTES_CHOICES = tuple((
(i * 5, human_readable_minutes(i * 5)) for i in range(1, 24 * 12 + 1)
))
shift_minutes = models.PositiveSmallIntegerField(
"shift length",
validators=[MaxValueValidator(1440)], choices=SHIFT_MINUTES_CHOICES,
help_text="The length of the shift",
)
num_slots = models.PositiveSmallIntegerField(
default=1,
help_text="How many slots does this shift have",
)
code = models.CharField(
max_length=50, blank=True,
help_text="Leave blank if this shift can be claimed by anyone.",
)
objects = ShiftQuerySet.as_manager()
def __str__(self):
return self.get_start_time_display()
@property
def open_slot_count(self):
return max(0, self.num_slots - self.slots.filter(cancelled_at__isnull=True).count())
@property
def filled_slot_count(self):
return self.slots.filter(cancelled_at__isnull=True).count()
@property
def has_open_slots(self):
return bool(self.open_slot_count)
@property
def claimed_slots(self):
return self.slots.filter(cancelled_at__isnull=True)
def get_start_time_display(self):
return self.start_time.strftime('%H:%M')
@property
def end_time(self):
return self.start_time + datetime.timedelta(minutes=self.shift_minutes)
def overlaps_with(self, other):
if self.end_time <= other.start_time:
return False
elif self.start_time >= other.end_time:
return False
return True
@property
def is_protected(self):
return bool(self.code)
@property
def is_locked(self):
return self.is_closed or not self.event.is_registration_open
@property
def is_midnight_spanning(self):
if self.shift_minutes > 24 * 60:
return True
start_hour = self.start_time.astimezone(DENVER_TIMEZONE).hour
end_hour = self.end_time.astimezone(DENVER_TIMEZONE).hour
return bool(end_hour) and start_hour > end_hour
# Permissions Methods
def is_claimable_by_user(self, user):
"""
Not locked.
Has open slots.
User does not already have a slot.
"""
if self.is_locked:
return False
elif not self.has_open_slots:
return False
elif self.claimed_slots.filter(volunteer=user).exists():
return False
return True
@property
def duration(self):
return timezone.timedelta(minutes=self.shift_minutes)
class ShiftSlotQuerySet(models.QuerySet):
use_for_related_fields = True
def filter_to_active_event(self, active_event=None):
if active_event is None:
from volunteer.apps.events.models import Event
active_event = Event.objects.get_current()
if active_event is None:
return self
else:
return self.filter(shift__event=active_event)
@python_2_unicode_compatible
class ShiftSlot(Timestamped):
shift = models.ForeignKey('Shift', related_name='slots')
volunteer = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='shift_slots')
cancelled_at = models.DateTimeField(null=True)
objects = ShiftSlotQuerySet.as_manager()
def __str__(self):
return "{s.shift_id}:{s.volunteer_id}".format(s=self)
def cancel(self):
self.is_cancelled = True
self.save()
def _is_cancelled_getter(self):
return bool(self.cancelled_at)
def _is_cancelled_setter(self, value):
if bool(value) is bool(self.cancelled_at):
return
elif value:
self.cancelled_at = timezone.now()
else:
self.cancelled_at = None
is_cancelled = property(_is_cancelled_getter, _is_cancelled_setter)
@property
def is_locked(self):
return not self.shift.event.is_registration_open
#
# Permissions Methods
#
def is_cancelable_by_user(self, user):
"""
Not locked.
Not cancelled.
User is the volunteer or is an admin
else, not allowed.
"""
if self.is_cancelled:
return False
elif self.is_locked:
return False
elif user.pk == self.volunteer_id or user.is_admin:
return True
return False
| gpl-3.0 | -1,218,786,950,874,683,600 | 27.708134 | 92 | 0.6305 | false |
qbeenslee/Nepenthes-Server | utils/codeutil.py | 1 | 3122 | # coding:utf-8
'''
编码处理
Author : qbeenslee
Created : 2015/1/20
'''
import hashlib
import random
import re
import time
import uuid
from config import setting, configuration
SAFEHASH = [x for x in "0123456789-abcdefghijklmnopqrstuvwxyz_ABCDEFGHIJKLMNOPQRSTUVWXYZ"] # 64
SHORTHASH = [x for x in "0123456789ABCDEFGHJKMNPQRSTVWXYZ"] # 32
def shorter_UID():
'''
生成一个较短的代码
:return:
'''
random.seed(int(time.time()))
num = random.randint(configuration.MIN_RAND_EMAIL_CODE, configuration.MAX_RAND_EMAIL_CODE)
return hex(num)[2:]
def short_UID(imei):
'''
产生较短唯一压缩id码
:return:string 10位id编码
'''
if imei is None:
return None
shortID = ''
imei = re.sub(r'\D+', '', imei, 0)
imei_token = imei[8:14] + imei[0:6]
time_token = ('%.3f' % time.time()).split(r'.')[-1]
token = time_token + imei_token
enbin = "%050d" % int(bin(int(token))[2:], 10)
for i in xrange(10):
shortID += SHORTHASH[int(enbin[i * 5:i * 5 + 5], 2)]
return shortID
def compress_UUID():
'''
根据http://www.ietf.org/rfc/rfc1738.txt,由uuid编码扩大字符域生成串
包括: [0-9a-zA-Z\-_] 共64个
长度: (32-2)/3*2 = 20
备注: 可在地球上人人都用,使用1000年不重复(2^120)
:return:String
'''
row = str(uuid.uuid4()).replace('-', '')
safe_code = ''
for i in xrange(10):
enbin = "%012d" % int(bin(int(row[i * 3] + row[i * 3 + 1] + row[i * 3 + 2], 16))[2:], 10)
safe_code += (SAFEHASH[int(enbin[0:6], 2)] + SAFEHASH[int(enbin[6:12], 2)])
return safe_code
def encode_pwd(password, loaded=True, salt_bit=None, iteration_index=None):
'''
明码密码加盐加密
:return:String
格式: 加密方式$迭代次数$盐$结果
:param:pwd 明文密码(或者已经加载的迭代次数)[32]
loaded 是否已经(md5)散列处理
salt 盐[20]
iteration_index 数据库中的迭代次数(对于已经已经经过一次散列处理的字符串)[2]
:do:第一次都是不加盐迭代,剩余次数根据服务器生成盐迭代
'''
if password is None: raise ValueError('ValueEmpty', 'pwd')
if salt_bit is None: salt_bit = compress_UUID()
if iteration_index is None:
iteration_index = random.randint(setting.PWD_ITERATION_INTERVAL['MIN'],
setting.PWD_ITERATION_INTERVAL['MAX'])
if not loaded:
password = hashlib.md5(password).hexdigest()
for i in xrange(iteration_index):
password = hashlib.md5(password + salt_bit).hexdigest()
return "%s$%d$%s$%s" % ('md5', iteration_index, salt_bit, password)
if __name__ == '__main__':
# strTest = u'md5$35$Xm9UuCi4hap6MmNXN2SV$9e77dd1761c233b079d9f2568f905f8'
# #
# method, iteration, salt, pwd = strTest.split(r'$')
# pwd_gen = encode_pwd(password='1234', salt_bit=salt, iteration_index=int(iteration))
# print pwd_gen
shorter_UID()
| gpl-3.0 | 8,844,720,525,724,796,000 | 28.193548 | 97 | 0.587251 | false |
unomena/django-saml2-sp | saml2sp/saml2sp_settings.py | 1 | 1417 | from django.conf import settings
try:
SAML2SP_ACS_URL = settings.SAML2SP_ACS_URL
except:
SAML2SP_ACS_URL = 'http://127.0.0.1:9000/sp/acs/'
try:
SAML2SP_ENTITY_ID = settings.SAML2SP_ENTITY_ID
except:
# A value of None will default to the metadata URL, in the views.
SAML2SP_ENTITY_ID = None
try:
SAML2SP_IDP_SLO_URL = settings.SAML2SP_IDP_SLO_URL
except:
SAML2SP_IDP_SLO_URL = 'http://127.0.0.1:8000/idp/logout/'
try:
SAML2SP_IDP_AUTO_LOGOUT = settings.SAML2SP_IDP_AUTO_LOGOUT
except:
SAML2SP_IDP_AUTO_LOGOUT = False
try:
SAML2SP_IDP_REQUEST_URL = settings.SAML2SP_IDP_REQUEST_URL
except:
SAML2SP_IDP_REQUEST_URL = 'http://127.0.0.1:8000/idp/login/'
#XXX: OK, this is an evil hack. But I can't figure out a better way to do this,
# since Django requires a local user account. I suppose I could write my
# own auth backend, but I don't really want to right now.
try:
SAML2SP_SAML_USER_PASSWORD = settings.SAML2SP_SAML_USER_PASSWORD
except:
SAML2SP_SAML_USER_PASSWORD = settings.SECRET_KEY[::-1]
# If using relative paths, be careful!
try:
SAML2SP_CERTIFICATE_FILE = settings.SAML2SP_CERTIFICATE_FILE
except:
SAML2SP_CERTIFICATE_FILE = 'keys/sample/certificate.pem'
# If using relative paths, be careful!
try:
SAML2SP_PRIVATE_KEY_FILE = settings.SAML2SP_PRIVATE_KEY_FILE
except:
SAML2SP_PRIVATE_KEY_FILE = 'keys/sample/private-key.pem'
| bsd-3-clause | 2,608,328,142,906,823,700 | 29.148936 | 79 | 0.715596 | false |
boudewijnrempt/HyvesDesktop | 3rdparty/socorro/socorro/collector/initializer.py | 1 | 2614 | import os
import logging
import logging.handlers
import socorro.lib.ConfigurationManager
import socorro.lib.util as sutil
import socorro.lib.JsonDumpStorage as jds
import socorro.collector.collect as collect
#-----------------------------------------------------------------------------------------------------------------
def createPersistentInitialization(configModule):
storage = {}
storage["config"] = config = socorro.lib.ConfigurationManager.newConfiguration(configurationModule=configModule,automaticHelp=False)
storage["collectObject"] = collect.Collect(config)
storage["hostname"] = os.uname()[1]
storage["logger"] = logger = logging.getLogger("monitor")
logger.setLevel(logging.DEBUG)
rotatingFileLog = logging.handlers.RotatingFileHandler(config.logFilePathname, "a", config.logFileMaximumSize, config.logFileMaximumBackupHistory)
rotatingFileLog.setLevel(config.logFileErrorLoggingLevel)
rotatingFileLogFormatter = logging.Formatter(config.logFileLineFormatString)
rotatingFileLog.setFormatter(rotatingFileLogFormatter)
logger.addHandler(rotatingFileLog)
logger.info("current configuration\n%s", str(config))
standardFileSystemStorage = jds.JsonDumpStorage(root = config.storageRoot,
maxDirectoryEntries = config.dumpDirCount,
jsonSuffix = config.jsonFileSuffix,
dumpSuffix = config.dumpFileSuffix,
dumpGID = config.dumpGID,
dumpPermissions = config.dumpPermissions,
dirPermissions = config.dirPermissions,
)
storage["standardFileSystemStorage"] = standardFileSystemStorage
deferredFileSystemStorage = jds.JsonDumpStorage(root = config.deferredStorageRoot,
maxDirectoryEntries = config.dumpDirCount,
jsonSuffix = config.jsonFileSuffix,
dumpSuffix = config.dumpFileSuffix,
dumpGID = config.dumpGID,
dumpPermissions = config.dumpPermissions,
dirPermissions = config.dirPermissions,
)
storage["deferredFileSystemStorage"] = deferredFileSystemStorage
return storage | gpl-2.0 | -4,890,446,938,437,536,000 | 52.367347 | 148 | 0.568477 | false |
kalebdavis/calenbro | outlook/outlookservice.py | 1 | 1868 | import requests
import uuid
import json
outlook_api_endpoint = 'https://outlook.office.com/api/v2.0{0}'
def make_api_call(method, url, token, user_email, payload=None, parameters=None):
headers = {
'User-Agent': 'brickhack/1.0',
'Authorization': 'Bearer {0}'.format(token),
'Accept': 'application/json',
'X-AnchorMailbox': 'user_email'
}
request_id = str(uuid.uuid4())
instrumentation = {
'client-request-id': request_id,
'return-client-request-id': 'true'
}
headers.update(instrumentation)
response = None
if(method.upper() == 'GET'):
response = requests.get(url, headers=headers, params=parameters)
elif(method.upper() == 'DELETE'):
response = requests.delete(url, headers=headers, params=parameters)
elif(method.upper() == 'PATCH'):
headers.update({ 'Content-Type': 'application/json' })
response = requests.patch(url, headers=headers, data=json.dumps(payload), params=parameters)
elif(method.upper() == 'POST'):
headers.update({ 'Content-Type': 'application/json' })
response = requests.post(url, headers=headers, data=json.dumps(payload), params=parameters)
return response
def get_my_events(access_token, user_email):
get_events_url = outlook_api_endpoint.format('/Me/Events')
query_parameters = {
'$top': '10',
'$select': 'Subject,Start,End',
'$orderby': 'Start/DateTime ASC'
}
r = make_api_call('GET', get_events_url, access_token, user_email, parameters=query_parameters)
if(r.status_code == requests.codes.ok):
return r.json()
else:
return "{0}: {1}".format(r.status_code, r.text)
| gpl-2.0 | 1,723,155,915,210,495,700 | 37.122449 | 100 | 0.577088 | false |
Etxea/gestioneide | profesores/urls.py | 1 | 1195 | from django.conf.urls import include, url
from django.views.generic import ListView, DetailView
from django.views.generic.edit import UpdateView
from django.contrib.auth.decorators import login_required, permission_required
from profesores.views import *
urlpatterns = [
url(r'^$', login_required(ProfesorDashboardView.as_view()),name="profesores_dashboard"),
url(r'lista/$', login_required(ProfesorListView.as_view()),name="profesores_lista"),
url(r'nuevo/$',ProfesorCreateView.as_view(), name="profesor_nuevo"),
url(r'editar/(?P<pk>\d+)/$',ProfesorUpdateView.as_view(), name="profesor_editar"),
url(r'borrar/(?P<pk>\d+)/$',ProfesorDeleteView.as_view(), name="profesor_borrar"),
url(r'passwordreset/(?P<pk>\d+)/$',ProfesorPasswordResetView.as_view(), name="profesor_passwordreset"),
url(r'createuser/(?P<pk>\d+)/$',ProfesorCreateUserView.as_view(), name="profesor_createuser"),
url(r'disableuser/(?P<pk>\d+)/$',ProfesorDisableUserView.as_view(), name="profesor_disableuser"),
url(r'enableuser/(?P<pk>\d+)/$',ProfesorEnableUserView.as_view(), name="profesor_enableuser"),
url(r'(?P<pk>\d+)/$',ProfesorDetailView.as_view(), name="profesor_detalle"),
]
| gpl-3.0 | 5,310,999,755,454,793,000 | 55.904762 | 107 | 0.713808 | false |
lina9527/easybi | migrations/versions/822389978719_.py | 1 | 21400 | """empty message
Revision ID: 822389978719
Revises: None
Create Date: 2017-10-17 15:49:01.970182
"""
# revision identifiers, used by Alembic.
import sqlalchemy_utils
revision = '822389978719'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('keyvalue',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('value', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('access_request',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('datasource_id', sa.Integer(), nullable=True),
sa.Column('datasource_type', sa.String(length=200), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('clusters',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('verbose_name', sa.String(length=250), nullable=True),
sa.Column('cluster_name', sa.String(length=250), nullable=True),
sa.Column('coordinator_host', sa.String(length=255), nullable=True),
sa.Column('coordinator_port', sa.Integer(), nullable=True),
sa.Column('coordinator_endpoint', sa.String(length=255), nullable=True),
sa.Column('broker_host', sa.String(length=255), nullable=True),
sa.Column('broker_port', sa.Integer(), nullable=True),
sa.Column('broker_endpoint', sa.String(length=255), nullable=True),
sa.Column('metadata_last_refreshed', sa.DateTime(), nullable=True),
sa.Column('cache_timeout', sa.Integer(), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('cluster_name'),
sa.UniqueConstraint('verbose_name')
)
op.create_table('css_templates',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('template_name', sa.String(length=250), nullable=True),
sa.Column('css', sa.Text(), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('dashboards',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('dashboard_title', sa.String(length=500), nullable=True),
sa.Column('position_json', sa.Text(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('css', sa.Text(), nullable=True),
sa.Column('json_metadata', sa.Text(), nullable=True),
sa.Column('slug', sa.String(length=255), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug')
)
op.create_table('dbs',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('verbose_name', sa.String(length=250), nullable=True),
sa.Column('database_name', sa.String(length=250), nullable=True),
sa.Column('sqlalchemy_uri', sa.String(length=1024), nullable=True),
sa.Column('password', sqlalchemy_utils.types.encrypted.EncryptedType(), nullable=True),
sa.Column('cache_timeout', sa.Integer(), nullable=True),
sa.Column('select_as_create_table_as', sa.Boolean(), nullable=True),
sa.Column('expose_in_sqllab', sa.Boolean(), nullable=True),
sa.Column('allow_run_sync', sa.Boolean(), nullable=True),
sa.Column('allow_run_async', sa.Boolean(), nullable=True),
sa.Column('allow_ctas', sa.Boolean(), nullable=True),
sa.Column('allow_dml', sa.Boolean(), nullable=True),
sa.Column('force_ctas_schema', sa.String(length=250), nullable=True),
sa.Column('extra', sa.Text(), nullable=True),
sa.Column('perm', sa.String(length=1000), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('database_name'),
sa.UniqueConstraint('verbose_name')
)
op.create_table('favstar',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('class_name', sa.String(length=50), nullable=True),
sa.Column('obj_id', sa.Integer(), nullable=True),
sa.Column('dttm', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('action', sa.String(length=512), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('dashboard_id', sa.Integer(), nullable=True),
sa.Column('slice_id', sa.Integer(), nullable=True),
sa.Column('json', sa.Text(), nullable=True),
sa.Column('dttm', sa.DateTime(), nullable=True),
sa.Column('dt', sa.Date(), nullable=True),
sa.Column('duration_ms', sa.Integer(), nullable=True),
sa.Column('referrer', sa.String(length=1024), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('slices',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('slice_name', sa.String(length=250), nullable=True),
sa.Column('datasource_id', sa.Integer(), nullable=True),
sa.Column('datasource_type', sa.String(length=200), nullable=True),
sa.Column('datasource_name', sa.String(length=2000), nullable=True),
sa.Column('viz_type', sa.String(length=250), nullable=True),
sa.Column('params', sa.Text(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('cache_timeout', sa.Integer(), nullable=True),
sa.Column('perm', sa.String(length=1000), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('url',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', sa.Text(), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('dashboard_slices',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('dashboard_id', sa.Integer(), nullable=True),
sa.Column('slice_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['dashboard_id'], ['dashboards.id'], ),
sa.ForeignKeyConstraint(['slice_id'], ['slices.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('dashboard_user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('dashboard_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['dashboard_id'], ['dashboards.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('datasources',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('default_endpoint', sa.Text(), nullable=True),
sa.Column('is_featured', sa.Boolean(), nullable=True),
sa.Column('filter_select_enabled', sa.Boolean(), nullable=True),
sa.Column('offset', sa.Integer(), nullable=True),
sa.Column('cache_timeout', sa.Integer(), nullable=True),
sa.Column('params', sa.String(length=1000), nullable=True),
sa.Column('perm', sa.String(length=1000), nullable=True),
sa.Column('datasource_name', sa.String(length=255), nullable=True),
sa.Column('is_hidden', sa.Boolean(), nullable=True),
sa.Column('fetch_values_from', sa.String(length=100), nullable=True),
sa.Column('cluster_name', sa.String(length=250), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['cluster_name'], ['clusters.cluster_name'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('datasource_name')
)
op.create_table('query',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=11), nullable=False),
sa.Column('database_id', sa.Integer(), nullable=False),
sa.Column('tmp_table_name', sa.String(length=256), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('status', sa.String(length=16), nullable=True),
sa.Column('tab_name', sa.String(length=256), nullable=True),
sa.Column('sql_editor_id', sa.String(length=256), nullable=True),
sa.Column('schema', sa.String(length=256), nullable=True),
sa.Column('sql', sa.Text(), nullable=True),
sa.Column('select_sql', sa.Text(), nullable=True),
sa.Column('executed_sql', sa.Text(), nullable=True),
sa.Column('limit', sa.Integer(), nullable=True),
sa.Column('limit_used', sa.Boolean(), nullable=True),
sa.Column('select_as_cta', sa.Boolean(), nullable=True),
sa.Column('select_as_cta_used', sa.Boolean(), nullable=True),
sa.Column('progress', sa.Integer(), nullable=True),
sa.Column('rows', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('results_key', sa.String(length=64), nullable=True),
sa.Column('start_time', sa.Numeric(precision=20, scale=6), nullable=True),
sa.Column('start_running_time', sa.Numeric(precision=20, scale=6), nullable=True),
sa.Column('end_time', sa.Numeric(precision=20, scale=6), nullable=True),
sa.Column('end_result_backend_time', sa.Numeric(precision=20, scale=6), nullable=True),
sa.Column('tracking_url', sa.Text(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['database_id'], ['dbs.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('client_id')
)
op.create_index(op.f('ix_query_results_key'), 'query', ['results_key'], unique=False)
op.create_index('ti_user_id_changed_on', 'query', ['user_id', 'changed_on'], unique=False)
op.create_table('saved_query',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('db_id', sa.Integer(), nullable=True),
sa.Column('schema', sa.String(length=128), nullable=True),
sa.Column('label', sa.String(length=256), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('sql', sa.Text(), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['db_id'], ['dbs.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('slice_user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('slice_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['slice_id'], ['slices.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tables',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('default_endpoint', sa.Text(), nullable=True),
sa.Column('is_featured', sa.Boolean(), nullable=True),
sa.Column('filter_select_enabled', sa.Boolean(), nullable=True),
sa.Column('offset', sa.Integer(), nullable=True),
sa.Column('cache_timeout', sa.Integer(), nullable=True),
sa.Column('params', sa.String(length=1000), nullable=True),
sa.Column('perm', sa.String(length=1000), nullable=True),
sa.Column('table_name', sa.String(length=250), nullable=True),
sa.Column('main_dttm_col', sa.String(length=250), nullable=True),
sa.Column('database_id', sa.Integer(), nullable=False),
sa.Column('fetch_values_predicate', sa.String(length=1000), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('schema', sa.String(length=255), nullable=True),
sa.Column('sql', sa.Text(), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['database_id'], ['dbs.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('database_id', 'schema', 'table_name', name='_customer_location_uc')
)
op.create_table('columns',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('column_name', sa.String(length=255), nullable=True),
sa.Column('verbose_name', sa.String(length=1024), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('type', sa.String(length=32), nullable=True),
sa.Column('groupby', sa.Boolean(), nullable=True),
sa.Column('count_distinct', sa.Boolean(), nullable=True),
sa.Column('sum', sa.Boolean(), nullable=True),
sa.Column('avg', sa.Boolean(), nullable=True),
sa.Column('max', sa.Boolean(), nullable=True),
sa.Column('min', sa.Boolean(), nullable=True),
sa.Column('filterable', sa.Boolean(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('datasource_name', sa.String(length=255), nullable=True),
sa.Column('dimension_spec_json', sa.Text(), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['datasource_name'], ['datasources.datasource_name'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('metrics',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('metric_name', sa.String(length=512), nullable=True),
sa.Column('verbose_name', sa.String(length=1024), nullable=True),
sa.Column('metric_type', sa.String(length=32), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('is_restricted', sa.Boolean(), nullable=True),
sa.Column('d3format', sa.String(length=128), nullable=True),
sa.Column('datasource_name', sa.String(length=255), nullable=True),
sa.Column('json', sa.Text(), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['datasource_name'], ['datasources.datasource_name'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('sql_metrics',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('metric_name', sa.String(length=512), nullable=True),
sa.Column('verbose_name', sa.String(length=1024), nullable=True),
sa.Column('metric_type', sa.String(length=32), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('is_restricted', sa.Boolean(), nullable=True),
sa.Column('d3format', sa.String(length=128), nullable=True),
sa.Column('table_id', sa.Integer(), nullable=True),
sa.Column('expression', sa.Text(), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['table_id'], ['tables.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('table_columns',
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('changed_on', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('column_name', sa.String(length=255), nullable=True),
sa.Column('verbose_name', sa.String(length=1024), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('type', sa.String(length=32), nullable=True),
sa.Column('groupby', sa.Boolean(), nullable=True),
sa.Column('count_distinct', sa.Boolean(), nullable=True),
sa.Column('sum', sa.Boolean(), nullable=True),
sa.Column('avg', sa.Boolean(), nullable=True),
sa.Column('max', sa.Boolean(), nullable=True),
sa.Column('min', sa.Boolean(), nullable=True),
sa.Column('filterable', sa.Boolean(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('table_id', sa.Integer(), nullable=True),
sa.Column('is_dttm', sa.Boolean(), nullable=True),
sa.Column('expression', sa.Text(), nullable=True),
sa.Column('python_date_format', sa.String(length=255), nullable=True),
sa.Column('database_expression', sa.String(length=255), nullable=True),
sa.Column('created_by_fk', sa.Integer(), nullable=True),
sa.Column('changed_by_fk', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ),
sa.ForeignKeyConstraint(['table_id'], ['tables.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('table_columns')
op.drop_table('sql_metrics')
op.drop_table('metrics')
op.drop_table('columns')
op.drop_table('tables')
op.drop_table('slice_user')
op.drop_table('saved_query')
op.drop_index('ti_user_id_changed_on', table_name='query')
op.drop_index(op.f('ix_query_results_key'), table_name='query')
op.drop_table('query')
op.drop_table('datasources')
op.drop_table('dashboard_user')
op.drop_table('dashboard_slices')
op.drop_table('url')
op.drop_table('slices')
op.drop_table('logs')
op.drop_table('favstar')
op.drop_table('dbs')
op.drop_table('dashboards')
op.drop_table('css_templates')
op.drop_table('clusters')
op.drop_table('access_request')
op.drop_table('keyvalue')
# ### end Alembic commands ###
| mit | -4,426,077,723,326,835,000 | 50.318945 | 94 | 0.656402 | false |
rndusr/stig | stig/main.py | 1 | 3550 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details
# http://www.gnu.org/licenses/gpl-3.0.txt
import asyncio
import os
import sys
from . import cliopts, logging, objects, settings
from .objects import cmdmgr, log, srvapi
# Remove python from process name when running inside tmux
if 'TMUX' in os.environ:
try:
from setproctitle import setproctitle
except ImportError:
pass
else:
from . import __appname__
setproctitle(__appname__)
cliargs, clicmds = cliopts.parse()
objects.main_rcfile = cliargs['rcfile'] or settings.defaults.DEFAULT_RCFILE
logging.setup(debugmods=cliargs['debug'], filepath=cliargs['debug_file'])
logging.redirect_level('INFO', sys.stdout)
def run():
cmdmgr.load_cmds_from_module('stig.commands.cli', 'stig.commands.tui')
from .commands.guess_ui import guess_ui, UIGuessError
from .commands import CmdError
from . import hooks # noqa: F401
# Read commands from rc file
rclines = ()
if not cliargs['norcfile']:
from .settings import rcfile
try:
rclines = rcfile.read(objects.main_rcfile)
except rcfile.RcFileError as e:
log.error('Loading rc file failed: {}'.format(e))
sys.exit(1)
# Decide if we run as a TUI or CLI
if cliargs['tui']:
cmdmgr.active_interface = 'tui'
elif cliargs['notui']:
cmdmgr.active_interface = 'cli'
else:
try:
cmdmgr.active_interface = guess_ui(clicmds, cmdmgr)
except UIGuessError:
log.error('Unable to guess user interface')
log.error('Provide one of these options: --tui/-t or --no-tui/-T')
sys.exit(1)
except CmdError as e:
log.error(e)
sys.exit(1)
def run_commands():
for cmdline in rclines:
success = cmdmgr.run_sync(cmdline)
# Ignored commands return None, which we consider a success here
# because TUI commands like 'tab' in the rc file should have no
# effect at all when in CLI mode.
if success is False:
return False
# Exit if CLI commands fail
if clicmds:
success = cmdmgr.run_sync(clicmds)
if not success:
return False
return True
exit_code = 0
# Run commands either in CLI or TUI mode
if cmdmgr.active_interface == 'cli':
# Exit when pipe is closed (e.g. `stig help | head -1`)
import signal
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
try:
if not run_commands():
exit_code = 1
except KeyboardInterrupt:
log.debug('Caught SIGINT')
elif cmdmgr.active_interface == 'tui':
from .tui import main as tui
if not tui.run(run_commands):
exit_code = 1
asyncio.get_event_loop().run_until_complete(srvapi.rpc.disconnect('Quit'))
# We're not closing the AsyncIO event loop here because it sometimes
# complains about unfinished tasks and not calling it seems to work fine.
sys.exit(exit_code)
| gpl-3.0 | 2,774,713,514,736,834,600 | 31.568807 | 78 | 0.63662 | false |
praekelt/sideloader2 | sideloader.web/sideloader/web/views.py | 1 | 27153 | from datetime import timedelta, datetime
import uuid
import urlparse
import json
import hashlib, hmac, base64
import time
import yaml
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.views.decorators.csrf import csrf_exempt
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.conf import settings
from sideloader import forms, tasks, models
def verifyHMAC(request, data=None):
clientauth = request.META['HTTP_AUTHORIZATION']
sig = request.META['HTTP_SIG']
if clientauth != settings.SPECTER_AUTHCODE:
return False
sign = [settings.SPECTER_AUTHCODE, request.method, request.path]
if data:
sign.append(
hashlib.sha1(data).hexdigest()
)
mysig = hmac.new(
key = settings.SPECTER_SECRET,
msg = '\n'.join(sign),
digestmod = hashlib.sha1
).digest()
return base64.b64encode(mysig) == sig
def getProjects(request):
if request.user.is_superuser:
return models.Project.objects.all().order_by('name')
else:
return request.user.project_set.all().order_by('name')
@login_required
def index(request):
projects = getProjects(request)
if request.user.is_superuser:
builds = models.Build.objects.filter(state=0).order_by('-build_time')
last_builds = models.Build.objects.filter(state__gt=0).order_by('-build_time')[:10]
else:
all_builds = models.Build.objects.filter(state=0).order_by('-build_time')
last_builds = models.Build.objects.filter(state__gt=0, project__in=projects).order_by('-build_time')[:10]
builds = []
for build in all_builds:
if build.project in projects:
builds.append(build)
else:
builds.append({'build_time': build.build_time, 'project': {'name': 'Private'}})
return render(request, "index.html", {
'builds': builds,
'last_builds': last_builds,
'projects': projects
})
@login_required
def accounts_profile(request):
if request.method == "POST":
form = forms.UserForm(request.POST, instance=request.user)
if form.is_valid():
user = form.save(commit=False)
user.set_password(form.cleaned_data['password'])
user.save()
return redirect('home')
else:
form = forms.UserForm(instance=request.user)
return render(request, "accounts_profile.html", {
'form': form,
'projects': getProjects(request)
})
@login_required
def manage_index(request):
if not request.user.is_superuser:
return redirect('home')
users = User.objects.all().order_by('username')
repos = models.PackageRepo.objects.all().order_by('name')
hives = []
for k, v in tasks.getClusterStatus().items():
v['hostname'] = k
hives.append({
'hostname': k,
'lastseen': time.ctime(v['lastseen']),
'status': v['status']
})
return render(request, "manage/index.html", {
'projects': getProjects(request),
'users': users,
'repos': repos,
'hives': hives
})
@login_required
def manage_create_repo(request):
if not request.user.is_superuser:
return redirect('home')
if request.method == "POST":
form = forms.PackageRepoForm(request.POST)
if form.is_valid():
release = form.save(commit=False)
release.save()
return redirect('manage_index')
else:
form = forms.PackageRepoForm()
return render(request, "manage/create_repo.html", {
'form': form,
'projects': getProjects(request),
})
@login_required
def manage_delete_repo(request, id):
repo = models.PackageRepo.objects.get(id=id)
repo.delete()
return redirect('manage_index')
@login_required
def server_index(request):
servers = models.Server.objects.all().order_by('last_checkin')
return render(request, "servers/index.html", {
'servers': servers,
'projects': getProjects(request)
})
@login_required
def server_log(request, id):
# Accepts stream target ID
target = models.Target.objects.get(id=id)
projects = getProjects(request)
d = {
'target': target,
'project': target.release.project,
'projects': projects
}
if (request.user.is_superuser) or (
target.release.project in request.user.project_set.all()):
d['target'] = target
return render(request, "servers/server_log.html", d)
@login_required
def release_index(request):
releases = models.ReleaseStream.objects.all()
return render(request, "releases/index.html", {
'releases': releases,
'projects': getProjects(request)
})
@login_required
def release_create(request):
if not request.user.is_superuser:
return redirect('home')
if request.method == "POST":
form = forms.ReleaseForm(request.POST)
if form.is_valid():
release = form.save(commit=False)
release.save()
return redirect('release_index')
else:
form = forms.ReleaseForm()
return render(request, 'releases/create_edit.html', {
'form': form,
'projects': getProjects(request)
})
@login_required
def release_edit(request, id):
if not request.user.is_superuser:
return redirect('home')
release = models.ReleaseStream.objects.get(id=id)
if request.method == "POST":
form = forms.ReleaseForm(request.POST, instance=release)
if form.is_valid():
release = form.save(commit=False)
release.save()
return redirect('release_index')
else:
form = forms.ReleaseForm(instance=release)
return render(request, 'releases/create_edit.html', {
'form': form,
'release': release,
'projects': getProjects(request)
})
@login_required
def module_index(request):
if not request.user.is_superuser:
return redirect('home')
modules = models.ModuleManifest.objects.all()
return render(request, 'modules/index.html', {
'modules': modules,
'projects': getProjects(request)
})
@login_required
def module_create(request):
if not request.user.is_superuser:
return redirect('home')
if request.method == "POST":
form = forms.ModuleForm(request.POST)
if form.is_valid():
module = form.save(commit=False)
module.save()
return redirect('module_index')
else:
form = forms.ModuleForm()
return render(request, 'modules/create_edit.html', {
'form': form,
'projects': getProjects(request)
})
@login_required
def module_edit(request, id):
if not request.user.is_superuser:
return redirect('home')
module = models.ModuleManifest.objects.get(id=id)
if request.method == "POST":
form = forms.ModuleForm(request.POST, instance=module)
if form.is_valid():
module = form.save(commit=False)
module.save()
return redirect('module_index')
else:
form = forms.ModuleForm(instance=module)
return render(request, 'modules/create_edit.html', {
'form': form,
'projects': getProjects(request)
})
@login_required
def module_scheme(request, id):
module = models.ModuleManifest.objects.get(id=id)
return HttpResponse(module.structure,
content_type='application/json')
@login_required
def manifest_view(request, id):
release = models.ReleaseStream.objects.get(id=id)
project = release.project
if not((request.user.is_superuser) or (
project in request.user.project_set.all())):
return redirect('home')
manifests = release.servermanifest_set.all()
return render(request, 'modules/manifest_view.html', {
'projects': getProjects(request),
'manifests': manifests,
'project': release.project,
'release': release
})
@login_required
def manifest_delete(request, id):
manifest = models.ServerManifest.objects.get(id=id)
release = manifest.release
project = release.project
if not((request.user.is_superuser) or (
project in request.user.project_set.all())):
return redirect('home')
manifest.delete()
return redirect('manifest_view', id=release.id)
@login_required
def manifest_add(request, id):
release = models.ReleaseStream.objects.get(id=id)
project = release.project
if not((request.user.is_superuser) or (
project in request.user.project_set.all())):
return redirect('home')
if request.method == "POST":
form = forms.ManifestForm(request.POST)
if form.is_valid():
manifest = form.save(commit=False)
manifest.release = release
manifest.save()
return redirect('manifest_view', id=release.id)
else:
form = forms.ManifestForm()
return render(request, 'modules/manifest_edit.html', {
'form': form,
'release': release,
'projects': getProjects(request),
'project': release.project
})
@login_required
def manifest_edit(request, id):
manifest = models.ServerManifest.objects.get(id=id)
project = manifest.release.project
if not((request.user.is_superuser) or (
project in request.user.project_set.all())):
return redirect('home')
if request.method == "POST":
form = forms.ManifestForm(request.POST, instance=manifest)
if form.is_valid():
manifest = form.save(commit=False)
manifest.save()
return redirect('manifest_view', id=manifest.release.id)
else:
form = forms.ManifestForm(instance=manifest)
return render(request, 'modules/manifest_edit.html', {
'form': form,
'projects': getProjects(request),
'project': project
})
@login_required
def stream_create(request, project):
p = models.Project.objects.get(id=project)
if request.method == "POST":
form = forms.StreamForm(request.POST)
if form.is_valid():
s = form.save(commit=False)
s.project = p
s.save()
form.save_m2m()
return redirect('projects_view', id=project)
else:
form = forms.StreamForm()
form.fields['targets'].queryset = p.target_set.all().order_by('description')
form.fields['repo'].queryset = p.repo_set.all().order_by('github_url')
return render(request, 'stream/create_edit.html', {
'form': form,
'project': p,
'projects': getProjects(request)
})
@login_required
def stream_edit(request, id):
stream = models.Stream.objects.get(id=id)
if request.method == "POST":
form = forms.StreamForm(request.POST, instance=stream)
if form.is_valid():
stream = form.save(commit=False)
stream.save()
form.save_m2m()
return redirect('projects_view', id=stream.repo.project.id)
else:
form = forms.StreamForm(instance=stream)
form.fields['targets'].queryset = stream.project.target_set.all().order_by('description')
return render(request, 'stream/create_edit.html', {
'form': form,
'stream': stream,
'project': stream.repo.project,
'projects': getProjects(request)
})
@login_required
def stream_delete(request, id):
stream = models.Stream.objects.get(id=id)
project = stream.project
if (request.user.is_superuser) or (
project in request.user.project_set.all()):
stream.delete()
return redirect('projects_view', id=project.id)
@login_required
def stream_push(request, flow, build):
flow = models.ReleaseStream.objects.get(id=flow)
project = flow.project
build = models.Build.objects.get(id=build)
if (request.user.is_superuser) or (
project in request.user.project_set.all()):
tasks.doRelease.delay(build, flow)
return redirect('projects_view', id=project.id)
@login_required
def stream_schedule(request, flow, build):
flow = models.ReleaseStream.objects.get(id=flow)
build = models.Build.objects.get(id=build)
if request.method == "POST":
form = forms.ReleasePushForm(request.POST)
if form.is_valid():
release = form.cleaned_data
schedule = release['scheduled'] + timedelta(hours=int(release['tz']))
tasks.doRelease.delay(build, flow, scheduled=schedule)
return redirect('projects_view', id=flow.project.id)
else:
form = forms.ReleasePushForm()
return render(request, 'stream/schedule.html', {
'projects': getProjects(request),
'project': flow.project,
'form': form,
'flow': flow,
'build': build
})
@login_required
def target_create(request, project):
project = models.Project.objects.get(id=project)
if request.method == "POST":
form = forms.TargetForm(request.POST)
if form.is_valid():
target = form.save(commit=False)
target.project = project
target.save()
return redirect('projects_view', id=project.id)
else:
form = forms.TargetForm()
#form.fields['server'].queryset = m.all().order_by('github_url')
return render(request, 'target/create_edit.html', {
'form': form,
'project': project,
'projects': getProjects(request)
})
@login_required
def target_edit(request, id):
target = models.Target.objects.get(id=id)
if request.method == "POST":
form = forms.TargetForm(request.POST, instance=target)
if form.is_valid():
target = form.save(commit=False)
target.save()
return redirect('projects_view', id=target.project.id)
else:
form = forms.TargetForm(instance=target)
return render(request, 'target/create_edit.html', {
'form': form,
'target': target,
'project': target.project,
'projects': getProjects(request)
})
@login_required
def target_delete(request, id):
target = models.Target.objects.get(id=id)
project = target.project
if (request.user.is_superuser) or (
project in request.user.project_set.all()):
target.delete()
return redirect('projects_view', id=project.id)
@login_required
def release_delete(request, id):
release = models.Release.objects.get(id=id)
project = release.flow.project
if (request.user.is_superuser) or (
project in request.user.project_set.all()):
release.delete()
return redirect('projects_view', id=project.id)
@login_required
def build_view(request, id):
build = models.Build.objects.get(id=id)
d = {
'projects': getProjects(request),
'project': build.project
}
if (request.user.is_superuser) or (
build.project in request.user.project_set.all()):
d['build'] = build
return render(request, 'projects/build_view.html', d)
@login_required
def projects_view(request, id):
project = models.Project.objects.get(id=id)
if (request.user.is_superuser) or (project in request.user.project_set.all()):
repos = project.repo_set.all().order_by('github_url')
builds = []
streams = []
releases = []
for repo in repos:
builds.extend(repo.build_set.all().order_by('-build_time'))
streams.extend(repo.stream_set.all().order_by('name'))
for stream in streams:
releases.extend(stream.release_set.all().order_by(
'-release_date'))
releases.sort(key=lambda r: r.release_date)
builds.sort(key=lambda r: r.build_time)
streams.sort(key=lambda r: r.name)
requests = project.serverrequest_set.filter(approval=0).order_by('request_date')
d = {
'project': project,
'repos': repos,
'targets': project.target_set.all().order_by('description'),
'builds': reversed(builds),
'streams': streams,
'releases': reversed(releases[-5:]),
'projects': getProjects(request),
'requests': requests
}
else:
d = {}
return render(request, 'projects/view.html', d)
@login_required
def project_graph(request, id):
# Server checkin endpoint
project = models.Project.objects.get(id=id)
data = {
'project': project.name,
'repos': [],
'targets': [],
'streams': []
}
for repo in project.repo_set.all():
data['repos'].append({
'name': repo.github_url,
'id': 'R%s' % repo.id
})
for target in project.target_set.all():
data['targets'].append({
'name': target.description,
'id': 'T%s' % target.id
})
for stream in project.stream_set.all():
data['streams'].append({
'id': 'S%s' % stream.id,
'name': stream.name,
'branch': stream.branch,
'repo_link': 'R%s' % stream.repo.id,
'target_link': ['T%s' % t.id for t in stream.targets.all()]
})
return HttpResponse(json.dumps(data),
content_type='application/json')
@login_required
def projects_delete(request, id):
if not request.user.is_superuser:
return redirect('home')
models.Project.objects.get(id=id).delete()
return redirect('home')
@login_required
def projects_create(request):
if not request.user.is_superuser:
return redirect('home')
if request.method == "POST":
form = forms.ProjectForm(request.POST)
if form.is_valid():
project = form.save(commit=False)
project.save()
return redirect('projects_view', id=project.id)
else:
form = forms.ProjectForm()
return render(request, 'projects/create_edit.html', {
'projects': getProjects(request),
'form': form
})
@login_required
def projects_edit(request, id):
if not request.user.is_superuser:
return redirect('home')
project = models.Project.objects.get(id=id)
if request.method == "POST":
form = forms.ProjectForm(request.POST, instance=project)
if form.is_valid():
project = form.save(commit=False)
project.save()
form.save_m2m()
return redirect('projects_view', id=id)
else:
form = forms.ProjectForm(instance=project)
d = {
'form': form,
'project': project,
'projects': getProjects(request)
}
return render(request, 'projects/create_edit.html', d)
@login_required
def server_request(request, project):
project = models.Project.objects.get(id=project)
if request.method == "POST":
form = forms.ServerRequestForm(request.POST)
if form.is_valid():
server = form.save(commit=False)
server.requested_by = request.user
server.project = project
server.save()
return redirect('projects_view', id=project.id)
else:
form = forms.ServerRequestForm()
return render(request, 'projects/server_request.html', {
'form': form,
'project': project,
'projects': getProjects(request),
})
@login_required
def repo_edit(request, id):
repo = models.Repo.objects.get(id=id)
project = repo.project
if request.method == "POST":
form = forms.RepoForm(request.POST, instance=repo)
if form.is_valid():
repo = form.save(commit=False)
repo.project = project
repo.save()
return redirect('projects_view', id=id)
else:
form = forms.RepoForm(instance=repo)
d = {
'projects': getProjects(request),
'repo': repo,
'form': form
}
return render(request, 'repo/create_edit.html', d)
@login_required
def repo_delete(request, id):
repo = models.Repo.objects.get(id=id)
projectid = repo.project.id
repo.delete()
return redirect('projects_view', id=projectid)
@login_required
def repo_create(request, project):
project = models.Project.objects.get(id=project)
if request.method == "POST":
form = forms.RepoForm(request.POST)
if form.is_valid():
repo = form.save(commit=False)
repo.created_by_user = request.user
repo.idhash = uuid.uuid1().get_hex()
repo.project = project
repo.save()
return redirect('projects_view', id=project.id)
else:
form = forms.RepoForm()
return render(request, 'repo/create_edit.html', {
'projects': getProjects(request),
'project': project,
'form': form
})
@login_required
def help_index(request):
return render(request, 'help/index.html')
@login_required
def build_cancel(request, id):
build = models.Build.objects.get(id=id)
if build.project in request.user.project_set.all():
build.state = 3
build.save()
return redirect('home')
@login_required
def projects_build(request, id):
project = models.Project.objects.get(id=id)
if project and (request.user.is_superuser or (
project in request.user.project_set.all())):
current_builds = models.Build.objects.filter(project=project, state=0)
if current_builds:
return redirect('build_view', id=current_builds[0].id)
else:
bcount = project.build_counter + 1
build = models.Build.objects.create(project=project, state=0, build_num=bcount)
task_id = tasks.build(build)
build.task_id = task_id
build.save()
project.build_counter = bcount
project.save()
return redirect('build_view', id=build.id)
return redirect('home')
@login_required
def build_output(request, id):
build = models.Build.objects.get(id=id)
if (request.user.is_superuser) or (
build.project in request.user.project_set.all()):
d = {'state': build.state, 'log': build.log}
else:
d = {}
return HttpResponse(json.dumps(d), content_type='application/json')
@login_required
def get_servers(request):
d = [s.name for s in models.Server.objects.all()]
return HttpResponse(json.dumps(d), content_type='application/json')
@login_required
def get_stream_servers(request, id):
stream = models.ReleaseStream.objects.get(id=id)
d = [s.server.name for s in stream.target_set.all()]
return HttpResponse(json.dumps(d), content_type='application/json')
#############
# API methods
@csrf_exempt
def api_build(request, hash):
project = models.Project.objects.get(idhash=hash)
if project:
if request.method == 'POST':
if request.POST.get('payload'):
r = json.loads(request.POST['payload'])
else:
r = json.loads(request.body)
ref = r.get('ref', '')
branch = ref.split('/',2)[-1]
if branch != project.branch:
return HttpResponse('{"result": "Request ignored"}',
content_type='application/json')
current_builds = models.Build.objects.filter(project=project, state=0)
if not current_builds:
build = models.Build.objects.create(project=project, state=0)
task = tasks.build(build)
build.task_id = task.task_id
build.save()
return HttpResponse('{"result": "Building"}',
content_type='application/json')
return HttpResponse('{"result": "Already building"}',
content_type='application/json')
return redirect('home')
@csrf_exempt
def api_sign(request, hash):
signoff = models.ReleaseSignoff.objects.get(idhash=hash)
signoff.signed = True
signoff.save()
if signoff.release.waiting:
if signoff.release.check_signoff():
tasks.runRelease.delay(signoff.release)
return render(request, "sign.html", {
'signoff': signoff
})
@csrf_exempt
def api_checkin(request):
# Server checkin endpoint
if request.method == 'POST':
if verifyHMAC(request, request.body):
data = json.loads(request.body)
try:
server = models.Server.objects.get(name=data['hostname'])
except models.Server.DoesNotExist:
server = models.Server.objects.create(name=data['hostname'])
server.last_checkin = datetime.now()
server.save()
return HttpResponse(json.dumps({}),
content_type='application/json')
return HttpResponse(
json.dumps({"error": "Not authorized"}),
content_type='application/json'
)
@csrf_exempt
def api_enc(request, server):
# Puppet ENC
if verifyHMAC(request):
# Build our ENC dict
try:
server = models.Server.objects.get(name=server)
except:
server = None
if server:
releases = [target.release for target in server.target_set.all()]
server.last_checkin = datetime.now()
server.last_puppet_run = datetime.now()
server.change = False
server.status = "Success"
cdict = {}
for release in releases:
for manifest in release.servermanifest_set.all():
key = manifest.module.key
try:
value = json.loads(manifest.value)
except Exception, e:
server.status = "Validation error in manifest "
server.status += "%s -> %s -> %s: %s" % (
release.project.name,
release.name,
manifest.module.name,
e
)
continue
if isinstance(value, list):
if key in cdict:
cdict[key].extend(value)
else:
cdict[key] = value
if isinstance(value, dict):
for k, v in value.items():
if key in cdict:
cdict[key][k] = v
else:
cdict[key] = {k: v}
server.save()
node = {
'parameters': cdict
}
else:
node = {}
return HttpResponse(yaml.safe_dump(node),
content_type='application/yaml')
return HttpResponse(
json.dumps({"error": "Not authorized"}),
content_type='application/json'
)
| mit | 105,687,536,872,928,980 | 27.108696 | 113 | 0.593599 | false |
ianmiell/shutit-distro | libmng/libmng.py | 1 | 1137 | """ShutIt module. See http://shutit.tk
"""
from shutit_module import ShutItModule
class libmng(ShutItModule):
def build(self, shutit):
shutit.send('mkdir -p /tmp/build/libmng')
shutit.send('cd /tmp/build/libmng')
shutit.send('wget -qO- http://downloads.sourceforge.net/libmng/libmng-2.0.2.tar.xz | xz -d | tar -xf -')
shutit.send('cd libmng*')
shutit.send(r'''sed -i "s:#include <jpeg:#include <stdio.h>\n&:" libmng_types.h''')
shutit.send('./configure --prefix=/usr --disable-static')
shutit.send('make')
shutit.send('make install')
shutit.send('install -v -m755 -d /usr/share/doc/libmng-2.0.2')
shutit.send('install -v -m644 doc/*.txt /usr/share/doc/libmng-2.0.2')
return True
#def get_config(self, shutit):
# shutit.get_config(self.module_id,'item','default')
# return True
def finalize(self, shutit):
shutit.send('rm -rf /tmp/build/libmng')
return True
#def remove(self, shutit):
# return True
#def test(self, shutit):
# return True
def module():
return libmng(
'shutit.tk.sd.libmng.libmng', 158844782.012101,
description='',
maintainer='',
depends=['shutit.tk.sd.lcms.lcms']
)
| gpl-2.0 | -1,712,007,182,109,809,700 | 25.44186 | 106 | 0.673703 | false |
efinst0rm/B3DiscordPlugin | discordban.py | 1 | 6918 | #
# DiscordB3 (www.namelessnoobs.com)
# Copyright (C) 2016 st0rm
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#Credits
#Fenix the orginal author of the irc b3 bot which this plugin is based on.
#Mordecaii from iG for his lovely discordPush fuction <3.
#ItsDizzy from aD for the embedded message and some cleanups.
__author__ = "Fenix, st0rm, Mordecaii, ItsDizzy"
__version__ = "1.2"
import b3
import b3.plugin
import b3.events
import datetime
import urllib2
import json
from b3.functions import minutesStr
class DiscordbanPlugin(b3.plugin.Plugin):
####################################################################################################################
# #
# PLUGIN INIT #
# #
####################################################################################################################
def __init__(self, console, config=None):
"""
Build the plugin object.
:param console: The parser instance.
:param config: The plugin configuration object instance.
"""
b3.plugin.Plugin.__init__(self, console, config)
self.adminPlugin = self.console.getPlugin("admin")
if not self.adminPlugin:
raise AttributeError("could not start without admin plugin")
def onLoadConfig(self):
"""
Load plugin configuration.
"""
self._discordWebhookUrl = self.config.get("authentication","webhookUrl")
self._serverName = self.config.get("authentication","hostname")
def onStartup(self):
"""
Initialize plugin settings.
"""
# register necessary events
self.registerEvent(self.console.getEventID("EVT_CLIENT_BAN"), self.onBan)
self.registerEvent(self.console.getEventID("EVT_CLIENT_BAN_TEMP"), self.onBan)
self.registerEvent(self.console.getEventID("EVT_CLIENT_KICK"), self.onKick)
# notice plugin started
self.debug("plugin started")
####################################################################################################################
# #
# EVENTS #
# #
####################################################################################################################
def onBan(self, event):
"""
Perform operations when EVT_CLIENT_BAN or EVT_CLIENT_BAN_TEMP is received.
:param event: An EVT_CLIENT_BAN or and EVT_CLIENT_BAN_TEMP event.
"""
admin = event.data["admin"]
client = event.client
reason = event.data["reason"]
admin_name = ""
if admin == None:
admin_name = "B3"
else:
admin_name = admin.name
embed = {
"title": "B3 Ban",
"description": "**%s** Banned **%s**" % (admin_name, client.name),
"timestamp": datetime.datetime.now().isoformat(),
"color": 15466496,
"fields": [
{
"name": "Server",
"value": self._serverName,
"inline": False
}
]
}
if reason:
# if there is a reason attached to the ban, append it to the notice
embed["fields"].append({
"name": "Reason",
"value": self.console.stripColors(reason),
"inline": True
})
duration = 'permanent'
if 'duration' in event.data:
# if there is a duration convert it
duration = minutesStr(event.data['duration'])
# append the duration to the ban notice
embed["fields"].append({"name": "Duration", "value": duration, "inline": True})
self.discordEmbeddedPush(embed)
def onKick(self, event):
"""
Perform operations when EVT_CLIENT_KICK is received.
:param event: An EVT_CLIENT_KICK event.
"""
admin = event.data["admin"]
client = event.client
reason = event.data["reason"]
admin_name = ""
if admin == None:
admin_name = "B3"
else:
admin_name = admin.name
embed = {
"title": "B3 Kick",
"description": "**%s** Kicked **%s**" % (admin_name, client.name),
"timestamp": datetime.datetime.now().isoformat(),
"color": 15466496,
"fields": [
{
"name": "Server",
"value": self._serverName,
"inline": False
}
]
}
if reason:
# if there is a reason attached to the ban, append it to the notice
embed["fields"].append({
"name": "Reason",
"value": self.console.stripColors(reason),
"inline": True
})
self.discordEmbeddedPush(embed)
def discordEmbeddedPush(self, embed):
"""
Send embedded message to discord bot huehue
"""
data = json.dumps({"embeds": [embed]})
req = urllib2.Request(self._discordWebhookUrl, data, {
"Content-Type": "application/json",
"User-Agent": "B3DiscordbanPlugin/1.1" #Is that a real User-Agent? Nope but who cares.
})
# Final magic happens here, we will never get an error ofcourse ;)
try:
urllib2.urlopen(req)
except urllib2.HTTPError as ex:
self.debug("Cannot push data to Discord. is your webhook url right?")
self.debug("Data: %s\nCode: %s\nRead: %s" % (data, ex.code, ex.read()))
| gpl-3.0 | 1,292,984,793,108,103,700 | 36.803279 | 120 | 0.477016 | false |
ThomasZh/legend-league-portal | foo/portal/newsup.py | 1 | 53200 | #!/usr/bin/env python
# _*_ coding: utf-8_*_
#
# Copyright 2016 planc2c.com
# [email protected]
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tornado.web
import logging
import time
import sys
import os
import uuid
import smtplib
import json as JSON # 启用别名,不会跟方法里的局部变量混淆
from bson import json_util
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../"))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../dao"))
from tornado.escape import json_encode, json_decode
from tornado.httpclient import *
from tornado.httputil import url_concat
from bson import json_util
from comm import *
from global_const import *
class WxMpVerifyHandler(tornado.web.RequestHandler):
def get(self):
self.finish('qdkkOWgyqqLTrijx')
return
class NewsupLoginNextHandler(tornado.web.RequestHandler):
def get(self):
login_next = self.get_secure_cookie("login_next")
logging.info("got login_next %r",login_next)
if login_next:
self.redirect(login_next)
else:
self.redirect("/portal/newsup/index")
class NewsupIndexHandler(BaseHandler):
def get(self):
logging.info(self.request)
# league(联盟信息)
league_info = self.get_league_info()
# franchises(景区)
params = {"filter":"league", "franchise_type":"景区", "page":1, "limit":5}
url = url_concat(API_DOMAIN+"/api/leagues/"+LEAGUE_ID+"/clubs", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
rs = data['rs']
franchises = rs['data']
for franchise in franchises:
franchise['create_time'] = timestamp_friendly_date(franchise['create_time'])
# suppliers(供应商)
params = {"filter":"league", "franchise_type":"供应商", "page":1, "limit":5}
url = url_concat(API_DOMAIN+"/api/leagues/"+LEAGUE_ID+"/clubs", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
rs = data['rs']
suppliers = rs['data']
for supplier in suppliers:
supplier['create_time'] = timestamp_friendly_date(supplier['create_time'])
# sceneries(景点)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"41c057a6f73411e69a3c00163e023e51", "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
sceneries = data['rs']
for article in sceneries:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# journey(游记)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"01d6120cf73411e69a3c00163e023e51", "idx":0, "limit":12}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
journeies = data['rs']
for article in journeies:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# recently articles(最新文章news)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# hot(热点新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"1b86ad38f73411e69a3c00163e023e51", "idx":0, "limit":12}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
hots = data['rs']
for article in hots:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
# multimedia
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":8}
url = url_concat(API_DOMAIN+"/api/multimedias", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
multimedias = data['rs']
for multimedia in multimedias:
multimedia['publish_time'] = timestamp_friendly_date(multimedia['publish_time'])
# notices
params = {"filter":"league", "league_id":LEAGUE_ID, "page":1, "limit":3}
url = url_concat(API_DOMAIN+"/api/notice-board", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
notices = data['rs']
is_login = False
access_token = self.get_secure_cookie("access_token")
logging.info("got access_token>>>>> %r",access_token)
if access_token:
is_login = True
self.render('newsup/index.html',
is_login=is_login,
franchises=franchises,
suppliers=suppliers,
sceneries=sceneries,
journeies=journeies,
news=news,
populars=populars,
hots=hots,
league_info=league_info,
activities=activities,
lastest_comments=lastest_comments,
multimedias=multimedias,
api_domain=API_DOMAIN,
notices=notices['data'])
class NewsupAccountHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self):
logging.info(self.request)
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
# league(联盟信息)
league_info = self.get_league_info()
headers = {"Authorization":"Bearer "+access_token}
url = API_DOMAIN+"/api/myinfo?filter=login"
http_client = HTTPClient()
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response %r", response.body)
data = json_decode(response.body)
user = data['rs']
self.render('newsup/account.html',
is_login=is_login,
league_info=league_info,
user = user,
access_token=access_token,
api_domain=API_DOMAIN,
upyun_domain=UPYUN_DOMAIN,
upyun_notify_url=UPYUN_NOTIFY_URL,
upyun_form_api_secret=UPYUN_FORM_API_SECRET,
upyun_bucket=UPYUN_BUCKET)
class NewsupAuthorHandler(BaseHandler):
def get(self):
logging.info(self.request)
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
# league(联盟信息)
league_info = self.get_league_info()
# news(新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"30a56cb8f73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
self.render('newsup/author.html',
is_login=is_login,
league_info=league_info,
news=news,
populars=populars,
activities=activities,
api_domain=API_DOMAIN,
lastest_comments=lastest_comments)
class NewsupMediaHandler(BaseHandler):
def get(self):
logging.info(self.request)
# league(联盟信息)
league_info = self.get_league_info()
# multimedia
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":14}
url = url_concat(API_DOMAIN+"/api/multimedias", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
multimedias = data['rs']
# news(新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"30a56cb8f73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# hot(热点新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"1b86ad38f73411e69a3c00163e023e51", "idx":0, "limit":12}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
hots = data['rs']
for article in hots:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/media.html',
is_login=is_login,
league_info=league_info,
news=news,
populars=populars,
activities=activities,
hots=hots,
lastest_comments=lastest_comments,
league_id=LEAGUE_ID,
api_domain=API_DOMAIN,
multimedias=multimedias)
class NewsupShortcodesHandler(BaseHandler):
def get(self):
logging.info(self.request)
# league(联盟信息)
league_info = self.get_league_info()
# news(新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"30a56cb8f73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/shortcodes.html',
is_login=is_login,
league_info=league_info,
news=news,
activities=activities,
api_domain=API_DOMAIN,
populars=populars)
class NewsupContactHandler(BaseHandler):
def get(self):
logging.info(self.request)
# league(联盟信息)
league_info = self.get_league_info()
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/contact.html',
is_login=is_login,
league_info=league_info,
lastest_comments=lastest_comments,
api_domain=API_DOMAIN,
league_id=LEAGUE_ID)
class NewsupItemDetailHandler(BaseHandler):
def get(self):
logging.info(self.request)
article_id = self.get_argument("id", "")
# league(联盟信息)
league_info = self.get_league_info()
# recently articles(最新文章news)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# article
url = API_DOMAIN+"/api/articles/"+article_id
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got article response %r", response.body)
data = json_decode(response.body)
article_info = data['rs']
article_info['publish_time'] = timestamp_friendly_date(article_info['publish_time'])
# hot(热点新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"1b86ad38f73411e69a3c00163e023e51", "idx":0, "limit":12}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
hots = data['rs']
for article in hots:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# update read_num
read_num = article_info['read_num']
url = API_DOMAIN+"/api/articles/"+article_id+"/read"
http_client = HTTPClient()
_body = {"read_num": read_num+1}
_json = json_encode(_body)
response = http_client.fetch(url, method="POST", body=_json)
logging.info("got update read_num response %r", response.body)
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
# multimedia
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/multimedias", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
multimedias = data['rs']
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/item-detail.html',
is_login=is_login,
access_token=access_token,
league_info=league_info,
article_info=article_info,
news=news,
populars=populars,
hots=hots,
activities=activities,
api_domain=API_DOMAIN,
multimedias=multimedias,
lastest_comments=lastest_comments)
class NewsupNewHandler(BaseHandler):
def get(self):
logging.info(self.request)
# league(联盟信息)
league_info = self.get_league_info()
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/new.html',
league_info=league_info,
api_domain=API_DOMAIN,
is_login=is_login)
class NewsupCategoryTileHandler(BaseHandler):
def get(self):
logging.info(self.request)
# league(联盟信息)
league_info = self.get_league_info()
# recently articles(最新文章news)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/category-tile.html',
is_login=is_login,
league_info=league_info,
lastest_comments=lastest_comments,
news=news,
activities=activities,
api_domain=API_DOMAIN,
populars=populars)
class NewsupCategoryHandler(BaseHandler):
def get(self):
logging.info(self.request)
category_id = self.get_argument("id", "")
# league(联盟信息)
league_info = self.get_league_info()
# query category_name by category_id
url = API_DOMAIN+"/api/categories/" + category_id
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
category = data['rs']
# query by category_id
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":category_id, "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
sceneries = data['rs']
for article in sceneries:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# multimedia
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/multimedias", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
multimedias = data['rs']
# recently articles(最新文章news)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# hot(热点新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"1b86ad38f73411e69a3c00163e023e51", "idx":0, "limit":12}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
hots = data['rs']
for article in hots:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/category.html',
is_login=is_login,
league_info=league_info,
sceneries=sceneries,
news=news,
hots=hots,
populars=populars,
activities=activities,
lastest_comments=lastest_comments,
multimedias=multimedias,
league_id=LEAGUE_ID,
category_id=category_id,
api_domain=API_DOMAIN,
category=category)
class NewsupCategorySearchHandler(BaseHandler):
def get(self):
logging.info(self.request)
category_id = self.get_argument("id", "")
# league(联盟信息)
league_info = self.get_league_info()
# query category_name by category_id
url = API_DOMAIN+"/api/categories/" + category_id
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
category = data['rs']
# query by category_id
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":category_id, "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
sceneries = data['rs']
for article in sceneries:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# multimedia
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/multimedias", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
multimedias = data['rs']
# recently articles(最新文章news)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# hot(热点新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"1b86ad38f73411e69a3c00163e023e51", "idx":0, "limit":12}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
hots = data['rs']
for article in hots:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/category-search.html',
is_login=is_login,
league_info=league_info,
sceneries=sceneries,
news=news,
hots=hots,
populars=populars,
activities=activities,
lastest_comments=lastest_comments,
multimedias=multimedias,
league_id=LEAGUE_ID,
category_id=category_id,
api_domain=API_DOMAIN,
category=category)
class NewsupFranchisesHandler(BaseHandler):
def get(self):
logging.info(self.request)
franchise_type = self.get_argument("franchise_type", "")
franchise_type = franchise_type.encode('utf-8')
logging.info("got franchise_type %r from argument", franchise_type)
# league(联盟信息)
league_info = self.get_league_info()
# franchises(景区)
params = {"franchise_type":franchise_type, "page":1, "limit":1}
url = url_concat(API_DOMAIN+"/api/leagues/"+LEAGUE_ID+"/clubs", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
franchises = data['rs']['data']
for franchise in franchises:
franchise['create_time'] = timestamp_friendly_date(franchise['create_time'])
# multimedia
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/multimedias", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
multimedias = data['rs']
# recently articles(最新文章news)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# hot(热点新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"1b86ad38f73411e69a3c00163e023e51", "idx":0, "limit":12}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
hots = data['rs']
for article in hots:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/franchises.html',
is_login=is_login,
league_info=league_info,
franchises=franchises,
multimedias=multimedias,
news=news,
hots= hots,
populars=populars,
activities=activities,
lastest_comments=lastest_comments,
league_id=LEAGUE_ID,
api_domain=API_DOMAIN,
franchise_type=franchise_type)
class NewsupFranchiseDetailHandler(BaseHandler):
def get(self):
logging.info(self.request)
franchise_id = self.get_argument("id", "")
access_token = self.get_secure_cookie("access_token")
# league(联盟信息)
league_info = self.get_league_info()
# recently articles(最新文章news)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# article
url = API_DOMAIN+"/api/clubs/"+franchise_id
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got article response %r", response.body)
data = json_decode(response.body)
franchise = data['rs']
if not franchise.has_key('paragraphs'):
franchise['paragraphs'] = ''
if not franchise.has_key('franchise_type'):
franchise['franchise_type'] = 'franchise'
if franchise.has_key('create_time'):
franchise['create_time'] = timestamp_friendly_date(franchise['create_time'])
else:
franchise['create_time'] = timestamp_friendly_date(0)
# franchise['create_time'] = timestamp_friendly_date(franchise['create_time'])
# hot(热点新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"1b86ad38f73411e69a3c00163e023e51", "idx":0, "limit":12}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
hots = data['rs']
for article in hots:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# update read_num
read_num = franchise['read_num']
url = API_DOMAIN+"/api/articles/"+franchise_id+"/read"
http_client = HTTPClient()
_body = {"read_num": read_num+1}
_json = json_encode(_body)
response = http_client.fetch(url, method="POST", body=_json)
logging.info("got update read_num response %r", response.body)
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
# multimedia
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/multimedias", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
multimedias = data['rs']
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/franchise-detail.html',
is_login=is_login,
access_token=access_token,
league_info=league_info,
franchise=franchise,
news=news,
populars=populars,
hots=hots,
activities=activities,
multimedias=multimedias,
api_domain=API_DOMAIN,
lastest_comments=lastest_comments)
class NewsupApplyFranchiseHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self):
logging.info(self.request)
# league(联盟信息)
league_info = self.get_league_info()
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
franchise = None
try:
params = {"filter":"franchise"}
url = url_concat(API_DOMAIN+"/api/myinfo", params)
http_client = HTTPClient()
headers={"Authorization":"Bearer "+access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response %r", response.body)
data = json_decode(response.body)
franchise = data['rs']
if franchise:
if not franchise['club'].has_key("province"):
franchise['club']['province'] = ''
franchise['club']['city'] = ''
if not franchise['club'].has_key("city"):
franchise['club']['city'] = ''
if not franchise['club'].has_key("franchise_type"):
franchise['club']['franchise_type'] = ''
franchise['create_time'] = timestamp_datetime(franchise['create_time'])
except:
logging.info("got franchise=[None]")
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
self.render('newsup/apply-franchise.html',
is_login=is_login,
league_info=league_info,
access_token=access_token,
league_id=LEAGUE_ID,
franchise=franchise,
api_domain=API_DOMAIN,
upyun_domain=UPYUN_DOMAIN,
upyun_notify_url=UPYUN_NOTIFY_URL,
upyun_form_api_secret=UPYUN_FORM_API_SECRET,
upyun_bucket=UPYUN_BUCKET,
lastest_comments=lastest_comments)
class NewsupSearchResultHandler(BaseHandler):
def get(self):
logging.info(self.request)
# category_id = self.get_argument("id", "")
# league(联盟信息)
league_info = self.get_league_info()
# query by category_id
# params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":category_id, "idx":0, "limit":6}
# url = url_concat(API_DOMAIN+"/api/articles", params)
# http_client = HTTPClient()
# response = http_client.fetch(url, method="GET")
# logging.info("got sceneries response %r", response.body)
# data = json_decode(response.body)
# sceneries = data['rs']
# for article in sceneries:
# article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# multimedia
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/multimedias", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
multimedias = data['rs']
# news(新闻)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0e9a3c68e94511e6b40600163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
news = data['rs']
for article in news:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# popular(流行)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"3801d62cf73411e69a3c00163e023e51", "idx":0, "limit":6}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
populars = data['rs']
for article in populars:
article['publish_time'] = timestamp_friendly_date(article['publish_time'])
# activity(活动)
params = {"filter":"league", "league_id":LEAGUE_ID, "status":"publish", "category":"0bbf89e2f73411e69a3c00163e023e51", "idx":0, "limit":4}
url = url_concat(API_DOMAIN+"/api/articles", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
activities = data['rs']
# lastest comments(最新的评论)
params = {"filter":"league", "league_id":LEAGUE_ID, "idx":0, "limit":5}
url = url_concat(API_DOMAIN+"/api/last-comments", params)
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
lastest_comments = data['rs']
for comment in lastest_comments:
comment['create_time'] = timestamp_friendly_date(comment['create_time'])
is_login = False
access_token = self.get_secure_cookie("access_token")
if access_token:
is_login = True
self.render('newsup/search-result.html',
is_login=is_login,
league_info=league_info,
news=news,
populars=populars,
activities=activities,
lastest_comments=lastest_comments,
multimedias=multimedias,
league_id=LEAGUE_ID,
api_domain=API_DOMAIN)
| apache-2.0 | -2,263,610,190,328,251,000 | 41.378117 | 147 | 0.594806 | false |
MarkLark/dstore | tests/__init__.py | 1 | 1435 | from dstore import MemoryStore, Model, var, mod
from unittest import TestCase
__all__ = [ "BaseTest", "Car", "AllVars" ]
class Car( Model ):
_namespace = "cars.make"
_vars = [
var.RowID,
var.String( "manufacturer", 32, mods = [ mod.NotNull() ] ),
var.String( "make", 32, mods = [ mod.NotNull() ] ),
var.Number( "year", mods = [ mod.NotNull(), mod.Min( 1950 ), mod.Max( 2017 ) ] ),
]
class AllVars( Model ):
_namespace = "all.vars"
_vars = [
var.RowID,
var.Number( "number", mods = [ mod.Min( 0 ), mod.Max( 100 ) ] ),
var.Boolean( "boolean" ),
var.String( "string", 32, mods = [ mod.NotNull() ] ),
var.Character( "character", 4 ),
var.Binary( "binary", 25 ),
var.Text( "text" ),
var.Float( "float" ),
var.Enum( "enum", [ "one", "two", "three" ] ),
var.ForeignKey( "cars.make" )
]
class BaseTest( TestCase ):
models = [ Car, AllVars ]
auto_create = True
auto_init = True
def setUp( self ):
if self.auto_init:
self.store = MemoryStore( self.models )
self.store.init_app()
self.store.connect()
if self.auto_create: self.store.create_all()
def tearDown( self ):
if self.auto_create: self.store.destroy_all()
if self.auto_init:
self.store.disconnect()
self.store.destroy_app()
| mit | 3,888,849,410,973,275,600 | 28.285714 | 89 | 0.530314 | false |
keialk/TRAP | OneTime.py | 1 | 6151 |
"""
TRAP - Time-series RNA-seq Analysis Package
Created by Kyuri Jo on 2014-02-05.
Copyright (c) 2014 Kyuri Jo. All rights reserved.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import math
import random
import copy
import numpy as np
import scipy.stats as stats
import TRAP
colorCode = ["#FFAAAA", "#FF5555", "#FF0000", "#AAAAFF", "#5555FF", "#0000FF"]
def new_hypergeom_sf(k, *args, **kwds):
(M, n, N) = args[0:3]
try:
return stats.hypergeom.sf(k, *args, **kwds)
except Exception as inst:
if k >= n and type(inst) == IndexError:
return 0 ## or conversely 1 - hypergeom.cdf(k, *args, **kwds)
else:
raise inst
def calPF_one(g, wgene, redic, PFdic, recur) :
if (g in redic) :
PFsum = 0
for alist in redic[g] :
if (alist[0] not in PFdic) :
if (alist[0] in recur) :
PFdic[alist[0]]=wgene[alist[0]]
else :
recur.add(g)
calPF_one(alist[0], wgene, redic, PFdic, recur)
PFsum = PFsum + alist[2]*(PFdic[alist[0]]/alist[1])
PFdic[g]=PFsum+wgene[g]
else :
PFdic[g]=wgene[g]
def pickColor(fc, cut) :
if (fc==0) :
return "#FFFFFF"
elif (fc>0) :
index = int(fc/(cut/2))
if (index >2) :
index =2
return colorCode[index]
else :
index = int(abs(fc)/(cut/2))+3
if (index >5) :
index =5
return colorCode[index]
def pathwayAnalysis(outPath, fileN, wgene, wredic, DEG, DEGCut, idDic, pnameDic, ind) :
tA = []
status = []
pORA = []
pOFDR = []
pPERT = []
pG = []
pFDR = []
pMIX = []
for i in range(0, fileN) :
tA.append(0)
status.append("")
pORA.append(0)
pOFDR.append(0)
pPERT.append(0)
pG.append(0)
pFDR.append(0)
pMIX.append(0)
if wredic[i]=={} :
continue
# pPERT
# Calculation of PF
tempPF = {}
recur = set()
PFsum = 0
for gene in wgene[i] :
calPF_one(gene, wgene[i], wredic[i], tempPF, recur)
status[i] = sum(tempPF.values())
currtA = sum(tempPF.values())-sum(wgene[i].values())
tA[i] = currtA
# Calculation of tA from H0
nulltA = []
repeat = 2000
tempFC = copy.copy(wgene[i])
sh = tempFC.values()
recur = set()
for j in range(0, repeat) :
randPF = {}
random.shuffle(sh)
for key, value in tempFC.iteritems() :
tempFC[key]=sh[random.randint(0, len(tempFC)-1)]
for g in tempFC :
calPF_one(g, tempFC, wredic[i], randPF, recur)
nulltA.append(sum(randPF.values())-sum(tempFC.values()))
def above(x):
return round(x, 5)>=round(currtA, 5)
def below(x):
return round(x, 5)<=round(currtA, 5)
avgtA = np.median(nulltA)
if (currtA >=avgtA) :
pPERT[i]=float(len(filter(above, nulltA)))/float(repeat)
else :
pPERT[i]=float(len(filter(below, nulltA)))/float(repeat)
if status[i]>=0 :
status[i]="Activated"
else :
status[i]="Inhibited"
# pORA
genesum = {}
DEGsum = set()
for i in range(0, fileN) :
genesum.update(wgene[i])
DEGsum = DEGsum.union(DEG[i])
totG = len(genesum)
totD = len(DEGsum)
for i in range(0, fileN) :
pORA[i]=new_hypergeom_sf(len(DEG[i]), totG, totD, len(wgene[i]), loc=0)
# pG
for i in range(0, fileN) :
c = pORA[i]*pPERT[i]
if (c==0) :
pG[i]==0
else :
pG[i] = c-c*math.log(c)
pFDR = TRAP.cal_FDR(pG)
pOFDR = TRAP.cal_FDR(pORA)
for i in range(0, fileN) :
if (wredic[i]=={}) :
pMIX[i]=pOFDR[i]
else :
pMIX[i]=pFDR[i]
# Text result
outDEG = open(outPath+"_DEG.txt", "w")
for gene in DEGsum :
if (gene in idDic) :
outDEG.write(idDic[gene][0]+"\n")
else :
outDEG.write(gene+"\n")
outDEG.close()
outColor = open(outPath+"_color.txt", "w")
for g,fc in genesum.iteritems() :
outColor.write(g+"\t"+pickColor(fc, DEGCut)+"\n")
outColor.close()
outPathway = open(outPath+"_pathway.txt", "w")
outPathway.write("PathwayID\tPathwayName \tGeneNum\tDEGNum\tpORA\tpORAfdr\ttA\tpPERT\tpG\tpG_FDR\tStatus\n")
sortedkey = sorted(ind, key = lambda x : pMIX[ind[x]])
for sk in sortedkey :
i = ind[sk]
pathwayName = ""
if (sk in pnameDic) :
pathwayName = pnameDic[sk]
nameLen = len(pathwayName)
if (nameLen<15) :
pathwayName = pathwayName+TRAP.addStr(18-nameLen)
else :
pathwayName = pathwayName[0:15]+"..."
if (wredic[i]=={}) :
outPathway.write(sk+"\t"+pathwayName+"\t"+str(len(wgene[i]))+"\t"+str(len(DEG[i]))+"\t"+str(round(pORA[i],3))+"\t"+str(round(pOFDR[i], 3))+"\t.\t.\t.\t.\t.\n")
else :
outPathway.write(sk+"\t"+pathwayName+"\t"+str(len(wgene[i]))+"\t"+str(len(DEG[i]))+"\t"+str(round(pORA[i],3))+"\t"+str(round(pOFDR[i], 3))+"\t"+str(round(tA[i],3))+"\t"+str(round(pPERT[i],3))+"\t"+str(round(pG[i],3))+"\t"+str(round(pFDR[i],3))+"\t"+status[i]+"\n")
outPathway.close()
| gpl-3.0 | 2,745,135,926,260,084,000 | 30.54359 | 281 | 0.52902 | false |
session-id/poker-predictor | preflop/dummy_model.py | 1 | 1311 | import numpy as np
import model
def evaluate(inp, probs):
num_players = list(inp[0,:4]).index(1) + 4
most_probable_action = list(probs).index(max(list(probs)))
num_correct = 0
log_prob_sum = 0
num_total = 0
for action in inp:
if max(list(action[4:])) == 0:
continue
action = list(action[4:]).index(1)
if action == most_probable_action:
num_correct += 1
log_prob_sum -= np.log(probs[action])
num_total += 1
return (num_total, num_correct, log_prob_sum)
if __name__ == '__main__':
X_train, y_train, X_test, y_test = model.load_training_data()
probs = np.mean(y_train, (0,1))
probs = np.multiply(probs, 1.0 / np.sum(probs))
print probs
'''
num_total = 0;
num_correct = 0;
log_prob_sum = 0;
for hand in y_test:
num_total_incr, num_correct_incr, log_prob_sum_incr = evaluate(hand, probs)
num_total += num_total_incr
num_correct += num_correct_incr
log_prob_sum += log_prob_sum_incr
print float(num_correct) / num_total
print log_prob_sum / num_total
'''
total_log_loss = -np.sum(np.multiply(y_test, np.log(probs)), (0,1,2))
num_total = np.sum(y_test, (0,1,2))
print y_test.shape, num_total
print total_log_loss / num_total
| apache-2.0 | 8,038,306,455,472,240,000 | 29.488372 | 83 | 0.581998 | false |
PhilippMundhenk/Kindle-Alarm-Clock | mnt/us/alarm/alarmControl.py | 1 | 4394 | from datetime import datetime, timedelta
from threading import Thread, Timer
import time
import os
import pickle
import subprocess
from subprocess import call
from settings import secondsToAutoOff
from alarm import Alarm
from settings import wificontrol
from audioControl import AudioControl
from settings import backupSound
class AlarmControl():
alarms = []
activeAlarm = None
class __AlarmControl:
#def __init__(self):
def __str__(self):
return repr(self)
instance = None
def __init__(self):
if not AlarmControl.instance:
AlarmControl.instance = AlarmControl.__AlarmControl()
def __getattr__(self):
return getattr(self.instance)
def getAlarms(self):
return self.alarms
def deleteAllAlarms(self):
for a in self.alarms:
a.setActive(False)
del self.alarms[:]
self.saveAlarms()
def setAlarms(self, alarmsList):
del self.alarms[:]
self.append(alarmsList)
self.saveAlarms()
def addAlarm(self, alarm):
print "addAlarm(): "+str(alarm.weekdays)+", "+str(alarm.hour)+":"+str(alarm.minute)
self.alarms.append(alarm)
self.saveAlarms()
def stopAlarm(self):
print "stopping alarm..."
for x in self.alarms:
print "id: "+str(id(x))
if id(x)==self.activeAlarm and len(x.weekdays)==0:
print "deleting..."
alarms.remove(x)
saveAlarms()
call(["killall", "mplayer"])
def createAlarm(self, hour, minute, weekdays):
print "createAlarm(): "+str(weekdays)+", "+str(hour)+":"+str(minute)
alarmHour=int(hour)
alarmMinute=int(minute)
format="%H:%M"
alarmString=str(alarmHour)+":"+str(alarmMinute)
now = datetime.now()
diff=datetime.strptime(alarmString, format)-now
seconds=diff.seconds
nextRing=datetime.now()+timedelta(seconds=seconds)
if len(weekdays) == 0:
newAlarm = Alarm([], alarmHour, alarmMinute)
else:
newAlarm = Alarm([int(i) for i in weekdays], alarmHour, alarmMinute)
self.alarms.append(newAlarm)
self.saveAlarms()
t=Thread(target=AlarmControl().ringIn, args=[seconds, newAlarm])
t.start()
return newAlarm
def WifiOn(self):
global wificontrol
if wificontrol:
#Need to turn off WiFi via Kindle Framework first, so that it auto connects when turning on
call(["lipc-set-prop", "com.lab126.cmd", "wirelessEnable", "0"])
time.sleep(30)
call(["lipc-set-prop", "com.lab126.cmd", "wirelessEnable", "1"])
call(["ifup", "wlan0"])
time.sleep(10)
def WifiOff(self):
global wificontrol
if wificontrol:
time.sleep(5)
call(["ifdown", "wlan0"])
#Better do not use propper WiFi off here, will trigger UI elements:
# call(["lipc-set-prop", "com.lab126.cmd", "wirelessEnable", "0"])
def saveAlarms(self):
if os.path.exists('/mnt/us/alarm/alarms.bak'):
os.remove('/mnt/us/alarm/alarms.bak')
afile = open(r'/mnt/us/alarm/alarms.bak', 'wb')
pickle.dump(self.alarms, afile)
afile.close()
def stopRingIn(self, i):
time.sleep(i)
self.stopAlarm()
def ringIn(self, i, alarm):
global stream
global secondsToAutoOff
time.sleep(i-20)
#print "today: "+str(datetime.today().weekday())
#print "days: "+str(alarm.weekdays)
if not alarm.getActive():
print "alarm deactivated, exiting..."
return
if len(alarm.weekdays) > 0:
if not datetime.today().weekday() in alarm.weekdays:
seconds = 24*60*60;
t=Thread(target=AlarmControl().ringIn, args=[seconds, alarm])
t.start()
print "seconds: "+str(seconds)
print "alarm for: days: "+str(alarm.weekdays)+" "+str(alarm.hour)+":"+str(alarm.minute)+" ("+str(seconds)+"seconds)"
return
print "preparing alarm..."
self.activeAlarm=id(alarm)
self.WifiOn()
AudioControl.phaseIn(1)
Thread(target=AlarmControl().stopRingIn, args=[secondsToAutoOff]).start()
print "waiting for check..."
time.sleep(10)
#ToDo: move this to thread? What if mplayer/wget/pipe cache hangs and there is no sound output? How to detect?
if(AudioControl.isMplayerRunning()==""):
command = "/mnt/us/mplayer/mplayer -loop 0 "+backupSound+" &"
os.system(command)
#self.alarms.remove(alarm)
#self.saveAlarms()
if len(alarm.weekdays) > 0:
#check in 24h if ring is required
seconds = 24*60*60;
t=Thread(target=AlarmControl().ringIn, args=[seconds, alarm])
t.start()
print "seconds: "+str(seconds)
print "alarm for: days "+str(alarm.weekdays)+" "+str(alarm.hour)+":"+str(alarm.minute)
else:
self.alarms.remove(alarm) | mit | 715,075,976,977,626,500 | 26.12963 | 120 | 0.684342 | false |
MishtuBanerjee/xaya | xaya/xayabee.py | 1 | 3044 | #!/usr/bin/env python
"""
xayabee: a little dose of bee genetics ...
BeginDate:2012
CurrentRevisionDate:20150324
Development Version : core 001
Release Version: pre-release
Author(s): Mishtu Banerjee, Robin Owens
Contact: [email protected]
Copyright: 2012-2015, The Authors
License: Distributed under MIT License
[http://opensource.org/licenses/mit-license.html]
Original Environment: Programmed and tested under Python 2.7.X
Dependencies:
Python Interpreter and base libraries.
xaya: xayacore, xaystats
"""
import xayastats
def genHaploid(numberofalleles= 0):
"""
Given a number of sex alleles, randomly generate a haploid genotype.
"""
#Set internal variable from parameters
alleles = numberofalleles
# Randomly generate haploid
haploid = xayastats.diceroll(1, alleles)
return (haploid) # return haploid as a 1 item tuple
def genDiploid(numberofalleles=0):
"""
"""
alleles = numberofalleles
diploid = (xayastats.diceroll(1,alleles), xayastats.diceroll(1,alleles))
return diploid # return dipoid as a two item tuple
def createPop(numberalleles= 0, dippopsize=0, happopsize=0):
"""
Build haploid and diploic population given alleles, number of diploids, number haploids
"""
# Set internal variables from parameters
alleles = numberalleles
diploids = dippopsize
haploids = happopsize
# Build a list of haploids
haploidcounter = range(haploids)
haploidslist = []
for bee in haploidcounter:
haploidslist.append(genHaploid(alleles))
# Build a list of diploids
diploidcounter = range(diploids)
diploidlist = []
for beecouple in diploidcounter:
diploidlist.append(genDiploid(alleles))
return [haploidslist, diploidlist]
# Next we must build up a dictonary where keys are tuples and
# the values are counts.
# Later can make the values more complicated as lists, dicts.
# Give Robin the choice -- and ask him how he likes it.
# if lists -- can have multiple properties
# if values -- can have named properties:"COUNT"; MUTATION RATE
def summarizePop(poplist=[]):
"""
Creates a summary table of the bee population
"""
mypop=poplist
myhaploids=poplist[0]
mydiploids=poplist[1]
myhaptable = xayastats.histograph(myhaploids)
mydiptable=xayastats.histograph(mydiploids)
return [myhaptable, mydiptable]
def findHomozygotes(diptable={}):
"""
Given a summary table of diploids, finds those
which are homozygous
"""
mydiptable=diptable
homozygouslist=[]
mydipkeys=mydiptable.keys()
for key in mydipkeys:
if key[0]==key[1]:
homozygouslist.append(key)
homozygtable = {}
for key in homozygouslist:
homozygtable[key] = mydiptable[key]
return homozygtable
def countPopulation(poptable):
"""
Counts all indivuals in a population; can be applied to
a diploid, haploid, or homzygotes table
"""
mypoptable = poptable
vals = mypoptable.values()
vals2 = []
for item in vals:
vals2.append(item[0])
popsum = sum(vals2)
return popsum
# Create a function checkforHomozygotes
# Get population as a dictionary where keys are alleles and values are counts | mit | 2,082,932,308,635,575,300 | 25.946903 | 88 | 0.752957 | false |
hunter-cameron/Bioinformatics | python/gbk_get_entry_by_locusid.py | 1 | 2464 |
from Bio import SeqIO
import sys
import re
def get_entries_by_locid(gbk_file, id_list):
print(id_list)
results = dict()
for record in SeqIO.parse(open(gbk_file,"r"), "genbank"): # record == contig
for feature in record.features: # all annotations per contig
if feature.type == "CDS": # get only CDS
#sys.exit()
if "locus_tag" in feature.qualifiers: # check if CDS has a locus tag (it should)
if feature.qualifiers['locus_tag'][0] in id_list: # check if locus tag is on the list
results[feature.qualifiers['locus_tag'][0]] = {
"location": feature.location,
"product": feature.qualifiers['product'][0],
}
#sys.exit()
return results
def read_locid_list(id_file):
""" Returns a list of sorted ids from a file """
with open(id_file, 'r') as IN:
return sorted([line[:-1] for line in IN])
if __name__ == "__main__":
id_file = sys.argv[1]
gbk_file = sys.argv[2]
id_list = []
with open(id_file, 'r') as IN:
for line in IN:
qry = line.split("\t")[0]
loctag = qry.split(" ")[0]
id_list.append(loctag)
id_info = get_entries_by_locid(gbk_file, id_list)
for tag in id_list:
if tag not in id_info:
print("Locus tag '{}' not found.".format(tag))
with open(id_file, 'r') as IN, open("final_matrix.txt", 'w') as OUT:
for line in IN:
if line.startswith("qry"):
OUT.write("\t".join(["locus_tag", "contig", "contig_length", "start", "end", "strand", "product", "closest_match", "perc_id", "aln_length", "query_cov", "closest_match_cov", "bitscore"]) + "\n")
else:
elements = line[:-1].split("\t")
qry_info = elements[0].split(" ")
locid = qry_info[0]
contig = qry_info[2]
m = re.search("len_(?P<length>\d+)_", contig)
contig_length = m.group("length")
OUT.write("\t".join([locid, contig, contig_length, str(id_info[locid]['location'].start), str(id_info[locid]['location'].end), str(id_info[locid]['location'].strand), id_info[locid]['product'], "\t".join(elements[1:])]) + "\n")
| mit | -884,123,956,013,676,300 | 33.222222 | 243 | 0.500812 | false |
jstapleton/Maynard-2015 | mutationCounter.py | 1 | 13150 | #!/usr/bin/env python
#############################################################################
# mutationCounter.py
# 2015 James A. Stapleton, Justin R. Klesmith
#
# This program takes short reads from shotgun sequencing of mutant
# libraries and creates FASTQ files compatible with ENRICH.
#
# The program fills in wild-type sequence around the short reads
# to create full-length sequences, and puts in a made-up
# quality score.
#
# Overlapping read pairs are merged by FLASH.
#
#############################################################################
import argparse
import subprocess
import os
import itertools
from Bio.SeqIO.QualityIO import FastqGeneralIterator
from Bio.Emboss.Applications import WaterCommandline
def main(forward_paired, forward_unpaired, reverse_paired, reverse_unpaired):
fakeFASTQ = ''
notAligned = 0
wrongLength = 0
# open file containing wild-type sequence and pull it out as a string
wt = wtParser()
# take trimmed paired-end FASTQ files as input
# run FLASH to combine overlapping read pairs
# or remove this line and add to shell script
subprocess.call(["flash", "-M", "140", "-t", "1", forward_paired, reverse_paired])
# merged read pairs
notAligned = align_and_index('out.extendedFrags.fastq', notAligned, '')
with open("fakeFASTQ.fastq", "w") as fakeFASTQ:
with open('indexes.txt', 'rU') as indexes:
with open('out.extendedFrags.fastq', 'rU') as merged:
f_iter = FastqGeneralIterator(merged)
for (title, seq, qual), indexline in itertools.izip(f_iter, indexes):
index1, index2, rc_flag = indexline.split()
# print title, seq, qual, index1, index2, rc_flag
if int(index1) and int(index2):
if int(rc_flag):
seq = revcomp(seq)
fakeSeq = buildFakeSeq(seq, 0, wt, index1, index2, 0, 0)
if len(fakeSeq) != len(wt):
wrongLength += 1
# print fakeSeq
# print rc_flag, seq, index1, index2
continue
fakeFASTQwriter(fakeSeq, title, fakeFASTQ)
notAligned = align_and_index(forward_unpaired, notAligned, '')
with open("fakeFASTQ.fastq", "a") as fakeFASTQ:
with open('indexes.txt', 'rU') as indexes:
with open(forward_unpaired, "rU") as merged:
f_iter = FastqGeneralIterator(merged)
for (title, seq, qual), indexline in itertools.izip(f_iter, indexes):
index1, index2, rc_flag = indexline.split()
if int(index1) and int(index2):
if int(rc_flag):
seq = revcomp(seq)
fakeSeq = buildFakeSeq(seq, 0, wt, index1, index2, 0, 0)
if len(fakeSeq) != len(wt):
wrongLength += 1
continue
fakeFASTQwriter(fakeSeq, title, fakeFASTQ)
notAligned = align_and_index(reverse_unpaired, notAligned, '')
with open("fakeFASTQ.fastq", "a") as fakeFASTQ:
with open('indexes.txt', 'rU') as indexes:
with open(reverse_unpaired, "rU") as merged:
f_iter = FastqGeneralIterator(merged)
for (title, seq, qual), indexline in itertools.izip(f_iter, indexes):
index1, index2, rc_flag = indexline.split()
if int(index1) and int(index2):
if int(rc_flag):
seq = revcomp(seq)
fakeSeq = buildFakeSeq(seq, 0, wt, index1, index2, 0, 0)
if len(fakeSeq) != len(wt):
wrongLength += 1
continue
fakeFASTQwriter(fakeSeq, title, fakeFASTQ)
notAligned = align_and_index('out.notCombined_1.fastq', notAligned, '_F')
notAligned = align_and_index('out.notCombined_2.fastq', notAligned, '_R')
# unmerged (non-overlapping) read pairs
with open("fakeFASTQ.fastq", "a") as fakeFASTQ:
with open("indexes_F.txt", 'rU') as indexes_F:
with open("indexes_R.txt", 'rU') as indexes_R:
with open("out.notCombined_1.fastq", 'rU') as unmerged_F:
with open("out.notCombined_2.fastq", 'rU') as unmerged_R:
f_iter = FastqGeneralIterator(unmerged_F)
r_iter = FastqGeneralIterator(unmerged_R)
for (title, seq, qual), (title_R, seq_R, qual_R), indexline_F, indexline_R in itertools.izip(f_iter, r_iter, indexes_F, indexes_R):
index1, index2, rc_flag_F = indexline_F.split()
index3, index4, rc_flag_R = indexline_R.split()
if int(index1) and int(index2) and int(index3) and int(index4):
if int(rc_flag_F):
seq = revcomp(seq)
if int(rc_flag_R):
seq_R = revcomp(seq_R)
fakeSeq = buildFakeSeq(seq, seq_R, wt, index1, index2, index3, index4)
if len(fakeSeq) != len(wt):
wrongLength += 1
# print fakeSeq
# print index1, index2, index3, index4
# print seq, seq_R
continue
fakeFASTQwriter(fakeSeq, title, fakeFASTQ)
print notAligned, wrongLength
return 0
######## Function definitions ##############
def revcomp(seq):
"""Returns the reverse complement of a DNA sequence."""
COMPLEMENT_DICT = {'A': 'T', 'G': 'C', 'T': 'A', 'C': 'G', 'N': 'N'}
rc = ''.join([COMPLEMENT_DICT[base] for base in seq])[::-1]
return rc
def buildFakeSeq(seq_F, seq_R, wt, index1, index2, index3, index4):
"""Builds a fake full-length DNA sequence line consisting of one
merged read or two short reads filled in with wild-type sequence.
"""
index1 = int(index1)
index2 = int(index2)
index3 = int(index3)
index4 = int(index4)
if seq_R:
diff = 0
if index1 < index3:
if index2 > index3 - 1:
diff = index2 - index3 + 1
index2 = index3 - 1
fakeRead = wt[:index1 - 1] + seq_F + wt[index2:index3 - 1] + seq_R[diff:] + wt[index4:]
else:
if index4 > index1 - 1:
diff = index4 - index1 + 1
index4 = index1 -1
fakeRead = wt[:index3 - 1] + seq_R + wt[index4:index1 - 1] + seq_F[diff:] + wt[index2:]
else:
fakeRead = wt[:index1-1] + seq_F + wt[index2:]
return fakeRead.upper()
def index_finder(line):
"""Searches the water output line
for alignment position indexes.
"""
index = 0
if len(line.split()) > 1:
if line.split()[1] == 'al_start:':
index = int(line.split()[2])
elif line.split()[1] == 'al_stop:':
index = int(line.split()[2])
return index
def Ntest(seq):
"""Trims sequences with N's.
Removes N from the first position,
truncates the sequence at subsequent N's.
"""
if seq[0] == 'N':
seq = seq[1:]
Ntest = 0
for i, ch in enumerate(seq):
if ch == 'N':
Ntest = 1
break
if Ntest == 1:
seq = seq[:i-1]
return seq
def runWater(fastq, out):
"""Removes existing water.txt file,
generates a water command line, and runs water.
"""
if os.path.isfile(out):
os.remove(out)
water_cline = WaterCommandline(asequence="wt.fasta", bsequence=fastq, gapopen=10, gapextend=0.5, outfile=out, aformat='markx10')
stdout, stderr = water_cline()
return 0
def wtParser():
"""Takes a wild-type DNA sequence in FASTA format
and reads it into a string.
"""
with open('wt.fasta', 'rU') as wildtype:
wildtype = wildtype.read()
if wildtype[0] == ">":
wildtype = wildtype.split('\n', 1)[1]
wt = ''.join([line.strip() for line in wildtype.split('\n')])
return wt
def identity_finder(line):
identity = 0
if len(line.split()) > 1:
if line.split()[1] == 'Identity:':
identity = line.split()[3]
identity = identity[1:4]
return identity
def align_and_index(fastq, notAligned, suffix):
"""Runs a pipeline to align a sequence (merged or unmerged
sequencing reads) to a wild-type reference with the EMBOSS
water local alignment program, align the reverse complement
if the first alignment was poor, and parse and return the
wt positions where the alignment begins and ends.
"""
# generate water command line and call it
runWater(fastq, 'water_fwd.txt')
# reverse-complement the reads in fastq
with open('fastq_rc.fastq', 'w') as reverse:
with open(fastq, 'rU') as forward:
next_line_is_seq = 0
for line in forward:
if next_line_is_seq:
line_rc = revcomp(line.strip())
reverse.write(line_rc + '\n')
next_line_is_seq = 0
elif line[0] == '@':
next_line_is_seq = 1
reverse.write(line)
else:
reverse.write(line)
# run water on the reverse complements
runWater('fastq_rc.fastq', 'water_rc.txt')
# Write only the index
# and identity lines to new files
with open('water_fwd.txt', 'rU') as forward:
with open('water_fwd_indexes.txt', 'w') as forward_index_lines:
for line in forward:
if identity_finder(line) or index_finder(line):
forward_index_lines.write(line)
with open('water_rc.txt', 'rU') as forward:
with open('water_rc_indexes.txt', 'w') as forward_index_lines:
for line in forward:
if identity_finder(line) or index_finder(line):
forward_index_lines.write(line)
# Check whether the read was in the right orientation:
# Iterate over the water outfiles and pick the best match
# Write the alignment start and stop of the best matches
with open('water_fwd_indexes.txt', 'rU') as forward:
with open('water_rc_indexes.txt', 'rU') as reverse:
with open('indexes' + suffix + '.txt', 'w') as outfile:
find_index_F = 0
find_index_R = 0
index1 = 0
index2 = 0
for line_F, line_R in itertools.izip(forward, reverse):
if not find_index_F and not find_index_R:
identity_F = identity_finder(line_F)
identity_R = identity_finder(line_R)
if float(identity_F) > 90:
find_index_F = 1
rev_flag = 0
elif float(identity_R) > 90:
find_index_R = 1
rev_flag = 1
elif identity_F and identity_R:
outfile.write('0 0 0\n')
notAligned += 1
elif find_index_F:
if not index1 and not index2:
index1 = index_finder(line_F)
elif index1:
index2 = index_finder(line_F)
outfile.write(str(index1) + ' ' + str(index2) + ' ' + str(rev_flag) + '\n')
find_index_F = 0
index1 = 0
index2 = 0
elif find_index_R:
if not index1 and not index2:
index1 = index_finder(line_R)
elif index1:
index2 = index_finder(line_R)
outfile.write(str(index1) + ' ' + str(index2) + ' ' + str(rev_flag) + '\n')
find_index_R = 0
index1 = 0
index2 = 0
return notAligned
def fakeFASTQwriter(fakeSeq, title, handle):
"""Writes the four lines of a fake FASTQ."""
handle.write('@' + title + '\n')
handle.write(fakeSeq + '\n')
handle.write('+\n')
fakeQual = ''.join(['A' for ch in fakeSeq])
handle.write(fakeQual + '\n')
return 0
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('forward_paired')
parser.add_argument('forward_unpaired')
parser.add_argument('reverse_paired')
parser.add_argument('reverse_unpaired')
args = parser.parse_args()
main(args.forward_paired, args.forward_unpaired, args.reverse_paired, args.reverse_unpaired)
| mit | -2,058,295,108,188,137,700 | 40.09375 | 155 | 0.516502 | false |
raphaeldussin/EZmovie | EZmovie/ezmovie_plots.py | 1 | 3423 | import matplotlib.pylab as plt
import matplotlib.cm as cm
import matplotlib.colors as mc
from mpl_toolkits.basemap import Basemap
import numpy as np
def setup_contour(vmin,vmax,ncontours):
''' set the contours and norm '''
plotcmin = float(vmin)
plotcmax = float(vmax)
stepticks = (plotcmax - plotcmin) / 10.
ticks = np.arange(plotcmin,plotcmax+stepticks,stepticks)
step = (plotcmax - plotcmin) / ncontours
contours = np.arange(plotcmin,plotcmax+step,step)
norm = mc.Normalize(vmin=plotcmin, vmax=plotcmax)
return contours, norm, ticks
def setup_colorbar_fmt(data):
''' setup the format for colorbar '''
if data.max() < 0.1:
cbarfmt = '%.1e'
elif data.max() > 10.:
cbarfmt = '%.0f'
else:
cbarfmt = '%.2f'
return cbarfmt
def plot_map(ax,diag,coord1,coord2,data,current_date=None,current_step=None):
''' single plot '''
contours, norm, ticks = setup_contour(diag['vmin'],diag['vmax'],40)
cbarfmt = setup_colorbar_fmt(data)
# background
if diag['run']['grid'] == 'CCS1':
bmap = Basemap(projection='cyl',llcrnrlat=18,urcrnrlat=51,\
llcrnrlon=219,urcrnrlon=251,resolution='l')
parallels = np.arange(20.,60.,10.)
bmap.drawparallels(parallels,labels=[True,False,False,True])
meridians = np.arange(220.,260.,10.)
bmap.drawmeridians(meridians,labels=[True,False,False,True])
elif diag['run']['grid'] == 'NWA':
bmap = Basemap(projection='cyl',llcrnrlat=5,urcrnrlat=55, llcrnrlon=250, \
urcrnrlon=320,resolution='l')
parallels = np.arange(0.,70.,10.)
bmap.drawparallels(parallels,labels=[1,0,0,0],fontsize=10)
meridians = np.arange(240.,340.,10.)
bmap.drawmeridians(meridians,labels=[0,0,0,1],fontsize=10)
else:
# default : autoguess domain
bmap = Basemap(projection='cyl',llcrnrlat=coord2.min(),urcrnrlat=coord2.max(), \
llcrnrlon=coord1.min(), urcrnrlon=coord1.max(),resolution='c')
bmap.drawcoastlines()
bmap.fillcontinents(color='grey',lake_color='white')
C = ax.contourf(coord1,coord2,data,contours,cmap=diag['pal'],norm=norm,extend='both')
if diag.has_key('cbar_shrink'):
cbar_shrink = diag['cbar_shrink']
else:
cbar_shrink = 1.0
cbar = plt.colorbar(C,format=cbarfmt,shrink=cbar_shrink,ticks=ticks)
fmt = "%Y %m %d"
if current_date is not None:
plt.title(diag['label'] + ' ' + current_date.strftime(fmt))
if current_step is not None:
plt.title(diag['label'] + ' ' + str(current_step))
return ax
def plot_section(ax,diag,coord1,coord2,data,current_date):
''' single plot '''
contours, norm, ticks = setup_contour(diag['vmin'],diag['vmax'],40)
cbarfmt = setup_colorbar_fmt(data)
ax.set_axis_bgcolor('grey')
C = ax.contourf(coord1,coord2,data,contours,cmap=diag['pal'],norm=norm,extend='both')
if diag.has_key('max_depth'):
ax.set_ylim(-diag['max_depth'],coord2[-1].min())
cbar = plt.colorbar(C,format=cbarfmt,shrink=0.8,ticks=ticks)
fmt = "%Y %m %d"
plt.title(diag['label'] + ' ' + current_date.strftime(fmt))
return ax
| gpl-3.0 | 2,947,154,667,678,862,300 | 40.743902 | 96 | 0.595384 | false |
Subsets and Splits