code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf8 -*-
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_POST
from .models import Comment
from lesson.models import Lesson
import json
@login_required
@require_POST
def create_comment(request):
result = False
message = ''
lessonId = int(request.POST.get('lesson'))
try:
lesson = Lesson.objects.get(id=lessonId)
except:
lesson = None
if lesson:
content = request.POST.get('content', None)
if content:
comment = Comment.objects.create(lesson=lesson,
content=content, user=request.user, name=request.user.username)
result = True
else:
message = '表单内容不全'
else:
message = '课程不存在'
# return HttpResponse(json.dumps({
# 'result': result,
# 'message': message,
# }))
return redirect('/lesson/' + str(lessonId) + '/')
| chenzeyuczy/keba | src/comment/views.py | Python | gpl-2.0 | 1,049 |
"""Miscellaneous report classes.
"""
__author__ = "Martin Blais <[email protected]>"
import datetime
import re
from beancount.reports import report
from beancount.reports import table
from beancount.reports import gviz
from beancount.parser import printer
from beancount.core import data
from beancount.core import amount
from beancount.core import account
from beancount.core import getters
from beancount.ops import prices
from beancount.ops import lifetimes
class CommoditiesReport(report.TableReport):
"""Print out a list of commodities."""
names = ['commodities']
default_format = 'text'
def generate_table(self, entries, errors, options_map):
price_map = prices.build_price_map(entries)
return table.create_table([(base_quote,)
for base_quote in sorted(price_map.forward_pairs)],
[(0, "Base/Quote", self.formatter.render_commodity)])
class CommodityLifetimes(report.TableReport):
"""Print out a list of lifetimes of each commodity."""
names = ['lifetimes']
default_format = 'text'
@classmethod
def add_args(cls, parser):
parser.add_argument('-c', '--compress-days', type=int,
action='store', default=None,
help="The number of unused days to allow for continuous usage.")
def render_text(self, entries, errors, options_map, file):
lifetimes_map = lifetimes.get_commodity_lifetimes(entries)
if self.args.compress_days:
lifetimes_map = lifetimes.compress_lifetimes_days(lifetimes_map,
self.args.compress_days)
name_map = {pair: '{}/{}'.format(pair[0], pair[1]) if pair[1] else pair[0]
for pair in lifetimes_map.keys()}
ccywidth = max(map(len, name_map.values()))
for currency, lifetime in sorted(lifetimes_map.items(),
key=lambda x: (x[1][0][0], x[0])):
file.write('{:{width}}: {}\n'.format(
name_map[currency],
' / '.join('{} - {}'.format(begin, end or '')
for begin, end in lifetime),
width=ccywidth))
class CommodityPricesReport(report.TableReport):
"""Print all the prices for a particular commodity."""
names = ['prices']
default_format = 'text'
@classmethod
def add_args(cls, parser):
parser.add_argument('-c', '--commodity', '--currency',
action='store', default=None,
help="The commodity pair to display.")
def get_date_rates(self, entries):
if not self.args.commodity:
self.parser.error("Commodity pair must be specified (in BASE/QUOTE format)")
if not re.match('{ccy}/{ccy}$'.format(ccy=amount.CURRENCY_RE),
self.args.commodity):
self.parser.error(('Invalid commodity pair "{}"; '
'must be in BASE/QUOTE format').format(self.args.commodity))
price_map = prices.build_price_map(entries)
try:
date_rates = prices.get_all_prices(price_map, self.args.commodity)
except KeyError:
self.parser.error(
"Commodity not present in database: {}".format(self.args.commodity))
return date_rates
def generate_table(self, entries, errors, options_map):
date_rates = self.get_date_rates(entries)
return table.create_table(date_rates,
[(0, "Date", datetime.date.isoformat),
(1, "Price", '{:.5f}'.format)])
def render_htmldiv(self, entries, errors, options_map, file):
date_rates = self.get_date_rates(entries)
dates, rates = zip(*date_rates)
scripts = gviz.gviz_timeline(dates,
{'rates': rates, 'rates2': rates},
css_id='chart')
file.write('<div id="prices">\n')
super().render_htmldiv(entries, errors, options_map, file)
file.write('<scripts>\n')
file.write(scripts)
file.write('</scripts>\n')
file.write('<div id="chart" style="height: 512px"></div>\n')
file.write('</div>\n') # prices
class PricesReport(report.Report):
"""Print out the unnormalized price entries that we input.
Unnormalized means that we may render both (base,quote) and (quote,base).
This can be used to rebuild a prices database without having to share the
entire ledger file.
Note: this type of report should be removed once we have filtering on
directive type, this is simply the 'print' report with type:price. Maybe
rename the 'pricedb' report to just 'prices' for simplicity's sake.
"""
names = ['all_prices']
default_format = 'beancount'
def render_beancount(self, entries, errors, options_map, file):
price_entries = [entry
for entry in entries
if isinstance(entry, data.Price)]
dcontext = options_map['dcontext']
printer.print_entries(price_entries, dcontext, file=file)
class PriceDBReport(report.Report):
"""Print out the normalized price entries from the price db.
Normalized means that we print prices in the most common (base, quote) order.
This can be used to rebuild a prices database without having to share the
entire ledger file.
Only the forward prices are printed; which (base, quote) pair is selected is
selected based on the most common occurrence between (base, quote) and
(quote, base). This is done in the price map.
"""
names = ['pricedb', 'pricesdb', 'prices_db']
default_format = 'beancount'
def render_beancount(self, entries, errors, options_map, file):
dcontext = options_map['dcontext']
price_map = prices.build_price_map(entries)
meta = data.new_metadata('<report_prices_db>', 0)
for base_quote in price_map.forward_pairs:
price_list = price_map[base_quote]
base, quote = base_quote
for date, price in price_list:
entry = data.Price(meta, date, base, amount.Amount(price, quote))
file.write(printer.format_entry(entry, dcontext))
file.write('\n')
class TickerReport(report.TableReport):
"""Print a parseable mapping of (base, quote, ticker, name) for all commodities."""
names = ['tickers', 'symbols']
def generate_table(self, entries, errors, options_map):
commodity_map = getters.get_commodity_map(entries, options_map)
ticker_info = getters.get_values_meta(commodity_map, 'name', 'ticker', 'quote')
price_rows = [
(currency, cost_currency, ticker, name)
for currency, (name, ticker, cost_currency) in sorted(ticker_info.items())
if ticker]
return table.create_table(price_rows,
[(0, "Currency"),
(1, "Cost-Currency"),
(2, "Symbol"),
(3, "Name")])
__reports__ = [
CommoditiesReport,
CommodityLifetimes,
CommodityPricesReport,
PricesReport,
PriceDBReport,
TickerReport,
]
| iocoop/beancount | src/python/beancount/reports/price_reports.py | Python | gpl-2.0 | 7,389 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
"""Endpoints for the web blog."""
from flask import Blueprint, g, request, redirect, url_for, flash, make_response
import datetime
import time
import re
from xml.dom.minidom import Document
import rophako.model.user as User
import rophako.model.blog as Blog
import rophako.model.comment as Comment
import rophako.model.emoticons as Emoticons
from rophako.utils import (template, render_markdown, pretty_time,
login_required, remote_addr)
from rophako.plugin import load_plugin
from rophako.settings import Config
from rophako.log import logger
import sys
if sys.version_info[0] > 2:
def unicode(s):
return str(s)
mod = Blueprint("blog", __name__, url_prefix="/blog")
load_plugin("rophako.modules.comment")
@mod.route("/")
def index():
return template("blog/index.html")
@mod.route("/archive")
def archive():
"""List all blog posts over time on one page."""
index = Blog.get_index()
# Group by calendar month, and keep track of friendly versions of months.
groups = dict()
friendly_months = dict()
for post_id, data in index.items():
ts = datetime.datetime.fromtimestamp(data["time"])
date = ts.strftime("%Y-%m")
if not date in groups:
groups[date] = dict()
friendly = ts.strftime("%B %Y")
friendly_months[date] = friendly
# Get author's profile && Pretty-print the time.
data["profile"] = User.get_user(uid=data["author"])
data["pretty_time"] = pretty_time(Config.blog.time_format, data["time"])
groups[date][post_id] = data
# Sort by calendar month.
sort_months = sorted(groups.keys(), reverse=True)
# Prepare the results.
result = list()
for month in sort_months:
data = dict(
month=month,
month_friendly=friendly_months[month],
posts=list()
)
# Sort the posts by time created, descending.
for post_id in sorted(groups[month].keys(), key=lambda x: groups[month][x]["time"], reverse=True):
data["posts"].append(groups[month][post_id])
result.append(data)
g.info["archive"] = result
return template("blog/archive.html")
@mod.route("/category/<category>")
def category(category):
g.info["url_category"] = category
return template("blog/index.html")
@mod.route("/drafts")
@login_required
def drafts():
"""View all of the draft blog posts."""
return template("blog/drafts.html")
@mod.route("/private")
@login_required
def private():
"""View all of the blog posts marked as private."""
return template("blog/private.html")
@mod.route("/entry/<fid>")
def entry(fid):
"""Endpoint to view a specific blog entry."""
# Resolve the friendly ID to a real ID.
post_id = Blog.resolve_id(fid, drafts=True)
if not post_id:
# See if the friendly ID contains any extraneous dashes at the front
# or end, and remove them and see if we have a match. This allows for
# fixing blog fid's that allowed leading/trailing dashes and having the
# old URL just redirect to the new one.
fid = fid.strip("-")
post_id = Blog.resolve_id(fid, drafts=True)
# If still nothing, try consolidating extra dashes into one.
if not post_id:
fid = re.sub(r'-+', '-', fid)
post_id = Blog.resolve_id(fid, drafts=True)
# Did we find one now?
if post_id:
return redirect(url_for(".entry", fid=fid))
flash("That blog post wasn't found.")
return redirect(url_for(".index"))
# Look up the post.
post = Blog.get_entry(post_id)
post["post_id"] = post_id
# Body has a snipped section?
if "<snip>" in post["body"]:
post["body"] = re.sub(r'\s*<snip>\s*', '\n\n', post["body"])
# Render the body.
if post["format"] == "markdown":
post["rendered_body"] = render_markdown(post["body"])
else:
post["rendered_body"] = post["body"]
# Render emoticons.
if post["emoticons"]:
post["rendered_body"] = Emoticons.render(post["rendered_body"])
# Get the author's information.
post["profile"] = User.get_user(uid=post["author"])
post["photo"] = User.get_picture(uid=post["author"])
post["photo_url"] = Config.photo.root_public
# Pretty-print the time.
post["pretty_time"] = pretty_time(Config.blog.time_format, post["time"])
# Count the comments for this post
post["comment_count"] = Comment.count_comments("blog-{}".format(post_id))
# Inject information about this post's siblings.
index = Blog.get_index()
siblings = [None, None] # previous, next
sorted_ids = list(map(lambda y: int(y), sorted(index.keys(), key=lambda x: index[x]["time"], reverse=True)))
for i in range(0, len(sorted_ids)):
if sorted_ids[i] == post_id:
# Found us!
if i > 0:
# We have an older post.
siblings[0] = index[ str(sorted_ids[i-1]) ]
if i < len(sorted_ids) - 1:
# We have a newer post.
siblings[1] = index[ str(sorted_ids[i+1]) ]
post["siblings"] = siblings
g.info["post"] = post
return template("blog/entry.html")
@mod.route("/entry")
@mod.route("/index")
def dummy():
return redirect(url_for(".index"))
@mod.route("/update", methods=["GET", "POST"])
@login_required
def update():
"""Post/edit a blog entry."""
# Get our available avatars.
g.info["avatars"] = Blog.list_avatars()
g.info["userpic"] = User.get_picture(uid=g.info["session"]["uid"])
# Default vars.
g.info.update(dict(
post_id="",
fid="",
author=g.info["session"]["uid"],
subject="",
body="",
format="markdown",
avatar="",
categories="",
privacy=Config.blog.default_privacy,
sticky=False,
emoticons=True,
comments=Config.blog.allow_comments,
preview=False,
))
# Editing an existing post?
post_id = request.args.get("id", request.form.get("id", None))
if post_id:
post_id = Blog.resolve_id(post_id, drafts=True)
if post_id:
logger.info("Editing existing blog post {}".format(post_id))
post = Blog.get_entry(post_id)
g.info["post_id"] = post_id
g.info["post"] = post
# Copy fields.
for field in ["author", "fid", "subject", "time", "format",
"body", "avatar", "categories", "privacy",
"sticky", "emoticons", "comments"]:
g.info[field] = post[field]
# Are we SUBMITTING the form?
if request.method == "POST":
action = request.form.get("action")
# Get all the fields from the posted params.
g.info["post_id"] = request.form.get("id")
for field in ["fid", "subject", "format", "body", "avatar", "categories", "privacy"]:
g.info[field] = request.form.get(field)
for boolean in ["sticky", "emoticons", "comments"]:
g.info[boolean] = True if request.form.get(boolean, None) == "true" else False
g.info["author"] = int(g.info["author"])
# What action are they doing?
if action == "preview":
g.info["preview"] = True
# Render markdown?
if g.info["format"] == "markdown":
g.info["rendered_body"] = render_markdown(g.info["body"])
else:
g.info["rendered_body"] = g.info["body"]
# Render emoticons.
if g.info["emoticons"]:
g.info["rendered_body"] = Emoticons.render(g.info["rendered_body"])
elif action == "publish":
# Publishing! Validate inputs first.
invalid = False
if len(g.info["body"]) == 0:
invalid = True
flash("You must enter a body for your blog post.")
if len(g.info["subject"]) == 0:
invalid = True
flash("You must enter a subject for your blog post.")
# Resetting the post's time stamp?
if not request.form.get("id") or request.form.get("reset-time"):
g.info["time"] = float(time.time())
else:
g.info["time"] = float(request.form.get("time", time.time()))
# Format the categories.
tags = []
for tag in g.info["categories"].split(","):
tags.append(tag.strip())
# Okay to update?
if invalid is False:
new_id, new_fid = Blog.post_entry(
post_id = g.info["post_id"],
epoch = g.info["time"],
author = g.info["author"],
subject = g.info["subject"],
fid = g.info["fid"],
avatar = g.info["avatar"],
categories = tags,
privacy = g.info["privacy"],
ip = remote_addr(),
emoticons = g.info["emoticons"],
sticky = g.info["sticky"],
comments = g.info["comments"],
format = g.info["format"],
body = g.info["body"],
)
return redirect(url_for(".entry", fid=new_fid))
if type(g.info["categories"]) is list:
g.info["categories"] = ", ".join(g.info["categories"])
return template("blog/update.html")
@mod.route("/delete", methods=["GET", "POST"])
@login_required
def delete():
"""Delete a blog post."""
post_id = request.args.get("id")
# Resolve the post ID.
post_id = Blog.resolve_id(post_id, drafts=True)
if not post_id:
flash("That blog post wasn't found.")
return redirect(url_for(".index"))
if request.method == "POST":
confirm = request.form.get("confirm")
if confirm == "true":
Blog.delete_entry(post_id)
flash("The blog entry has been deleted.")
return redirect(url_for(".index"))
# Get the entry's subject.
post = Blog.get_entry(post_id)
g.info["subject"] = post["subject"]
g.info["post_id"] = post_id
return template("blog/delete.html")
@mod.route("/rss")
def rss():
"""RSS feed for the blog."""
doc = Document()
rss = doc.createElement("rss")
rss.setAttribute("version", "2.0")
rss.setAttribute("xmlns:blogChannel", "http://backend.userland.com/blogChannelModule")
doc.appendChild(rss)
channel = doc.createElement("channel")
rss.appendChild(channel)
rss_time = "%a, %d %b %Y %H:%M:%S GMT"
######
## Channel Information
######
today = time.strftime(rss_time, time.gmtime())
xml_add_text_tags(doc, channel, [
["title", Config.blog.title],
["link", Config.blog.link],
["description", Config.blog.description],
["language", Config.blog.language],
["copyright", Config.blog.copyright],
["pubDate", today],
["lastBuildDate", today],
["webmaster", Config.blog.webmaster],
])
######
## Image Information
######
image = doc.createElement("image")
channel.appendChild(image)
xml_add_text_tags(doc, image, [
["title", Config.blog.image_title],
["url", Config.blog.image_url],
["link", Config.blog.link],
["width", Config.blog.image_width],
["height", Config.blog.image_height],
["description", Config.blog.image_description],
])
######
## Add the blog posts
######
index = Blog.get_index()
posts = get_index_posts(index)
for post_id in posts[:int(Config.blog.entries_per_feed)]:
post = Blog.get_entry(post_id)
item = doc.createElement("item")
channel.appendChild(item)
# Render the body.
if post["format"] == "markdown":
post["rendered_body"] = render_markdown(post["body"])
else:
post["rendered_body"] = post["body"]
# Render emoticons.
if post["emoticons"]:
post["rendered_body"] = Emoticons.render(post["rendered_body"])
xml_add_text_tags(doc, item, [
["title", post["subject"]],
["link", url_for("blog.entry", fid=post["fid"], _external=True)],
["description", post["rendered_body"]],
["pubDate", time.strftime(rss_time, time.gmtime(post["time"]))],
])
resp = make_response(doc.toprettyxml(encoding="utf-8"))
resp.headers["Content-Type"] = "application/rss+xml; charset=utf-8"
return resp
def xml_add_text_tags(doc, root_node, tags):
"""RSS feed helper function.
Add a collection of simple tag/text pairs to a root XML element."""
for pair in tags:
name, value = pair
channelTag = doc.createElement(name)
channelTag.appendChild(doc.createTextNode(unicode(value)))
root_node.appendChild(channelTag)
def partial_index(template_name="blog/index.inc.html", mode="normal"):
"""Partial template for including the index view of the blog.
Args:
template_name (str): The name of the template to be rendered.
mode (str): The view mode of the posts, one of:
- normal: Only list public entries, or private posts for users
who are logged in.
- drafts: Only list draft entries for logged-in users.
"""
# Get the blog index.
if mode == "normal":
index = Blog.get_index()
elif mode == "drafts":
index = Blog.get_drafts()
elif mode == "private":
index = Blog.get_private()
else:
return "Invalid partial_index mode."
# Let the pages know what mode they're in.
g.info["mode"] = mode
pool = {} # The set of blog posts to show.
category = g.info.get("url_category", None)
if category == Config.blog.default_category:
category = ""
# Are we narrowing by category?
if category is not None:
# Narrow down the index to just those that match the category.
for post_id, data in index.items():
if not category in data["categories"]:
continue
pool[post_id] = data
# No such category?
if len(pool) == 0:
flash("There are no posts with that category.")
return redirect(url_for(".index"))
else:
pool = index
# Get the posts we want.
posts = get_index_posts(pool)
# Handle pagination.
offset = request.args.get("skip", 0)
try: offset = int(offset)
except: offset = 0
# Handle the offsets, and get those for the "older" and "earlier" posts.
# "earlier" posts count down (towards index 0), "older" counts up.
g.info["offset"] = offset
g.info["earlier"] = offset - int(Config.blog.entries_per_page) if offset > 0 else 0
g.info["older"] = offset + int(Config.blog.entries_per_page)
if g.info["earlier"] < 0:
g.info["earlier"] = 0
if g.info["older"] < 0 or g.info["older"] > len(posts) - 1:
g.info["older"] = 0
g.info["count"] = 0
# Can we go to other pages?
g.info["can_earlier"] = True if offset > 0 else False
g.info["can_older"] = False if g.info["older"] == 0 else True
# Load the selected posts.
selected = []
stop = offset + int(Config.blog.entries_per_page)
if stop > len(posts): stop = len(posts)
index = 1 # Let each post know its position on-page.
for i in range(offset, stop):
post_id = posts[i]
post = Blog.get_entry(post_id)
post["post_id"] = post_id
# Body has a snipped section?
if "<snip>" in post["body"]:
post["body"] = post["body"].split("<snip>")[0]
post["snipped"] = True
# Render the body.
if post["format"] == "markdown":
post["rendered_body"] = render_markdown(post["body"])
else:
post["rendered_body"] = post["body"]
# Render emoticons.
if post["emoticons"]:
post["rendered_body"] = Emoticons.render(post["rendered_body"])
# Get the author's information.
post["profile"] = User.get_user(uid=post["author"])
post["photo"] = User.get_picture(uid=post["author"])
post["photo_url"] = Config.photo.root_public
post["pretty_time"] = pretty_time(Config.blog.time_format, post["time"])
# Count the comments for this post
post["comment_count"] = Comment.count_comments("blog-{}".format(post_id))
post["position_index"] = index
index += 1
selected.append(post)
g.info["count"] += 1
g.info["category"] = category
g.info["posts"] = selected
return template(template_name)
def get_index_posts(index):
"""Helper function to get data for the blog index page."""
# Separate the sticky posts from the normal ones.
sticky, normal = set(), set()
for post_id, data in index.items():
if data["sticky"]:
sticky.add(post_id)
else:
normal.add(post_id)
# Sort the blog IDs by published time.
posts = []
posts.extend(sorted(sticky, key=lambda x: index[x]["time"], reverse=True))
posts.extend(sorted(normal, key=lambda x: index[x]["time"], reverse=True))
return posts
def partial_tags():
"""Get a listing of tags and their quantities for the nav bar."""
tags = Blog.get_categories()
# Sort the tags by popularity.
sort_tags = [ tag for tag in sorted(tags.keys(), key=lambda y: tags[y], reverse=True) ]
result = []
has_small = False
for tag in sort_tags:
result.append(dict(
category=tag if len(tag) else Config.blog.default_category,
count=tags[tag],
small=tags[tag] < 10, # TODO: make this configurable
))
if tags[tag] < 10:
has_small = True
g.info["tags"] = result
g.info["has_small"] = has_small
return template("blog/categories.inc.html")
| kirsle/rophako | rophako/modules/blog/__init__.py | Python | gpl-2.0 | 18,180 |
# -*- coding: utf-8 -*-
"""
AllDb
Eksportowanie danych do pliku pdf
"""
from __future__ import with_statement
__author__ = "Karol Będkowski"
__copyright__ = "Copyright (c) Karol Będkowski, 2009-2010"
__version__ = "2010-06-11"
import logging
from cStringIO import StringIO
from alldb.model import objects
from alldb.libs.appconfig import AppConfig
_LOG = logging.getLogger(__name__)
# próba załadowania reportlab
try:
from reportlab.platypus import (SimpleDocTemplate, Table, Paragraph,
TableStyle, Spacer, Image)
from reportlab.rl_config import defaultPageSize
from reportlab.lib import colors
from reportlab.lib.units import cm, inch
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.enums import TA_CENTER, TA_LEFT
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
except ImportError:
_LOG.warn('reportlab not available')
PDF_AVAILABLE = False
else:
_LOG.info('reportlab loaded')
PDF_AVAILABLE = True
_MARGIN_TOP = 0.5 * cm
_MARGIN_BOTTOM = 1 * cm
_MARGIN_LEFT = _MARGIN_RIGHT = 0.5 * cm
def _my_page(canvas, doc):
# strona - numer
canvas.saveState()
canvas.setFont('FreeSans', 6)
canvas.drawString(defaultPageSize[0] / 2, _MARGIN_BOTTOM, "%d" % doc.page)
canvas.restoreState()
def export_pdf_list(filename, cls, items):
''' exportowanie danych
@filename ścieżka do pliku
@cls klasa do eksportu
@items lista elementów do eksportu'''
_create_document(filename, cls, items, _create_pdf_list)
def export_pdf_all(filename, cls, items):
''' exportowanie danych
@filename ścieżka do pliku
@cls klasa do eksportu
@items lista elementów do eksportu'''
_create_document(filename, cls, items, _create_pdf_all)
def _prepare_styles():
styles = {}
stylesheet = getSampleStyleSheet()
style = ParagraphStyle("Normal", stylesheet['Normal'])
style.alignment = TA_LEFT
style.fontSize = 6
style.fontName = 'FreeSans'
style.leading = 8
styles['Normal'] = style
style = ParagraphStyle("ItemTitle", stylesheet['Heading1'])
style.alignment = TA_LEFT
style.fontSize = 8
style.fontName = 'FreeSansBold'
style.fontSize = 10
styles['ItemTitle'] = style
style = ParagraphStyle("Heading", stylesheet['Heading2'])
style.alignment = TA_CENTER
style.fontSize = 6
style.fontName = 'FreeSansBold'
styles['Heading'] = style
style = ParagraphStyle("FieldHead", stylesheet['Heading2'])
style.alignment = TA_LEFT
style.fontSize = 6
style.fontName = 'FreeSansBold'
style.leading = 8
styles['FieldHead'] = style
return styles
def _create_document(filename, cls, items, method):
try:
appconfig = AppConfig()
pdfmetrics.registerFont(TTFont('FreeSans',
appconfig.get_data_file('fonts/freesans.ttf')))
pdfmetrics.registerFont(TTFont('FreeSansBold',
appconfig.get_data_file('fonts/freesansbold.ttf')))
doc = SimpleDocTemplate(filename, leftMargin=_MARGIN_LEFT,
rightMargin=_MARGIN_RIGHT, topMargin=_MARGIN_TOP,
bottomMargin=_MARGIN_BOTTOM, pageCompression=9)
pages = list(method(cls, items))
doc.build(pages, onLaterPages=_my_page, onFirstPage=_my_page)
except RuntimeError:
_LOG.exception('create_pdf error. file=%s', filename)
raise
def _create_pdf_list(cls, items):
styles = _prepare_styles()
style_header = styles['Heading']
style_normal = styles['Normal']
fields = list(cls.fields_in_list)
data = [[Paragraph(objects.get_field_name_human(field), style_header)
for field in fields]]
for item in items:
row = [Paragraph(objects.get_field_value_human(item.get_value(
field)), style_normal) for field in fields]
data.append(row)
table_style = [('ALIGN', (0, 0), (-1, 0), 'CENTER'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
('GRID', (0, 0), (-1, -1), 0.25, colors.black)]
table = Table(data)
table.setStyle(TableStyle(table_style))
yield table
def _create_pdf_all(cls, items):
styles = _prepare_styles()
style_header = styles['FieldHead']
style_normal = styles['Normal']
style_title = styles['ItemTitle']
fields = cls.fields
table_style = TableStyle([('GRID', (0, 0), (-1, -1), 0.25, colors.black)])
for item in items:
if cls.title_show:
yield Paragraph(item.title, style_title)
rows = []
for field_name, field_type, dummy, dummy in fields:
row = [Paragraph(objects.get_field_value_human(field_name),
style_header)]
if field_type == 'image':
blob = item.get_blob(field_name)
if blob:
img = Image(StringIO(blob), lazy=2)
img.drawWidth = img.drawWidth / 150. * inch
img.drawHeight = img.drawHeight / 150. * inch
row.append(img)
else:
row.append(Paragraph(objects.get_field_value_human(
item.get_value(field_name)), style_normal))
rows.append(row)
yield Table(rows, [5 * cm, None], style=table_style)
yield Spacer(0.5 * cm, 0.5 * cm)
# vim: encoding=utf8: ff=unix:
| KarolBedkowski/alldb | alldb/filetypes/pdf_support.py | Python | gpl-2.0 | 4,841 |
#convert nexus tree format to newick
import sys
import os
import dendropy
for n in sys.argv[1:]:
basename = os.path.splitext(n)
outfile = basename[0] + ".nwk"
nexusfile = dendropy.TreeList.get_from_path(n, "nexus")
nexusfile.write_to_path('temp.nwk', "newick")
os.rename('temp.nwk', outfile)
| Wendellab/phylogenetics | nexus2newick.py | Python | gpl-2.0 | 314 |
#
# Copyright (C) 2010 B. Malengier
# Copyright (C) 2010 P.Li
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
upscaling to fabric1d domain
Here a room of LxWxH is modeled with a net spanning the room at L/2
This can be reduced to a 1D model.
The yarn in the net is modeled as 1D cilindrical over 2 x Ry. The mass
outside the yarn (Ry, 2Ry) (the overlap zone) is put in the room model
which runs from Ry to L/2. Note that this is a cube domain!
The room allows for some ventilation L/s which is modeled as removing
the corresponding volume of AI from everywhere
"""
#-------------------------------------------------------------------------
#
# Global Imports
#
#-------------------------------------------------------------------------
from __future__ import division, print_function
import os.path
import numpy as np
import matplotlib.pyplot as plt
import math
from numpy import pi
MAX_STORE_LENGTH = 1000
MAX_PLOT_LENGTH = 500000
INSPECT_MEM = False
HAVE_ODES = False
try:
from scikits.odes import ode as sc_ode
HAVE_ODES = True
except:
print ('Could not load scikits.odes, odes solver not available')
#consider absorption problem possible or not?
ABSORPTION = True
#-------------------------------------------------------------------------
#
# Local Imports
#
#-------------------------------------------------------------------------
import stick.const as const
import stick.lib.utils.utils as utils
from stick.yarn.config import YarnConfigManager
from stick.yarn1d.yarn1dmodel import Yarn1DModel
#-------------------------------------------------------------------------
#
# DiffusionModel-fabric1d class
#
#-------------------------------------------------------------------------
class Room1DModel(object):
"""
upscaling to fabric1d domain
Here a room of LxWxH is modeled with a net spanning the room at L/2
This can be reduced to a 1D model.
The yarn in the net is modeled as 1D cilindrical over 2 x Ry. The mass
outside the yarn (Ry, 2Ry) (the overlap zone) is put in the room model
which runs from Ry to L/2. Note that this is a cube domain!
The room allows for some ventilation L/s which is modeled as removing
the corresponding volume of AI from everywhere
"""
def __init__(self, config):
#indicate if only one side of bednet is free, or both are free
self.singleside = False
#other settings from config
self.cfg = config
self.verbose = self.cfg.get('general.verbose')
self.time_period = self.cfg.get('time.time_period')
self.delta_t = self.cfg.get('time.dt')
self.timesteps = int((self.time_period*(1.+self.delta_t*1e-6)) // self.delta_t)
#set correct delta_t
self.delta_t = self.time_period / self.timesteps
if self.verbose:
print ("Timestep used in bednet model:", self.delta_t)
self.initconc = eval(self.cfg.get('initial.init_conc'))
self.dx = self.cfg.get('domain.dx')
self.dy = self.cfg.get('domain.dy')
#size room in mm
self.room_L = self.cfg.get("domain.room_L") * 1000
self.room_W = self.cfg.get("domain.room_W") * 1000
self.room_H = self.cfg.get("domain.room_H") * 1000
self.nvertyarns = self.room_W/self.dx
self.nhoryarns = self.room_H/self.dy
self.diff_coef = self.cfg.get('diffusion.diff_coef')
self.saturation_conc = self.cfg.get('active_component.saturation_conc')
self.treshold = self.cfg.get('active_component.treshold_effect')
self.x0 = self.cfg.get('observer.x0')
#define whether there is the ventilation existing
self.ventilation = self.cfg.get('domain.ventilation')
self.vel_ventilation = self.cfg.get('domain.vel_ventilation')
#we set a distance for the yarn bc
EXTFRAC = 1.
self.cfg_yarn = []
self.radius_yarn = []
for filename in self.cfg.get('sample.yarn_config'):
if not os.path.isabs(filename):
filename = os.path.normpath(os.path.join(
os.path.dirname(self.cfg.filename), filename))
self.cfg_yarn.append(YarnConfigManager.get_instance(filename))
#set values from the yarn on this inifile
print ('time', self.time_period)
self.cfg_yarn[-1].set("time.time_period", self.time_period)
self.cfg_yarn[-1].set("boundary.dist_conc_out", float(self.x0[0]))
self.cfg_yarn[-1].set("boundary.D_out", self.diff_coef)
self.cfg_yarn[-1].set("boundary.conc_out",
float(self.initconc(self.cfg_yarn[-1].get("domain.yarnradius"))))
self.cfg_yarn[-1].set("domain.useextension", True)
## How much overlap region? Take one yarn radius for now
self.cfg_yarn[-1].set("domain.extensionfraction", EXTFRAC)
self.radius_yarn.append(self.cfg_yarn[-1].get("domain.yarnradius"))
assert self.radius_yarn[-1] == self.radius_yarn[0], 'ERROR, yarns'\
' must have equal radius for now, as massperyarn is equally '\
'distributed over the yarns'
#we want the overlap zone of the yarns to end at 2* maximum radius:
self.maxyarnrad = max(self.radius_yarn)
self.minyarnrad = min(self.radius_yarn)
voloverlapyarn = (np.pi*((self.maxyarnrad * (1 + EXTFRAC))**2 -
(self.maxyarnrad**2)) * (self.nhoryarns
* self.room_W + self.nvertyarns
* self.room_H)
)
#self.endoverlap = self.maxyarnrad * (1 + EXTFRAC)
if self.singleside:
self.endoverlap = self.minyarnrad + voloverlapyarn / self.room_W / self.room_H
else:
self.endoverlap = self.minyarnrad + voloverlapyarn / self.room_W / self.room_H / 2
for config, rad in zip(self.cfg_yarn, self.radius_yarn):
config.set("domain.extensionfraction", EXTFRAC)
#create yarn models
self.yarn_models = []
for cfg in self.cfg_yarn:
self.yarn_models.append(Yarn1DModel(cfg))
self.nr_models = len(self.yarn_models)
#some memory
self.source_mass = np.empty(self.nr_models, float)
#self.mass_build = [0.,]
#plot the result every few seconds so outcome becomes visible during calculations
self.plotevery = self.cfg.get("plot.plotevery")
self.viewerwritecount = 0
self.writeevery = self.cfg.get("plot.writeevery")
#now some output on density
self.volbednet = 0.
self.surfbednet = self.room_H * self.room_W
for rad in self.radius_yarn:
print ('vert vol yarns', self.nvertyarns * pi * rad**2 * self.room_H, 'mm3')
print ('horz vol yarns', self.nhoryarns * pi * rad**2 * self.room_W, 'mm3')
self.volbednet += self.nvertyarns * pi * rad**2 * self.room_H
self.volbednet += self.nhoryarns * pi * rad**2 * self.room_W
print ('volume_bednet space =', (2 * self.maxyarnrad * self.room_H
* self.room_W))
# The total volume of the bednet incl void space is the area of the
# net * the tickness of the net.
# This thickness is taken to be twice a yarndiameter.
self.totalvolume_net = self.room_H * self.room_W * 4 * self.maxyarnrad
self.voidvolume = self.totalvolume_net - self.volbednet
self.densitybednet = self.volbednet / (2 * self.maxyarnrad * self.room_H
* self.room_W)
self.fabporosity = self.voidvolume / self.totalvolume_net
self.initialized = False
self.yarnconc_center = np.empty((self.timesteps, 2),float)
self.yarnconc_surface = np.empty((self.timesteps, 2),float)
def times(self, timestep, end=None):
""" Compute the time at one of our steps
If end is given, all times between step timestep and step end are
returned as a list, with end included
"""
if end is None:
return timestep * self.delta_t
else:
begin = timestep * self.delta_t
end = end * self.delta_t
return np.linspace(begin, end, end-begin + 1)
def create_mesh(self):
""" create a mesh for the room model
"""
self.begin_point = self.minyarnrad
self.end_point = self.room_L / 2
self.nr_edge = self.cfg.get('domain.n_edge')
self.nr_cell = self.nr_edge - 1
self.grid_edge = np.empty(self.nr_edge, float)
self.grid_edge[0] = self.begin_point
self.grid_edge[1:] = np.linspace(self.endoverlap, self.end_point,
self.nr_edge-1)
if self.singleside:
self.overlapvolume = (self.endoverlap - self.begin_point) \
* self.room_H * self.room_W
else:
self.overlapvolume = (self.endoverlap - self.begin_point) \
* self.room_H * self.room_W * 2
#construct cell centers from this
self.grid = (self.grid_edge[:-1] + self.grid_edge[1:])/2.
#obtain cell sizes
self.delta_x = self.grid_edge[1:] - self.grid_edge[:-1]
self.plotdata = []
for xplot in self.x0:
assert self.grid[0] < xplot < self.grid[-1], "%f < %f < %f "\
"Not satisfied, observer out of domain" % (self.grid[0],
xplot, self.grid[-1])
for ind, xcell in enumerate(self.grid):
if xcell >= xplot:
interpol_start = (xcell-xplot)/(self.grid[ind]-self.grid[ind-1])
self.plotdata.append((ind-1, interpol_start))
break
def upscale_yarnmass(self, mass):
"""
Upscale the mass in one yarn, to the mass in the entire bednet,
returns the mass
"""
return (self.nhoryarns * self.room_W + self.nvertyarns * self.room_H) * mass
def calc_mass(self):
"""
Calculate the mass in the room and the bednet of Active Component
at this specific state the model is in.
"""
#First, the mass in the yarns
yarnmass = [None] * len(self.yarn_models)
fiberconc_sta = [None] * len(self.yarn_models)
fiberconc_mid = [None] * len(self.yarn_models)
fiberconc_end = [None] * len(self.yarn_models)
yarnmassoverlap = [None] * len(self.yarn_models)
for ttype, model in enumerate(self.yarn_models):
yarnmass[ttype] = model.calc_mass(model.step_old_sol)
yarnmassoverlap[ttype] = model.calc_mass_overlap(model.step_old_sol)
fiberconc_sta[ttype], fiberconc_mid[ttype], fiberconc_end[ttype] = model.get_fiber_conc()
#Next, the upscaled mass in the yarns
totyarnmass = [None] * len(self.yarn_models)
totyarnmassoverlap = [None] * len(self.yarn_models)
for ttype, (massy, massyo) in enumerate(zip(yarnmass, yarnmassoverlap)):
totyarnmass[ttype] = self.upscale_yarnmass(massy)
totyarnmassoverlap[ttype] = self.upscale_yarnmass(massyo)
#Next, the mass in the room, divided in overlapzone and rest.
if self.singleside:
roomoverlapmass = self.overlapvolume * self.step_old_sol[0]
roommass = np.sum(self.delta_x[1:] * self.room_H * self.room_W * self.step_old_sol[1:])
else:
# factor 2 because we only model half of the room
roomoverlapmass = self.overlapvolume * self.step_old_sol[0]
roommass = 2*np.sum(self.delta_x[1:] * self.room_H * self.room_W * self.step_old_sol[1:])
return (yarnmass, yarnmassoverlap, totyarnmass, totyarnmassoverlap,
roommass, roomoverlapmass, fiberconc_sta, fiberconc_mid, fiberconc_end)
def initial_room(self):
""" initial concentration in the room domain
"""
self.init_conc = np.empty(self.nr_cell, float)
self.init_conc[:] = self.initconc(self.grid[:])
def init_yarn(self):
self.yarn_mass = [0] * len(self.yarn_models)
self.yarn_mass_overlap = [0] * len(self.yarn_models)
self.yarn_mass_overlap_old = [0] * len(self.yarn_models)
self.tstep = 0
for ind, model in enumerate(self.yarn_models):
model.do_yarn_init()
if model.bound_type != 0 :
print (' ***********************************************')
print (' ****** WARNING: Boundary condition not diffusion flux,'\
'\n so yarn does not consider the fabric !!')
print (' ***********************************************')
self.yarn_mass[ind] = model.calc_mass(model.init_conc)
self.yarn_mass_overlap[ind] = model.calc_mass_overlap(model.init_conc)
# no mass released at start time
self.source_mass[ind] = 0.
#self.fibermass = model.get_fiber_mass()
#print(self.fibermass)
def f_conc_ode(self, t, conc_x, diff_u_t):
"""
Solving the room 1D diffusion equation:
\partial_t (C) = \partial_x (D \partial_x C) + Source
with Source the concentration amount per time unit added/removed at x.
Solution is obtained by integration over a cell, so
\delta x d_t (C) = flux_right - flux_left + Source (\delta x)
so
d_t C = 1 / (\delta x) * (flux_right - flux_left) + Source
We have homogeneous Neumann BC
"""
grid = self.grid
n_cellcenters = len(grid)
#Initialize the flux rate on the edges
flux_edge = self.__tmp_flux_edge
#set flux on edge 0, self.nr_edge-1
flux_edge[0] = 0.
flux_edge[-1] = 0.
#calculate flux rate in each edge of the domain
flux_edge[1:self.nr_edge-1] = (2 * self.diff_coef *
(conc_x[1:]-conc_x[:-1]) / (self.delta_x[:-1]+self.delta_x[1:])
)
if self.ventilation == 'advection':
raise NotImplementedError, 'This piece needs testing before use!' # needs testing before activating this!
flux_edge[-1] = - self.vel_ventilation * conc_x[-1] * self.delta_t
flux_edge[1:self.nr_edge-1] += - 2 * self.vel_ventilation \
* (conc_x[1:] + conc_x[:-1]) / 2.
elif self.ventilation == "zero_on_edge":
#we assume always 0 outside the edge, this means outside is refreshed
flux_edge[self.nr_edge-1] = (self.diff_coef *
(0-conc_x[-1]) / self.delta_x[-1] )
##print ('flux edge room', flux_edge[self.nr_edge-1])
diff_u_t[:] = ((flux_edge[1:]-flux_edge[:-1])
/ self.delta_x[:]
)
## we add a source term in the first cell where the overlap is
diff_u_t[0] += self.source_room_from_yarn
## def f_conc_ode_vel(self, t, conc_x, diff_u_t, vel_ventilation):
## """
## Solving the room 1D diffusion equation:
##
## \partial_t (C) = \partial_x (D \partial_x C) - v\partial_x C + Source
##
## with Source the concentration amount per time unit added/removed at x.
## Solution is obtained by integration over a cell, so
##
## \delta x d_t (C) = flux_right - flux_left + Source (\delta x)
##
## so
##
## d_t C = 1 / (\delta x) * (flux_right - flux_left) + Source
##
## We have homogeneous Neumann BC
## """
## grid = self.grid
## n_cellcenter = len(grid)
## flux_edge = self.__tmp_flux_edge
## #set flux on edge 0, self.nr_edge-1
## flux_edge[0] = 0.
## flux_edge[-1] = -vel_ventilation * conc_x[-1]
## #flux_edge[-1] = 0.
## #calculate flux rate in each edge of the domain
## flux_edge[1:self.nr_edge-1] = (2 * self.diff_coef *
## (conc_x[1:]-conc_x[:-1]) / (self.delta_x[:-1]+self.delta_x[1:])
## ) - vel_ventilation * (conc_x[1:] + conc_x[:-1]) / (self.delta_x[:-1]
## +self.delta_x[1:])
## diff_u_t[:] = ((flux_edge[1:]-flux_edge[:-1])
## / self.delta_x[:]
## )
## ## we add a source term in the first cell where the overlap is
## diff_u_t[0] += self.source_room_from_yarn
def solve_ode_init(self):
"""
Initialize the ode solver
"""
self.initial_t = self.times(0)
self.step_old_time = self.initial_t
#storage for solution
self.solstoreind = 0
if self.timesteps+1 > MAX_STORE_LENGTH:
self.solpart = np.empty((MAX_STORE_LENGTH, self.nr_cell), float)
else:
self.sol = np.empty((self.timesteps+1, self.nr_cell), float)
self.solpart = self.sol
self.solpart[0, :] = self.init_conc[:]
self.ret_y = np.empty(self.nr_cell, float)
self.__tmp_flux_edge = np.zeros(self.nr_cell+1, float)
self.tstep = 0
self.step_old_sol = self.solpart[0]
self.solver = sc_ode('cvode', self.f_conc_ode,
min_step_size=1e-8, rtol=1e-6, atol=1e-6,
max_steps=50000, lband=1, uband=1)
print (self.step_old_time)
self.solver.init_step(self.step_old_time, self.init_conc)
self.initialized = True
def do_ode_step(self, stoptime):
"""Solve the roommodel up to stoptime, continuing from the present
state, return the time, concentration after step
"""
self.solver.init_step(self.step_old_time, self.step_old_sol)
self.solver.set_options(tstop=stoptime)
if not self.initialized:
raise Exception, 'Solver ode not initialized'
flag, realtime = self.solver.step(stoptime, self.ret_y)
if flag < 0:
raise Exception, 'could not find solution, flag %d' % flag
assert np.allclose(realtime, stoptime), "%f %f" % (realtime, stoptime)
return stoptime, self.ret_y
def solve_timestep(self, t):
print ("solve up to time", t, "s")
self.tstep += 1
# 1. step one, solve the yarn model, calculate the mass coming out of one yarn and calculate
# the corresponding concentration by dividing by the volume of a yarn pi Ry^2
for ttype, model in enumerate(self.yarn_models):
rt, rety = model.do_yarn_step(t)
self.yarnconc_center[self.tstep-1,0] = t
self.yarnconc_center[self.tstep-1,1] = rety[0]
self.yarnconc_surface[self.tstep-1,0] = t
self.yarnconc_surface[self.tstep-1,1] = rety[-1]
#filedata= open(utils.OUTPUTDIR + os.sep + "yarnconc_%05d" %t + ".txt",'w')
#filedata.write("conc on %.10f is %s" % (t,rety))
#filedata.close()
#filedata= open(utils.OUTPUTDIR + os.sep + "yarnconc_center%05d" %t + ".txt",'w')
#filedata.write("conc on %.10f is %s" % (t,rety[0]))
#filedata.close()
#filedata= open(utils.OUTPUTDIR + os.sep + "yarnconc_surface_%05d" %t + ".txt",'w')
#filedata.write("conc on %.10f is %s" % (t,rety[-1]))
#filedata.close()
tmp = model.calc_mass(rety)
tmp_overlap = model.calc_mass_overlap(rety)
# mass that goes into overlap is the mass that disappeared.
self.source_mass[ttype] = tmp_overlap \
- (self.yarn_mass_overlap[ttype] + model.source_overlap
* self.delta_t * model.areaextend)
self.yarn_mass[ttype] = tmp
self.yarn_mass_overlap_old[ttype] = self.yarn_mass_overlap[ttype]
self.yarn_mass_overlap[ttype] = tmp_overlap
if (ABSORPTION != True):
# we check on absorption, and give error if too big
if self.source_mass[ttype] < 0.:
print ("source mass", self.source_mass[ttype])
if abs(self.source_mass[ttype]) < 100:
#self.source_mass[ttype, self.tstep] = 0.
print ('WARNING: small negative release, reduce timestep fiber/yarn if needed')
else:
raise NotImplementedError, 'source must be positive, negative not supported'
# 2. step two, solve the room model
# to obtain new concentration value near yarn.
# We know that self.source_mass[ttype] has been released in the
# overlap region since last step
# 2.a upscale source_mass (mass in ring zone area) to a source per second per mm^3
# concentration is a consequence of all yarn types, so sum
# over yarn types, and compute contribution of that moment.
concreleased = (self.nhoryarns * self.room_W + self.nvertyarns * self.room_H) \
* np.sum(self.source_mass[:]) / self.overlapvolume
self.source_room_from_yarn = concreleased / self.delta_t
# 2.b solve the room model
self.step_old_time, self.step_old_sol = self.do_ode_step(t)
if self.tstep % MAX_STORE_LENGTH == 0 :
#dump to file, and restart
self.dump_sol(self.solstoreind)
self.solstoreind += 1
self.solpart[self.tstep % MAX_STORE_LENGTH, :] = self.step_old_sol[:]
# 3. for next timestep, we need to set correct boundary condition
# on the yarn level, so downscale the mass to keep mass balance
##massdiff = massoverlapnew - massoverlapold
massperyarn = (self.step_old_sol[0] * self.overlapvolume
/ (self.nhoryarns * self.room_W + self.nvertyarns * self.room_H)
/ len(self.cfg_yarn)
)
for ind, model in enumerate(self.yarn_models):
#the source mass is what was present in the overlap before doing room model
massyarnoverlapold = self.yarn_mass_overlap[ind]
#the new mass there we approximate from concentration
massyarnoverlapnew = massperyarn
massyarndiff = massyarnoverlapnew - massyarnoverlapold
#based on removed, we set a source term in the overlap zone of
# of the yarn
model.source_overlap = massyarndiff / self.delta_t / model.areaextend
#store masses
self.__store_masses(self.tstep)
def view_sol(self):
#maxv = np.max(self.sol)
#minv = np.min(self.sol)
#print 'max', maxv, minv
#self.plottimes = np.arange(self.times[0],self.times[-1]+1,self.plotevery)
plotextra = False
times = self.times(self.timesteps+1-((self.tstep - 1) % MAX_STORE_LENGTH+1),
self.timesteps)
sol = self.solpart[:(self.tstep - 1) % MAX_STORE_LENGTH+1]
extravals = self.cfg.get("plot.extra_time_room")
if extravals:
extravals = extravals.split("|")
if len(extravals) == 3 and not eval(extravals[1]) == []:
plotextra = True
plt.ion()
ind = 0
for ind, interpdat in enumerate(self.plotdata):
xval = self.x0[ind]
cellstart, interpval = interpdat
conc_in_point = interpval * sol[:, ind] + (1-interpval) * sol[:, ind+1]
print ('conc in end point', conc_in_point[-1])
plt.rc("font", family="serif")
plt.rc("font", size=10)
width = 4.5 #width in inches
height = 1.4 #height in inches
plt.rc("figure.subplot", left=(50/72.27)/width)
plt.rc("figure.subplot", right=(width-10/72.27)/width)
plt.rc("figure.subplot", bottom=(14/72.27)/height)
plt.rc("figure.subplot", top=(height-7/72.27)/height)
plt.figure(ind)
plt.gca().set_xlabel('Time [s]')
plt.gca().set_ylabel('Concentration [$\mu$g/mm$^3$]')
#plt.gca().yaxis.set_major_formatter(pylab.FormatStrFormatter('%e'))
plt.title('Concentration at position %g mm' % xval)
if plotextra:
plt.plot(eval(extravals[1]), eval(extravals[2]), extravals[0])
plt.plot(times, conc_in_point)
#plt.ylim(0, maxv*1.1)
plt.plot(times, np.ones(len(times)) * self.saturation_conc, 'k--')
plt.plot(times, np.ones(len(times)) * self.treshold, 'b--')
plt.show()
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AIconc_%03.1f_mm' % xval + const.FIGFILEEXT)
return ind
def plot_room_sol(self, ind):
print ('Generating fig of solution over the room domain')
self.viewerplotcount = 0
times = self.times(self.timesteps+1-((self.tstep - 1) % MAX_STORE_LENGTH+1),
self.timesteps)
sol = self.solpart[:(self.tstep - 1) % MAX_STORE_LENGTH+1]
minval = np.min(sol)
maxv = np.max(sol)
try:
maxval = np.power(10., int(math.log10(maxv))+1)
except ValueError:
maxval = minval + 10
plt.ion()
if self.plotevery:
for time, ssol in zip(times, sol):
if self.viewerplotcount == 0:
print ('plotting for time', time)
plt.rc("font", family="serif")
plt.rc("font", size=10)
width = 4.5 #width in inches
height = 1.4 #height in inches
plt.rc("figure.subplot", left=(50/72.27)/width)
plt.rc("figure.subplot", right=(width-10/72.27)/width)
plt.rc("figure.subplot", bottom=(14/72.27)/height)
plt.rc("figure.subplot", top=(height-7/72.27)/height)
fig = plt.figure(ind)
plt.gca().set_xlabel('Position [mm]')
plt.gca().set_ylabel('Concentration [$\mu$g/mm$^3$]')
plt.gca().set_ylim(minval, maxval)
#plt.gca().yaxis.set_major_formatter(pylab.FormatStrFormatter('%e'))
plt.title('Concentration in the room at t = %g s' % time)
plt.ioff()
lines = plt.plot(self.grid, ssol, 'r')
plt.draw()
try:
fig.canvas.flush_events()
except NotImplementedError:
pass
plt.ion()
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AIconc_%08.1f_sec' % time + const.FIGFILEEXT)
#remove the line again
lines.pop(0).remove()
self.viewerplotcount += 1
self.viewerplotcount = self.viewerplotcount % self.plotevery
else:
#plot last
time = times[-1]
ssol = sol[-1]
print ('plotting for time', time)
plt.rc("font", family="serif")
plt.rc("font", size=10)
width = 4.5 #width in inches
height = 1.4 #height in inches
plt.rc("figure.subplot", left=(50/72.27)/width)
plt.rc("figure.subplot", right=(width-10/72.27)/width)
plt.rc("figure.subplot", bottom=(14/72.27)/height)
plt.rc("figure.subplot", top=(height-7/72.27)/height)
fig = plt.figure(ind)
plt.gca().set_xlabel('Position [mm]')
plt.gca().set_ylabel('Concentration [$\mu$g/mm$^3$]')
plt.gca().set_ylim(minval, maxval)
#plt.gca().yaxis.set_major_formatter(pylab.FormatStrFormatter('%e'))
plt.title('Concentration in the room at t = %g s' % time)
plt.ioff()
lines = plt.plot(self.grid, ssol, 'r')
plt.draw()
try:
fig.canvas.flush_events()
except NotImplementedError:
pass
plt.ion()
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AIconc_%08.1f_sec' % time + const.FIGFILEEXT)
def view_sol_mass(self, ind):
"""
Plot the evolution of the mass at current state of the solution
"""
times = self.times(self.timesteps+1-((self.tstep - 1) % MAX_STORE_LENGTH+1),
self.timesteps)
fignr = ind
plt.ion()
for ind, ymass in enumerate(self.yarnmass):
plt.rc("font", family="serif")
plt.rc("font", size=10)
width = 4.5 #width in inches
height = 1.4 #height in inches
plt.rc("figure.subplot", left=(50/72.27)/width)
plt.rc("figure.subplot", right=(width-10/72.27)/width)
plt.rc("figure.subplot", bottom=(14/72.27)/height)
plt.rc("figure.subplot", top=(height-7/72.27)/height)
plt.figure(fignr)
plt.gca().set_xlabel('Time [s]')
plt.gca().set_ylabel('Mass [$\mu$g]')
plt.title('Mass AC in yarn type %d' % ind)
plt.plot(times, ymass[:(self.tstep-1) % MAX_STORE_LENGTH+1])
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AImass_yarn_%d' % ind + const.FIGFILEEXT)
fignr += 1
#now show evolution of fiber conc in center
plt.figure(fignr)
plt.gca().set_xlabel('Time [s]')
plt.gca().set_ylabel('Conc [$\mu$g/mm$^3$]')
plt.title('Conc center fiber in yarn type %d' % ind)
plt.plot(times, self.fconc_sta[ind][:(self.tstep-1) % MAX_STORE_LENGTH+1])
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AIconc_fibstart_%d' % ind + const.FIGFILEEXT)
fignr += 1
plt.figure(fignr)
plt.gca().set_xlabel('Time [s]')
plt.gca().set_ylabel('Conc [$\mu$g/mm$^3$]')
plt.title('Conc middle fiber in yarn type %d' % ind)
plt.plot(times, self.fconc_mid[ind][:(self.tstep-1) % MAX_STORE_LENGTH+1])
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AIconc_fibmid_%d' % ind + const.FIGFILEEXT)
fignr += 1
plt.figure(fignr)
plt.gca().set_xlabel('Time [s]')
plt.gca().set_ylabel('Conc [$\mu$g/mm$^3$]')
plt.title('Conc surface fiber in yarn type %d' % ind)
plt.plot(times, self.fconc_end[ind][:(self.tstep-1) % MAX_STORE_LENGTH+1])
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AIconc_fibend_%d' % ind + const.FIGFILEEXT)
fignr += 1
plt.figure(fignr)
plt.gca().set_xlabel('Time [s]')
plt.gca().set_ylabel('Mass [$\mu$g]')
plt.title('Mass AC in the bednet')
#plt.plot([0,],[28.935,], 'r*')
plt.plot(times, self.totyarnmass[:(self.tstep-1) % MAX_STORE_LENGTH+1])
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AImass_bednet' + const.FIGFILEEXT)
fignr += 1
plt.figure(fignr)
plt.gca().set_xlabel('Time [s]')
plt.gca().set_ylabel('Mass [$\mu$g]')
plt.title('Mass AC in the room')
plt.plot(times, self.totroommass[:(self.tstep-1) % MAX_STORE_LENGTH+1])
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AImass_in_room' + const.FIGFILEEXT)
fignr += 1
#plot to check mass conservation
plt.figure(fignr)
plt.gca().set_xlabel('Time [s]')
plt.gca().set_ylabel('Mass [$\mu$g]')
plt.title('Total Mass AC')
plt.plot(times, self.totroommass[:(self.tstep-1) % MAX_STORE_LENGTH+1] \
+ self.totyarnmass[:(self.tstep-1) % MAX_STORE_LENGTH+1])
plt.savefig(utils.OUTPUTDIR + os.sep
+ 'AImass_total' + const.FIGFILEEXT)
return fignr
def init_room(self):
#first we create the room mesh
self.create_mesh()
self.initial_room()
self.init_yarn()
if not self.initialized:
self.solve_ode_init()
#storage for mass values
self.yarnmass = [None] * len(self.yarn_models)
self.fconc_sta = [None] * len(self.yarn_models)
self.fconc_mid = [None] * len(self.yarn_models)
self.fconc_end = [None] * len(self.yarn_models)
for ind in range(len(self.yarn_models)):
self.yarnmass[ind] = np.empty(MAX_STORE_LENGTH, float)
self.fconc_sta[ind] = np.empty(MAX_STORE_LENGTH, float)
self.fconc_mid[ind] = np.empty(MAX_STORE_LENGTH, float)
self.fconc_end[ind] = np.empty(MAX_STORE_LENGTH, float)
self.totyarnmass = np.empty(MAX_STORE_LENGTH, float)
self.totroommass = np.empty(MAX_STORE_LENGTH, float)
#compute the initial masses
self.__store_masses(0)
def __store_masses(self, ind):
(yarnmass, yarnmassoverlap, totyarnmass, totyarnmassoverlap,
roommass, roomoverlapmass, fconc_sta, fconc_mid, fconc_end) \
= self.calc_mass()
#compute initial values
for mind, val in enumerate(yarnmass):
self.yarnmass[mind][ind%MAX_STORE_LENGTH] = val
self.fconc_sta[mind][ind%MAX_STORE_LENGTH] = fconc_sta[mind]
self.fconc_mid[mind][ind%MAX_STORE_LENGTH] = fconc_mid[mind]
self.fconc_end[mind][ind%MAX_STORE_LENGTH] = fconc_end[mind]
self.totyarnmass[ind%MAX_STORE_LENGTH] = np.sum(totyarnmass) #+ np.sum(totyarnmassoverlap)
self.totroommass[ind%MAX_STORE_LENGTH] = roommass + roomoverlapmass
def write_info(self):
"""
Write generic info on the bednet
"""
print ("\n\nINFO ON BEDNET")
print ("**************")
print ("volume bednet = %g m^3, which means calculated porosity"\
" %f mm^3 fabric/mm^3" \
% (self.volbednet/1e9, self.fabporosity))
print ("surface bednet = %g m^2, which means calculated surface"\
"mass %f gram/m^2" \
% (self.surfbednet/1e6, (self.totyarnmass[0]/1e6)/(self.surfbednet/1e6)))
print (" initial mass in bednet", self.totyarnmass[0]/1e6, "gram, room",\
self.totroommass[0]/1e6, "gram")
print (" number of yarns in fabric", "vertical", self.nvertyarns, \
"horizontal", self.nhoryarns)
print (" masses in the yarns ")
for mind, val in enumerate(self.yarn_mass):
print ("Yarn %d has initial mass AC %f" % (mind, val))
for ind, models in enumerate(self.yarn_models[mind].fiber_models):
print ('yarn cell', ind)
for type, model in enumerate(models):
print ("fibertype %d: fibermass %f ; " % (type,
self.yarn_models[mind].fiber_mass[ind, type]))
print (' ')
print ("Blend in yarn is", self.yarn_models[mind].blend)
print ("**************\n\n")
#raw_input("Press key to start")
def dump_sol(self, index):
""" Dump solpart to file with extension index """
times = self.times(index*MAX_STORE_LENGTH, self.tstep-1)
timestxt = self.times(index*MAX_PLOT_LENGTH,self.tstep-1)
newyarnmass = [0] * len(self.yarnmass)
newfconc_sta = [0] * len(self.fconc_sta)
newfconc_mid = [0] * len(self.fconc_mid)
newfconc_end = [0] * len(self.fconc_end)
nr = len(times)
for ind in range(len(self.yarnmass)):
newyarnmass[ind] = self.yarnmass[ind][:nr]
newfconc_sta[ind] = self.fconc_sta[ind][:nr]
newfconc_mid[ind] = self.fconc_mid[ind][:nr]
newfconc_end[ind] = self.fconc_end[ind][:nr]
np.savez(utils.OUTPUTDIR + os.sep + 'bednetroom1d_solpart_%05d.npz' % index,
times=times,
sol = self.solpart[:len(times)],
tresh_sat = [self.saturation_conc, self.treshold],
grid_cellcenters = self.grid,
yarnmass = newyarnmass,
totyarnmass = self.totyarnmass[:len(times)],
totroommass = self.totroommass[:len(times)],
fconc_sta = newfconc_sta,
fconc_mid = newfconc_mid,
fconc_end = newfconc_end
)
#roomconc over time to textfile
filedata= open(utils.OUTPUTDIR + os.sep + "roomconc" + ".txt",'w')
filedata.write("conc in the room is %s" %(self.solpart[:len(timestxt)]) )
filedata.close()
#roomconc at the outermost left position over time to textfile
self.roomconcleft = np.empty((len(timestxt),2),float)
self.roomconcleft[:,0] = timestxt
self.roomconcleft[:,1] = self.solpart[:len(timestxt),0]
self.roomconcmiddle = np.empty((len(timestxt),2),float)
self.roomconcmiddle[:,0] = timestxt
self.roomconcmiddle[:,1] = self.solpart[:len(timestxt),int(self.nr_cell/2)]
self.roomconcright = np.empty((len(timestxt),2),float)
self.roomconcright[:,0] = timestxt
self.roomconcright[:,1] = self.solpart[:len(timestxt),-1]
filedata= open(utils.OUTPUTDIR + os.sep + "roomconcLEFT" + ".txt",'w')
for i in range(0,len(self.roomconcleft)):
filedata.write("%.5f %.5f\n" % (self.roomconcleft[i,0],self.roomconcleft[i,1]))
#filedata.write("conc at outermost LEFT in the room is %s" %(self.solpart[:len(times),0]) )
filedata.close()
#roomconc at the middle of the room over time to textfile
filedata= open(utils.OUTPUTDIR + os.sep + "roomconcMIDDLE" + ".txt",'w')
for i in range(0,len(self.roomconcmiddle)):
filedata.write("%.5f %.5f\n" % (self.roomconcmiddle[i,0],self.roomconcmiddle[i,1]))
#filedata.write("conc at outermost LEFT in the room is %s" %(self.solpart[:len(times),0]) )
filedata.close()
#roomconc at the outermost right position over time to textfile
filedata= open(utils.OUTPUTDIR + os.sep + "roomconcRIGHT" + ".txt",'w')
for i in range(0,len(self.roomconcright)):
filedata.write("%.5f %.5f\n" % (self.roomconcright[i,0],self.roomconcright[i,1]))
filedata.close()
def run(self, wait=False):
self.init_room()
self.write_info()
t = self.times(self.tstep+1)
while t <= self.time_period+self.delta_t/10:
self.solve_timestep(t)
t = self.times(self.tstep+1)
#we set tstep one after last value
self.tstep += 1
if INSPECT_MEM:
import gc
notr =gc.collect()
print ('unreachable objects:')
print (notr)
notr =gc.collect()
print ('unreachable objects:')
print (notr)
raw_input('press key')
print ("Remaining Garbage")
print (gc.garbage)
raw_input('press key')
#save solution to output file
self.dump_sol(self.solstoreind)
filedata= open(utils.OUTPUTDIR + os.sep + "yarnconccenter" + ".txt",'w')
for i in range(len(self.yarnconc_center)):
filedata.write("%.8f %.8f\n" % (self.yarnconc_center[i,0],self.yarnconc_center[i,1]))
filedata.close()
filedata= open(utils.OUTPUTDIR + os.sep + "yarnconcsurface" + ".txt",'w')
for i in range(len(self.yarnconc_surface)):
filedata.write("%.8f %.8f\n" % (self.yarnconc_surface[i,0],self.yarnconc_surface[i,1]))
filedata.close()
fignr = self.view_sol()
fignr = self.view_sol_mass(fignr+1)
self.plot_room_sol(fignr+1)
for ymod in self.yarn_models:
ymod.view_sol([ymod.step_old_time], [ymod.step_old_sol])
for ind_cell, models in enumerate(ymod.fiber_models):
for ftype, model in enumerate(models):
model.view_last_sol(" cell %d, type %d" % (ind_cell, ftype))
break
if wait:
raw_input("Finished bednet run") | bmcage/stickproject | stick/bednet/room1dmodel.py | Python | gpl-2.0 | 41,045 |
# -*- coding: utf-8 -*-
'''
Copyright (C) 2015-2020 enen92,Zag
This file is part of script.screensaver.cocktail
SPDX-License-Identifier: GPL-2.0-only
See LICENSE for more information.
'''
import xbmc
import xbmcaddon
import xbmcvfs
import thecocktaildb
import os
addon = xbmcaddon.Addon(id='script.screensaver.cocktail')
addon_path = addon.getAddonInfo('path')
addon_userdata = xbmc.translatePath(addon.getAddonInfo('profile')).decode('utf-8')
addon_name = addon.getAddonInfo('name')
cocktailsdb_api = thecocktaildb.Api('1352')
favourite_drinks_folder = os.path.join(addon_userdata,'favourites')
if not os.path.exists(addon_userdata): xbmcvfs.mkdir(addon_userdata)
if not os.path.exists(favourite_drinks_folder): xbmcvfs.mkdir(favourite_drinks_folder)
ACTION_CONTEXT_MENU = 117
ACTION_LEFT = 1
ACTION_RIGHT = 2
ACTION_ESCAPE = 10
ACTION_RETURN = 92
ACTION_ENTER = 7
def removeNonAscii(s):
return "".join(filter(lambda x: ord(x)<128, s))
def translate(text):
return addon.getLocalizedString(text).encode('utf-8')
| enen92/script.screensaver.cocktail | resources/lib/common_cocktail.py | Python | gpl-2.0 | 1,041 |
__author__ = 'tonycastronova'
import cPickle as pickle
import uuid
import stdlib
import utilities.spatial
from emitLogging import elog
from sprint import *
from utilities import io
def create_variable(variable_name_cv):
"""
creates a variable object using the lookup table
"""
sPrint('Loading variable: '+variable_name_cv, MessageType.DEBUG)
var_path = io.getRelativeToAppData('dat/var_cv.dat')
var = pickle.load(open(var_path,'rb'))
sPrint('Loaded var_cv', MessageType.DEBUG)
if variable_name_cv in var:
sPrint('var name in var', MessageType.DEBUG)
V = stdlib.Variable()
V.VariableNameCV(value=variable_name_cv)
V.VariableDefinition(value=var[variable_name_cv].strip())
return V
else:
sPrint('var name not in var', MessageType.DEBUG)
V = stdlib.Variable()
V.VariableNameCV(value=variable_name_cv)
V.VariableDefinition(value='unknown')
#print '> [WARNING] Variable not found in controlled vocabulary : '+variable_name_cv
return V
def create_unit(unit_name):
"""
creates a unit object using the lookup table
"""
unit_path = io.getRelativeToAppData('dat/units_cv.dat')
var = pickle.load(open(unit_path,'rb'))
# dir = os.path.dirname(__file__)
# var = pickle.load(open(os.path.join(dir,'../data/units_cv.dat'),'rb'))
if unit_name in var:
U = stdlib.Unit()
U.UnitName(value=unit_name)
U.UnitTypeCV(value=var[unit_name][0].strip())
U.UnitAbbreviation(value=var[unit_name][1].strip())
return U
else:
U = stdlib.Unit()
U.UnitName(value=unit_name)
U.UnitTypeCV(value='unknown')
U.UnitAbbreviation(value='unknown')
#print '> [WARNING] Unit not found in controlled vocabulary : '+unit_name
return U
def build_exchange_items_from_config(params):
# get all inputs and outputs
iitems = params['input'] if 'input' in params else []
oitems = params['output'] if 'output' in params else []
eitems = iitems + oitems
items = {stdlib.ExchangeItemType.INPUT:[],stdlib.ExchangeItemType.OUTPUT:[]}
# loop through each input/output and create an exchange item
for io in eitems:
variable = None
unit = None
srs = None
geoms = []
# get all input and output exchange items as a list
iotype = stdlib.ExchangeItemType.OUTPUT if io['type'].upper() == stdlib.ExchangeItemType.OUTPUT else stdlib.ExchangeItemType.INPUT
for key, value in io.iteritems():
sPrint(key, MessageType.DEBUG)
if key == 'variable_name_cv':
sPrint('Creating Variable', MessageType.DEBUG)
variable = create_variable(value)
sPrint('Done Creating Variable', MessageType.DEBUG)
if 'variable_definition' in io.keys():
variable.VariableDefinition(io['variable_definition'])
elif key == 'unit_type_cv': unit = create_unit(value)
elif key == 'elementset' :
# check if the value is a path
if os.path.dirname(value ) != '':
gen_path = os.path.abspath(os.path.join(params['basedir'],value))
if not os.path.isfile(gen_path):
# get filepath relative to *.mdl
elog.critical('Could not find file at path %s, generated from relative path %s'%(gen_path, value))
raise Exception('Could not find file at path %s, generated from relative path %s'%(gen_path, value))
# parse the geometry from the shapefile
geoms, srs = utilities.spatial.read_shapefile(gen_path)
srs = srs.AutoIdentifyEPSG()
# otherwise it must be a wkt
else:
try:
# get the wkt text string
value = value.strip('\'').strip('"')
# parse the wkt string into a stdlib.Geometry object
geom = utilities.geometry.fromWKT(value)
for g in geom:
geoms.append(g)
except:
elog.warning('Could not load component geometry from *.mdl file')
# this is OK. Just set the geoms to [] and assume that they will be populated during initialize.
geom = []
if 'espg_code' in io:
srs = utilities.spatial.get_srs_from_epsg(io['epsg_code'])
# generate a unique uuid for this exchange item
id = uuid.uuid4().hex
# create exchange item
ei = stdlib.ExchangeItem(id,
name=variable.VariableNameCV(),
desc=variable.VariableDefinition(),
unit= unit,
variable=variable,
# srs_epsg=srs, #todo: this is causing problems
type=iotype)
# add geometry to exchange item (NEW)
ei.addGeometries2(geoms)
# save exchange items based on type
items[ei.type()].append(ei)
return items
| Castronova/EMIT | utilities/mdl.py | Python | gpl-2.0 | 5,317 |
################################################################################
################################### Class ######################################
################################################################################
from brain import BrainException,Brain
class LookUpTableBrainException(BrainException): pass
class LookUpTableBrain(Brain):
"""
Brain class based on Lookup table
Attributes:
_table - Look up table
"""
def __init__(self):
self._table = {}
def configure(self,**kargs):
"""
This method is not used
Input:
Nothing
Returns:
Nothing
"""
pass
def learn(self,dataset):
"""
This method trains network
Input:
dataset - Dataset to train
Returns:
Nothing
"""
if dataset == {}: raise LookUpTableBrainException("Dataset for learning is empty.")
self._table = dataset
def think(self,data):
"""
Activates brain with data and produces response
Input:
data - Input data (request)
Returns:
output data (answer)
"""
try: return self._table[data]
except KeyError: raise LookUpTableBrainException("Don't know.") | 0x1001/jarvis | jarvis/neural/lookuptablebrain.py | Python | gpl-2.0 | 1,385 |
from __future__ import division, print_function, absolute_import
import csv
import numpy as np
from sklearn import metrics, cross_validation
# import pandas
import tensorflow as tf
import tflearn
from tflearn.layers.core import input_data, dropout, fully_connected
from tflearn.layers.conv import conv_1d
from tflearn.layers.merge_ops import merge
from tflearn.layers.estimator import regression
from tflearn.data_utils import to_categorical, pad_sequences
import getopt
import sys
import os
data_dir = "text" # directory contains text documents
model_size = 2000 # length of output vectors
nb_epochs = 10 # number of training epochs
embedding_size = 300
label_file = "enwikilabel"
MAX_FILE_ID = 50000
cnn_size = 128
dropout_ratio = 0.5
dynamic = True
activation_function = "relu"
try:
opts, args = getopt.getopt(sys.argv[1:],"hd:model_size:epoch:lb:es:",["model_size=","epoch=","es=","cnn_size=","dropout=","dynamic=","activation="])
except getopt.GetoptError as e:
print ("Error of parameters")
print (e)
print (sys.argv[0] + " -h for help")
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print ('LSTM for Wikipedia classification')
print (sys.argv[0] + " -h for help")
sys.exit ()
elif opt in ("-model_size","--model_size"):
model_size = int (arg)
elif opt in ("-epoch","--epoch"):
nb_epochs = int (arg)
elif opt in ["-es","--es"]:
embedding_size = int (arg)
elif opt in ["--cnn_size"]:
cnn_size = int (arg)
elif opt in ["--dropout"]:
dropout_ratio = float (arg)
elif opt in ["--dynamic"]:
dynamic = bool (arg)
elif opt in ["--activation"]:
activation_function = arg
### Training data
qualities = ["stub","start","c","b","ga","fa"]
print('Read labels')
def load_label (label_file):
with open (label_file) as f:
return f.read().splitlines()
Y = load_label(label_file)
for i in range(len(Y)):
Y[i] = qualities.index(Y[i])
print('Read content')
def load_content (file_name):
with open(file_name) as f:
return f.read()
X = []
for i in range (MAX_FILE_ID):
file_name = data_dir + '/' + str(i + 1)
if os.path.isfile (file_name):
X.append (load_content(file_name))
X_train, X_test, Y_train, Y_test = cross_validation.train_test_split(X, Y,
test_size=0.2, random_state=2017)
Y_train = to_categorical (Y_train, nb_classes = len (qualities))
Y_test = to_categorical (Y_test, nb_classes = len (qualities))
### Process vocabulary
print('Process vocabulary')
vocab_processor = tflearn.data_utils.VocabularyProcessor(max_document_length = model_size, min_frequency = 0)
X_train = np.array(list(vocab_processor.fit_transform(X_train)))
X_test = np.array(list(vocab_processor.fit_transform(X_test)))
X_train = pad_sequences(X_train, maxlen=model_size, value=0.)
X_test = pad_sequences(X_test, maxlen=model_size, value=0.)
n_words = len(vocab_processor.vocabulary_)
print('Total words: %d' % n_words)
# pickle.dump (X_train, open ("xtrain.p", b))
# pickle.dump (X_test, open ("xtest.p", b))
# X_train = pickle.load (open ("xtrain.p", rb))
# X_test = pickle.load (open ("xtest.p", rb))
### Models
# Building convolutional network
print ('Build CNN')
network = input_data(shape=[None, model_size], name='input')
network = tflearn.embedding(network, input_dim=n_words, output_dim=cnn_size)
branch1 = conv_1d(network, cnn_size, 3, padding='valid', activation=activation_function, regularizer="L2")
branch2 = conv_1d(network, cnn_size, 4, padding='valid', activation=activation_function, regularizer="L2")
branch3 = conv_1d(network, cnn_size, 5, padding='valid', activation=activation_function, regularizer="L2")
network = merge([branch1, branch2, branch3], mode='concat', axis=1)
network = tf.expand_dims(network, 2)
network = global_max_pool(network)
network = dropout(network, dropout_ratio)
network = fully_connected(network, len(qualities), activation='softmax')
network = regression(network, optimizer='adam', learning_rate=0.001,
loss='categorical_crossentropy', name='target')
# Training
print ('Training')
model = tflearn.DNN(network, tensorboard_verbose=0)
print ('Testing')
model.fit(trainX, trainY, n_epoch = nb_epochs, shuffle=True, validation_set=(testX, testY), show_metric=True, batch_size=32) | vinhqdang/wikipedia_analysis | lang_model/enwiki/cnn.py | Python | gpl-2.0 | 4,343 |
'''
Created on Jul 9, 2018
@author: lqp
'''
import json
import os
import re
from util import TrackUtil
from util.TrackUtil import current_milli_time
from util.TrackUtil import mongoUri
from util.TrackUtil import todayMillis
from pymongo.mongo_client import MongoClient
import pymongo
from xlrd.book import colname
class MongoBatch():
def __init__(self, trackMongoConf, stormMongoConf, batchMongoConf):
client = MongoClient(mongoUri(stormMongoConf))
self.stormDb = client.storm_db
client = MongoClient(mongoUri(trackMongoConf))
self.trackDb = client.track_1v1fudao
client = MongoClient(mongoUri(batchMongoConf))
self.batchDb = client.batch_db
pass
def storm2Batch(self, colName):
pl = self.stormDb[colName].find()
for p in pl:
print('mv: ' + str(p))
ts = p['ts']
self.batchDb[colName].update({'ts': ts}, p, True)
#self.batchDb[colName].insert(p)
if __name__ == '__main__':
os.chdir('../../')
with open('conf/server.json') as f:
content = f.read()
conf = json.loads(content)
batch = MongoBatch(conf['mongoTrack'], conf['mongoStorm'], conf['mongoBatch'])
batch.storm2Batch('alert_batch')
pass | lqp276/repo_lqp | repopy/src/batch/collection_move.py | Python | gpl-2.0 | 1,299 |
#! python
import sys
reader = open(sys.argv[1], 'r')
writer = open(sys.argv[2], 'w')
def calcIdentity(stringa,stringb):
counter = 0
counter2 = 0
if len(stringa) != len(stringb):
return 0
for x in range(len(stringa)):
if stringa[x] == stringb[x]:
#print stringa[x]+stringb[x]
counter += 1
counter2 += 1
return float(counter)/len(stringa)
b = ""
for line in reader:
if not line[:1] == ">":
if not b == "":
if len(line.split(': ')) < 2:
print line
s = calcIdentity(b,line.split(': ')[1])
writer.write(str(s) + " " + line.split(': ')[0][:-1] + "\n")
else:
b = line.split(': ')[1]
else:
b = ""
reader.close()
writer.close() | carstenuhlig/gobi | python/calc_Identity.py | Python | gpl-2.0 | 666 |
import logging
from ConfigParser import ConfigParser, NoOptionError
import settings
from common.functional import LazyObject
logger = logging.getLogger('user_prefs')
class types(object):
str = 'get'
bool = 'getboolean'
int = 'getint'
float = 'getfloat'
class UserPrefs(object):
defaults = dict(
logDateTimeFormat = ("at %H:%M", types.str),
showWorkTill = (True, types.bool),
logEditCommand = ('open -a TextEdit "%s"', types.str),
showHelpMessageOnStart = (True, types.bool),
dateFormat = ('%m-%d-%Y %H:%M', types.str),
projectSeparator = ('::', types.str),
timeFormat = ('%H:%M', types.str),
workEndTime = ('06:00', types.str),
workDayLength = (3600 * 8, types.int),
timerInterval = (1, types.int),
showDateTime = (False, types.bool),
selectedProject = ('Default', types.str),
soundOnNotification = (False, types.bool),
notificationRepeatTime = (10, types.int),
notificationTime = (40, types.int),
showNotification = (False, types.bool),
startPlaceholder = ("__start__", types.str),
)
root_section = 'root'
def __init__(self):
self.config = ConfigParser()
self.config.read(settings.USER_PREFS_PATH)
if not self.config.has_section(self.root_section):
self.config.add_section(self.root_section)
def __getattr__(self, name):
if name not in self.defaults:
raise ValueError("No such prefernce '{0}'".format(name))
default, type_name = self.defaults[name]
try:
try:
value = getattr(self.config, type_name)(
self.root_section, name)
except (TypeError, ValueError):
logger.warn("Unable to cast type for '{0}', using default value".
format(name, value))
value = default
except NoOptionError:
value = default
return value
def __setattr__(self, name, value):
if name in self.defaults:
self.config.set(self.root_section, name, unicode(value))
else:
super(UserPrefs, self).__setattr__(name, value)
def save(self):
with open(settings.USER_PREFS_PATH, 'wb') as configfile:
self.config.write(configfile)
userPrefs = LazyObject(UserPrefs) | SPlyer/MacTimeLog | user_prefs.py | Python | gpl-2.0 | 2,419 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
Copyright (C) 2013 Sebastien GALLET <[email protected]>
This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
Install telldus-core : http://developer.telldus.com/wiki/TellStickInstallationUbuntu
"""
import sys,os
try :
sys.path.insert(0, os.path.abspath('/opt/agocontrol/bin/'))
sys.path.insert(0, os.path.abspath('../../../agocontrol/shared'))
except:
pass
import syslog
import time
from datetime import datetime
import threading
from threading import Timer
import agoclient
from ctypes import c_int, c_ubyte, c_void_p, POINTER, string_at, c_char_p
from ctypes.util import find_library
from ctypes import cdll, CFUNCTYPE
import traceback
def log_exception(exc):
for line in exc.split('\n'):
if len(line):
syslog.syslog(syslog.LOG_ERR, line)
# Device methods
TELLSTICK_TURNON = 1
TELLSTICK_TURNOFF = 2
TELLSTICK_BELL = 4
TELLSTICK_TOGGLE = 8
TELLSTICK_DIM = 16
TELLSTICK_LEARN = 32
TELLSTICK_EXECUTE = 64
TELLSTICK_UP = 128
TELLSTICK_DOWN = 256
TELLSTICK_STOP = 512
#Sensor value types
TELLSTICK_TEMPERATURE = 1
TELLSTICK_HUMIDITY = 2
#Error codes
TELLSTICK_SUCCESS = 0
TELLSTICK_ERROR_NOT_FOUND = -1
TELLSTICK_ERROR_PERMISSION_DENIED = -2
TELLSTICK_ERROR_DEVICE_NOT_FOUND = -3
TELLSTICK_ERROR_METHOD_NOT_SUPPORTED = -4
TELLSTICK_ERROR_COMMUNICATION = -5
TELLSTICK_ERROR_CONNECTING_SERVICE = -6
TELLSTICK_ERROR_UNKNOWN_RESPONSE = -7
TELLSTICK_ERROR_UNKNOWN = -99
#Device typedef
TELLSTICK_TYPE_DEVICE = 1
TELLSTICK_TYPE_GROUP = 2
TELLSTICK_TYPE_SCENE = 3
#Device changes
TELLSTICK_DEVICE_ADDED = 1
TELLSTICK_DEVICE_CHANGED = 2
TELLSTICK_DEVICE_REMOVED = 3
TELLSTICK_DEVICE_STATE_CHANGED = 4
#Change types
TELLSTICK_CHANGE_NAME = 1
TELLSTICK_CHANGE_PROTOCOL = 2
TELLSTICK_CHANGE_MODEL = 3
TELLSTICK_CHANGE_METHOD = 4
timers = {} #timerlist
def sensor_callback(protocol, model, id, dataType, value, timestamp, callbackId, context):
print "Sensor:", string_at(protocol), string_at(model), "id:", id
if(dataType == TELLSTICK_TEMPERATURE):
print "Temperature:", string_at(value), "C,", datetime.fromtimestamp(timestamp)
elif(dataType == TELLSTICK_HUMIDITY):
print "Humidity:", string_at(value), "%,", datetime.fromtimestamp(timestamp)
print ""
def nothing() :
print "nothing called"
def device_callback(deviceId, method, value, callbackId, context):
global timers
print "callback!"
print method
if (deviceId == 1):
# is turning on deviceId 1 here, so just return if events for that device are picked up
return
t = 0
print "Received event for device %d" % (deviceId,)
if (deviceId in timers):
# a timer already exists for this device, it might be running so interrupt it
# Many devices (for example motion detectors) resends their messages many times to ensure that they
# are received correctly. In this example, we don't want to run the turnOn/turnOff methods every time, instead we
# start a timer, and run the method when the timer is finished. For every incoming event on this device, the timer
# is restarted.
t = timers[deviceId]
t.cancel()
if (method == TELLSTICK_DIM):
print int(float(string_at(value))/2.55)+1
t = Timer(delay_rf/1000.0, client.emitEvent,[lib.make_device_id(deviceId), "event.device.statechanged", int(float(string_at(value))/2.55)+1, ""])
elif (method == TELLSTICK_TURNON):
t = Timer(delay_rf/1000.0, client.emitEvent,[lib.make_device_id(deviceId), "event.device.statechanged", "255", ""])
elif (method == TELLSTICK_TURNOFF):
t = Timer(delay_rf/1000.0, client.emitEvent,[lib.make_device_id(deviceId), "event.device.statechanged", "0", ""])
else :
syslog.syslog(syslog.LOG_ERR, 'Unknown command received for %s:' % deviceId)
syslog.syslog(syslog.LOG_ERR, 'method = %s' % method)
syslog.syslog(syslog.LOG_ERR, 'value = %s' % value)
syslog.syslog(syslog.LOG_ERR, 'callbackId = %s' % callbackId)
syslog.syslog(syslog.LOG_ERR, 'context = %s' % context)
t.start()
timers[deviceId] = t #put timer in list, to allow later cancellation
#function to be called when device event occurs, even for unregistered devices
def raw_callback(data, controllerId, callbackId, context):
print string_at(data)
print "callback!"
SENSORFUNC = CFUNCTYPE(None, POINTER(c_ubyte), POINTER(c_ubyte), c_int, c_int, POINTER(c_ubyte), c_int, c_int, c_void_p)
DEVICEFUNC = CFUNCTYPE(None, c_int, c_int, POINTER(c_ubyte), c_int, c_void_p)
RAWFUNC = CFUNCTYPE(None, POINTER(c_ubyte), c_int, c_int, c_void_p)
class TelldusException(Exception):
"""
telldus exception
"""
def __init__(self, value):
'''
'''
Exception.__init__(self)
self.value = value
def __str__(self):
'''
'''
return repr(self.value)
class Telldusd:
"""
Interface to the telldusd daemon. It encapsulates ALL the calls to
the telldus daemon.
"""
def __init__(self):
'''
Init the class
'''
self._tdlib = None
self._device_event_cb = None
self._device_event_cb_id = None
self._sensor_event_cb = None
self._sensor_event_cb_id = None
self._device_change_event_cb = None
self._device_change_event_cb_id = None
ret = find_library("telldus-core")
if ret != None:
try:
self._tdlib = cdll.LoadLibrary(ret)
except:
raise TelldusException("Could not load the telldus-core library : %s" % (traceback.format_exc()))
else:
raise TelldusException("Could not find the telldus-core library. Check if it is installed properly : %s" % (traceback.format_exc()))
try:
self._tdlib.tdInit()
except:
raise TelldusException("Could not initialize telldus-core library : %s" % (traceback.format_exc()))
def register_device_event(self, callback):
'''
Register the device event callback to telldusd
'''
try:
self._device_event_cb_id = \
self._tdlib.tdRegisterDeviceEvent(callback, 0)
return self._device_event_cb_id
except:
raise TelldusException("Could not register the device event callback : %s" % (traceback.format_exc()))
def unregister_device_event(self):
'''
Unregister the device event callback to telldusd
'''
try:
self._tdlib.tdUnregisterCallback(self._device_event_cb_id)
except:
raise TelldusException("Could not unregister the device event callback : %s" % (traceback.format_exc()))
def register_device_change_event(self, callback):
'''
Register the device change event callback to telldusd
'''
try:
self._device_change_event_cb_id = \
self._tdlib.tdRegisterDeviceChangeEvent(callback,0)
return self._device_change_event_cb_id
except:
raise TelldusException("Could not register the device change event callback : %s" % (traceback.format_exc()))
def unregister_device_change_event(self):
'''
Unregister the device change event callback to telldusd
'''
try:
self._tdlib.tdUnregisterCallback(self._device_change_event_cb_id)
except:
raise TelldusException("Could not unregister the device event change callback : %s" % (traceback.format_exc()))
def register_sensor_event(self, callback):
'''
Register the sensor event callback to telldusd
'''
try:
self._sensor_event_cb_id = \
self._tdlib.tdRegisterSensorEvent(callback, 0)
return self._sensor_event_cb_id
except:
raise TelldusException("Could not register the sensor event callback : %s" % (traceback.format_exc()))
def unregister_sensor_event(self):
'''
Unregister the sensor event callback to telldusd
'''
try:
self._tdlib.tdUnregisterCallback(self._sensor_event_cb_id)
except:
raise TelldusException("Could not unregister the sensor event callback : %s" % (traceback.format_exc()))
def get_devices(self):
'''
Return a list of devices registered in telldus daemon
'''
ret = {}
for i in range(self._tdlib.tdGetNumberOfDevices()):
iid = self._tdlib.tdGetDeviceId(c_int(i))
ret[iid] = { "name" : c_char_p(self._tdlib.tdGetName(c_int(iid))).value,
"house" : c_char_p(self._tdlib.tdGetDeviceParameter(c_int(iid), c_char_p("house"), "")).value,
"unit" : c_char_p(self._tdlib.tdGetDeviceParameter(c_int(iid), c_char_p("unit"), "")).value,
"model" : "%s" % c_char_p(self._tdlib.tdGetModel(c_int(iid))).value,
"protocol" : c_char_p(self._tdlib.tdGetProtocol(c_int(iid))).value
}
return ret
def is_dimmer(self, deviceid):
'''
Get the info on the device
@param deviceid : id of the module
'''
if self.methods(deviceid, TELLSTICK_DIM) == TELLSTICK_DIM:
return True
return False
def is_switch(self, deviceid):
'''
Get the info on the device
@param deviceid : id of the module
'''
if self.methods(deviceid, TELLSTICK_TURNON) == TELLSTICK_TURNON and \
self.methods(deviceid, TELLSTICK_TURNOFF) == TELLSTICK_TURNOFF and \
self.methods(deviceid, TELLSTICK_DIM) != TELLSTICK_DIM:
return True
return False
def get_info(self, deviceid):
'''
Get the info on the device
@param deviceid : id of the module
'''
sst = []
sst.append("%s : %s" % \
(deviceid, c_char_p(self._tdlib.tdGetName(c_int(deviceid))).value))
sst.append("model : %s" % \
(c_char_p(self._tdlib.tdGetModel(c_int(deviceid))).value))
sst.append("protocol : %s" % \
(c_char_p(self._tdlib.tdGetProtocol(c_int(deviceid))).value))
sst.append("house : %s / unit: %s" % (c_char_p(self._tdlib.tdGetDeviceParameter(c_int(deviceid), c_char_p("house"), "")).value, \
c_char_p(self._tdlib.tdGetDeviceParameter(c_int(deviceid), c_char_p("unit"), "")).value))
sst.append("Methods :")
ss1, ss2, ss3 = "No", "No", "No"
if self.methods(deviceid, TELLSTICK_TURNON) \
== TELLSTICK_TURNON:
ss1 = "Yes"
if self.methods(deviceid, TELLSTICK_TURNOFF) \
== TELLSTICK_TURNOFF:
ss2 = "Yes"
if self.methods(deviceid, TELLSTICK_DIM) \
== TELLSTICK_DIM:
ss3 = "Yes"
sst.append("ON : %s / OFF: %s / DIM: %s" % (ss1, ss2, ss3))
ss1, ss2, ss3, ss4 = "No", "No", "No", "No"
if self.methods(deviceid, TELLSTICK_BELL) \
== TELLSTICK_BELL:
ss1 = "Yes"
if self.methods(deviceid, TELLSTICK_TOGGLE) \
== TELLSTICK_TOGGLE:
ss2 = "Yes"
if self.methods(deviceid, TELLSTICK_LEARN) \
== TELLSTICK_LEARN:
ss3 = "Yes"
if self.methods(deviceid, TELLSTICK_EXECUTE) \
== TELLSTICK_EXECUTE:
ss4 = "Yes"
sst.append("BELL : %s / TOGGLE: %s / LEARN: %s / EXECUTE: %s" % \
(ss1, ss2, ss3, ss4))
ss1, ss2, ss3 = "No", "No", "No"
if self.methods(deviceid, TELLSTICK_UP) \
== TELLSTICK_UP:
ss1 = "Yes"
if self.methods(deviceid, TELLSTICK_DOWN) \
== TELLSTICK_DOWN:
ss2 = "Yes"
if self.methods(deviceid, TELLSTICK_STOP) \
== TELLSTICK_STOP:
ss3 = "Yes"
sst.append("UP : %s / DOWN: %s / STOP: %s" % (ss1, ss2, ss3))
return sst
def check_device(self, device):
'''
Check that the device exist in telldusd
@param device : address of the device. Maybe malformed.
'''
try:
deviceid = int(device[2:])
name = c_char_p(self._tdlib.tdGetName(c_int(deviceid))).value
#print "found name = %s" % name
if name == None or name == "" :
#print "bad device %s" % device
return False
else:
#print "good device %s" % device
return True
except :
#print "bad device %s" % device
return False
def get_device_id(self, devicestr):
'''
Retrieve an id from HU address
@param device : address of the module (ie TS14)
@return : Id of the device (14)
'''
return int(devicestr[2:])
def make_device_id(self, deviceint):
'''
Retrieve an id from HU address
@param device : address of the module (ie TS14)
@return : Id of the device (14)
'''
return "TS%s"%deviceint
def get_device(self, deviceid):
'''
Retrieve an address device from deviceid
@param deviceid : id of the device (ie 14)
@return : address of the device (ie TS14)
'''
return 'TS'+str(deviceid)
def turn_on(self, deviceid):
'''
Turns the internal device On
@param deviceid : id of the module
'''
self._tdlib.tdTurnOn(c_int(deviceid))
def turn_off(self, deviceid):
'''
Turns the internal device Off
@param deviceid : id of the module
'''
self._tdlib.tdTurnOff(c_int(deviceid))
def bell(self, deviceid):
'''
Bells the device
@param deviceid : id of the module
'''
self._tdlib.tdBell(c_int(deviceid))
def learn(self, deviceid):
'''
Sends a special Learn command to the device
@param deviceid : id of the module
'''
self._tdlib.tdLearn(c_int(deviceid))
def dim(self, deviceid, level):
'''
Dims the device level should be between 0 and 100
tdlib use a level from 0 to 255. So we translate it.
@param deviceid : id of the module
@param level : level of light
'''
self._tdlib.tdDim(c_int(deviceid), c_ubyte(int(int(level)*2.55)))
def up(self, deviceid):
'''
Move the shutter up.
Test if the device support the up command
If not try to send an on command
@param deviceid : id of the module
'''
self._tdlib.tdUp(c_int(deviceid))
def down(self, deviceid):
'''
Move the shutter down.
Test if the device support the up command
If not try to send an on command
@param deviceid : id of the module
'''
self._tdlib.tdDown(c_int(deviceid))
def stop(self, deviceid):
'''
Stop the shutter.
Test if the device support the up command
If not try to manage it supporting the on command
@param deviceid : id of the module
'''
self._tdlib.tdStop(c_int(deviceid))
def methods(self, deviceid, methods):
'''
Stop the shutter.
Test if the device support the up command
If not try to manage it supporting the on command
@param deviceid : id of the module
'''
#int methods = tdMethods(id, TELLSTICK_TURNON | \
# TELLSTICK_TURNOFF | TELLSTICK_BELL);
return self._tdlib.tdMethods(c_int(deviceid), methods)
client = agoclient.AgoConnection("tellstick")
device = agoclient.getConfigOption("tellstick", "device", "/dev/tellstick")
delay_rf = float(agoclient.getConfigOption("tellstick", "delay_rf", "400"))
lib = Telldusd()
sensor_func = SENSORFUNC(sensor_callback)
device_func = DEVICEFUNC(device_callback)
raw_func = RAWFUNC(raw_callback)
lib.register_sensor_event(sensor_func)
lib.register_device_event(device_func)
lib.register_device_change_event(raw_func)
devices=lib.get_devices()
for dev in devices.keys() :
if lib.is_dimmer(dev) :
client.addDevice(lib.make_device_id(dev), "dimmer")
elif lib.is_switch(dev) :
client.addDevice(lib.make_device_id(dev), "switch")
else :
syslog.syslog(syslog.LOG_ERR, 'Unknown device type for %s' % dev)
log_exception(lib.get_info(dev))
print devices
tellsticklock = threading.Lock()
class command_send(threading.Thread):
def __init__(self, id, command, level):
threading.Thread.__init__(self)
self.id = id
self.command = command
self.level = level
def run(self):
try :
tellsticklock.acquire()
if self.command == "on":
if lib.is_dimmer(lib.get_device_id(self.id)) :
lib.dim(lib.get_device_id(self.id), 255)
else :
lib.turn_on(lib.get_device_id(self.id))
elif self.command == "off":
if lib.is_dimmer(lib.get_device_id(self.id)) :
lib.dim(lib.get_device_id(self.id), 0)
else :
lib.turn_off(lib.get_device_id(self.id))
elif self.command == "setlevel":
lib.dim(lib.get_device_id(self.id), self.level)
except :
error = traceback.format_exc()
syslog.syslog(syslog.LOG_ERR, 'Error when calling telldus command %s for device %s' % (self.command, self.id))
log_exception(error)
self.error=1
finally :
tellsticklock.release()
def messageHandler(internalid, content):
print content
if "command" in content:
if "level" in content:
background = command_send(internalid, content["command"], content["level"])
else:
background = command_send(internalid, content["command"], "")
background.setDaemon(True)
background.start()
# specify our message handler method
client.addHandler(messageHandler)
client.run()
| bibi21000/agocontrol | debian/agocontrol-tellstick/opt/agocontrol/bin/agotellstick.py | Python | gpl-2.0 | 18,493 |
# templater.py - template expansion for output
#
# Copyright 2005, 2006 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
from i18n import _
import re, sys, os
import util, config, templatefilters
path = ['templates', '../templates']
stringify = templatefilters.stringify
def parsestring(s, quoted=True):
'''parse a string using simple c-like syntax.
string must be in quotes if quoted is True.'''
if quoted:
if len(s) < 2 or s[0] != s[-1]:
raise SyntaxError(_('unmatched quotes'))
return s[1:-1].decode('string_escape')
return s.decode('string_escape')
class engine(object):
'''template expansion engine.
template expansion works like this. a map file contains key=value
pairs. if value is quoted, it is treated as string. otherwise, it
is treated as name of template file.
templater is asked to expand a key in map. it looks up key, and
looks for strings like this: {foo}. it expands {foo} by looking up
foo in map, and substituting it. expansion is recursive: it stops
when there is no more {foo} to replace.
expansion also allows formatting and filtering.
format uses key to expand each item in list. syntax is
{key%format}.
filter uses function to transform value. syntax is
{key|filter1|filter2|...}.'''
template_re = re.compile(r'{([\w\|%]+)}|#([\w\|%]+)#')
def __init__(self, loader, filters={}, defaults={}):
self.loader = loader
self.filters = filters
self.defaults = defaults
self.cache = {}
def process(self, t, map):
'''Perform expansion. t is name of map element to expand. map contains
added elements for use during expansion. Is a generator.'''
tmpl = self.loader(t)
iters = [self._process(tmpl, map)]
while iters:
try:
item = iters[0].next()
except StopIteration:
iters.pop(0)
continue
if isinstance(item, str):
yield item
elif item is None:
yield ''
elif hasattr(item, '__iter__'):
iters.insert(0, iter(item))
else:
yield str(item)
def _format(self, expr, get, map):
key, format = expr.split('%')
v = get(key)
if not hasattr(v, '__iter__'):
raise SyntaxError(_("error expanding '%s%%%s'") % (key, format))
lm = map.copy()
for i in v:
lm.update(i)
yield self.process(format, lm)
def _filter(self, expr, get, map):
if expr not in self.cache:
parts = expr.split('|')
val = parts[0]
try:
filters = [self.filters[f] for f in parts[1:]]
except KeyError, i:
raise SyntaxError(_("unknown filter '%s'") % i[0])
def apply(get):
x = get(val)
for f in filters:
x = f(x)
return x
self.cache[expr] = apply
return self.cache[expr](get)
def _process(self, tmpl, map):
'''Render a template. Returns a generator.'''
def get(key):
v = map.get(key)
if v is None:
v = self.defaults.get(key, '')
if hasattr(v, '__call__'):
v = v(**map)
return v
while tmpl:
m = self.template_re.search(tmpl)
if not m:
yield tmpl
break
start, end = m.span(0)
variants = m.groups()
expr = variants[0] or variants[1]
if start:
yield tmpl[:start]
tmpl = tmpl[end:]
if '%' in expr:
yield self._format(expr, get, map)
elif '|' in expr:
yield self._filter(expr, get, map)
else:
yield get(expr)
engines = {'default': engine}
class templater(object):
def __init__(self, mapfile, filters={}, defaults={}, cache={},
minchunk=1024, maxchunk=65536):
'''set up template engine.
mapfile is name of file to read map definitions from.
filters is dict of functions. each transforms a value into another.
defaults is dict of default map definitions.'''
self.mapfile = mapfile or 'template'
self.cache = cache.copy()
self.map = {}
self.base = (mapfile and os.path.dirname(mapfile)) or ''
self.filters = templatefilters.filters.copy()
self.filters.update(filters)
self.defaults = defaults
self.minchunk, self.maxchunk = minchunk, maxchunk
self.engines = {}
if not mapfile:
return
if not os.path.exists(mapfile):
raise util.Abort(_('style not found: %s') % mapfile)
conf = config.config()
conf.read(mapfile)
for key, val in conf[''].items():
if val[0] in "'\"":
try:
self.cache[key] = parsestring(val)
except SyntaxError, inst:
raise SyntaxError('%s: %s' %
(conf.source('', key), inst.args[0]))
else:
val = 'default', val
if ':' in val[1]:
val = val[1].split(':', 1)
self.map[key] = val[0], os.path.join(self.base, val[1])
def __contains__(self, key):
return key in self.cache or key in self.map
def load(self, t):
'''Get the template for the given template name. Use a local cache.'''
if not t in self.cache:
try:
self.cache[t] = open(self.map[t][1]).read()
except IOError, inst:
raise IOError(inst.args[0], _('template file %s: %s') %
(self.map[t][1], inst.args[1]))
return self.cache[t]
def __call__(self, t, **map):
ttype = t in self.map and self.map[t][0] or 'default'
proc = self.engines.get(ttype)
if proc is None:
proc = engines[ttype](self.load, self.filters, self.defaults)
self.engines[ttype] = proc
stream = proc.process(t, map)
if self.minchunk:
stream = util.increasingchunks(stream, min=self.minchunk,
max=self.maxchunk)
return stream
def templatepath(name=None):
'''return location of template file or directory (if no name).
returns None if not found.'''
normpaths = []
# executable version (py2exe) doesn't support __file__
if hasattr(sys, 'frozen'):
module = sys.executable
else:
module = __file__
for f in path:
if f.startswith('/'):
p = f
else:
fl = f.split('/')
p = os.path.join(os.path.dirname(module), *fl)
if name:
p = os.path.join(p, name)
if name and os.path.exists(p):
return os.path.normpath(p)
elif os.path.isdir(p):
normpaths.append(os.path.normpath(p))
return normpaths
def stylemap(style, paths=None):
"""Return path to mapfile for a given style.
Searches mapfile in the following locations:
1. templatepath/style/map
2. templatepath/map-style
3. templatepath/map
"""
if paths is None:
paths = templatepath()
elif isinstance(paths, str):
paths = [paths]
locations = style and [os.path.join(style, "map"), "map-" + style] or []
locations.append("map")
for path in paths:
for location in locations:
mapfile = os.path.join(path, location)
if os.path.isfile(mapfile):
return mapfile
raise RuntimeError("No hgweb templates found in %r" % paths)
| dkrisman/Traipse | mercurial/templater.py | Python | gpl-2.0 | 7,996 |
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2011 Nick Hall
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Gtk modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from gramps.gui.listmodel import ListModel
from gramps.gen.utils.db import navigation_label
from gramps.gen.plug import Gramplet
from gramps.gui.utils import edit_object
from gramps.gen.const import GRAMPS_LOCALE as glocale
from gramps.gen.datehandler import displayer
_ = glocale.translation.gettext
class Backlinks(Gramplet):
"""
Displays the back references for an object.
"""
def init(self):
self.date_column = None
self.evts = False
self.gui.WIDGET = self.build_gui()
self.gui.get_container_widget().remove(self.gui.textview)
self.gui.get_container_widget().add(self.gui.WIDGET)
self.gui.WIDGET.show()
def build_gui(self):
"""
Build the GUI interface.
"""
self.top = Gtk.TreeView()
titles = [(_('Type'), 1, 100),
(_('Name'), 2, 100),
(_('Date'), 4, 200),
('sd', 4, 120), # sorted date column
('', 5, 1), #hidden column for the handle
('', 6, 1), #hidden column for non-localized object type
]
self.model = ListModel(self.top, titles,
event_func=self.cb_double_click)
self.date_column = self.top.get_column(2)
self.sdate = self.top.get_column(3)
self.top.get_column(1).set_expand(True) # The name use the max
# possible size
return self.top
def display_backlinks(self, active_handle):
"""
Display the back references for an object.
"""
self.evts = False
sdcolumn = None
for classname, handle in \
self.dbstate.db.find_backlink_handles(active_handle):
name = navigation_label(self.dbstate.db, classname, handle)[0]
sdcolumn = self.top.get_column(3)
dcolumn = self.top.get_column(2)
if classname == "Event":
obj = self.dbstate.db.get_event_from_handle(handle)
o_date = obj.get_date_object()
date = displayer.display(o_date)
sdate = "%09d" % o_date.get_sort_value()
sdcolumn.set_sort_column_id(3)
dcolumn.set_sort_column_id(3)
self.evts = True
else:
sdcolumn.set_sort_column_id(1)
date = sdate = ""
self.model.add((_(classname), name, date, sdate, handle, classname))
if self.evts:
self.date_column.set_visible(True)
sdcolumn.set_visible(False)
else:
self.date_column.set_visible(False)
if sdcolumn:
sdcolumn.set_visible(False)
self.set_has_data(self.model.count > 0)
def get_has_data(self, active_handle):
"""
Return True if the gramplet has data, else return False.
"""
if not active_handle:
return False
for handle in self.dbstate.db.find_backlink_handles(active_handle):
return True
return False
def cb_double_click(self, treeview):
"""
Handle double click on treeview.
"""
(model, iter_) = treeview.get_selection().get_selected()
if not iter_:
return
(objclass, handle) = (model.get_value(iter_, 5),
model.get_value(iter_, 4))
edit_object(self.dbstate, self.uistate, objclass, handle)
class PersonBacklinks(Backlinks):
"""
Displays the back references for a person.
"""
def db_changed(self):
self.connect(self.dbstate.db, 'person-update', self.update)
def active_changed(self, handle):
self.update()
def update_has_data(self):
active_handle = self.get_active('Person')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Person')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class EventBacklinks(Backlinks):
"""
Displays the back references for an event.
"""
def db_changed(self):
self.connect(self.dbstate.db, 'event-update', self.update)
self.connect_signal('Event', self.update)
def update_has_data(self):
active_handle = self.get_active('Event')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Event')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class FamilyBacklinks(Backlinks):
"""
Displays the back references for a family.
"""
def db_changed(self):
self.connect(self.dbstate.db, 'family-update', self.update)
self.connect_signal('Family', self.update)
def update_has_data(self):
active_handle = self.get_active('Family')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Family')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class PlaceBacklinks(Backlinks):
"""
Displays the back references for a place.
"""
def db_changed(self):
self.connect(self.dbstate.db, 'place-update', self.update)
self.connect_signal('Place', self.update)
def update_has_data(self):
active_handle = self.get_active('Place')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Place')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class SourceBacklinks(Backlinks):
"""
Displays the back references for a source,.
"""
def db_changed(self):
self.connect(self.dbstate.db, 'source-update', self.update)
self.connect_signal('Source', self.update)
def update_has_data(self):
active_handle = self.get_active('Source')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Source')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class CitationBacklinks(Backlinks):
"""
Displays the back references for a Citation,.
"""
def db_changed(self):
self.connect(self.dbstate.db, 'citation-update', self.update)
self.connect_signal('Citation', self.update)
def update_has_data(self):
active_handle = self.get_active('Citation')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Citation')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class RepositoryBacklinks(Backlinks):
"""
Displays the back references for a repository.
"""
def db_changed(self):
self.connect(self.dbstate.db, 'repository-update', self.update)
self.connect_signal('Repository', self.update)
def update_has_data(self):
active_handle = self.get_active('Repository')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Repository')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class MediaBacklinks(Backlinks):
"""
Displays the back references for a media object.
"""
def db_changed(self):
self.connect(self.dbstate.db, 'media-update', self.update)
self.connect_signal('Media', self.update)
def update_has_data(self):
active_handle = self.get_active('Media')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Media')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class NoteBacklinks(Backlinks):
"""
Displays the back references for a note.
"""
def db_changed(self):
self.connect(self.dbstate.db, 'note-update', self.update)
self.connect_signal('Note', self.update)
def update_has_data(self):
active_handle = self.get_active('Note')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Note')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
| Nick-Hall/gramps | gramps/plugins/gramplet/backlinks.py | Python | gpl-2.0 | 10,199 |
# -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003-2005 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# GUISTORE.py - script to open the store/inn/temple windows
###################################################
import GemRB
import GUICommon
import GUICommonWindows
from GUIDefines import *
from ie_stats import *
from ie_slots import *
StoreWindow = None
MessageWindow = None
ActionWindow = None
PortraitWindow = None
StoreShoppingWindow = None
StoreIdentifyWindow = None
StoreStealWindow = None
StoreDonateWindow = None
StoreHealWindow = None
StoreRumourWindow = None
StoreRentWindow = None
OldPortraitWindow = None
RentConfirmWindow = None
LeftButton = None
RightButton = None
ITEM_PC = 0
ITEM_STORE = 1
Inventory = None
RentIndex = -1
Store = None
Buttons = [-1,-1,-1,-1]
inventory_slots = ()
total_price = 0
total_income = 0
if GUICommon.GameIsIWD2():
ItemButtonCount = 6
else:
ItemButtonCount = 4
RepModTable = None
PreviousPC = 0
BarteringPC = 0
# 0 - Store
# 1 - Tavern
# 2 - Inn
# 3 - Temple
# 4 - Container
# 5 - Container
# 0 - buy/sell
# 1 - identify
# 2 - steal
# 3 - heal
# 4 - donate
# 5 - drink
# 6 - rent
if GUICommon.GameIsIWD1():
# no bam for bags
storebams = ("STORSTOR","STORTVRN","STORINN","STORTMPL","STORSTOR","STORSTOR")
else:
storebams = ("STORSTOR","STORTVRN","STORINN","STORTMPL","STORBAG","STORBAG")
storetips = (14288,14292,14291,12138,15013,14289,14287)
roomtypes = (17389,17517,17521,17519)
store_funcs = None
def CloseWindows ():
global StoreShoppingWindow, StoreIdentifyWindow, StoreStealWindow
global StoreHealWindow, StoreDonateWindow, StoreRumourWindow, StoreRentWindow
for win in StoreShoppingWindow, StoreIdentifyWindow, StoreStealWindow, StoreHealWindow, StoreDonateWindow, StoreRumourWindow, StoreRentWindow:
if win:
win.Unload ()
StoreShoppingWindow = StoreIdentifyWindow = StoreStealWindow = StoreHealWindow = StoreDonateWindow = StoreRumourWindow = StoreRentWindow = None
return
def CloseStoreWindow ():
import GUIINV
global StoreWindow, ActionWindow, PortraitWindow
global OldPortraitWindow
GemRB.SetVar ("Inventory", 0)
CloseWindows ()
if StoreWindow:
StoreWindow.Unload ()
if ActionWindow:
ActionWindow.Unload ()
if not GUICommon.GameIsBG1():
if PortraitWindow:
PortraitWindow.Unload ()
StoreWindow = None
GemRB.LeaveStore ()
if not GUICommon.GameIsBG1():
GUICommonWindows.PortraitWindow = OldPortraitWindow
if Inventory:
GUIINV.OpenInventoryWindow ()
else:
GUICommon.GameWindow.SetVisible(WINDOW_VISIBLE) #enabling the game control screen
GemRB.UnhideGUI () #enabling the other windows
GUICommonWindows.SetSelectionChangeHandler( None )
return
def OpenStoreWindow ():
global Store
global StoreWindow, ActionWindow, PortraitWindow
global OldPortraitWindow
global store_funcs
global Inventory, RepModTable, BarteringPC
#these are function pointers, not strings
#can't put this in global init, doh!
store_funcs = (OpenStoreShoppingWindow,
OpenStoreIdentifyWindow,OpenStoreStealWindow,
OpenStoreHealWindow, OpenStoreDonateWindow,
OpenStoreRumourWindow,OpenStoreRentWindow )
RepModTable = GemRB.LoadTable ("repmodst")
GemRB.HideGUI ()
GUICommon.GameWindow.SetVisible(WINDOW_INVISIBLE) #removing the game control screen
if GemRB.GetVar ("Inventory"):
Inventory = 1
else:
Inventory = None
GemRB.SetVar ("Action", 0)
if GUICommon.GameIsIWD2():
GemRB.LoadWindowPack ("GUISTORE", 800, 600)
else:
GemRB.LoadWindowPack ("GUISTORE", 640, 480)
StoreWindow = Window = GemRB.LoadWindow (3)
#saving the original portrait window
OldPortraitWindow = GUICommonWindows.PortraitWindow
if GUICommon.GameIsIWD2() or GUICommon.GameIsBG1():
#PortraitWindow = GUICommonWindows.OpenPortraitWindow ()
pass
else:
PortraitWindow = GUICommonWindows.OpenPortraitWindow (0)
ActionWindow = GemRB.LoadWindow (0)
#this window is static and grey, but good to stick the frame onto
ActionWindow.SetFrame ()
Store = GemRB.GetStore ()
BarteringPC = GemRB.GameGetFirstSelectedPC ()
# Done
Button = Window.GetControl (0)
Button.SetText (11973)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, CloseStoreWindow)
#Store type icon
if not GUICommon.GameIsIWD2():
Button = Window.GetControl (5)
Button.SetSprites (storebams[Store['StoreType']],0,0,0,0,0)
#based on shop type, these buttons will change
store_type = Store['StoreType']
store_buttons = Store['StoreButtons']
for i in range (4):
Buttons[i] = Button = Window.GetControl (i+1)
Action = store_buttons[i]
Button.SetVarAssoc ("Action", i)
if Action>=0:
Button.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
if GUICommon.GameIsIWD1() or GUICommon.GameIsIWD2():
Button.SetSprites ("GUISTBBC", Action, 1,2,0,0)
else:
Button.SetSprites ("GUISTBBC", Action, 0,1,2,0)
Button.SetTooltip (storetips[Action])
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, store_funcs[Action])
Button.SetState (IE_GUI_BUTTON_ENABLED)
else:
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
Button.SetTooltip ("")
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, None)
Button.SetState (IE_GUI_BUTTON_DISABLED)
ActionWindow.SetVisible (WINDOW_VISIBLE)
Window.SetVisible (WINDOW_VISIBLE)
store_funcs[store_buttons[0]] ()
if not GUICommon.GameIsIWD2():
if GUICommon.GameIsBG1():
GUICommonWindows.PortraitWindow.SetVisible (WINDOW_VISIBLE)
else:
PortraitWindow.SetVisible (WINDOW_VISIBLE)
return
def OpenStoreShoppingWindow ():
global StoreShoppingWindow
global LeftButton, RightButton
CloseWindows()
StoreShoppingWindow = Window = GemRB.LoadWindow (2)
# left scrollbar
ScrollBarLeft = Window.GetControl (11)
ScrollBarLeft.SetEvent (IE_GUI_SCROLLBAR_ON_CHANGE, RedrawStoreShoppingWindow)
# right scrollbar
ScrollBarRight = Window.GetControl (12)
ScrollBarRight.SetEvent (IE_GUI_SCROLLBAR_ON_CHANGE, RedrawStoreShoppingWindow)
if Inventory:
# Title
Label = Window.GetControl (0xfffffff)
if GUICommon.GameIsIWD1() or GUICommon.GameIsIWD2():
Label.SetText (26291)
elif GUICommon.GameIsBG2():
Label.SetText (51881)
else:
Label.SetText ("")
# buy price ...
Label = Window.GetControl (0x1000002b)
Label.SetText ("")
# sell price ...
Label = Window.GetControl (0x1000002c)
Label.SetText ("")
# buy price ...
Label = Window.GetControl (0x1000002f)
Label.SetText ("")
# sell price ...
Label = Window.GetControl (0x10000030)
Label.SetText ("")
else:
# buy price ...
Label = Window.GetControl (0x1000002b)
Label.SetText ("0")
# sell price ...
Label = Window.GetControl (0x1000002c)
Label.SetText ("0")
for i in range (ItemButtonCount):
Button = Window.GetControl (i+5)
if GUICommon.GameIsBG2():
Button.SetBorder (0,0,0,0,0,0,0,128,160,0,1)
else:
Button.SetBorder (0,0,0,0,0,32,32,192,128,0,1)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, SelectBuy)
Button.SetEvent (IE_GUI_BUTTON_ON_RIGHT_PRESS, InfoLeftWindow)
Button.AttachScrollBar (ScrollBarLeft)
Button = Window.GetControl (i+13)
if GUICommon.GameIsBG2():
Button.SetBorder (0,0,0,0,0,0,0,128,160,0,1)
Button.SetSprites ("GUIBTBUT", 0, 0,1,2,5)
else:
Button.SetBorder (0,0,0,0,0,32,32,192,128,0,1)
if Store['StoreType'] != 3: # can't sell to temples
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, SelectSell)
Button.SetEvent (IE_GUI_BUTTON_ON_RIGHT_PRESS, InfoRightWindow)
Button.AttachScrollBar (ScrollBarRight)
# Buy
LeftButton = Button = Window.GetControl (2)
if Inventory:
if GUICommon.GameIsIWD2():
Button.SetText (26287)
elif GUICommon.GameIsIWD1():
Button.SetText (26288)
elif GUICommon.GameIsBG2():
Button.SetText (51882)
else:
Button.SetText ("")
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, ToBackpackPressed)
else:
Button.SetText (13703)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, BuyPressed)
# Sell
RightButton = Button = Window.GetControl (3)
if Inventory:
if GUICommon.GameIsIWD1() or GUICommon.GameIsIWD2():
Button.SetText (26288)
elif GUICommon.GameIsBG2():
Button.SetText (51883)
else:
Button.SetText ("")
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, ToBagPressed)
else:
Button.SetText (13704)
if Store['StoreType'] != 3: # can't sell to temples
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, SellPressed)
# inactive button
if GUICommon.GameIsBG2():
Button = Window.GetControl (50)
Button.SetState (IE_GUI_BUTTON_LOCKED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_SET)
#backpack
Button = Window.GetControl (44)
Button.SetState (IE_GUI_BUTTON_LOCKED)
# encumbrance
Label = Window.CreateLabel (0x10000043, 15,325,60,15,"NUMBER","0:",IE_FONT_ALIGN_LEFT|IE_FONT_ALIGN_TOP)
Label = Window.CreateLabel (0x10000044, 15,365,80,15,"NUMBER","0:",IE_FONT_ALIGN_RIGHT|IE_FONT_ALIGN_TOP)
GUICommonWindows.SetSelectionChangeHandler( UpdateStoreShoppingWindow )
UpdateStoreShoppingWindow ()
Window.SetVisible (WINDOW_VISIBLE)
return
def OpenStoreIdentifyWindow ():
global StoreIdentifyWindow
global LeftButton
GemRB.SetVar ("Index", -1)
GemRB.SetVar ("TopIndex", 0)
CloseWindows()
StoreIdentifyWindow = Window = GemRB.LoadWindow (4)
ScrollBar = Window.GetControl (7)
ScrollBar.SetEvent (IE_GUI_SCROLLBAR_ON_CHANGE, RedrawStoreIdentifyWindow)
TextArea = Window.GetControl (23)
TextArea.SetFlags (IE_GUI_TEXTAREA_AUTOSCROLL)
# Identify
LeftButton = Button = Window.GetControl (5)
Button.SetText (14133)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, IdentifyPressed)
Button.SetEvent (IE_GUI_BUTTON_ON_RIGHT_PRESS, InfoIdentifyWindow)
# price ...
Label = Window.GetControl (0x10000003)
Label.SetText ("0")
# 8-11 item slots, 0x1000000c-f labels
for i in range (ItemButtonCount):
Button = Window.GetControl (i+8)
Button.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
if GUICommon.GameIsIWD1() or GUICommon.GameIsIWD2():
Button.SetSprites ("GUISTMSC", 0, 1,2,0,3)
Button.SetBorder (0,0,0,0,0,32,32,192,128,0,1)
elif GUICommon.GameIsBG1():
Button.SetBorder (0,0,0,0,0,32,32,192,128,0,1)
else:
Button.SetBorder (0,0,0,0,0,0,0,128,160,0,1)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, SelectID)
Button.SetEvent (IE_GUI_BUTTON_ON_RIGHT_PRESS, InfoIdentifyWindow)
Button.AttachScrollBar (ScrollBar)
GUICommonWindows.SetSelectionChangeHandler( UpdateStoreIdentifyWindow )
UpdateStoreIdentifyWindow ()
Window.SetVisible (WINDOW_VISIBLE)
return
def OpenStoreStealWindow ():
global StoreStealWindow
global LeftButton
GemRB.SetVar ("RightIndex", 0)
GemRB.SetVar ("LeftIndex", 0)
GemRB.SetVar ("RightTopIndex", 0)
GemRB.SetVar ("LeftTopIndex", 0)
CloseWindows()
StoreStealWindow = Window = GemRB.LoadWindow (6)
# left scrollbar
ScrollBarLeft = Window.GetControl (9)
ScrollBarLeft.SetEvent (IE_GUI_SCROLLBAR_ON_CHANGE, RedrawStoreStealWindow)
# right scrollbar
ScrollBarRight = Window.GetControl (10)
ScrollBarRight.SetEvent (IE_GUI_SCROLLBAR_ON_CHANGE, RedrawStoreStealWindow)
for i in range (ItemButtonCount):
Button = Window.GetControl (i+4)
if GUICommon.GameIsBG2():
Button.SetBorder (0,0,0,0,0,0,0,128,160,0,1)
else:
Button.SetBorder (0,0,0,0,0,32,32,192,128,0,1)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, RedrawStoreStealWindow)
Button.AttachScrollBar (ScrollBarLeft)
Button = Window.GetControl (i+11)
if GUICommon.GameIsBG2():
Button.SetBorder (0,0,0,0,0,0,0,128,160,0,1)
else:
Button.SetBorder (0,0,0,0,0,32,32,192,128,0,1)
Button.SetEvent (IE_GUI_BUTTON_ON_RIGHT_PRESS, InfoRightWindow)
Button.AttachScrollBar (ScrollBarRight)
# Steal
LeftButton = Button = Window.GetControl (1)
Button.SetText (14179)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, StealPressed)
Button = Window.GetControl (37)
Button.SetState (IE_GUI_BUTTON_LOCKED)
# encumbrance
Label = Window.CreateLabel (0x10000043, 15,325,60,15,"NUMBER","0:",IE_FONT_ALIGN_LEFT|IE_FONT_ALIGN_TOP)
Label = Window.CreateLabel (0x10000044, 15,365,80,15,"NUMBER","0:",IE_FONT_ALIGN_RIGHT|IE_FONT_ALIGN_TOP)
GUICommonWindows.SetSelectionChangeHandler( UpdateStoreStealWindow )
UpdateStoreStealWindow ()
Window.SetVisible (WINDOW_VISIBLE)
return
def OpenStoreDonateWindow ():
global StoreDonateWindow
CloseWindows ()
StoreDonateWindow = Window = GemRB.LoadWindow (9)
# graphics
Button = Window.GetControl (10)
Button.SetFlags (IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_ANIMATED|IE_GUI_BUTTON_PLAYONCE, OP_OR)
Button.SetState (IE_GUI_BUTTON_LOCKED)
# Donate
Button = Window.GetControl (3)
Button.SetText (15101)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, DonateGold)
Button.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
# Entry
Field = Window.GetControl (5)
Field.SetText ("0")
Field.SetEvent (IE_GUI_EDIT_ON_CHANGE, UpdateStoreDonateWindow)
Field.SetStatus (IE_GUI_EDIT_NUMBER|IE_GUI_CONTROL_FOCUSED)
# +
Button = Window.GetControl (6)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, IncrementDonation)
# -
Button = Window.GetControl (7)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, DecrementDonation)
GUICommonWindows.SetSelectionChangeHandler( UpdateStoreDonateWindow )
UpdateStoreDonateWindow ()
Window.SetVisible (WINDOW_VISIBLE)
return
def OpenStoreHealWindow ():
global StoreHealWindow
GemRB.SetVar ("Index", -1)
GemRB.SetVar ("TopIndex", 0)
CloseWindows()
StoreHealWindow = Window = GemRB.LoadWindow (5)
ScrollBar = Window.GetControl (7)
ScrollBar.SetEvent (IE_GUI_SCROLLBAR_ON_CHANGE, UpdateStoreHealWindow)
#spell buttons
for i in range (ItemButtonCount):
Button = Window.GetControl (i+8)
Button.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, UpdateStoreHealWindow)
Button.SetEvent (IE_GUI_BUTTON_ON_RIGHT_PRESS, InfoHealWindow)
#Button.AttachScrollBar (ScrollBar)
# price tag
Label = Window.GetControl (0x10000003)
Label.SetText ("0")
# Heal
Button = Window.GetControl (5)
Button.SetText (13703)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, BuyHeal)
Button.SetState (IE_GUI_BUTTON_DISABLED)
Count = Store['StoreCureCount']
if Count>4:
Count = Count-4
else:
Count = 0
ScrollBar.SetVarAssoc ("TopIndex", Count+1)
UpdateStoreHealWindow ()
Window.SetVisible (WINDOW_VISIBLE)
return
def OpenStoreRumourWindow ():
global StoreRumourWindow
GemRB.SetVar ("TopIndex", 0)
CloseWindows()
StoreRumourWindow = Window = GemRB.LoadWindow (8)
#removing those pesky labels
for i in range (5):
Window.DeleteControl (0x10000005+i)
TextArea = Window.GetControl (11)
TextArea.SetText (14144)
#tavern quality image
if GUICommon.GameIsBG1() or GUICommon.GameIsBG2():
BAM = "TVRNQUL%d"% ((Store['StoreFlags']>>9)&3)
Button = Window.GetControl (12)
Button.SetSprites (BAM, 0, 0, 0, 0, 0)
Button.SetState (IE_GUI_BUTTON_LOCKED)
ScrollBar = Window.GetControl (5)
ScrollBar.SetEvent (IE_GUI_SCROLLBAR_ON_CHANGE, UpdateStoreRumourWindow)
Count = Store['StoreDrinkCount']
if Count>4:
Count = Count-4
else:
Count = 0
ScrollBar.SetVarAssoc ("TopIndex", Count+1)
UpdateStoreRumourWindow ()
Window.SetVisible (WINDOW_VISIBLE)
return
def OpenStoreRentWindow ():
global StoreRentWindow, RentIndex
CloseWindows()
StoreRentWindow = Window = GemRB.LoadWindow (7)
# room types
RentIndex = -1
for i in range (4):
ok = Store['StoreRoomPrices'][i]
Button = Window.GetControl (i)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, UpdateStoreRentWindow)
if ok<0:
Button.SetState (IE_GUI_BUTTON_DISABLED) #disabled room icons are selected, not disabled
else:
Button.SetVarAssoc ("RentIndex", i)
if RentIndex==-1:
RentIndex = i
Button = Window.GetControl (i+4)
Button.SetText (14294+i)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, UpdateStoreRentWindow)
Button.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_OR)
Button.SetVarAssoc ("RentIndex", i)
if GUICommon.GameIsBG1():
#these bioware guys screw up everything possible
#remove this line if you fixed guistore
Button.SetSprites ("GUISTROC",0, 1,2,0,3)
if ok<0:
Button.SetState (IE_GUI_BUTTON_DISABLED)
# Rent
Button = Window.GetControl (11)
Button.SetText (14293)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, RentRoom)
GemRB.SetVar ("RentIndex", RentIndex)
UpdateStoreRentWindow ()
Window.SetVisible (WINDOW_VISIBLE)
return
def UpdateStoreCommon (Window, title, name, gold):
Label = Window.GetControl (title)
Label.SetText (Store['StoreName'])
if name:
pc = GemRB.GameGetSelectedPCSingle ()
Label = Window.GetControl (name)
Label.SetText (GemRB.GetPlayerName (pc, 0) )
Label = Window.GetControl (gold)
Label.SetText (str(GemRB.GameGetPartyGold ()))
return
def GetPC():
global PreviousPC
if PreviousPC:
pc = GemRB.GameGetSelectedPCSingle ()
if PreviousPC != pc:
PreviousPC = pc
# reset the store indices, to prevent overscrolling
GemRB.SetVar ("RightIndex", 0)
GemRB.SetVar ("LeftIndex", 0)
GemRB.SetVar ("RightTopIndex", 0)
GemRB.SetVar ("LeftTopIndex", 0)
GemRB.SetVar ("Index", 0)
GemRB.SetVar ("TopIndex", 0)
else:
PreviousPC = GemRB.GameGetSelectedPCSingle ()
pc = PreviousPC
return pc
def UpdateStoreShoppingWindow ():
global Store, inventory_slots
Window = StoreShoppingWindow
#reget store in case of a change
Store = GemRB.GetStore ()
pc = GetPC()
LeftCount = Store['StoreItemCount'] - ItemButtonCount + 1
if LeftCount<0:
LeftCount=0
ScrollBar = Window.GetControl (11)
ScrollBar.SetVarAssoc ("LeftTopIndex", LeftCount)
LeftTopIndex = GemRB.GetVar ("LeftTopIndex")
if LeftTopIndex>LeftCount:
GemRB.SetVar ("LeftTopIndex", LeftCount)
pc = GemRB.GameGetSelectedPCSingle ()
inventory_slots = GemRB.GetSlots (pc, SLOT_INVENTORY)
RightCount = len(inventory_slots) - ItemButtonCount + 1
if RightCount<0:
RightCount=0
ScrollBar = Window.GetControl (12)
ScrollBar.SetVarAssoc ("RightTopIndex", RightCount)
RightTopIndex = GemRB.GetVar ("RightTopIndex")
if RightTopIndex>RightCount:
GemRB.SetVar ("RightTopIndex", RightCount)
RedrawStoreShoppingWindow ()
return
def SelectID ():
pc = GemRB.GameGetSelectedPCSingle ()
Index = GemRB.GetVar ("Index")
GemRB.ChangeStoreItem (pc, inventory_slots[Index], SHOP_ID|SHOP_SELECT)
RedrawStoreIdentifyWindow ()
return
def SelectBuy ():
Window = StoreShoppingWindow
pc = GemRB.GameGetSelectedPCSingle ()
LeftIndex = GemRB.GetVar ("LeftIndex")
GemRB.ChangeStoreItem (pc, LeftIndex, SHOP_BUY|SHOP_SELECT)
RedrawStoreShoppingWindow ()
return
def ToBackpackPressed ():
Window = StoreShoppingWindow
pc = GemRB.GameGetSelectedPCSingle ()
LeftCount = Store['StoreItemCount']
#going backwards because removed items shift the slots
for i in range (LeftCount, 0, -1):
Flags = GemRB.IsValidStoreItem (pc, i-1, ITEM_STORE)&SHOP_SELECT
if Flags:
GemRB.ChangeStoreItem (pc, i-1, SHOP_BUY)
UpdateStoreShoppingWindow ()
return
def BuyPressed ():
Window = StoreShoppingWindow
if (BuySum>GemRB.GameGetPartyGold ()):
ErrorWindow (11047)
return
pc = GemRB.GameGetSelectedPCSingle ()
LeftCount = Store['StoreItemCount']
#going backwards because removed items shift the slots
for i in range (LeftCount, 0, -1):
Flags = GemRB.IsValidStoreItem (pc, i-1, ITEM_STORE)&SHOP_SELECT
if Flags:
Slot = GemRB.GetStoreItem (i-1)
Item = GemRB.GetItem (Slot['ItemResRef'])
Price = GetRealPrice (pc, "sell", Item, Slot)
if Price <= 0:
Price = 1
if GemRB.ChangeStoreItem (pc, i-1, SHOP_BUY):
GemRB.GameSetPartyGold (GemRB.GameGetPartyGold ()-Price)
UpdateStoreShoppingWindow ()
return
def SelectSell ():
Window = StoreShoppingWindow
pc = GemRB.GameGetSelectedPCSingle ()
RightIndex = GemRB.GetVar ("RightIndex")
GemRB.ChangeStoreItem (pc, inventory_slots[RightIndex], SHOP_SELL|SHOP_SELECT)
RedrawStoreShoppingWindow ()
return
def ToBagPressed ():
Window = StoreShoppingWindow
pc = GemRB.GameGetSelectedPCSingle ()
RightCount = len (inventory_slots)
#no need to go reverse
for Slot in range (RightCount):
Flags = GemRB.IsValidStoreItem (pc, inventory_slots[Slot], ITEM_PC)
if Flags & SHOP_SELECT:
GemRB.ChangeStoreItem (pc, inventory_slots[Slot], SHOP_SELL)
UpdateStoreShoppingWindow ()
return
def SellPressed ():
Window = StoreShoppingWindow
pc = GemRB.GameGetSelectedPCSingle ()
RightCount = len (inventory_slots)
#no need to go reverse
for Slot in range (RightCount):
Flags = GemRB.IsValidStoreItem (pc, inventory_slots[Slot], ITEM_PC) & SHOP_SELECT
if Flags:
GemRB.ChangeStoreItem (pc, inventory_slots[Slot], SHOP_SELL)
GemRB.GameSetPartyGold (GemRB.GameGetPartyGold ()+SellSum)
UpdateStoreShoppingWindow ()
return
def RedrawStoreShoppingWindow ():
global BuySum, SellSum
Window = StoreShoppingWindow
UpdateStoreCommon (Window, 0x10000003, 0x1000002e, 0x1000002a)
pc = GemRB.GameGetSelectedPCSingle ()
LeftTopIndex = GemRB.GetVar ("LeftTopIndex")
LeftIndex = GemRB.GetVar ("LeftIndex")
RightTopIndex = GemRB.GetVar ("RightTopIndex")
RightIndex = GemRB.GetVar ("RightIndex")
idx = [ LeftTopIndex, RightTopIndex, LeftIndex, RightIndex ]
LeftCount = Store['StoreItemCount']
BuySum = 0
for i in range (LeftCount):
if GemRB.IsValidStoreItem (pc, i, ITEM_STORE) & SHOP_SELECT:
Slot = GemRB.GetStoreItem (i)
Item = GemRB.GetItem (Slot['ItemResRef'])
if Inventory:
Price = 1
else:
Price = GetRealPrice (pc, "sell", Item, Slot)
if Price <= 0:
Price = 1
BuySum = BuySum + Price
RightCount = len(inventory_slots)
SellSum = 0
for i in range (RightCount):
Flags = GemRB.IsValidStoreItem (pc, inventory_slots[i], ITEM_PC)
if Flags & SHOP_SELECT:
Slot = GemRB.GetSlotItem (pc, inventory_slots[i])
Item = GemRB.GetItem (Slot['ItemResRef'])
if Inventory:
Price = 1
else:
Price = GetRealPrice (pc, "buy", Item, Slot)
if Flags & SHOP_ID:
Price = 1
SellSum = SellSum + Price
Label = Window.GetControl (0x1000002b)
if Inventory:
Label.SetText ("")
else:
Label.SetText (str(BuySum) )
if BuySum:
LeftButton.SetState (IE_GUI_BUTTON_ENABLED)
else:
LeftButton.SetState (IE_GUI_BUTTON_DISABLED)
Label = Window.GetControl (0x1000002c)
if Inventory:
Label.SetText ("")
else:
Label.SetText (str(SellSum) )
if SellSum:
RightButton.SetState (IE_GUI_BUTTON_ENABLED)
else:
RightButton.SetState (IE_GUI_BUTTON_DISABLED)
for i in range (ItemButtonCount):
if i+LeftTopIndex<LeftCount:
Slot = GemRB.GetStoreItem (i+LeftTopIndex)
else:
Slot = None
Button = Window.GetControl (i+5)
Label = Window.GetControl (0x10000012+i)
Button.SetVarAssoc ("LeftIndex", LeftTopIndex+i)
SetupItems (pc, Slot, Button, Label, i, ITEM_STORE, idx)
if i+RightTopIndex<RightCount:
Slot = GemRB.GetSlotItem (pc, inventory_slots[i+RightTopIndex])
else:
Slot = None
Button = Window.GetControl (i+13)
Label = Window.GetControl (0x1000001e+i)
Button.SetVarAssoc ("RightIndex", RightTopIndex+i)
SetupItems (pc, Slot, Button, Label, i, ITEM_PC, idx)
return
def UpdateStoreIdentifyWindow ():
global inventory_slots
Window = StoreIdentifyWindow
pc = GetPC()
inventory_slots = GemRB.GetSlots (pc, SLOT_INVENTORY)
Count = len(inventory_slots)
ScrollBar = Window.GetControl (7)
ScrollBar.SetVarAssoc ("TopIndex", Count-ItemButtonCount+1)
GemRB.SetVar ("Index", -1)
RedrawStoreIdentifyWindow ()
return
def RedrawStoreIdentifyWindow ():
Window = StoreIdentifyWindow
UpdateStoreCommon (Window, 0x10000000, 0x10000005, 0x10000001)
TopIndex = GemRB.GetVar ("TopIndex")
Index = GemRB.GetVar ("Index")
pc = GemRB.GameGetSelectedPCSingle ()
Count = len(inventory_slots)
IDPrice = Store['IDPrice']
Selected = 0
for Slot in range (0, Count):
flags = GemRB.IsValidStoreItem (pc, inventory_slots[Slot], ITEM_PC)
if flags & SHOP_ID and flags & SHOP_SELECT:
Selected += 1
for i in range (ItemButtonCount):
if TopIndex+i<Count:
Slot = GemRB.GetSlotItem (pc, inventory_slots[TopIndex+i])
else:
Slot = None
Button = Window.GetControl (i+8)
# TODO: recheck they really differ
if GUICommon.GameIsIWD2():
Label = Window.GetControl (0x1000000d+i)
else:
Label = Window.GetControl (0x1000000c+i)
Button.SetVarAssoc ("Index", TopIndex+i)
if Slot:
Flags = GemRB.IsValidStoreItem (pc, inventory_slots[TopIndex+i], ITEM_PC)
Item = GemRB.GetItem (Slot['ItemResRef'])
Button.SetItemIcon (Slot['ItemResRef'], 0)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_NAND)
Button.SetFlags (IE_GUI_BUTTON_PICTURE, OP_OR)
if Flags & SHOP_ID:
if Flags & SHOP_SELECT:
Button.SetState (IE_GUI_BUTTON_SELECTED)
else:
Button.SetState (IE_GUI_BUTTON_ENABLED)
GemRB.SetToken ("ITEMNAME", GemRB.GetString (Item['ItemName']))
GemRB.SetToken ("ITEMCOST", str(IDPrice) )
Button.EnableBorder (0, 1)
else:
Button.SetState (IE_GUI_BUTTON_DISABLED)
GemRB.SetToken ("ITEMNAME", GemRB.GetString (Item['ItemNameIdentified']))
GemRB.SetToken ("ITEMCOST", str(0) )
Button.EnableBorder (0, 0)
Label.SetText (10162)
else:
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
Button.SetFlags (IE_GUI_BUTTON_PICTURE, OP_NAND)
Label.SetText ("")
Button = Window.GetControl (5)
Label = Window.GetControl (0x10000003)
if Selected:
Button.SetState (IE_GUI_BUTTON_ENABLED)
Label.SetText (str(IDPrice * Selected) )
else:
Button.SetState (IE_GUI_BUTTON_DISABLED)
Label.SetText (str(0) )
return
def IdentifyPressed ():
pc = GemRB.GameGetSelectedPCSingle ()
Count = len(inventory_slots)
# get all the selected items
toID = []
for Slot in range (0, Count):
Flags = GemRB.IsValidStoreItem (pc, inventory_slots[Slot], ITEM_PC)
if Flags & SHOP_SELECT and Flags & SHOP_ID:
toID.append(Slot)
# enough gold?
EndGold = GemRB.GameGetPartyGold () - Store['IDPrice'] * len(toID)
if EndGold < 0:
return
# identify
Window = StoreIdentifyWindow
TextArea = Window.GetControl (23)
for i in toID:
GemRB.ChangeStoreItem (pc, inventory_slots[i], SHOP_ID)
Slot = GemRB.GetSlotItem (pc, inventory_slots[i])
Item = GemRB.GetItem (Slot['ItemResRef'])
# FIXME: some items have the title, some don't - figure it out
TextArea.Append(Item['ItemNameIdentified'])
TextArea.Append("\n\n")
TextArea.Append(Item['ItemDescIdentified'])
TextArea.Append("\n\n\n")
GemRB.GameSetPartyGold (EndGold)
UpdateStoreIdentifyWindow ()
return
def InfoIdentifyWindow ():
Index = GemRB.GetVar ("Index")
pc = GemRB.GameGetSelectedPCSingle ()
Count = len(inventory_slots)
if Index >= Count:
return
Slot = GemRB.GetSlotItem (pc, inventory_slots[Index])
Item = GemRB.GetItem (Slot['ItemResRef'])
InfoWindow (Slot, Item)
return
def InfoLeftWindow ():
Index = GemRB.GetVar ("LeftIndex")
Slot = GemRB.GetStoreItem (Index)
Item = GemRB.GetItem (Slot['ItemResRef'])
InfoWindow (Slot, Item)
return
def InfoRightWindow ():
Index = GemRB.GetVar ("RightIndex")
pc = GemRB.GameGetSelectedPCSingle ()
Count = len(inventory_slots)
if Index >= Count:
return
Slot = GemRB.GetSlotItem (pc, inventory_slots[Index])
Item = GemRB.GetItem (Slot['ItemResRef'])
InfoWindow (Slot, Item)
return
def InfoWindow (Slot, Item):
global MessageWindow
Identify = Slot['Flags'] & IE_INV_ITEM_IDENTIFIED
MessageWindow = Window = GemRB.LoadWindow (12)
#fake label
Label = Window.GetControl (0x10000000)
Label.SetText ("")
#description bam
if GUICommon.GameIsBG1() or GUICommon.GameIsBG2():
Button = Window.GetControl (7)
Button.SetItemIcon (Slot['ItemResRef'], 2)
#slot bam
Button = Window.GetControl (2)
Button.SetItemIcon (Slot['ItemResRef'], 0)
Label = Window.GetControl (0x10000007)
TextArea = Window.GetControl (5)
if Identify:
Label.SetText (Item['ItemNameIdentified'])
TextArea.SetText (Item['ItemDescIdentified'])
else:
Label.SetText (Item['ItemName'])
TextArea.SetText (Item['ItemDesc'])
#Done
Button = Window.GetControl (4)
Button.SetText (11973)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, ErrorDone)
# hide the empty button
if GUICommon.GameIsBG2() or GUICommon.GameIsIWD2():
Window.DeleteControl (9)
Window.ShowModal (MODAL_SHADOW_GRAY)
return
def UpdateStoreStealWindow ():
global Store, inventory_slots
Window = StoreStealWindow
#reget store in case of a change
Store = GemRB.GetStore ()
LeftCount = Store['StoreItemCount']
ScrollBar = Window.GetControl (9)
ScrollBar.SetVarAssoc ("LeftTopIndex", LeftCount-ItemButtonCount+1)
pc = GetPC()
inventory_slots = GemRB.GetSlots (pc, SLOT_INVENTORY)
RightCount = len(inventory_slots)
ScrollBar = Window.GetControl (10)
ScrollBar.SetVarAssoc ("RightTopIndex", RightCount-ItemButtonCount+1)
GemRB.SetVar ("LeftIndex", -1)
LeftButton.SetState (IE_GUI_BUTTON_DISABLED)
RedrawStoreStealWindow ()
return
def StealPressed ():
Window = StoreShoppingWindow
LeftIndex = GemRB.GetVar ("LeftIndex")
pc = GemRB.GameGetSelectedPCSingle ()
#percentage skill check, if fails, trigger StealFailed
#if difficulty = 0 and skill=100, automatic success
#if difficulty = 0 and skill=50, 50% success
#if difficulty = 50 and skill=50, 0% success
#if skill>random(100)+difficulty - success
if GUICommon.CheckStat100 (pc, IE_PICKPOCKET, Store['StealFailure']):
GemRB.ChangeStoreItem (pc, LeftIndex, SHOP_STEAL)
UpdateStoreStealWindow ()
else:
GemRB.StealFailed ()
CloseStoreWindow ()
return
def RedrawStoreStealWindow ():
Window = StoreStealWindow
UpdateStoreCommon (Window, 0x10000002, 0x10000027, 0x10000023)
LeftTopIndex = GemRB.GetVar ("LeftTopIndex")
LeftIndex = GemRB.GetVar ("LeftIndex")
RightTopIndex = GemRB.GetVar ("RightTopIndex")
RightIndex = GemRB.GetVar ("RightIndex")
idx = [ LeftTopIndex, RightTopIndex, LeftIndex, RightIndex ]
LeftCount = Store['StoreItemCount']
pc = GemRB.GameGetSelectedPCSingle ()
RightCount = len(inventory_slots)
for i in range (ItemButtonCount):
Slot = GemRB.GetStoreItem (i+LeftTopIndex)
Button = Window.GetControl (i+4)
Label = Window.GetControl (0x1000000f+i)
Button.SetVarAssoc ("LeftIndex", LeftTopIndex+i)
SetupItems (pc, Slot, Button, Label, i, ITEM_STORE, idx, 1)
if i+RightTopIndex<RightCount:
Slot = GemRB.GetSlotItem (pc, inventory_slots[i+RightTopIndex])
else:
Slot = None
Button = Window.GetControl (i+11)
Label = Window.GetControl (0x10000019+i)
Button.SetVarAssoc ("RightIndex", RightTopIndex+i)
SetupItems (pc, Slot, Button, Label, i, ITEM_PC, idx, 1)
if LeftIndex>=0:
LeftButton.SetState (IE_GUI_BUTTON_ENABLED)
else:
LeftButton.SetState (IE_GUI_BUTTON_DISABLED)
return
def SetupItems (pc, Slot, Button, Label, i, type, idx, steal=0):
if Slot == None:
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
Button.SetFlags (IE_GUI_BUTTON_PICTURE, OP_NAND)
Label.SetText ("")
else:
LeftTopIndex = idx[0]
RightTopIndex = idx[1]
LeftIndex = idx[2]
Item = GemRB.GetItem (Slot['ItemResRef'])
Button.SetItemIcon (Slot['ItemResRef'], 0)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_NAND)
Button.SetFlags (IE_GUI_BUTTON_PICTURE, OP_OR)
if type == ITEM_STORE:
Price = GetRealPrice (pc, "buy", Item, Slot)
Flags = GemRB.IsValidStoreItem (pc, i+LeftTopIndex, type)
if steal:
Button.SetState (IE_GUI_BUTTON_ENABLED)
else:
if Flags & SHOP_BUY:
if Flags & SHOP_SELECT:
Button.SetState (IE_GUI_BUTTON_SELECTED)
else:
Button.SetState (IE_GUI_BUTTON_ENABLED)
else:
Button.SetState (IE_GUI_BUTTON_DISABLED)
if not Inventory:
Price = GetRealPrice (pc, "sell", Item, Slot)
if Price <= 0:
Price = 1
else:
Flags = GemRB.IsValidStoreItem (pc, inventory_slots[i+RightTopIndex], type)
if Flags & SHOP_STEAL:
if LeftIndex == LeftTopIndex + i:
Button.SetState (IE_GUI_BUTTON_SELECTED)
else:
Button.SetState (IE_GUI_BUTTON_ENABLED)
else:
Button.SetState (IE_GUI_BUTTON_DISABLED)
if steal:
Price = Slot['Price']
else:
if Inventory:
Price = 1
else:
Price = GetRealPrice (pc, "buy", Item, Slot)
if (Price>0) and (Flags & SHOP_SELL):
if Flags & SHOP_SELECT:
Button.SetState (IE_GUI_BUTTON_SELECTED)
else:
Button.SetState (IE_GUI_BUTTON_ENABLED)
else:
Button.SetState (IE_GUI_BUTTON_DISABLED)
if Flags & SHOP_ID:
GemRB.SetToken ("ITEMNAME", GemRB.GetString (Item['ItemName']))
Button.EnableBorder (0, 1)
if not steal and type == ITEM_PC:
Price = 1
else:
GemRB.SetToken ("ITEMNAME", GemRB.GetString (Item['ItemNameIdentified']))
Button.EnableBorder (0, 0)
if Inventory:
if GUICommon.GameIsIWD1() or GUICommon.GameIsIWD2():
Label.SetText (24890)
elif GUICommon.GameIsBG2():
Label.SetText (28337)
else:
Label.SetText ("")
else:
GemRB.SetToken ("ITEMCOST", str(Price) )
Label.SetText (10162)
def GetRealPrice (pc, mode, Item, Slot):
# get the base from the item
price = Item['Price']
# modifier from store properties (in percent)
if mode == "buy":
mod = Store['BuyMarkup']
else:
mod = Store['SellMarkup']
# depreciation works like this:
# - if you sell the item the first time, SellMarkup is used;
# - if you sell the item the second time, SellMarkup-DepreciationRate is used;
# - if you sell the item any more times, SellMarkup-2*DepreciationRate is used.
# If the storekeep has an infinite amount of the item, only SellMarkup is used.
# The amount of items sold at the same time doesn't matter! Selling three bows
# separately will produce less gold then selling them at the same time.
# We don't care who is the seller, so if the store already has 2 items, there'll be no gain
if mode == "buy":
count = GemRB.FindStoreItem (Slot["ItemResRef"])
if count:
oc = count
if count > 2:
count = 2
mod -= count * Store['Depreciation']
# charisma modifier (in percent)
mod += GemRB.GetAbilityBonus (IE_CHR, GemRB.GetPlayerStat (BarteringPC, IE_CHR) - 1, 0)
# reputation modifier (in percent, but absolute)
mod = mod * RepModTable.GetValue (0, GemRB.GameGetReputation()/10 - 1) / 100
return price * mod / 100
def UpdateStoreDonateWindow ():
Window = StoreDonateWindow
UpdateStoreCommon (Window, 0x10000007, 0, 0x10000008)
Field = Window.GetControl (5)
donation = int("0"+Field.QueryText ())
gold = GemRB.GameGetPartyGold ()
if donation>gold:
donation = gold
Field.SetText (str(gold) )
Button = Window.GetControl (3)
if donation:
Button.SetState (IE_GUI_BUTTON_ENABLED)
else:
Button.SetState (IE_GUI_BUTTON_DISABLED)
return
def IncrementDonation ():
Window = StoreDonateWindow
Field = Window.GetControl (5)
donation = int("0"+Field.QueryText ())
if donation<GemRB.GameGetPartyGold ():
Field.SetText (str(donation+1) )
else:
Field.SetText (str(GemRB.GameGetPartyGold ()) )
UpdateStoreDonateWindow ()
return
def DecrementDonation ():
Window = StoreDonateWindow
Field = Window.GetControl (5)
donation = int("0"+Field.QueryText ())
if donation>0:
Field.SetText (str(donation-1) )
else:
Field.SetText (str(0) )
UpdateStoreDonateWindow ()
return
def DonateGold ():
Window = StoreDonateWindow
TextArea = Window.GetControl (0)
TextArea.SetFlags (IE_GUI_TEXTAREA_AUTOSCROLL)
Button = Window.GetControl (10)
Button.SetAnimation ("DONATE")
Field = Window.GetControl (5)
donation = int("0"+Field.QueryText ())
GemRB.GameSetPartyGold (GemRB.GameGetPartyGold ()-donation)
if GemRB.IncreaseReputation (donation):
TextArea.Append (10468, -1)
GemRB.PlaySound ("act_03")
UpdateStoreDonateWindow ()
return
TextArea.Append (10469, -1)
GemRB.PlaySound ("act_03e")
UpdateStoreDonateWindow ()
return
def UpdateStoreHealWindow ():
Window = StoreHealWindow
UpdateStoreCommon (Window, 0x10000000, 0, 0x10000001)
TopIndex = GemRB.GetVar ("TopIndex")
Index = GemRB.GetVar ("Index")
pc = GemRB.GameGetSelectedPCSingle ()
for i in range (ItemButtonCount):
Cure = GemRB.GetStoreCure (TopIndex+i)
Button = Window.GetControl (i+8)
Label = Window.GetControl (0x1000000c+i)
Button.SetVarAssoc ("Index", TopIndex+i)
if Cure:
Spell = GemRB.GetSpell (Cure['CureResRef'])
Button.SetSpellIcon (Cure['CureResRef'], 1)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_NAND)
Button.SetFlags (IE_GUI_BUTTON_PICTURE, OP_OR)
dead = GemRB.GetPlayerStat (pc, IE_STATE_ID) & STATE_DEAD
# toggle raise dead/resurrect based on state
# unfortunately the flags are not set properly in iwd
print "UpdateStoreHealWindow", dead, Cure['CureResRef'], Spell["SpellTargetType"]
if (dead and Spell["SpellTargetType"] != 3) or \
(not dead and Spell["SpellTargetType"] == 3): # 3 - non-living
# locked and shaded
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetBorder (0, 0,0, 0,0, 200,0,0,100, 1,1)
else:
Button.SetState (IE_GUI_BUTTON_ENABLED)
Button.SetBorder (0, 0,0, 0,0, 0,0,0,0, 0,0)
GemRB.SetToken ("ITEMNAME", GemRB.GetString (Spell['SpellName']))
GemRB.SetToken ("ITEMCOST", str(Cure['Price']) )
Label.SetText (10162)
else:
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_OR)
Button.SetFlags (IE_GUI_BUTTON_PICTURE, OP_NAND)
Button.SetBorder (0, 0,0, 0,0, 0,0,0,0, 0,0)
Label.SetText ("")
if TopIndex+i==Index:
TextArea = Window.GetControl (23)
TextArea.SetText (Cure['Description'])
Label = Window.GetControl (0x10000003)
Label.SetText (str(Cure['Price']) )
Button = Window.GetControl (5)
Button.SetState (IE_GUI_BUTTON_ENABLED)
return
def InfoHealWindow ():
global MessageWindow
UpdateStoreHealWindow ()
Index = GemRB.GetVar ("Index")
Cure = GemRB.GetStoreCure (Index)
Spell = GemRB.GetSpell (Cure['CureResRef'])
MessageWindow = Window = GemRB.LoadWindow (14)
Label = Window.GetControl (0x10000000)
Label.SetText (Spell['SpellName'])
Button = Window.GetControl (2)
Button.SetSpellIcon (Cure['CureResRef'], 1)
TextArea = Window.GetControl (3)
TextArea.SetText (Spell['SpellDesc'])
#Done
Button = Window.GetControl (5)
Button.SetText (11973)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, ErrorDone)
Window.ShowModal (MODAL_SHADOW_GRAY)
return
def BuyHeal ():
Index = GemRB.GetVar ("Index")
Cure = GemRB.GetStoreCure (Index)
gold = GemRB.GameGetPartyGold ()
if gold < Cure['Price']:
ErrorWindow (11048)
return
GemRB.GameSetPartyGold (gold-Cure['Price'])
pc = GemRB.GameGetSelectedPCSingle ()
GemRB.ApplySpell (pc, Cure['CureResRef'])
UpdateStoreHealWindow ()
return
def UpdateStoreRumourWindow ():
Window = StoreRumourWindow
UpdateStoreCommon (Window, 0x10000011, 0, 0x10000012)
TopIndex = GemRB.GetVar ("TopIndex")
for i in range (5):
Drink = GemRB.GetStoreDrink (TopIndex+i)
Button = Window.GetControl (i)
Button.SetVarAssoc ("Index", i)
if Drink:
GemRB.SetToken ("ITEMNAME", GemRB.GetString (Drink['DrinkName']))
GemRB.SetToken ("ITEMCOST", str(Drink['Price']) )
Button.SetText (10162)
Button.SetState (IE_GUI_BUTTON_ENABLED)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GulpDrink)
else:
Button.SetText ("")
Button.SetState (IE_GUI_BUTTON_DISABLED)
return
def GulpDrink ():
Window = StoreRumourWindow
TextArea = Window.GetControl (13)
TextArea.SetFlags (IE_GUI_TEXTAREA_AUTOSCROLL)
pc = GemRB.GameGetSelectedPCSingle ()
intox = GemRB.GetPlayerStat (pc, IE_INTOXICATION)
intox = 0
if intox > 80:
TextArea.Append (10832, -1)
return
gold = GemRB.GameGetPartyGold ()
Index = GemRB.GetVar ("TopIndex")+GemRB.GetVar ("Index")
Drink = GemRB.GetStoreDrink (Index)
if gold < Drink['Price']:
ErrorWindow (11049)
return
GemRB.GameSetPartyGold (gold-Drink['Price'])
GemRB.SetPlayerStat (pc, IE_INTOXICATION, intox+Drink['Strength'])
text = GemRB.GetRumour (Drink['Strength'], Store['TavernRumour'])
TextArea.Append (text, -1)
GemRB.PlaySound ("gam_07")
UpdateStoreRumourWindow ()
return
def UpdateStoreRentWindow ():
global RentIndex
Window = StoreRentWindow
UpdateStoreCommon (Window, 0x10000008, 0, 0x10000009)
RentIndex = GemRB.GetVar ("RentIndex")
Button = Window.GetControl (11)
Label = Window.GetControl (0x1000000d)
if RentIndex>=0:
TextArea = Window.GetControl (12)
TextArea.SetText (roomtypes[RentIndex] )
price = Store['StoreRoomPrices'][RentIndex]
Label.SetText (str(price) )
Button.SetState (IE_GUI_BUTTON_ENABLED)
else:
Label.SetText ("0" )
Button.SetState (IE_GUI_BUTTON_DISABLED)
return
def RentConfirm ():
RentIndex = GemRB.GetVar ("RentIndex")
price = Store['StoreRoomPrices'][RentIndex]
Gold = GemRB.GameGetPartyGold ()
GemRB.GameSetPartyGold (Gold-price)
GemRB.RestParty (13, 1, RentIndex+1)
if RentConfirmWindow:
RentConfirmWindow.Unload ()
Window = StoreRentWindow
TextArea = Window.GetControl (12)
#is there any way to change this???
GemRB.SetToken ("HOUR", "8")
GemRB.SetToken ("HP", "%d"%(RentIndex+1))
TextArea.SetText (16476)
GemRB.SetVar ("RentIndex", -1)
Button = Window.GetControl (RentIndex+4)
Button.SetState (IE_GUI_BUTTON_ENABLED)
UpdateStoreRentWindow ()
return
def RentDeny () :
if RentConfirmWindow:
RentConfirmWindow.Unload ()
UpdateStoreRentWindow ()
return
def RentRoom ():
global RentIndex, RentConfirmWindow
RentIndex = GemRB.GetVar ("RentIndex")
price = Store['StoreRoomPrices'][RentIndex]
Gold = GemRB.GameGetPartyGold ()
if Gold<price:
ErrorWindow (11051)
return
RentConfirmWindow = Window = GemRB.LoadWindow (11)
#confirm
Button = Window.GetControl (0)
Button.SetText (17199)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, RentConfirm)
Button.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
#deny
Button = Window.GetControl (1)
Button.SetText (13727)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, RentDeny)
Button.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
#textarea
TextArea = Window.GetControl (3)
TextArea.SetText (15358)
Window.ShowModal (MODAL_SHADOW_GRAY)
return
def ErrorWindow (strref):
global MessageWindow
MessageWindow = Window = GemRB.LoadWindow (10)
TextArea = Window.GetControl (3)
TextArea.SetText (strref)
#done
Button = Window.GetControl (0)
Button.SetText (11973)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, ErrorDone)
Window.ShowModal (MODAL_SHADOW_GRAY)
return
def ErrorDone ():
if MessageWindow:
MessageWindow.Unload ()
return
###################################################
# End of file GUISTORE.py
| NickDaly/GemRB-FixConfig-Branch | gemrb/GUIScripts/GUISTORE.py | Python | gpl-2.0 | 42,696 |
"""
Top-level conftest.py does a couple of things:
1) Add cfme_pages repo to the sys.path automatically
2) Load a number of plugins and fixtures automatically
"""
from pkgutil import iter_modules
import pytest
import requests
import cfme.fixtures
import fixtures
import markers
import metaplugins
from fixtures.artifactor_plugin import art_client, appliance_ip_address
from cfme.fixtures.rdb import Rdb
from fixtures.pytest_store import store
from utils import ports
from utils.conf import rdb
from utils.log import logger
from utils.path import data_path
from utils.net import net_check
from utils.wait import TimedOutError
class _AppliancePoliceException(Exception):
def __init__(self, message, port, *args, **kwargs):
super(_AppliancePoliceException, self).__init__(message, port, *args, **kwargs)
self.message = message
self.port = port
def __str__(self):
return "{} (port {})".format(self.message, self.port)
@pytest.mark.hookwrapper
def pytest_addoption(parser):
# Create the cfme option group for use in other plugins
parser.getgroup('cfme', 'cfme: options related to cfme/miq appliances')
yield
@pytest.fixture(scope="session", autouse=True)
def set_session_timeout():
store.current_appliance.set_session_timeout(86400)
@pytest.fixture(scope="session", autouse=True)
def fix_merkyl_workaround():
"""Workaround around merkyl not opening an iptables port for communication"""
ssh_client = store.current_appliance.ssh_client
if ssh_client.run_command('test -s /etc/init.d/merkyl').rc != 0:
logger.info('Rudely overwriting merkyl init.d on appliance;')
local_file = data_path.join("bundles").join("merkyl").join("merkyl")
remote_file = "/etc/init.d/merkyl"
ssh_client.put_file(local_file.strpath, remote_file)
ssh_client.run_command("service merkyl restart")
art_client.fire_hook('setup_merkyl', ip=appliance_ip_address)
@pytest.fixture(scope="session", autouse=True)
def fix_missing_hostname():
"""Fix for hostname missing from the /etc/hosts file
Note: Affects RHOS-based appliances but can't hurt the others so
it's applied on all.
"""
ssh_client = store.current_appliance.ssh_client
logger.info("Checking appliance's /etc/hosts for its own hostname")
if ssh_client.run_command('grep $(hostname) /etc/hosts').rc != 0:
logger.info("Adding it's hostname to its /etc/hosts")
# Append hostname to the first line (127.0.0.1)
ret = ssh_client.run_command('sed -i "1 s/$/ $(hostname)/" /etc/hosts')
if ret.rc == 0:
logger.info("Hostname added")
else:
logger.error("Failed to add hostname")
@pytest.fixture(autouse=True, scope="function")
def appliance_police():
if not store.slave_manager:
return
try:
port_numbers = {
'ssh': ports.SSH,
'https': store.current_appliance.ui_port,
'postgres': ports.DB}
port_results = {pn: net_check(pp, force=True) for pn, pp in port_numbers.items()}
for port, result in port_results.items():
if not result:
raise _AppliancePoliceException('Unable to connect', port_numbers[port])
try:
status_code = requests.get(store.current_appliance.url, verify=False,
timeout=120).status_code
except Exception:
raise _AppliancePoliceException('Getting status code failed', port_numbers['https'])
if status_code != 200:
raise _AppliancePoliceException('Status code was {}, should be 200'.format(
status_code), port_numbers['https'])
return
except _AppliancePoliceException as e:
# special handling for known failure conditions
if e.port == 443:
# Lots of rdbs lately where evm seems to have entirely crashed
# and (sadly) the only fix is a rude restart
store.current_appliance.restart_evm_service(rude=True)
try:
store.current_appliance.wait_for_web_ui(900)
store.write_line('EVM was frozen and had to be restarted.', purple=True)
return
except TimedOutError:
pass
e_message = str(e)
except Exception as e:
e_message = str(e)
# Regardles of the exception raised, we didn't return anywhere above
# time to call a human
msg = 'Help! My appliance {} crashed with: {}'.format(store.current_appliance.url, e_message)
store.slave_manager.message(msg)
if 'appliance_police_recipients' in rdb:
rdb_kwargs = {
'subject': 'RDB Breakpoint: Appliance failure',
'recipients': rdb.appliance_police_recipients,
}
else:
rdb_kwargs = {}
Rdb(msg).set_trace(**rdb_kwargs)
store.slave_manager.message('Resuming testing following remote debugging')
def _pytest_plugins_generator(*extension_pkgs):
# Finds all submodules in pytest extension packages and loads them
for extension_pkg in extension_pkgs:
path = extension_pkg.__path__
prefix = '%s.' % extension_pkg.__name__
for importer, modname, is_package in iter_modules(path, prefix):
yield modname
pytest_plugins = tuple(_pytest_plugins_generator(fixtures, markers, cfme.fixtures, metaplugins))
collect_ignore = ["tests/scenarios"]
| lehinevych/cfme_tests | conftest.py | Python | gpl-2.0 | 5,434 |
class Config:
db_config = {"user": "root",
"password": "root",
"host": "localhost",
"database": "dita"}
table = None
@classmethod
def get_table(cls):
return cls.table
@classmethod
def set_table(cls, table):
cls.table = table | dita-programming/dita-access | model/config.py | Python | gpl-2.0 | 318 |
from termcolor import colored
import cherrywasp.logger
class CherryAccessPoint:
""" An object that represents an Access Point seen in the environment.
Inputs:
- bssid(str) the MAC address of the device sending beacon frames
- file_prefix(str) the file prefix to use when creating the .csv file.
Returns: prints to console and .csv file.
add_new_essid adds a new ESSID or network name to the list for the particular client.
"""
def __init__(self, bssid, file_prefix):
self.type = "access_point"
self.bssid = bssid
self.beaconed_essid = set()
self.log = cherrywasp.logger.CherryLogger()
self.log.file_name_prefix = file_prefix
# TODO: Add channel to the output & file.
def add_new_beaconed_essid(self, new_essid):
if new_essid not in self.beaconed_essid:
self.beaconed_essid.add(new_essid)
self.log.write_to_file("beacon", self.bssid, new_essid)
return "[+] <{0}> is beaconing as {1}".format(colored(self.bssid, 'red'),
colored(new_essid, 'green'))
| ajackal/cherry-wasp | cherrywasp/accesspoint.py | Python | gpl-2.0 | 1,136 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2007-2008 Brian G. Matherly
# Copyright (C) 2009 Gary Burton
# Contribution 2009 by Reinhard Mueller <[email protected]>
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2013-2014 Paul Franklin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""Reports/Text Reports/Kinship Report"""
#------------------------------------------------------------------------
#
# python modules
#
#------------------------------------------------------------------------
#------------------------------------------------------------------------
#
# gramps modules
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
from gramps.gen.errors import ReportError
from gramps.gen.relationship import get_relationship_calculator
from gramps.gen.plug.docgen import (IndexMark, FontStyle, ParagraphStyle,
FONT_SANS_SERIF, INDEX_TYPE_TOC,
PARA_ALIGN_CENTER)
from gramps.gen.plug.menu import NumberOption, BooleanOption, PersonOption
from gramps.gen.plug.report import Report
from gramps.gen.plug.report import utils as ReportUtils
from gramps.gen.plug.report import MenuReportOptions
from gramps.gen.plug.report import stdoptions
from gramps.gen.utils.db import get_birth_or_fallback, get_death_or_fallback
#------------------------------------------------------------------------
#
# KinshipReport
#
#------------------------------------------------------------------------
class KinshipReport(Report):
def __init__(self, database, options, user):
"""
Create the KinshipReport object that produces the report.
The arguments are:
database - the GRAMPS database instance
options - instance of the Options class for this report
user - a gen.user.User() instance
This report needs the following parameters (class variables)
that come in the options class.
maxdescend - Maximum generations of descendants to include.
maxascend - Maximum generations of ancestors to include.
incspouses - Whether to include spouses.
inccousins - Whether to include cousins.
incaunts - Whether to include aunts/uncles/nephews/nieces.
pid - The Gramps ID of the center person for the report.
name_format - Preferred format to display names
incl_private - Whether to include private data
"""
Report.__init__(self, database, options, user)
menu = options.menu
stdoptions.run_private_data_option(self, menu)
self.__db = self.database
self.max_descend = menu.get_option_by_name('maxdescend').get_value()
self.max_ascend = menu.get_option_by_name('maxascend').get_value()
self.inc_spouses = menu.get_option_by_name('incspouses').get_value()
self.inc_cousins = menu.get_option_by_name('inccousins').get_value()
self.inc_aunts = menu.get_option_by_name('incaunts').get_value()
pid = menu.get_option_by_name('pid').get_value()
self.person = self.database.get_person_from_gramps_id(pid)
if (self.person == None) :
raise ReportError(_("Person %s is not in the Database") % pid )
rlocale = self.set_locale(menu.get_option_by_name('trans').get_value())
stdoptions.run_name_format_option(self, menu)
self.rel_calc = get_relationship_calculator(reinit=True,
clocale=rlocale)
self.kinship_map = {}
self.spouse_map = {}
def write_report(self):
"""
The routine the actually creates the report. At this point, the document
is opened and ready for writing.
"""
pname = self._name_display.display(self.person)
self.doc.start_paragraph("KIN-Title")
# feature request 2356: avoid genitive form
title = self._("Kinship Report for %s") % pname
mark = IndexMark(title, INDEX_TYPE_TOC, 1)
self.doc.write_text(title, mark)
self.doc.end_paragraph()
if self.inc_spouses:
spouse_handles = self.get_spouse_handles(self.person.get_handle())
if spouse_handles:
self.write_people(self._("Spouses"), spouse_handles)
# Collect all descendants of the person
self.traverse_down(self.person.get_handle(), 0, 1)
# Collect all ancestors/aunts/uncles/nephews/cousins of the person
self.traverse_up(self.person.get_handle(), 1, 0)
# Write Kin
for Ga, Gbs in self.kinship_map.items():
for Gb in Gbs:
# To understand these calculations, see:
# http://en.wikipedia.org/wiki/Cousin#Mathematical_definitions
x = min (Ga, Gb)
y = abs(Ga-Gb)
# Skip unrequested people
if x == 1 and y > 0 and not self.inc_aunts:
continue
elif x > 1 and not self.inc_cousins:
continue
get_rel_str = self.rel_calc.get_plural_relationship_string
title = get_rel_str(Ga, Gb, in_law_b=False)
self.write_people(self._(title), self.kinship_map[Ga][Gb])
if (self.inc_spouses and
Ga in self.spouse_map and
Gb in self.spouse_map[Ga]):
title = get_rel_str(Ga, Gb, in_law_b=True)
self.write_people(self._(title), self.spouse_map[Ga][Gb])
def traverse_down(self, person_handle, Ga, Gb, skip_handle=None):
"""
Populate a map of arrays containing person handles for the descendants
of the passed person. This function calls itself recursively until it
reaches max_descend.
Parameters:
person_handle: the handle of the person to go to next
Ga: The number of generations from the main person to the common
ancestor. This should be incremented when going up generations, and
left alone when going down generations.
Gb: The number of generations from this person (person_handle) to the
common ancestor. This should be incremented when going down
generations and set back to zero when going up generations.
skip_handle: an optional handle to skip when going down. This is useful
to skip the descendant that brought you this generation in the first
place.
"""
for child_handle in self.get_children_handles(person_handle):
if child_handle != skip_handle:
self.add_kin(child_handle, Ga, Gb)
if self.inc_spouses:
for spouse_handle in self.get_spouse_handles(child_handle):
self.add_spouse(spouse_handle, Ga, Gb)
if Gb < self.max_descend:
self.traverse_down(child_handle, Ga, Gb+1)
def traverse_up(self, person_handle, Ga, Gb):
"""
Populate a map of arrays containing person handles for the ancestors
of the passed person. This function calls itself recursively until it
reaches max_ascend.
Parameters:
person_handle: the handle of the person to go to next
Ga: The number of generations from the main person to the common
ancestor. This should be incremented when going up generations, and
left alone when going down generations.
Gb: The number of generations from this person (person_handle) to the
common ancestor. This should be incremented when going down
generations and set back to zero when going up generations.
"""
parent_handles = self.get_parent_handles(person_handle)
for parent_handle in parent_handles:
self.add_kin(parent_handle, Ga, Gb)
self.traverse_down(parent_handle, Ga, Gb+1, person_handle)
if Ga < self.max_ascend:
self.traverse_up(parent_handle, Ga+1, 0)
def add_kin(self, person_handle, Ga, Gb):
"""
Add a person handle to the kin map.
"""
if Ga not in self.kinship_map:
self.kinship_map[Ga] = {}
if Gb not in self.kinship_map[Ga]:
self.kinship_map[Ga][Gb] = []
if person_handle not in self.kinship_map[Ga][Gb]:
self.kinship_map[Ga][Gb].append(person_handle)
def add_spouse(self, spouse_handle, Ga, Gb):
"""
Add a person handle to the spouse map.
"""
if Ga not in self.spouse_map:
self.spouse_map[Ga] = {}
if Gb not in self.spouse_map[Ga]:
self.spouse_map[Ga][Gb] = []
if spouse_handle not in self.spouse_map[Ga][Gb]:
self.spouse_map[Ga][Gb].append(spouse_handle)
def get_parent_handles(self, person_handle):
"""
Return an array of handles for all the parents of the
given person handle.
"""
parent_handles = []
person = self.__db.get_person_from_handle(person_handle)
family_handle = person.get_main_parents_family_handle()
if family_handle:
family = self.__db.get_family_from_handle(family_handle)
father_handle = family.get_father_handle()
if father_handle:
parent_handles.append(father_handle)
mother_handle = family.get_mother_handle()
if mother_handle:
parent_handles.append(mother_handle)
return parent_handles
def get_spouse_handles(self, person_handle):
"""
Return an array of handles for all the spouses of the
given person handle.
"""
spouses = []
person = self.__db.get_person_from_handle(person_handle)
for family_handle in person.get_family_handle_list():
family = self.__db.get_family_from_handle(family_handle)
father_handle = family.get_father_handle()
mother_handle = family.get_mother_handle()
spouse_handle = None
if mother_handle and father_handle == person_handle:
spouse_handle = mother_handle
elif father_handle and mother_handle == person_handle:
spouse_handle = father_handle
if spouse_handle and spouse_handle not in spouses:
spouses.append(spouse_handle)
return spouses
def get_children_handles(self, person_handle):
"""
Return an array of handles for all the children of the
given person handle.
"""
children = []
person = self.__db.get_person_from_handle(person_handle)
for family_handle in person.get_family_handle_list():
family = self.__db.get_family_from_handle(family_handle)
for child_ref in family.get_child_ref_list():
children.append(child_ref.get_reference_handle())
return children
def write_people(self, title, people_handles):
"""
Write information about a group of people - including the title.
"""
cap_title = title[0].upper() + title[1:]
subtitle = "%s (%d)" % (cap_title, len(people_handles))
self.doc.start_paragraph("KIN-Subtitle")
mark = IndexMark(cap_title, INDEX_TYPE_TOC, 2)
self.doc.write_text(subtitle, mark)
self.doc.end_paragraph()
list(map(self.write_person, people_handles))
def write_person(self, person_handle):
"""
Write information about the given person.
"""
person = self.database.get_person_from_handle(person_handle)
name = self._name_display.display(person)
mark = ReportUtils.get_person_mark(self.database, person)
birth_date = ""
birth = get_birth_or_fallback(self.database, person)
if birth:
birth_date = self._get_date(birth.get_date_object())
death_date = ""
death = get_death_or_fallback(self.database, person)
if death:
death_date = self._get_date(death.get_date_object())
dates = self._(" (%(birth_date)s - %(death_date)s)") % {
'birth_date' : birth_date,
'death_date' : death_date }
self.doc.start_paragraph('KIN-Normal')
self.doc.write_text(name, mark)
self.doc.write_text(dates)
self.doc.end_paragraph()
#------------------------------------------------------------------------
#
# KinshipOptions
#
#------------------------------------------------------------------------
class KinshipOptions(MenuReportOptions):
"""
Defines options and provides handling interface.
"""
def __init__(self, name, dbase):
MenuReportOptions.__init__(self, name, dbase)
def add_menu_options(self, menu):
"""
Add options to the menu for the kinship report.
"""
category_name = _("Report Options")
pid = PersonOption(_("Center Person"))
pid.set_help(_("The center person for the report"))
menu.add_option(category_name, "pid", pid)
stdoptions.add_name_format_option(menu, category_name)
maxdescend = NumberOption(_("Max Descendant Generations"), 2, 1, 20)
maxdescend.set_help(_("The maximum number of descendant generations"))
menu.add_option(category_name, "maxdescend", maxdescend)
maxascend = NumberOption(_("Max Ancestor Generations"), 2, 1, 20)
maxascend.set_help(_("The maximum number of ancestor generations"))
menu.add_option(category_name, "maxascend", maxascend)
incspouses = BooleanOption(_("Include spouses"), True)
incspouses.set_help(_("Whether to include spouses"))
menu.add_option(category_name, "incspouses", incspouses)
inccousins = BooleanOption(_("Include cousins"), True)
inccousins.set_help(_("Whether to include cousins"))
menu.add_option(category_name, "inccousins", inccousins)
incaunts = BooleanOption(_("Include aunts/uncles/nephews/nieces"), True)
incaunts.set_help(_("Whether to include aunts/uncles/nephews/nieces"))
menu.add_option(category_name, "incaunts", incaunts)
stdoptions.add_private_data_option(menu, category_name)
stdoptions.add_localization_option(menu, category_name)
def make_default_style(self, default_style):
"""Make the default output style for the Kinship Report."""
f = FontStyle()
f.set_size(16)
f.set_type_face(FONT_SANS_SERIF)
f.set_bold(1)
p = ParagraphStyle()
p.set_header_level(1)
p.set_bottom_border(1)
p.set_bottom_margin(ReportUtils.pt2cm(8))
p.set_font(f)
p.set_alignment(PARA_ALIGN_CENTER)
p.set_description(_("The style used for the title of the page."))
default_style.add_paragraph_style("KIN-Title", p)
font = FontStyle()
font.set_size(12)
font.set_bold(True)
p = ParagraphStyle()
p.set_header_level(3)
p.set_font(font)
p.set_top_margin(ReportUtils.pt2cm(6))
p.set_description(_('The basic style used for sub-headings.'))
default_style.add_paragraph_style("KIN-Subtitle", p)
font = FontStyle()
font.set_size(10)
p = ParagraphStyle()
p.set_font(font)
p.set_left_margin(0.5)
p.set_description(_('The basic style used for the text display.'))
default_style.add_paragraph_style("KIN-Normal", p)
| pmghalvorsen/gramps_branch | gramps/plugins/textreport/kinshipreport.py | Python | gpl-2.0 | 16,756 |
#
# Copyright 2001 - 2011 Ludek Smid [http://www.ospace.net/]
#
# This file is part of IGE - Outer Space.
#
# IGE - Outer Space is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# IGE - Outer Space is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with IGE - Outer Space; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from ige import *
from ige import log
from ige.IObject import IObject
from ige.IDataHolder import IDataHolder
from Const import *
import Rules, Utils, math, ShipUtils, time
import re
from ai_parser import AIList
class IPlayer(IObject):
typeID = T_PLAYER
resignTo = T_AIPLAYER
forums = {"INBOX": 56, "OUTBOX": 56, "EVENTS": 4}
def init(self, obj):
IObject.init(self, obj)
#
obj.login = u''
obj.fullName = u''
#
obj.buoys = {}
obj.alliedBuoys = {}
obj.planets = []
obj.fleets = []
obj.techs = {} # techs and their sublevel
obj.obsoleteTechs = set()
obj.rsrchQueue = []
obj.sciPoints = 0
obj.effSciPoints = 0
obj.techLevel = 1
obj.shipDesigns = {}
obj.race = "H" # race Bionic, Human, Cyborg
# bonuses
obj.prodEff = 1.0
obj.sciEff = 1.0
#
obj.govPwr = 0
obj.govPwrCtrlRange = 1
# fleet support
obj.fleetUpgradePool = 0.0
obj.fleetUpgradeInProgress = 0
# production
obj.prodQueues = [[],[],[],[],[]]
obj.prodIncreasePool = 0.0
# diplomacy
obj.diplomacyRels = {}
obj.defaultRelation = Rules.defaultRelation
obj.voteFor = OID_NONE
obj.governorOf = OID_NONE
obj.governors = []
obj.alliance = OID_NONE
obj.imperator = 0
# combat
# anti-small, anti-medium, anti-large, shield generator
obj.planetWeapons = [None, None, None, None, None]
#
obj.staticMap = {}
obj.dynamicMap = {}
obj.galaxies = []
obj.validSystems = []
#
obj.stats = IDataHolder()
obj.stats.type = T_STATS
obj.timeEnabled = 0
obj.stratRes = {}
obj.lastLogin = 0.0
#
obj.shipRedirections = {}
obj.buoys = {}
#
obj.clientStats = {}
def update(self, tran, obj):
# update all designs
for designID in obj.shipDesigns:
old = obj.shipDesigns[designID]
new = ShipUtils.makeShipMinSpec(obj, old.name, old.hullID,
old.eqIDs, old.improvements, raiseExs = False)
new.built = old.built
if hasattr(old, "upgradeTo"):
new.upgradeTo = old.upgradeTo
obj.shipDesigns[designID] = new
# check all diplomacyRels
for partyID in obj.diplomacyRels.keys():
party = tran.db.get(partyID, None)
if not party or party.type not in PLAYER_TYPES:
log.debug("Deleting party", obj.oid, partyID)
del obj.diplomacyRels[partyID]
# delete obj with low scan pwr
# check type of the objects in the map
for objID in obj.staticMap.keys():
obj.staticMap[objID] = min(obj.staticMap[objID], Rules.maxScanPwr)
if obj.staticMap[objID] < Rules.level1InfoScanPwr:
del obj.staticMap[objID]
if not tran.db.has_key(objID) or tran.db[objID].type not in (T_SYSTEM, T_WORMHOLE):
log.debug("Deleting non system %d from static map of player %d" % (objID, obj.oid))
del obj.staticMap[objID]
for objID in obj.dynamicMap.keys():
if obj.dynamicMap[objID] < Rules.level1InfoScanPwr:
del obj.dynamicMap[objID]
if not tran.db.has_key(objID) or tran.db[objID].type not in (T_FLEET, T_ASTEROID):
log.debug("Deleting obj %d from dynamic map of player %d" % (objID, objID))
del obj.dynamicMap[objID]
# check if all planets are planets
for objID in obj.planets[:]:
try:
if not tran.db.has_key(objID):
log.debug("Planet does not exists - removing", obj.oid, objID)
obj.planets.remove(objID)
if tran.db[objID].type != T_PLANET:
log.debug("Planet is not a planet - removing", obj.oid, objID)
obj.planets.remove(objID)
except:
log.warning("There is a problem when processing planet - removing", obj.oid, objID)
obj.planets.remove(objID)
# check if systems in buoys are systems
for objID in obj.buoys.keys():
try:
if not tran.db.has_key(objID):
log.debug("System for buoy does not exists - removing", obj.oid, objID)
del obj.buoys[objID]
if tran.db[objID].type not in (T_SYSTEM, T_WORMHOLE):
log.debug("System for buoy is not a system - removing", obj.oid, objID)
del obj.buoys[objID]
except:
log.warning("There is a problem when processing system for buoy - removing", obj.oid, objID)
del obj.buoys[objID]
# check if fleets are fleets
for objID in obj.fleets[:]:
try:
if not tran.db.has_key(objID):
log.debug("Fleet does not exists - removing", obj.oid, objID)
obj.fleets.remove(objID)
if tran.db[objID].type not in (T_FLEET, T_ASTEROID):
log.debug("Fleet is not a fleet - removing", obj.oid, objID)
obj.fleets.remove(objID)
except:
log.warning("There is a problem when processing planet - removing", obj.oid, objID)
# check accessible technologies
wip = 1
while wip:
wip = 0
for techID in obj.techs.keys():
if techID not in Rules.techs:
wip = 1
log.debug("Deleting nonexistent tech", techID, "player", obj.oid)
del obj.techs[techID]
continue
tech = Rules.techs[techID]
# check tech level
if tech.level > obj.techLevel:
wip = 1
log.debug("Deleting tech", techID, "player", obj.oid)
if techID in obj.techs: del obj.techs[techID]
# disabled?
#for tmpTechID in obj.techs.keys():
# if techID in Rules.techs[tmpTechID].researchDisables:
# wip = 1
# log.debug("Deleting tech", techID, "player", obj.oid)
# if techID in obj.techs: del obj.techs[techID]
# break
# check requirements
#for tmpTechID, improvement in tech.researchRequires:
# if not obj.techs.has_key(tmpTechID) or obj.techs[tmpTechID] < improvement:
# wip = 1
# log.debug("Deleting tech", techID, "player", obj.oid)
# if techID in obj.techs: del obj.techs[techID]
# break
for rTask in obj.rsrchQueue[:]:
if rTask.techID not in Rules.techs:
log.debug("Deleting res task for nonexistent tech", rTask.techID, "player", obj.oid)
obj.rsrchQueue.remove(rTask)
continue
tech = Rules.techs[rTask.techID]
if tech.level == 99:
log.debug("Deleting res task", rTask.techID, "player", obj.oid)
obj.rsrchQueue.remove(rTask)
# check if player is in the universe
universe = tran.db[OID_UNIVERSE]
if obj.oid not in universe.players and obj.oid not in (OID_NATURE, OID_ADMIN):
log.debug(obj.oid, "Adding player to the universe")
universe.players.append(obj.oid)
# check nick (TODO remove in 0.5.33)
if not hasattr(obj, "fullName"):
obj.fullName = obj.name
# TODO remove in 0.5.69
if not hasattr(obj, "prodQueues"):
obj.prodQueues = [[],[],[],[],[]]
# check if player is a leader
if not obj.galaxies:
log.debug(obj.oid, obj.name, "IS NOT IN ANY GALAXY")
else:
galaxy = tran.db[obj.galaxies[0]]
if galaxy.imperator != obj.oid and obj.imperator > 0:
log.debug(obj.oid, "Removing imperator/leader bonus")
obj.imperator = 0
## NON VALIDATING CODE (DERIVED ATTRS AND SO ON)
# get best technologies for planet weapons
bestScores = [0, 0, 0, 0]
obj.planetWeapons = [None, None, None, None, None]
for techID in obj.techs:
tech = Rules.techs[techID]
if tech.isShipEquip and tech.weaponDmgMin > 0 and not tech.buildSRes\
and tech.weaponGoodForFlak:
# compute score
weaponEff = Rules.techImprEff[obj.techs.get(techID, Rules.techBaseImprovement)]
score = (tech.weaponDmgMin + tech.weaponDmgMax) / 2.0 * \
tech.weaponROF * (tech.weaponAtt + 10.0)/10 * weaponEff
if score > bestScores[tech.weaponClass]:
obj.planetWeapons[tech.weaponClass] = techID
bestScores[tech.weaponClass] = score
#@log.debug(obj.oid, "Planet weapons", obj.planetWeapons)
# update all ship designs
for designID in obj.shipDesigns:
old = obj.shipDesigns[designID]
new = ShipUtils.makeShipMinSpec(obj, old.name, old.hullID,
old.eqIDs, old.improvements, raiseExs = False)
new.built = old.built
new.upgradeTo = old.upgradeTo
obj.shipDesigns[designID] = new
if not hasattr(obj, 'obsoleteTechs'):
obj.obsoleteTechs = set()
update.public = 0
def startGlobalConstruction(self, tran, player, techID, quantity, isShip, reportFinished, queue):
if len(player.prodQueues) <= queue:
raise GameException('Invalid queue.')
if len(player.prodQueues[queue]) > Rules.maxProdQueueLen:
raise GameException('Queue is full.')
if quantity < 1:
raise GameException("Quantity must be greater than 0")
if not player.techs.has_key(techID) and isShip == 0:
raise GameException('You do not own this kind of technology.')
if not player.shipDesigns.has_key(techID) and isShip == 1:
raise GameException('You do not own this ship design.')
if isShip:
tech = player.shipDesigns[techID]
if tech.upgradeTo:
raise GameException("You cannot build obsolete ship design.")
else:
tech = Rules.techs[techID]
if tech.isStructure or not tech.isProject:
raise GameException('You cannot construct this technology.')
elif tech.globalDisabled:
raise GameException('You cannot construct targeted project.')
neededSR = {}
for sr in tech.buildSRes:
if player.stratRes.get(sr, 0) < neededSR.get(sr, 0) + quantity:
raise GameException("You do not own required strategic resource(s)")
neededSR[sr] = neededSR.get(sr, 0) + quantity
# consume strategic resources
for sr in neededSR:
player.stratRes[sr] -= neededSR[sr]
# start construction
item = IDataHolder()
item.techID = techID
item.quantity = int(quantity)
item.changePerc = 0
item.isShip = bool(isShip)
item.reportFin = bool(reportFinished)
item.type = T_TASK
player.prodQueues[queue].append(item)
return player.prodQueues[queue], player.stratRes
startGlobalConstruction.public = 1
startGlobalConstruction.accLevel = AL_FULL
def changeGlobalConstruction(self, tran, player, queue, index, quantity):
if index < 0 or index >= len(player.prodQueues[queue]):
raise GameException("No such item in the construction queue.")
if quantity < 1:
raise GameException("Quantity must be greater than 0")
item = player.prodQueues[queue][index]
if item.isShip:
tech = player.shipDesigns[item.techID]
else:
tech = Rules.techs[item.techID]
quantityChange = quantity - player.prodQueues[queue][index].quantity
neededSR = {}
for sr in tech.buildSRes:
if player.stratRes.get(sr, 0) < neededSR.get(sr, 0) + quantityChange:
raise GameException("You do not own required strategic resource(s)")
neededSR[sr] = neededSR.get(sr, 0) + quantityChange
# consume strategic resources
for sr in neededSR:
player.stratRes[sr] += (-1 * neededSR[sr])
player.prodQueues[queue][index].quantity = quantity
player.prodQueues[queue][index].const = tech.buildProd * quantity
return player.prodQueues[queue], player.stratRes
changeGlobalConstruction.public = 1
changeGlobalConstruction.accLevel = AL_FULL
def abortGlobalConstruction(self, tran, player, queue, index):
if len(player.prodQueues) <= queue or queue < 0:
raise GameException('Invalid queue.')
if len(player.prodQueues[queue]) <= index or index < 0:
raise GameException('Invalid task.')
item = player.prodQueues[queue][index]
# return strategic resources
#is ship
if item.techID < 1000:
tech = player.shipDesigns[item.techID]
else:
tech = Rules.techs[item.techID]
for sr in tech.buildSRes:
player.stratRes[sr] += item.quantity
player.prodQueues[queue].pop(index)
return player.prodQueues[queue], player.stratRes
abortGlobalConstruction.public = 1
abortGlobalConstruction.accLevel = AL_FULL
def moveGlobalConstrItem(self, tran, player, queue, index, rel):
if index >= len(player.prodQueues[queue]):
raise GameException('No such item in the construction queue.')
if index + rel < 0 or index + rel >= len(player.prodQueues[queue]):
raise GameException('Cannot move.')
item = player.prodQueues[queue][index]
del player.prodQueues[queue][index]
player.prodQueues[queue].insert(index + rel, item)
return player.prodQueues[queue]
moveGlobalConstrItem.public = 1
moveGlobalConstrItem.accLevel = AL_FULL
def getReferences(self, tran, obj):
return obj.fleets
getReferences.public = 0
def loggedIn(self, tran, obj):
obj.lastLogin = time.time()
loggedIn.public = 0
def resign(self, tran, obj):
"""Remove player from the game. Give remaining planets, ... to the REBELS"""
# cannot resign when time is stopped
# TODO smarted conditions (like cannot resign twice a week or so)
if not obj.timeEnabled:
raise GameException('You cannot resign current game - time is stopped.')
log.debug("Resigning player", obj.oid)
# morph player to AI
obj.type = self.resignTo
self.cmd(obj).upgrade(tran, obj)
self.cmd(obj).update(tran, obj)
# reregister
tran.gameMngr.removePlayer(obj.oid)
self.cmd(obj).reregister(tran, obj)
resign.public = 1
resign.accLevel = AL_OWNER
def delete(self, tran, obj):
# check whether it is AI or normal player
if obj.type in AI_PLAYER_TYPES:
# remove AI account from the game, and record in the AI list
log.debug("Removing AI account from the AI list", obj.oid)
tran.gameMngr.clientMngr.removeAiAccount(obj.login)
aiList = AIList(tran.gameMngr.configDir)
aiList.remove(obj.login)
log.debug("Deleting player", obj.oid)
# delete relations
for playerID in tran.db[OID_UNIVERSE].players:
player = tran.db[playerID]
self.cmd(player).deleteDiplomacyWith(tran, player, obj.oid)
# delete fleets
for fleetID in obj.fleets:
fleet = tran.db[fleetID]
self.cmd(fleet).disbandFleet(tran, fleet)
try:
tran.gameMngr.removePlayer(obj.oid)
except Exception:
log.warning("Cannot remove player")
delete.public = 1
delete.accLevel = AL_ADMIN
def giveUp(self, tran, obj, playerID):
"""Remove player from the game. Give remaining planets, ... to the specified player"""
# cannot resign when time is stopped
# TODO smarted conditions (like cannot resign twice a week or so)
if not obj.timeEnabled:
raise GameException('You cannot resign current game - time is stopped.')
player = tran.db[playerID]
# give planets
for planetID in obj.planets[:]: # needs a copy - changeOwner modifies this
planet = tran.db[planetID]
self.cmd(planet).changeOwner(tran, planet, playerID, force = 1)
# give fleets
for fleetID in obj.fleets[:]:
fleet = tran.db[fleetID]
fleet.owner = playerID
player.fleets.append(fleetID)
# remove player
tran.gameMngr.removePlayer(obj.oid)
try:
tran.db[OID_UNIVERSE].players.remove(obj.oid)
except ValueError:
pass
giveUp.public = 1
giveUp.accLevel = AL_ADMIN
def addShipDesign(self, tran, obj, name, hullID, eqIDs):
"""Add ship design to the database of designs."""
# normalize design
name = name.strip()
# check technologies
if hullID not in obj.techs:
raise GameException("You do not posses this hull type.")
for techID in eqIDs:
if techID not in obj.techs:
raise GameException("You do not posses technology(ies) to construct this ship.")
# create spec (throws exception for invad ones)
spec = ShipUtils.makeShipMinSpec(obj, name, hullID, eqIDs, [])
# check number of designs
if len(obj.shipDesigns) > Rules.shipMaxDesigns:
raise GameException("No space to store design.")
# check name of designs
for designID in obj.shipDesigns:
if obj.shipDesigns[designID].name == name:
raise GameException("Design name is already used.")
if re.match("^\s*$",name):
raise GameException("Design name must not be entirely whitespace.")
# find free design id
index = 1
ids = obj.shipDesigns.keys()
while 1:
if index not in ids:
break
index += 1
# add design
obj.shipDesigns[index] = spec
return obj.shipDesigns, index
addShipDesign.public = 1
addShipDesign.accLevel = AL_OWNER
def addBuoy(self, tran, obj, systemID, text, type):
"""Add new buoy to player buoys."""
# delete buoy
if not text:
if systemID in obj.buoys:
del obj.buoys[systemID]
return obj.buoys
else:
raise GameException("Buoy at specified system does not exist.")
if type not in (BUOY_PRIVATE, BUOY_TO_ALLY, BUOY_TO_SCANNERSHARE):
raise GameException("Wrong bouy type.")
# edit buoy
if systemID in obj.buoys:
obj.buoys[systemID] = (text, type)
return obj.buoys
if len(obj.buoys) >= 30:
raise GameException("You cannot add more than 30 buoys.")
if tran.db[systemID].type not in (T_SYSTEM, T_WORMHOLE):
raise GameException("You can add buoy only to system.")
# new buoy
if len(text) > 0:
obj.buoys[systemID] = (text, type)
return obj.buoys
addBuoy.public = 1
addBuoy.accLevel = AL_OWNER
def scrapShipDesign(self, tran, obj, designID):
"""Remove ship design from the database of designs and remove all
active ships using this design."""
# check design ID
if designID not in obj.shipDesigns:
raise GameException("No such design.")
# delete ships
for fleetID in obj.fleets[:]: # make copy, fleet can be deleted
fleet = tran.db[fleetID]
self.cmd(fleet).deleteDesign(tran, fleet, designID)
# delete tasks
for planetID in obj.planets:
planet = tran.db[planetID]
self.cmd(planet).deleteDesign(tran, planet, designID)
# clear upgradeTo
for tmpDesignID in obj.shipDesigns:
spec = obj.shipDesigns[tmpDesignID]
if spec.upgradeTo == designID:
spec.upgradeTo = 0
# delete design
del obj.shipDesigns[designID]
return obj.shipDesigns, obj.fleets, obj.stratRes
scrapShipDesign.public = 1
scrapShipDesign.accLevel = AL_OWNER
def getShipDesign(self,tran,obj,designID):
if designID not in obj.shipDesigns:
raise GameException("No such design.")
return obj.shipDesigns[designID]
def upgradeShipDesign(self, tran, obj, oldDesignID, newDesignID):
# check designs ID
if oldDesignID not in obj.shipDesigns:
raise GameException("No such design.")
if newDesignID not in obj.shipDesigns:
raise GameException("No such design.")
if oldDesignID == newDesignID:
raise GameException("Designs are the same.")
oldSpec = obj.shipDesigns[oldDesignID]
newSpec = obj.shipDesigns[newDesignID]
if oldSpec.upgradeTo:
raise GameException("Old design has already been made obsolete.")
if newSpec.upgradeTo:
raise GameException("New design has already been made obsolete.")
if oldSpec.combatClass != newSpec.combatClass:
raise GameException("Designs must be of the same combat class.")
# set old design as upgradable
oldSpec.upgradeTo = newDesignID
# if something is upgraded to oldDesign change it to new design
for desID in obj.shipDesigns:
if obj.shipDesigns[desID].upgradeTo == oldDesignID:
obj.shipDesigns[desID].upgradeTo = newDesignID
# compute strat res difference
stratRes = {}
for sr in oldSpec.buildSRes:
stratRes[sr] = stratRes.get(sr, 0) - 1
for sr in newSpec.buildSRes:
stratRes[sr] = stratRes.get(sr, 0) + 1
if stratRes[sr] == 0:
del stratRes[sr]
log.debug("upgradeShipDesign", obj.oid, stratRes)
# modify tasks
tasksUpgraded = False
if not stratRes:
log.debug("upgradeShipDesign - upgrading tasks")
for planetID in obj.planets:
planet = tran.db[planetID]
self.cmd(planet).changeShipDesign(tran, planet, oldDesignID, newDesignID)
# upgrade global queue as well
for queue in obj.prodQueues:
for task in queue:
if task.techID == oldDesignID:
task.techID = newDesignID
tasksUpgraded = True
else:
log.debug("upgradeShipDesing - NOT upgrading tasks")
return obj.shipDesigns, obj.stratRes, tasksUpgraded, obj.prodQueues
upgradeShipDesign.public = 1
upgradeShipDesign.accLevel = AL_OWNER
def cancelUpgradeShipDesign(self, tran, obj, designID):
# check designs ID
if designID not in obj.shipDesigns:
raise GameException("No such design.")
obj.shipDesigns[designID].upgradeTo = OID_NONE
return obj.shipDesigns
cancelUpgradeShipDesign.public = 1
cancelUpgradeShipDesign.accLevel = AL_OWNER
def startResearch(self, tran, obj, techID, improveToMax = 0):
if len(obj.rsrchQueue) > Rules.maxRsrchQueueLen:
GameException('Queue is full.')
tech = Rules.techs[techID]
# player has to be a right race
if obj.race not in tech.researchRaces:
raise GameException("Your race cannot research this technology.")
# item cannot be researched twice
for tmpTech in obj.rsrchQueue:
if tmpTech.techID == techID:
raise GameException('Technology is already sheduled for research.')
# disabled?
for tmpTechID in obj.techs:
if techID in Rules.techs[tmpTechID].researchDisables:
raise GameException("Previous research has disabled this technology.")
# check requirements
for tmpTechID, improvement in tech.researchRequires:
if not obj.techs.has_key(tmpTechID) or obj.techs[tmpTechID] < improvement:
raise GameException('You cannot research this technology yet.')
improvement = obj.techs.get(techID, Rules.techBaseImprovement - 1) + 1
if improvement > Rules.techMaxImprovement or improvement > tech.maxImprovement:
raise GameException('You cannot improve this technology further.')
if tech.level > obj.techLevel:
raise GameException("Your technological level is insufficient.")
# check strategic resources
if improvement == 1:
for stratRes in tech.researchReqSRes:
if obj.stratRes.get(stratRes, 0) < 1:
raise GameException("Required strategy resource missing.")
item = IDataHolder()
item.techID = techID
item.improvement = improvement
item.currSci = 0
item.changeSci = 0
item.improveToMax = improveToMax
item.type = T_RESTASK
obj.rsrchQueue.append(item)
return obj.rsrchQueue
startResearch.public = 1
startResearch.accLevel = AL_FULL
def abortResearch(self, tran, obj, index):
if index >= len(obj.rsrchQueue) or index < 0:
GameException('No such item in queue.')
del obj.rsrchQueue[index]
return obj.rsrchQueue
abortResearch.public = 1
abortResearch.accLevel = AL_FULL
def editResearch(self, tran, obj, index, improveToMax = 0):
if index >= len(obj.rsrchQueue) or index < 0:
GameException('No such item in queue.')
obj.rsrchQueue[index].improveToMax = improveToMax
return obj.rsrchQueue
editResearch.public = 1
editResearch.accLevel = AL_FULL
def moveResearch(self, tran, obj, index, rel):
if index >= len(obj.rsrchQueue):
raise GameException('No such item in the researcg queue.')
if index + rel < 0 or index + rel >= len(obj.rsrchQueue):
raise GameException('Cannot move.')
item = obj.rsrchQueue[index]
del obj.rsrchQueue[index]
obj.rsrchQueue.insert(index + rel, item)
return obj.rsrchQueue
moveResearch.public = 1
moveResearch.accLevel = AL_FULL
def redirectShips(self, tran, obj, sourceSystemID, targetSystemID):
# check sourceSystemID
ok = 0
if sourceSystemID == targetSystemID:
targetSystemID = OID_NONE
for planetID in tran.db[sourceSystemID].planets:
if tran.db[planetID].owner == obj.oid:
ok = 1
if not ok:
raise GameException("You must own planet in the source system")
# check targetSystemID
if targetSystemID != OID_NONE and 0: # TODO: switch on
ok = 0
for planetID in tran.db[targetSystemID].planets:
if tran.db[planetID].owner == obj.oid:
ok = 1
if not ok:
raise GameException("You must own planet in the target system")
# fine - record it
log.debug(obj.oid, "Adding redirection", sourceSystemID, targetSystemID)
if targetSystemID:
obj.shipRedirections[sourceSystemID] = targetSystemID
else:
try:
del obj.shipRedirections[sourceSystemID]
except KeyError:
pass
return obj.shipRedirections
redirectShips.public = 1
redirectShips.accLevel = AL_FULL
def getPublicInfo(self, tran, obj):
result = IObject.getPublicInfo(self, tran, obj)
result.type = obj.type
result.name = obj.name
return result
getPublicInfo.public = 1
getPublicInfo.accLevel = AL_NONE
def changePactCond(self, tran, obj, playerID, pactID, state, conditions):
log.debug("changePactCond", obj.oid, playerID, pactID)
# must have a contact
if playerID not in obj.diplomacyRels:
raise GameException('No contact with this player.')
player = tran.db[playerID]
# must be a player
if player.type not in PLAYER_TYPES and player.type != T_ALLIANCE:
raise GameException('Pacts can be offered to players and aliances only.')
# check pactID
pact = Rules.pactDescrs.get(pactID, None)
if not pact:
raise GameException('No such pact type.')
# check state
if state not in (PACT_OFF, PACT_INACTIVE, PACT_ACTIVE):
raise GameException("Wrong pact state")
# check conditions
for tmpPactID in conditions:
pact = Rules.pactDescrs.get(tmpPactID, None)
if not pact:
raise GameException('No such pact type.')
# record pact
dipl = self.cmd(obj).getDiplomacyWith(tran, obj, playerID)
dipl.pacts[pactID] = [state]
dipl.pacts[pactID].extend(conditions)
# if state if PACT_OFF, disable state on partner's side
if state == PACT_OFF:
partner = tran.db[playerID]
dipl = self.cmd(partner).getDiplomacyWith(tran, partner, obj.oid)
if pactID in dipl.pacts:
dipl.pacts[pactID][0] = PACT_OFF
else:
dipl.pacts[pactID] = [PACT_OFF]
return obj.diplomacyRels
changePactCond.public = 1
changePactCond.accLevel = AL_OWNER
def getDiplomacyWith(self, tran, obj, playerID):
if obj.governorOf:
# player is a governor
leader = tran.db[obj.governorOf]
return self.cmd(leader).getDiplomacyWith(tran, leader, objID)
# player is independent
dipl = obj.diplomacyRels.get(playerID, None)
if not dipl:
# make default
dipl = IDataHolder()
dipl.type = T_DIPLREL
dipl.pacts = {
PACT_ALLOW_CIVILIAN_SHIPS: [PACT_ACTIVE, PACT_ALLOW_CIVILIAN_SHIPS]
}
dipl.relation = obj.defaultRelation
dipl.relChng = 0
dipl.lastContact = tran.db[OID_UNIVERSE].turn
dipl.contactType = CONTACT_NONE
dipl.stats = None
if playerID != obj.oid:
obj.diplomacyRels[playerID] = dipl
else:
log.debug("getDiplomacyWith myself", obj.oid)
return dipl
def getPartyDiplomacyRels(self, tran, obj, partyID):
if partyID not in obj.diplomacyRels:
return None, None
if obj.diplomacyRels[partyID].contactType == CONTACT_NONE:
return obj.diplomacyRels[partyID], None
party = tran.db[partyID]
return obj.diplomacyRels[partyID], party.diplomacyRels.get(obj.oid, None)
getPartyDiplomacyRels.public = 1
getPartyDiplomacyRels.accLevel = AL_OWNER
def isPactActive(self, tran, obj, partnerID, pactID):
#@log.debug("isPactActive", obj.oid, partnerID, pactID)
if partnerID not in obj.diplomacyRels:
return 0
partner = tran.db[partnerID]
partnerDipl = partner.diplomacyRels.get(obj.oid, None)
if not partnerDipl:
return 0
return partnerDipl.pacts.get(pactID, [PACT_OFF])[0] == PACT_ACTIVE
def deleteDiplomacyWith(self, tran, obj, playerID):
if playerID in obj.diplomacyRels:
del obj.diplomacyRels[playerID]
def getRelationTo(self, tran, obj, objID):
if objID == OID_NONE:
return REL_UNDEF
if obj.oid == objID:
return REL_UNITY
if obj.governorOf:
leader = tran.db[obj.governorOf]
return self.cmd(leader).getRelationTo(tran, leader, objID)
dipl = obj.diplomacyRels.get(objID, None)
if dipl:
return dipl.relation
else:
return obj.defaultRelation
getRelationTo.public = 1
getRelationTo.accLevel = AL_FULL
def setVoteFor(self, tran, obj, playerID):
if playerID not in obj.diplomacyRels and playerID != obj.oid and playerID != OID_NONE:
raise GameException("No contact with this commander.")
# check type
if playerID != OID_NONE:
player = tran.db[playerID]
if player.type != T_PLAYER:
raise GameException("You cannot vote for this player.")
# set
obj.voteFor = playerID
return obj.voteFor
setVoteFor.public = 1
setVoteFor.accLevel = AL_OWNER
def processDIPLPhase(self, tran, obj, data):
if not obj.timeEnabled:
return
turn = tran.db[OID_UNIVERSE].turn
# record changes from valid pacts
for partyID in obj.diplomacyRels:
dipl = obj.diplomacyRels[partyID]
# check contact
if dipl.contactType == CONTACT_NONE:
#@log.debug("Skipping contact", obj.oid, partyID)
continue
# base change of relation
dipl.relChng += Rules.baseRelationChange
# process pacts
for pactID in dipl.pacts:
#@log.debug("Processing pact", obj.oid, partyID, pactID, dipl.pacts[pactID])
if dipl.pacts[pactID][0] != PACT_ACTIVE:
continue
pactSpec = Rules.pactDescrs[pactID]
if dipl.relation < pactSpec.validityInterval[0] or \
dipl.relation > pactSpec.validityInterval[1] or \
dipl.relChng < Rules.relLostWhenAttacked / 2:
# skip this non active pact, mark it as off
# mark all pact off when attacked
dipl.pacts[pactID][0] = PACT_OFF
# TODO report it
continue
# pact is valid
if dipl.relation < pactSpec.targetRel:
#@log.debug("Affecting relation", pactSpec.relChng)
dipl.relChng += min(pactSpec.targetRel - dipl.relation, pactSpec.relChng)
# apply relation changes
for partyID in obj.diplomacyRels:
dipl = obj.diplomacyRels[partyID]
dipl.relation += dipl.relChng
dipl.relation = min(dipl.relation, REL_ALLY_HI)
dipl.relation = max(dipl.relation, REL_ENEMY_LO)
#@log.debug('IPlayer', 'Final relation', obj.oid, partyID, dipl.relation, dipl.relChng)
processDIPLPhase.public = 1
processDIPLPhase.accLevel = AL_ADMIN
def getScannerMap(self, tran, obj):
scanLevels = {}
# full map for the admin
if obj.oid == OID_ADMIN:
universe = tran.db[OID_UNIVERSE]
for galaxyID in universe.galaxies:
galaxy = tran.db[galaxyID]
for systemID in galaxy.systems:
system = tran.db[systemID]
obj.staticMap[systemID] = 111111
for planetID in system.planets:
obj.staticMap[planetID] = 111111
# adding systems with buoys
for objID in obj.buoys:
scanLevels[objID] = Rules.level1InfoScanPwr
# fixing system scan level for mine fields
systems = {}
for planetID in obj.planets:
systems[tran.db[planetID].compOf] = None
for systemID in systems.keys():
scanLevels[systemID] = Rules.partnerScanPwr
# player's map
for objID in obj.staticMap:
scanLevels[objID] = max(scanLevels.get(objID, 0), obj.staticMap[objID])
for objID in obj.dynamicMap:
scanLevels[objID] = max(scanLevels.get(objID, 0), obj.dynamicMap[objID])
# parties' map
for partnerID in obj.diplomacyRels:
if self.cmd(obj).isPactActive(tran, obj, partnerID, PACT_SHARE_SCANNER):
# load partner's map
partner = tran.db[partnerID]
for objID in partner.staticMap:
scanLevels[objID] = max(scanLevels.get(objID, 0), partner.staticMap[objID])
for objID in partner.dynamicMap:
scanLevels[objID] = max(scanLevels.get(objID, 0), partner.dynamicMap[objID])
# partner's fleets and planets
for objID in partner.fleets:
scanLevels[objID] = Rules.partnerScanPwr
for objID in partner.planets:
scanLevels[objID] = Rules.partnerScanPwr
# create map
map = dict()
for objID, level in scanLevels.iteritems():
tmpObj = tran.db.get(objID, None)
if not tmpObj:
continue
# add movement validation data
if tmpObj.type in (T_SYSTEM,T_WORMHOLE) and objID not in obj.validSystems:
obj.validSystems.append(objID)
for info in self.cmd(tmpObj).getScanInfos(tran, tmpObj, level, obj):
if (info.oid not in map) or (info.scanPwr > map[info.oid].scanPwr):
map[info.oid] = info
return map
getScannerMap.public = 1
getScannerMap.accLevel = AL_OWNER
def mergeScannerMap(self, tran, obj, map):
#@log.debug(obj.oid, "Merging scanner map")
contacts = {}
for object, level in map.iteritems():
objID = object.oid
if object.type in (T_SYSTEM, T_WORMHOLE):
obj.staticMap[objID] = max(obj.staticMap.get(objID, 0), level)
contacts.update(object.scannerPwrs)
elif object.type in (T_FLEET, T_ASTEROID):
obj.dynamicMap[objID] = max(obj.dynamicMap.get(objID, 0), level)
contacts[object.owner] = None
else:
raise GameException("Unsupported type %d" % object.type)
if obj.oid in contacts:
del contacts[obj.oid]
if OID_NONE in contacts:
del contacts[OID_NONE]
for partyID in contacts:
# add to player's contacts
dipl = self.cmd(obj).getDiplomacyWith(tran, obj, partyID)
dipl.contactType = max(dipl.contactType, CONTACT_DYNAMIC)
dipl.lastContact = tran.db[OID_UNIVERSE].turn
# add to detected owner's contacts
owner = tran.db[partyID]
assert owner.type in PLAYER_TYPES
dipl = self.cmd(obj).getDiplomacyWith(tran, owner, obj.oid)
dipl.contactType = max(dipl.contactType, CONTACT_DYNAMIC)
dipl.lastContact = tran.db[OID_UNIVERSE].turn
mergeScannerMap.public = 0
mergeScannerMap.accLevel = AL_OWNER
def processRSRCHPhase(self, tran, obj, data):
if not obj.timeEnabled:
return
# sci pts from allies
pts = obj.sciPoints
for partnerID in obj.diplomacyRels:
if self.cmd(obj).isPactActive(tran, obj, partnerID, PACT_MINOR_SCI_COOP):
partner = tran.db[partnerID]
pactSpec = Rules.pactDescrs[PACT_MINOR_SCI_COOP]
pts += min(
int(partner.sciPoints * pactSpec.effectivity),
int(obj.sciPoints * pactSpec.effectivity),
)
if self.cmd(obj).isPactActive(tran, obj, partnerID, PACT_MAJOR_SCI_COOP):
partner = tran.db[partnerID]
pactSpec = Rules.pactDescrs[PACT_MAJOR_SCI_COOP]
pts += min(
int(partner.sciPoints * pactSpec.effectivity),
int(obj.sciPoints * pactSpec.effectivity),
)
# compute effective sci pts
obj.effSciPoints = epts = pts - int(obj.stats.storPop * Rules.sciPtsPerCitizen[obj.techLevel])
index = 0
while epts > 0 and obj.rsrchQueue and index < len(obj.rsrchQueue):
item = obj.rsrchQueue[index]
tech = Rules.techs[item.techID]
# check requirements
canResearch = 1
# player has to be a right race
if obj.race not in tech.researchRaces:
canResearch = 0
for stratRes in tech.researchReqSRes:
if obj.stratRes.get(stratRes, 0) < 1 and item.improvement == 1:
Utils.sendMessage(tran, obj, MSG_MISSING_STRATRES, OID_NONE, stratRes)
canResearch = 0
break
for tmpTechID in obj.techs:
if item.techID in Rules.techs[tmpTechID].researchDisables:
canResearch = 0
Utils.sendMessage(tran, obj, MSG_DELETED_RESEARCH, OID_NONE, item.techID)
del obj.rsrchQueue[index]
index -= 1
break
if tech.level > obj.techLevel:
canResearch = 0
Utils.sendMessage(tran, obj, MSG_DELETED_RESEARCH, OID_NONE, item.techID)
del obj.rsrchQueue[index]
index -= 1
if not canResearch:
index += 1
continue
researchSci = Utils.getTechRCost(obj, item.techID)
wantSci = min(epts, researchSci - item.currSci,
researchSci / tech.researchTurns)
item.currSci += wantSci
item.changeSci = wantSci
epts -= wantSci
if item.currSci >= researchSci:
del obj.rsrchQueue[index]
obj.techs[item.techID] = item.improvement
# call finish handler
tech = Rules.techs[item.techID]
tech.finishResearchHandler(tran, obj, tech)
Utils.sendMessage(tran, obj, MSG_COMPLETED_RESEARCH, OID_NONE, item.techID)
# update derived attributes of player
self.cmd(obj).update(tran, obj)
# repeat research if required by player
if item.improveToMax == 1 and item.improvement < Rules.techMaxImprovement:
# reinsert the item on the top of the queue
self.cmd(obj).startResearch(tran, obj, item.techID, improveToMax = 1)
idx = len(obj.rsrchQueue) - 1
self.cmd(obj).moveResearch(tran, obj, idx, - idx)
if epts > 0 and 0: # TODO: remove me
Utils.sendMessage(tran, obj, MSG_WASTED_SCIPTS, OID_NONE, epts)
return
# oops we have negative epts
while epts < 0:
log.debug("Not enought RP", epts, obj.oid)
if obj.rsrchQueue:
item = obj.rsrchQueue[0]
if item.currSci > 0:
wantSci = min(item.currSci, - epts)
item.currSci -= wantSci
item.changeSci = - wantSci
epts += wantSci
if item.currSci == 0:
# remove item from the queue - TODO send message to player
del obj.rsrchQueue[0]
# at this point, epts can be zero
if epts == 0:
log.debug("RP deficit satisfied", obj.oid)
break
# try next project
if obj.rsrchQueue:
continue
# oops we must find technology to degrade
avail = obj.techs.keys()
# do not degrade technologies, which enables others
for techID in obj.techs:
tech = Rules.techs[techID]
for tmpTechID, impr in tech.researchRequires:
if tmpTechID in avail:
avail.remove(tmpTechID)
log.debug("Techs avialable for degradation", avail)
if not avail:
# no technology...
break
# from hight to low IDs
avail.sort()
avail.reverse()
degraded = 0
for level in range(obj.techLevel, 0, -1):
for techID in avail:
tech = Rules.techs[techID]
# check level
if tech.level != level:
continue
# do not touch starting technologies
if tech.isStarting and obj.techs[techID] <= 3:
continue
# ok we have one to degrade
item = IDataHolder()
item.techID = techID
item.improvement = obj.techs[techID]
item.currSci = Utils.getTechRCost(obj, techID, obj.techs[techID])
item.changeSci = 0
item.improveToMax = 0
item.type = T_RESTASK
obj.rsrchQueue.append(item)
# degrade tech
if obj.techs[techID] == 1:
# TODO send message
del obj.techs[techID]
else:
# TODO send message
obj.techs[techID] -= 1
if tech.recheckWhenTechLost:
# reset some attributes
plLevel = obj.techLevel
obj.techLevel = 1
# recheck remaining techs
for level in range(1, plLevel + 1):
for techID in obj.techs:
tech = Rules.techs[techID]
if tech.level != level:
continue
# call finish handler again
tech.finishResearchHandler(tran, obj, tech)
degraded = 1
break
if degraded: break
return
processRSRCHPhase.public = 1
processRSRCHPhase.accLevel = AL_ADMIN
def processACTIONPhase(self, tran, obj, data):
return NotImplementedError()
processACTIONPhase.public = 1
processACTIONPhase.accLevel = AL_ADMIN
def processINITPhase(self, tran, obj, data):
# reset stats
obj.stats.storPop = 0
obj.stats.prodProd = 0
obj.stats.effProdProd = 0
obj.stats.prodSci = 0
obj.stats.effProdSci = 0
obj.stats.slots = 0
obj.stats.structs = 0
obj.stats.planets = 0
obj.stats.fleetPwr = 0
obj.stats.fleetSupportProd = 0
obj.govPwr = Rules.baseGovPwr
# update galaxies
obj.galaxies = []
# remove old messages
self.cmd(obj).deleteOldMsgs(tran, obj)
# clear fleet upgrade flag
obj.fleetUpgradeInProgress = 0
# clear production pool
obj.prodIncreasePool = 0
# clear map
obj.dynamicMap.clear()
# set empty population distribution
obj.tmpPopDistr = {}
# do not process other cmds if time disabled
if not obj.timeEnabled:
return
# clear contacts and delete too old rels
turn = tran.db[OID_UNIVERSE].turn
for objID in obj.diplomacyRels.keys():
dipl = obj.diplomacyRels[objID]
# reset contact type
obj.diplomacyRels[objID].contactType = CONTACT_NONE
# delete old contacts
if dipl.lastContact + Rules.contactTimeout < turn:
del obj.diplomacyRels[objID]
continue
# lower scan powers in static map
for objID in obj.staticMap:
level = obj.staticMap[objID]
if level > Rules.level3InfoScanPwr:
obj.staticMap[objID] = max(
Rules.level3InfoScanPwr,
int(level * Rules.mapForgetScanPwr),
)
#@log.debug(obj.oid, "player static map fix", objID, level - obj.staticMap[objID])
# clear relations change indicator
for partyID in obj.diplomacyRels:
obj.diplomacyRels[partyID].relChng = 0
# reset science points
obj.sciPoints = 0
processINITPhase.public = 1
processINITPhase.accLevel = AL_ADMIN
def processFINALPhase(self, tran, obj, data):
if obj.timeEnabled:
#try/except so that entire final process doesn't break on error in sub-phase
try:
self.cmd(obj).processRSRCHPhase(tran, obj, data)
except:
log.warning('Cannot execute FINAL/RSRCH on %d' % (obj.oid))
try:
self.cmd(obj).processDIPLPhase(tran, obj, data)
except:
log.warning('Cannot execute FINAL/DIPL on %d' % (obj.oid))
# efficiency
obj.prodEff = 1.0
obj.sciEff = 1.0
if obj.imperator == 1:
log.debug(obj.oid, "Leader bonus")
obj.prodEff += Rules.galLeaderBonus
obj.sciEff += Rules.galLeaderBonus
elif obj.imperator >= 2:
log.debug(obj.oid, "Imperator bonus")
obj.prodEff += Rules.galImperatorBonus
obj.sciEff += Rules.galImperatorBonus
#@log.debug("Fleet upgrade pool", obj.oid, obj.fleetUpgradePool, obj.fleetUpgradeInProgress)
# compute some stats
# TODO remove, RAW SCI PTS represented now obj.stats.prodSci = obj.effSciPoints
obj.stats.planets = len(obj.planets)
# fleet support
#@log.debug("Fleet support", obj.oid, obj.stats.fleetSupportProd, obj.stats.prodProd)
if obj.stats.fleetSupportProd > 0 and obj.stats.prodProd > 0:
# TODO 0.1 shall be dependend on the race / government type
obj.prodEff += min(0.1 - float(obj.stats.fleetSupportProd + obj.fleetUpgradePool * Rules.operProdRatio) / obj.stats.prodProd, 0.0)
# delete non active player
if obj.lastLogin + Rules.playerTimeout < time.time():
log.message("Resigning inactive player", obj.name, obj.oid)
# TODO send a message?
self.cmd(obj).resign(tran, obj)
# delete nonactive newbie player
if obj.lastLogin + Rules.novicePlayerTimeout < time.time() \
and len(obj.planets) < 4:
log.message("Resigning inactive novice player", obj.name, obj.oid)
# TODO send a message?
self.cmd(obj).resign(tran, obj)
# acquire government power
if obj.planets:
planet = tran.db[obj.planets[0]]
for slot in planet.slots:
tech = Rules.techs[slot[STRUCT_IDX_TECHID]]
if tech.govPwr > 0 and slot[STRUCT_IDX_STATUS] & STRUCT_STATUS_ON:
eff = Utils.getTechEff(tran, slot[STRUCT_IDX_TECHID], obj.oid)
obj.govPwr = max(int(tech.govPwr * eff * (slot[STRUCT_IDX_OPSTATUS] / 100.0)), obj.govPwr)
# compute government controll range
if not hasattr(obj,"tmpPopDistr"): #when player is force-resigned, tmpPopDistr is unset. This is easiest fix.
obj.tmpPopDistr = {}
ranges = obj.tmpPopDistr.keys()
ranges.sort()
sum = 0
range = 1
for range in ranges:
sum += obj.tmpPopDistr[range]
if sum > obj.govPwr:
break
obj.govPwrCtrlRange = max(1, range)
if sum < obj.govPwr and sum > 0:
#@log.debug(obj.oid, "GovPwr compensation", obj.govPwrCtrlRange, obj.govPwr, sum)
obj.govPwrCtrlRange = int(obj.govPwrCtrlRange * obj.govPwr / float(sum))
#@log.debug(obj.oid, "GovPwr control range", obj.govPwrCtrlRange)
# compute prodBonus and sciBonus
sum = 0
for range in ranges:
sum += obj.tmpPopDistr[range]
if sum < obj.govPwr and sum > 0:
ratio = float(obj.govPwr - sum) / obj.govPwr
#@log.debug(obj.oid, "SMALL EMPIRE BONUS", ratio, "govPwr", obj.govPwr, "sum", sum)
# TODO let user to define how much to invest into prod and to sci
obj.prodEff += ratio / 2
obj.sciEff += ratio / 2
del obj.tmpPopDistr # delete temporary attribute
# increase prod eff from pacts
# CPs from allies
sum = 0
for partnerID in obj.diplomacyRels:
if self.cmd(obj).isPactActive(tran, obj, partnerID, PACT_MINOR_CP_COOP):
partner = tran.db[partnerID]
pactSpec = Rules.pactDescrs[PACT_MINOR_CP_COOP]
sum += min(
partner.stats.prodProd * pactSpec.effectivity,
obj.stats.prodProd * pactSpec.effectivity,
)
if self.cmd(obj).isPactActive(tran, obj, partnerID, PACT_MAJOR_CP_COOP):
partner = tran.db[partnerID]
pactSpec = Rules.pactDescrs[PACT_MAJOR_CP_COOP]
sum += min(
partner.stats.prodProd * pactSpec.effectivity,
obj.stats.prodProd * pactSpec.effectivity,
)
# apply production increase pool
obj.prodIncreasePool += sum
if obj.stats.prodProd > 0:
ratio = (Rules.unusedProdMod * obj.prodIncreasePool) / obj.stats.prodProd
real = min(ratio, math.sqrt(ratio))
#@log.debug(obj.oid, "Increase production by", ratio, "real", real)
obj.prodEff += real
# clean up prodEff if prodEff < 0 (prevent abuse)
if obj.prodEff < 0:
obj.prodEff = 0.0
# clean up ship redirections
systems = {}
for planetID in obj.planets:
systems[tran.db[planetID].compOf] = None
for systemID in obj.shipRedirections.keys():
if systemID not in systems:
del obj.shipRedirections[systemID]
# delete allied bouys
obj.alliedBuoys = {}
# find all allies
for partnerID in obj.diplomacyRels.keys():
dipl = obj.diplomacyRels[partnerID]
getAllyBuoys = False
getScannerBuoys = False
if dipl.relation >= REL_ALLY_LO:
getAllyBuoys = True
if self.isPactActive(tran, obj, partnerID, PACT_SHARE_SCANNER):
getScannerBuoys = True
if (getAllyBuoys or getScannerBuoys):
partner = tran.db[partnerID]
if hasattr(partner, "buoys"):
for systemID in partner.buoys.keys():
toAllyBuoy = BUOY_NONE
if getAllyBuoys and partner.buoys[systemID][1] == BUOY_TO_ALLY:
toAllyBuoy = (partner.buoys[systemID][0], BUOY_FROM_ALLY, partner.name)
elif getScannerBuoys and partner.buoys[systemID][1] == BUOY_TO_SCANNERSHARE:
toAllyBuoy = (partner.buoys[systemID][0], BUOY_FROM_ALLY, partner.name)
if toAllyBuoy != BUOY_NONE:
if systemID in obj.alliedBuoys:
obj.alliedBuoys[systemID].append(toAllyBuoy)
else:
obj.alliedBuoys[systemID] = [toAllyBuoy]
return None
processFINALPhase.public = 1
processFINALPhase.accLevel = AL_ADMIN
## messaging
def canSendMsg(self, tran, obj, oid, forum):
if forum == "INBOX":
sender = tran.db[oid]
return oid == OID_ADMIN or (oid in obj.diplomacyRels) or \
(obj.oid in sender.diplomacyRels)
if forum == "OUTBOX":
return obj.oid == oid
return 0
canSendMsg.public = 0
def cleanUpMsgs(self, tran, obj):
# get messages
msgs = self.cmd(obj).getMsgs(tran, obj)
# build list of events
delete = []
for msg in msgs:
if msg["forum"] == "EVENTS":
delete.append(msg["id"])
# delete
self.cmd(obj).deleteMsgs(tran, obj, delete)
return 1
cleanUpMsgs.public = 1
cleanUpMsgs.accLevel = AL_OWNER
def setResolution(self, tran, obj, x, y):
if not hasattr(obj,'clientStats'):
obj.clientStats = {}
obj.clientStats['x'] = x;
obj.clientStats['y'] = y;
setResolution.public = 1
setResolution.accLevel = AL_OWNER
def getResolution(self, obj):
if not hasattr(obj,'clientStats'):
obj.clientStats = {}
if obj.clientStats.has_key('x') and obj.clientStats.has_key('y'):
return ("%s,%s" % (obj.clientStats['x'],obj.clientStats['y']))
else:
return "0,0"
getResolution.public = 0
def addObsoleteTechs(self, tran, player, techID):
# add tech
temp = set([techID])
player.obsoleteTechs = player.obsoleteTechs | temp
return player.obsoleteTechs
addObsoleteTechs.public = 1
addObsoleteTechs.accLevel = AL_FULL
def delObsoleteTechs(self, tran, player, techID):
# del tech
temp = set([techID])
player.obsoleteTechs = player.obsoleteTechs - temp
return player.obsoleteTechs
delObsoleteTechs.public = 1
delObsoleteTechs.accLevel = AL_FULL
| Lukc/ospace-lukc | server/lib/ige/ospace/IPlayer.py | Python | gpl-2.0 | 47,542 |
# pygsear
# Copyright (C) 2003 Lee Harr
#
#
# This file is part of pygsear.
#
# pygsear is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pygsear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygsear; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import random
import pygame
from pygsear import Game, Drawable, Path, conf
from pygsear.locals import *
class Wing(Drawable.RotatedImage):
def __init__(self, name):
self.name = name
Drawable.RotatedImage.__init__(self, filename='arrow/right.png', steps=36)
p = Path.RandomAccelerationPathBounded(minSpeed=2)
self.set_path(p)
self.center()
def move(self):
self.set_rotation(self.path.get_direction())
Drawable.RotatedImage.move(self)
class Ship(Drawable.RotatedImage):
def __init__(self):
Drawable.RotatedImage.__init__(self, filename='penguin2.png', steps=36)
self.path.set_speed(100)
self.set_position((50, 50))
def set_enemy(self, enemy):
self.enemy = enemy
def follow_enemy(self):
self.path.turn_towards(self.enemy.get_position())
def move(self):
self.follow_enemy()
self.set_rotation(self.path.get_direction())
Drawable.RotatedImage.move(self)
class AGame(Game.Game):
def initialize(self):
self.makeWings()
self.makeShip()
def makeWings(self):
self.enemies = self.addGroup()
for name in ['Kirk', 'Spock', 'McCoy', 'Scotty', 'Chekov']:
i = Wing(name)
self.sprites.add(i)
self.enemies.add(i)
def makeShip(self):
self.ship = Ship()
self.sprites.add(self.ship)
self.chooseEnemy()
def chooseEnemy(self):
enemy = random.choice(self.enemies.sprites())
self.ship.set_enemy(enemy)
if hasattr(self, 'message'):
self.message.clear()
message = Drawable.String(message='"I am for %s"' % enemy.name,
fontSize=30)
message.center()
self.message = Drawable.Stationary(sprite=message)
self.message.draw()
def checkCollisions(self):
if self.ship.collide(self.ship.enemy):
self.ship.enemy.kill()
if not len(self.enemies.sprites()):
self.makeWings()
self.chooseEnemy()
if __name__ == '__main__':
g = AGame()
g.mainloop()
| davesteele/pygsear-debian | examples/wings_chase.py | Python | gpl-2.0 | 2,919 |
# Routines for handling fasta sequences and tab sep files
# std packages
import sys, textwrap, operator, types, doctest,logging, gzip, struct, cPickle, gc, itertools, math
from collections import defaultdict
from types import *
from os.path import basename, splitext
# external packages
try:
import namedtuple
except:
pass
try:
import dist, math
except:
pass
# --- CONVENIENCE ---
def openFile(fname, mode="r"):
""" opens file, recognizing stdout and stdin and none"""
if hasattr(fname, "read") or hasattr(fname, "write"):
return fname
elif fname.endswith(".gz"):
fh = gzip.open(fname, mode)
elif fname=="stdout":
fh = sys.stdout
elif fname=="stdin":
fh = sys.stdin
elif fname==None or fname.lower()=="none":
fh = None
else:
fh = open(fname, mode)
return fh
def flattenValues(dict):
""" return all values in dictionary (key -> list) as one long flat list """
list = []
for value in dict.values():
list.extend(value)
return list
def writeToTsv(fileObj, rec):
""" writes a namedtuple to a file as a tab-sep line """
if rec:
rec = [x.encode("utf-8") for x in rec]
string = "\t".join(rec)
fileObj.write(string+"\n")
# --- FASTA FILES ---
def parseFastaAsDict(fname, inDict=None):
if inDict==None:
inDict = {}
logging.info("Parsing %s" % fname)
fr = FastaReader(fname)
for (id, seq) in fr.parse():
if id in inDict:
print inDict
print inDict[id]
raise Exception("%s already seen before" % id)
inDict[id]=seq
return inDict
def parseFasta(fname):
fr = FastaReader(fname)
for (id, seq) in fr.parse():
yield id, seq
class FastaReader:
""" a class to parse a fasta file
Example:
fr = maxbio.FastaReader(filename)
for (id, seq) in fr.parse():
print id,seq """
def __init__(self, fname):
if hasattr(fname, 'read'):
self.f = fname
elif fname=="stdin":
self.f=sys.stdin
elif fname.endswith(".gz"):
self.f=gzip.open(fname)
else:
self.f=open(fname)
self.lastId=None
def parse(self):
""" Generator: returns sequences as tuple (id, sequence) """
lines = []
for line in self.f:
if line.startswith("\n") or line.startswith("#"):
continue
elif not line.startswith(">"):
lines.append(line.replace(" ","").strip())
continue
else:
if len(lines)!=0: # on first >, seq is empty
faseq = (self.lastId, "".join(lines))
self.lastId=line.strip(">").strip()
lines = []
yield faseq
else:
if self.lastId!=None:
sys.stderr.write("warning: when reading fasta file: empty sequence, id: %s\n" % line)
self.lastId=line.strip(">").strip()
lines=[]
# if it's the last sequence in a file, loop will end on the last line
if len(lines)!=0:
faseq = (self.lastId, "".join(lines))
yield faseq
else:
yield (None, None)
def outputFasta(id, seq, fh=sys.stdout, width=80):
""" output fasta seq to file object, break to 80 char width """
fh.write(">"+id+"\n")
#fh.write("\n".join(textwrap.wrap(seq, width=width)))
if len(seq)>width:
last = 0
for l in range(width,len(seq),width):
fh.write(seq[last:l])
fh.write("\n")
last = l
fh.write(seq[last:len(seq)])
else:
fh.write(seq)
fh.write("\n")
def outputFastaFile(id, seq, fname, width=80):
""" output fasta seq to fname and close file, break to 80 char width """
fh = openFile(fname)
outputFasta(id, seq, fh, width=80)
fh.close()
### functions for handling lists of tuples
def _makeGetter(var):
""" returns the right getter, depending on the type of var """
if type(var)==types.StringType:
getter = operator.attrgetter(var) # to get elements from records with named fields
else:
getter = operator.itemgetter(var) # to get elements from lists
return getter
def sortList(list, field, reverse=True, key=None):
""" sort list of tuples by a given field """
if not key:
key = _makeGetter(field)
list.sort(key=key, reverse=reverse)
return list
def bestIdentifiers(scoredList):
"""
given a list of tuples with a numeric last field and an id field, return the id fields with
the highest last field.
>>> bestIdentifiers ([("clinton", 1), ("obama", 3), ("washington", 10), ("lincoln", 10)])
['washington', 'lincoln']
"""
scoredList.sort(key=operator.itemgetter(-1), reverse=True)
bestScore = scoredList[0][-1]
bestElements = [e[0] for e in scoredList if e[-1] >= bestScore]
return bestElements
def bestScoreElements(list, scoreField):
""" return only those tuples in a list that contain a score >= the best score in the list
>>> import namedtuple
>>> tuple = namedtuple.namedtuple("test", "f1, f2")
>>> tuples = [tuple(1, 6), tuple(4, 7), tuple(2, 7)]
>>> print bestScoreElements(tuples, scoreField="f2")
[test(f1=4, f2=7), test(f1=2, f2=7)]
"""
scoreGetter = _makeGetter(scoreField)
sortList(list, scoreField, reverse=True, key=scoreGetter)
bestScore = scoreGetter(list[0])
bestElements = [e for e in list if scoreGetter(e) >= bestScore]
return bestElements
def indexByField(list, field):
""" index by a given field: convert list of tuples to dict of tuples """
map = {}
indexGetter = _makeGetter(field)
for tuple in list:
map.setdefault(indexGetter(tuple), []).append(tuple)
return map
def bestTuples(list, idField, scoreField):
""" Index a list of a key-value-tuples, keep only the best tuples per value and return their keys.
>>> import namedtuple
>>> tuple = namedtuple.namedtuple("test", "f1, f2")
>>> tuples = [tuple(1, 6), tuple(1, 3), tuple(2, 7), tuple(2,1000)]
>>> print bestTuples(tuples, idField="f1", scoreField="f2")
[test(f1=1, f2=6), test(f1=2, f2=1000)]
"""
map = indexByField(list, idField)
filteredList = []
for id, idList in map.iteritems():
bestElements = bestScoreElements(idList, scoreField)
filteredList.extend(bestElements)
return filteredList
def removeBigSets(predDict, limit):
""" given a dict with string -> set , remove elements where len(set) >= than limit """
result = {}
for key, predSet in predDict:
if len(predSet)<limit:
result[key] = predSet
return result
# return types for benchmark()
BenchmarkResult = namedtuple.namedtuple("BenchResultRec", "TP, FN, FP, Prec, Recall, F, errList, objCount")
ErrorDetails = namedtuple.namedtuple("ErrorDetails", "id, expected, predicted")
def benchmark(predDict, refDict):
""" returns a class with attributes for TP, FN, FP and various other counts and information about prediction errors
>>> benchmark({"a" : set([1,2,3]), "b" : set([3,4,5])}, {"a":set([1]), "b":set([4])})
BenchResultRec(TP=2, FN=0, FP=4, Prec=0.3333333333333333, Recall=1.0, F=0.5, errList=[ErrorDetails(id='a', expected=set([1]), predicted=set([1, 2, 3])), ErrorDetails(id='b', expected=set([4]), predicted=set([3, 4, 5]))], objCount=2)
"""
OBJECTNAME="documents"
TP, FN, FP = 0, 0, 0
objCount = 0
atLeastOneHit = 0
errDetails = []
completeMatch = 0
completeMismatch = 0
tooManyPred = 0
notEnoughPred = 0
limitPassed = 0
predCount = 0
# iterate over objects and update counters
for obj, predSet in predDict.iteritems():
if obj not in refDict:
logging.debug("%s not in reference, skipping" % obj)
continue
refSet = refDict[obj]
objCount+=1
perfectMatch=False
partialMatch=False
predCount += len(predSet)
tpSet = predSet.intersection(refSet) # true positives: are in pred and in reference
fnSet = refSet.difference(predSet) # false negatives: are in reference but not in prediction
fpSet = predSet.difference(refSet) # false positives: are in prediction but not in refernce
TP += len (tpSet)
FN += len (fnSet)
FP += len (fpSet)
if len(tpSet)>0:
atLeastOneHit+=1
partialMatch=True
if len(tpSet)==len(predSet)==len(refSet):
completeMatch+=1
perfectMatch=True # set flag to avoid checking several times below
if len(tpSet)==0:
completeMismatch+=1
if len(predSet)>len(refSet):
tooManyPred+=1
if len(predSet)<len(refSet):
notEnoughPred+=1
if not perfectMatch:
errDetails.append(ErrorDetails(id=obj, expected=refSet, predicted=predSet))
if objCount==0:
logging.debug("number of %s in common between prediction and reference is zero" % OBJECTNAME)
return None
if TP+FP > 0:
Prec = float(TP) / (TP + FP)
else:
print "Warning: Cannot calculate Prec because TP+FP = 0"
Prec = 0
if TP+FN > 0:
Recall = float(TP) / (TP + FN)
else:
print "Warning: Cannot calculate Recall because TP+FN = 0"
Recall = 0
if Recall>0 and Prec>0:
F = 2 * (Prec * Recall) / (Prec + Recall)
else:
print "Warning: Cannot calculate F because Recall and Prec = 0"
F = 0
return BenchmarkResult(TP=TP, FN=FN, FP=FP, Prec=Prec, Recall=Recall, F=F, errList=errDetails, objCount=objCount)
def allToString(list):
""" converts all members to a list to strings.
numbers -> string, lists/sets -> comma-sep strings """
newList = []
s = set()
for e in list:
if type(e)==types.ListType or type(e)==type(s):
newList.append(",".join(e))
else:
newList.append(str(e))
return newList
def prettyPrintDict(dict):
""" print dict to stdout """
for key, value in dict.iteritems():
print key, value
def calcBinomScore(background, foreground, genes, backgroundProb):
TP = len(genes.intersection(foreground))
binomProb = dist.pbinom(TP, len(genes), backgroundProb)
binomScore = -math.log10(binomProb)
return binomScore
def packCoord(start, end):
" pack start, end into 8 bytes "
return struct.pack("<ll", int(start), int(end))
def unpackCoord(start, end):
" undo packCoord "
start, end = struct.unpack("<ll", arr)
return start, end
def packChromCoord(chrom, start, end):
""" pack chrom,start,end into 9 little-endian bytes, return a byte string
>>> s = packChromCoord("chr21", 1233,123232299)
>>> unpackChromCoord(s)
('chr21', 1233, 123232299)
>>> unpackChromCoord(packChromCoord("chrM", 1233,123232299))
('chrM', 1233, 123232299)
>>> packChromCoord("chr6_hap", 1,2)
>>> len(packChromCoord("chr6", 1,2))
9
"""
if "_gl" in chrom or "hap" in chrom:
return None
chrom = chrom.replace("chr", "")
if chrom in ["M","X","Y"]:
chromInt = ord(chrom)
else:
chromInt = int(chrom)
return struct.pack("<bll", chromInt, int(start), int(end))
def unpackChromCoord(arr):
" undo packCoord "
chrom, start, end = struct.unpack("<bll", arr)
if(chrom)>22:
chrom = "chr"+chr(chrom)
else:
chrom = "chr"+str(chrom)
return chrom, start, end,
def revComp(seq):
table = { "a":"t", "A":"T", "t" :"a", "T":"A", "c":"g", "C":"G", "g":"c", "G":"C", "N":"N", "n":"n",
"Y":"R", "R" : "Y", "M" : "K", "K" : "M", "W":"W", "S":"S",
"H":"D", "B":"V", "V":"B", "D":"H", "y":"r", "r":"y","m":"k",
"k":"m","w":"w","s":"s","h":"d","b":"v","d":"h","v":"b","y":"r","r":"y" }
newseq = []
for nucl in reversed(seq):
newseq += table[nucl]
return "".join(newseq)
# -----
if __name__ == "__main__":
import doctest
doctest.testmod()
| maximilianh/maxtools | lib/maxbio.py | Python | gpl-2.0 | 12,239 |
#!/usr/bin/env python
###############################################################################
#
# searchMappedPairs.py
#
# Given a bam file, this srcipt will calculate where the mate of the read
# is mapped or unmappedand in which contig that mate is mapped to. The aim
# being to generate a graphviz image and report file of all of the pairs that
# map to a different contigs and to no contigs at all. Hopefully this will
# help with assemblies and stuff like that to piece together contigs.
#
# Copyright (C) 2011, 2012, 2014 Connor Skennerton
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import argparse
import sys
import pysam
import networkx as nx
from operator import itemgetter
def getSubList(subFile):
subList = set()
for line in subFile:
subList.add(line.rstrip())
return subList
def findEndLinks(G, bamFile, contig, length, endLength=500):
# get the links at the start of the reference
for read in bamFile.fetch(contig, 1, endLength):
if checkLink(bamFile, read, length, contig):
mate_contig = bamFile.getrname(read.mrnm)
G.add_node(contig, length=length)
G.add_node(mate_contig, length=bamFile.lengths[read.mrnm])
addLink(G, contig, mate_contig)
# now get oll of the links at the end of the reference
for read in bamFile.fetch(contig, length - endLength, length):
if checkLink(bamFile, read, length, contig):
mate_contig = bamFile.getrname(read.mrnm)
G.add_node(contig, length=length)
G.add_node(mate_contig, length=bamFile.lengths[read.mrnm])
addLink(G, contig, mate_contig)
def addLink(G, contig, mate_contig):
if contig < mate_contig:
G.add_edge(contig, mate_contig)
try:
G[contig][mate_contig]['weight'] += 1
except:
G[contig][mate_contig]['weight'] = 1
else:
G.add_edge(mate_contig, contig)
try:
G[mate_contig][contig]['weight'] += 1
except:
G[mate_contig][contig]['weight'] = 1
def checkLink(bamFile, read, length, contig):
if isMated(read):
if hasMissingMates(bamFile, read, contig):
# mate is on a different contig
return True
# checks for a number of features for each aligned read. If a read's mate is
# on a different contig then it returns that contig name. For all other
# possibilities returns None
def hasMissingMates(bamFile, alignedRead, contig):
mate_contig = bamFile.getrname(alignedRead.mrnm)
if (mate_contig != contig):
return True
return False
# checks the position of the read and it's mate to see if they are on oposite
# ends of a contig. Returns True if they are, False if not
def isCircularLink(alignedReadPos, matePos, contigLength, endLength=500):
if ((alignedReadPos < endLength) and (matePos > contigLength - endLength)):
return True
elif (alignedReadPos > (contigLength - endLength) and (matePos < endLength)):
return True
else:
return False
def isMated(alignedRead):
if alignedRead.is_paired:
if not alignedRead.mate_is_unmapped:
return True
return False
if __name__ =='__main__':
# intialise the options parser
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("bam", help="the name of the input bam file")
parser.add_argument('outfile', help='Name of output file of graph in GML format')
parser.add_argument('-f', '--wanted-contigs', dest='wantedContigs', type=argparse.FileType(),
help='A file of contig names to be considered')
parser.add_argument("-n","--numberLinks", type=int, dest="numOfLinks", default=3,
help="the number of links that two contigs must share for the links to even be considered 'real'")
parser.add_argument('-m', '--min-contig-len', type=int, dest='minContigLen', default=500,
help='The minimum length of the contig to be considered for adding links')
# get and check options
args = parser.parse_args()
endLength = 500
sublist = None
if args.wantedContigs is not None:
sublist = getSubList(args.wantedContigs)
try:
bamFile = pysam.Samfile(args.bam, 'rb')
except:
print "The input file must be in bam format"
sys.exit(1)
G = nx.Graph()
for contig, length in zip(bamFile.references, bamFile.lengths):
if length < args.minContigLen:
continue
if contig not in sublist:
continue
findEndLinks(G, bamFile, contig, length)
# now subset the graph based on the edge conditions
SG = nx.Graph( [ (u,v,d) for u,v,d in G.edges(data=True) if d ['weight'] >= args.numOfLinks] )
nx.write_gml(SG, args.outfile)
| JoshDaly/scriptShed | searchMappedPairs.py | Python | gpl-2.0 | 5,548 |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# This module will be shared with other services. Therefore, please refrain from
# importing anything from Mercurial and creating a dependency on Mercurial. This
# module is only for specifying classes based on simple types to represent the
# data required for creating commits.
from __future__ import absolute_import
from typing import Any, Dict, List, Optional, Union
class params(object):
def __init__(
self, changelist: "changelist", metadata: "metadata", destination: "destination"
) -> None:
self.changelist = changelist
self.metadata = metadata
self.destination = destination
def todict(self) -> "Dict[str, Any]":
d = {}
d["changelist"] = self.changelist.todict()
d["metadata"] = self.metadata.todict()
d["destination"] = self.destination.todict()
return d
@classmethod
def fromdict(cls, d: "Dict[str, Any]") -> "params":
return cls(
changelist=changelist.fromdict(d["changelist"]),
metadata=metadata.fromdict(d["metadata"]),
destination=destination.fromdict(d["destination"]),
)
class metadata(object):
def __init__(
self,
author: "Optional[str]",
description: "Optional[str]",
parents: "Optional[List[str]]",
extra: "Optional[Dict[str, str]]" = None,
) -> None:
self.author = author
self.description = description
self.parents = parents
self.extra = extra
def todict(self) -> "Dict[str, Any]":
d = {}
d["author"] = self.author
d["description"] = self.description
d["parents"] = self.parents
d["extra"] = self.extra
return d
@classmethod
def fromdict(cls, d: "Dict[str, Any]") -> "metadata":
author = d.get("author") # type: Optional[str]
description: "Optional[str]" = d.get("description")
parents: "Optional[List[str]]" = d.get("parents")
extra: "Optional[Dict[str, str]]" = d.get("extra")
return cls(author, description, parents, extra)
class destination(object):
def __init__(
self, bookmark: "Optional[str]" = None, pushrebase: "Optional[bool]" = False
) -> None:
self.bookmark = bookmark
self.pushrebase = pushrebase
def todict(self) -> "Dict[str, Any]":
d = {}
d["bookmark"] = self.bookmark
d["pushrebase"] = self.pushrebase
return d
@classmethod
def fromdict(cls, d: "Dict[str, Any]") -> "destination":
bookmark = d.get("bookmark") # type: Optional[str]
pushrebase: "Optional[bool]" = d.get("pushrebase")
return cls(bookmark, pushrebase)
class changelistbuilder(object):
def __init__(self, parent: str) -> None:
self.parent = parent
self.files: "Dict[str, fileinfo]" = {}
def addfile(self, path: str, fileinfo: "fileinfo") -> None:
self.files[path] = fileinfo
def build(self) -> "changelist":
return changelist(self.parent, self.files)
class changelist(object):
def __init__(self, parent: "Optional[str]", files: "Dict[str, fileinfo]") -> None:
self.parent = parent
self.files = files
def todict(self) -> "Dict[str, Any]":
d = {}
d["parent"] = self.parent
d["files"] = {path: info.todict() for path, info in self.files.items()}
return d
@classmethod
def fromdict(cls, d: "Dict[str, Any]") -> "changelist":
parent = d.get("parent") # type: Optional[str]
files: "Dict[str, fileinfo]" = {
path: fileinfo.fromdict(info) for path, info in d["files"].items()
}
return cls(parent, files)
class fileinfo(object):
def __init__(
self,
deleted: "Optional[bool]" = False,
flags: "Optional[str]" = None,
content: "Optional[str]" = None,
copysource: "Optional[str]" = None,
) -> None:
self.deleted = deleted
self.flags = flags
self.content = content
self.copysource = copysource
def islink(self) -> bool:
flags = self.flags
return flags is not None and "l" in flags
def isexec(self) -> bool:
flags = self.flags
return flags is not None and "x" in flags
def todict(self) -> "Dict[str, Union[bool, str]]":
d = {}
d["deleted"] = self.deleted
d["flags"] = self.flags
d["content"] = self.content
d["copysource"] = self.copysource
return d
@classmethod
def fromdict(cls, d: "Dict[str, Any]") -> "fileinfo":
deleted = d.get("deleted") # type: Optional[bool]
flags: "Optional[str]" = d.get("flags")
content: "Optional[str]" = d.get("content")
copysource: "Optional[str]" = d.get("copysource")
return cls(deleted, flags, content, copysource)
| facebookexperimental/eden | eden/hg-server/edenscm/hgext/memcommit/commitdata.py | Python | gpl-2.0 | 5,031 |
import pyterpol
import matplotlib.pyplot as plt
wmin = 3600
wmax = 4100
sygri = pyterpol.SyntheticGrid(flux_type='absolute')
params = dict(teff=9950, logg=3.7, z=1.0)
spec1 = sygri.get_synthetic_spectrum(params, [wmin, wmax], order=4, step=0.1)
params = dict(teff=10000, logg=3.5, z=1.0)
spec2 = sygri.get_synthetic_spectrum(params, [wmin, wmax], order=4, step=0.1)
params = dict(teff=10000, logg=4.0, z=1.0)
spec3 = sygri.get_synthetic_spectrum(params, [wmin, wmax], order=4, step=0.1)
ax = plt.subplot(111)
spec1.plot(ax=ax)
spec2.plot(ax=ax)
spec3.plot(ax=ax)
plt.show() | chrysante87/pyterpol | pyterpol_test/test_absolute_spectra/test.py | Python | gpl-2.0 | 575 |
#!/usr/bin/env python
# -*- coding: <utf-8> -*-
"""
This file is part of Spartacus project
Copyright (C) 2016 CSE
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
from Configuration.Configuration import MEMORY_START_AT
__author__ = "CSE"
__copyright__ = "Copyright 2015, CSE"
__credits__ = ["CSE"]
__license__ = "GPL"
__version__ = "2.0"
__maintainer__ = "CSE"
__status__ = "Dev"
DEFAULT_LOAD_ADDRESS = MEMORY_START_AT
UNDEFINED = "&&undefined&&"
| CommunicationsSecurityEstablishment/spartacus | ToolChain/Linker/Constants.py | Python | gpl-2.0 | 1,090 |
# Portions Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# convert.py Foreign SCM converter
#
# Copyright 2005-2007 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""import revisions from foreign VCS repositories into Mercurial"""
from __future__ import absolute_import
from edenscm.mercurial import registrar
from edenscm.mercurial.i18n import _
from . import convcmd, subversion
cmdtable = {}
command = registrar.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
# leave the attribute unspecified.
testedwith = "ships-with-hg-core"
# Commands definition was moved elsewhere to ease demandload job.
@command(
"convert",
[
(
"",
"authors",
"",
_("username mapping filename (DEPRECATED) (use --authormap instead)"),
_("FILE"),
),
("s", "source-type", "", _("source repository type"), _("TYPE")),
("d", "dest-type", "", _("destination repository type"), _("TYPE")),
("r", "rev", [], _("import up to source revision REV"), _("REV")),
("A", "authormap", "", _("remap usernames using this file"), _("FILE")),
("", "filemap", "", _("remap file names using contents of file"), _("FILE")),
("", "full", None, _("apply filemap changes by converting all files again")),
("", "splicemap", "", _("splice synthesized history into place"), _("FILE")),
("", "branchmap", "", _("change branch names while converting"), _("FILE")),
("", "branchsort", None, _("try to sort changesets by branches")),
("", "datesort", None, _("try to sort changesets by date")),
("", "sourcesort", None, _("preserve source changesets order")),
("", "closesort", None, _("try to reorder closed revisions")),
],
_("hg convert [OPTION]... SOURCE [DEST [REVMAP]]"),
norepo=True,
)
def convert(ui, src, dest=None, revmapfile=None, **opts):
"""convert a foreign SCM repository to a Mercurial one.
Accepted source formats [identifiers]:
- Mercurial [hg]
- git [git]
- Subversion [svn]
- Perforce [p4]
- Google Repo [repo]
Accepted destination formats [identifiers]:
- Mercurial [hg]
- Subversion [svn] (history on branches is not preserved)
If no revision is given, all revisions will be converted.
Otherwise, convert will only import up to the named revision
(given in a format understood by the source).
If no destination directory name is specified, it defaults to the
basename of the source with ``-hg`` appended. If the destination
repository doesn't exist, it will be created.
By default, all sources except Mercurial will use --branchsort.
Mercurial uses --sourcesort to preserve original revision numbers
order. Sort modes have the following effects:
--branchsort convert from parent to child revision when possible,
which means branches are usually converted one after
the other. It generates more compact repositories.
--datesort sort revisions by date. Converted repositories have
good-looking changelogs but are often an order of
magnitude larger than the same ones generated by
--branchsort.
--sourcesort try to preserve source revisions order, only
supported by Mercurial sources.
--closesort try to move closed revisions as close as possible
to parent branches, only supported by Mercurial
sources.
If ``REVMAP`` isn't given, it will be put in a default location
(``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple
text file that maps each source commit ID to the destination ID
for that revision, like so::
<source ID> <destination ID>
If the file doesn't exist, it's automatically created. It's
updated on each commit copied, so :hg:`convert` can be interrupted
and can be run repeatedly to copy new commits.
The authormap is a simple text file that maps each source commit
author to a destination commit author. It is handy for source SCMs
that use unix logins to identify authors (e.g.: CVS). One line per
author mapping and the line format is::
source author = destination author
Empty lines and lines starting with a ``#`` are ignored.
The filemap is a file that allows filtering and remapping of files
and directories. Each line can contain one of the following
directives::
include path/to/file-or-dir
exclude path/to/file-or-dir
rename path/to/source path/to/destination
Comment lines start with ``#``. A specified path matches if it
equals the full relative name of a file or one of its parent
directories. The ``include`` or ``exclude`` directive with the
longest matching path applies, so line order does not matter.
The ``include`` directive causes a file, or all files under a
directory, to be included in the destination repository. The default
if there are no ``include`` statements is to include everything.
If there are any ``include`` statements, nothing else is included.
The ``exclude`` directive causes files or directories to
be omitted. The ``rename`` directive renames a file or directory if
it is converted. To rename from a subdirectory into the root of
the repository, use ``.`` as the path to rename to.
``--full`` will make sure the converted changesets contain exactly
the right files with the right content. It will make a full
conversion of all files, not just the ones that have
changed. Files that already are correct will not be changed. This
can be used to apply filemap changes when converting
incrementally. This is currently only supported for Mercurial and
Subversion.
The splicemap is a file that allows insertion of synthetic
history, letting you specify the parents of a revision. This is
useful if you want to e.g. give a Subversion merge two parents, or
graft two disconnected series of history together. Each entry
contains a key, followed by a space, followed by one or two
comma-separated values::
key parent1, parent2
The key is the revision ID in the source
revision control system whose parents should be modified (same
format as a key in .hg/shamap). The values are the revision IDs
(in either the source or destination revision control system) that
should be used as the new parents for that node. For example, if
you have merged "release-1.0" into "trunk", then you should
specify the revision on "trunk" as the first parent and the one on
the "release-1.0" branch as the second.
The branchmap is a file that allows you to rename a branch when it is
being brought in from whatever external repository. When used in
conjunction with a splicemap, it allows for a powerful combination
to help fix even the most badly mismanaged repositories and turn them
into nicely structured Mercurial repositories. The branchmap contains
lines of the form::
original_branch_name new_branch_name
where "original_branch_name" is the name of the branch in the
source repository, and "new_branch_name" is the name of the branch
is the destination repository. No whitespace is allowed in the new
branch name. This can be used to (for instance) move code in one
repository from "default" to a named branch.
Mercurial Source
################
The Mercurial source recognizes the following configuration
options, which you can set on the command line with ``--config``:
:convert.hg.ignoreerrors: ignore integrity errors when reading.
Use it to fix Mercurial repositories with missing revlogs, by
converting from and to Mercurial. Default is False.
:convert.hg.saverev: store original revision ID in changeset
(forces target IDs to change). It takes a boolean argument and
defaults to False.
:convert.hg.startrev: specify the initial Mercurial revision.
The default is 0.
:convert.hg.revs: revset specifying the source revisions to convert.
Subversion Source
#################
Subversion source detects classical trunk/branches layouts. By default, the
supplied ``svn://repo/path/`` source URL is converted as a single branch. If
``svn://repo/path/trunk`` exists it replaces the default branch. If
``svn://repo/path/branches`` exists, its subdirectories are listed as
possible branches. Default ``trunk`` and ``branches`` values can be
overridden with following options. Set them to paths relative to the source
URL, or leave them blank to disable auto detection.
The following options can be set with ``--config``:
:convert.svn.branches: specify the directory containing branches.
The default is ``branches``.
:convert.svn.trunk: specify the name of the trunk branch. The
default is ``trunk``.
:convert.localtimezone: use local time (as determined by the TZ
environment variable) for changeset date/times. The default
is False (use UTC).
Source history can be retrieved starting at a specific revision,
instead of being integrally converted. Only single branch
conversions are supported.
:convert.svn.startrev: specify start Subversion revision number.
The default is 0.
Git Source
##########
The Git importer converts commits from all reachable branches (refs
in refs/heads) and remotes (refs in refs/remotes) to Mercurial.
Branches are converted to bookmarks with the same name, with the
leading 'refs/heads' stripped. Git submodules are not supported.
The following options can be set with ``--config``:
:convert.git.similarity: specify how similar files modified in a
commit must be to be imported as renames or copies, as a
percentage between ``0`` (disabled) and ``100`` (files must be
identical). For example, ``90`` means that a delete/add pair will
be imported as a rename if more than 90% of the file hasn't
changed. The default is ``50``.
:convert.git.findcopiesharder: while detecting copies, look at all
files in the working copy instead of just changed ones. This
is very expensive for large projects, and is only effective when
``convert.git.similarity`` is greater than 0. The default is False.
:convert.git.renamelimit: perform rename and copy detection up to this
many changed files in a commit. Increasing this will make rename
and copy detection more accurate but will significantly slow down
computation on large projects. The option is only relevant if
``convert.git.similarity`` is greater than 0. The default is
``400``.
:convert.git.committeractions: list of actions to take when processing
author and committer values.
Git commits have separate author (who wrote the commit) and committer
(who applied the commit) fields. Not all destinations support separate
author and committer fields (including Mercurial). This config option
controls what to do with these author and committer fields during
conversion.
A value of ``messagedifferent`` will append a ``committer: ...``
line to the commit message if the Git committer is different from the
author. The prefix of that line can be specified using the syntax
``messagedifferent=<prefix>``. e.g. ``messagedifferent=git-committer:``.
When a prefix is specified, a space will always be inserted between the
prefix and the value.
``messagealways`` behaves like ``messagedifferent`` except it will
always result in a ``committer: ...`` line being appended to the commit
message. This value is mutually exclusive with ``messagedifferent``.
``dropcommitter`` will remove references to the committer. Only
references to the author will remain. Actions that add references
to the committer will have no effect when this is set.
``replaceauthor`` will replace the value of the author field with
the committer. Other actions that add references to the committer
will still take effect when this is set.
The default is ``messagedifferent``.
:convert.git.extrakeys: list of extra keys from commit metadata to copy to
the destination. Some Git repositories store extra metadata in commits.
By default, this non-default metadata will be lost during conversion.
Setting this config option can retain that metadata. Some built-in
keys such as ``parent`` and ``branch`` are not allowed to be copied.
:convert.git.remoteprefix: remote refs are converted as bookmarks with
``convert.git.remoteprefix`` as a prefix followed by a /. The default
is 'remote'.
:convert.git.saverev: whether to store the original Git commit ID in the
metadata of the destination commit. The default is True.
:convert.git.skipsubmodules: does not convert root level .gitmodules files
or files with 160000 mode indicating a submodule. Default is False.
Perforce Source
###############
The Perforce (P4) importer can be given a p4 depot path or a
client specification as source. It will convert all files in the
source to a flat Mercurial repository, ignoring labels, branches
and integrations. Note that when a depot path is given you then
usually should specify a target directory, because otherwise the
target may be named ``...-hg``.
The following options can be set with ``--config``:
:convert.p4.encoding: specify the encoding to use when decoding standard
output of the Perforce command line tool. The default is default system
encoding.
:convert.p4.startrev: specify initial Perforce revision (a
Perforce changelist number).
Mercurial Destination
#####################
The following options are supported:
:convert.hg.clonebranches: dispatch source branches in separate
clones. The default is False.
:convert.hg.usebranchnames: preserve branch names. The default is
True.
:convert.hg.sourcename: records the given string as a 'convert_source' extra
value on each commit made in the target repository. The default is None.
"""
return convcmd.convert(ui, src, dest, revmapfile, **opts)
@command("debugsvnlog", [], "hg debugsvnlog", norepo=True)
def debugsvnlog(ui, **opts):
return subversion.debugsvnlog(ui, **opts)
def kwconverted(ctx, name):
rev = ctx.extra().get("convert_revision", "")
if rev.startswith("svn:"):
if name == "svnrev":
return str(subversion.revsplit(rev)[2])
elif name == "svnpath":
return subversion.revsplit(rev)[1]
elif name == "svnuuid":
return subversion.revsplit(rev)[0]
return rev
templatekeyword = registrar.templatekeyword()
@templatekeyword("svnrev")
def kwsvnrev(repo, ctx, **args):
"""String. Converted subversion revision number."""
return kwconverted(ctx, "svnrev")
@templatekeyword("svnpath")
def kwsvnpath(repo, ctx, **args):
"""String. Converted subversion revision project path."""
return kwconverted(ctx, "svnpath")
@templatekeyword("svnuuid")
def kwsvnuuid(repo, ctx, **args):
"""String. Converted subversion revision repository identifier."""
return kwconverted(ctx, "svnuuid")
# tell hggettext to extract docstrings from these functions:
i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
| facebookexperimental/eden | eden/scm/edenscm/hgext/convert/__init__.py | Python | gpl-2.0 | 16,126 |
import unittest
from model.session import *
from model.dbhandler import *
class SessionTest(unittest.TestCase):
def test_instance(self):
session = Session('123')
session.addData('key', 'vall')
self.assertNotEqual(session.data, None)
def test_add_data(self):
session1 = Session('1234')
session1.addData('testKey', 'testVal')
session1.addData('list', ['val1, val2'])
session2 = Session('1234')
self.assertEqual(session1.data, session2.data)
def test_destroy(self):
session = Session('123')
session.addData('key', 'vall')
Session.destroy('1234')
Session.destroy('123')
dbh = DbHandler.getInstance()
cur = dbh.cur
cur.execute("""SELECT data FROM `user_session`
WHERE sid IN ('123', '1234')""")
self.assertEqual(cur.rowcount, 0)
| gantonov/restaurant-e-menu | cgi-bin/tests/test_session.py | Python | gpl-3.0 | 891 |
'''
Created on Mar 25, 2016
Created on Jan 27, 2016
3.3V pin : 1,17
5V pin : 2,4
Ground : 6,9,14,20,25,30,34,39
EPROM : 27,28
GPIO : 3,5,7,8,10,11,12,13,15,16,18,10,21,22,23,24,26,29,31,32,33,35,36,37,38,40
Motor Control : 29,31,33,35
front 7,8
left 11,12
right 15,16
back 21,22
top 23,24
signal 26
sigt 10
wireless IMU
import socket, traceback
host = ''
port = 5555
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind((host, port))
while 1:
try:
message, address = s.recvfrom(8192)
print message
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
@author: kumaran
@author: kumaran
'''
import RPi.GPIO as GPIO
import time
from string import atoi,atof
import sys,tty,termios
import random
class EchoSensor(object):
def __init__(self,trigger,echo):
self.trigger = trigger
self.echo = echo
#print "Sensor configured with t,e",self.trigger,self.echo
GPIO.setup(self.trigger,GPIO.OUT)
GPIO.setup(self.echo,GPIO.IN,pull_up_down = GPIO.PUD_DOWN)
time.sleep(0.5)
def measure(self):
GPIO.output(self.trigger,0)
#GPIO.input(self.echo,pull_up_down = GPIO.PUD_DOWN)
GPIO.output(self.trigger,True)
time.sleep(0.00001)
GPIO.output(self.trigger,False)
self.startTime=time.time()
while GPIO.input(self.echo) == False:
self.startTime = time.time()
while GPIO.input(self.echo) == True:
self.stopTime = time.time()
self.elapsedTime=self.stopTime-self.startTime
self.distance=(self.elapsedTime*34000.0)/2.0
return self.distance
def avgDistance(self,trails):
self.avgdist=0.0
for i in range(trails):
time.sleep(0.1)
self.avgdist+=self.measure()
return self.avgdist/trails
class Engine(object):
def __init__(self,lm1,lm2,rm1,rm2,t,dc,ft=0,fc=0,bt=0,bc=0):
self.status="x"
self.turnDelay=[t-0.2,t-0.1,t,t+0.1,t+0.2]
self.leftMotor=[lm1,lm2]
self.rightMotor=[rm1,rm2]
self.motors=self.leftMotor+self.rightMotor
self.DistanceCutoff=dc
self.Maxturns = 5
GPIO.setup(self.motors,GPIO.OUT)
if ft and fc:
self.FronSensor=True
self.FS=EchoSensor(ft,fc)
else:
self.FronSensor=False
if bt and bc:
self.BackSensor=True
self.BS=EchoSensor(bt,bc)
else:
self.BackSensor=False
def Scan(self):
if self.FronSensor and self.BackSensor:
if self.status in ["x","s","t"]:
self.FS.measure()
self.BS.measure()
elif self.status == 'f':
self.FS.measure()
elif self.status == 'r':
self.BS.measure()
else:
print "Problem with Echo sensors"
def Stop(self):
self.status='s'
GPIO.output(self.motors,0)
def Run(self):
self.turns=0
while self.status != 'h':
time.sleep(0.01)
self.Scan()
self.Move()
#print self.status,self.FS.distance,self.BS.distance
self.Stop()
GPIO.cleanup()
print 'No way to go.. stopping....'
def Move(self):
if self.status in ["s","x","t","f"] and self.FS.distance > self.DistanceCutoff:
self.MoveForward()
self.turns=0
elif self.status in ["s","x","t","r"] and self.BS.distance > self.DistanceCutoff:
self.MoveBackward()
self.turns = 0
elif self.status == "f" and self.FS.distance < self.DistanceCutoff:
self.Turn()
elif self.status == "r" and self.BS.distance < self.DistanceCutoff:
self.Turn()
else:
self.turns+=1
self.Turn()
if self.turns > self.Maxturns:
self.status = 'h'
def MoveForward(self):
self.status = 'f'
GPIO.output(self.motors,(0,1,1,0))
def MoveBackward(self):
self.status = 'r'
GPIO.output(self.motors,(1,0,0,1))
def Turn(self):
if random.choice(['L','R'])=='R':
GPIO.output(self.motors,(0,0,0,1))
time.sleep(random.choice(self.turnDelay))
self.Stop()
else:
GPIO.output(self.motors,(1,0,0,0))
time.sleep(random.choice(self.turnDelay))
self.Stop()
if __name__=="__main__":
GPIO.setmode(GPIO.BOARD)
Neo=Engine(29,31,33,35,0.5,30.0,22,21,24,23)
Neo.Run()
| kumar-physics/pi | ObstacleAvoidance/ObstacleAvoidance/Jeno.py | Python | gpl-3.0 | 4,812 |
def check_voter(name):
if voted.get(name):
print("kick them out!")
else:
voted[name] = True
print("let them vote!")
book = dict()
book["apple"] = 0.67
book["milk"] = 1.49
book["avocado"] = 1.49
print(book)
print(book["avocado"])
phone_book = {}
phone_book["jenny"] = 711256
phone_book["emergency"] = 1
print(phone_book["jenny"])
voted ={}
check_voter("tom")
check_voter("kate")
check_voter("tom")
| serggrom/python-algorithms | Hash-tables.py | Python | gpl-3.0 | 446 |
#!/usr/bin/python
# (c) Nelen & Schuurmans. GPL licensed, see LICENSE.txt.
from optparse import make_option
from django.core.management.base import BaseCommand
from lizard_wbconfiguration.models import AreaField
from django.db import transaction
from django.db.models import get_model
import logging
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
(Remove and re-)insert model field names to wb configuration.
"""
help = ("Example: bin/django wb_configuration --app=app "\
"--model_name=model")
option_list = BaseCommand.option_list + (
make_option('--app',
help='app',
type='str',
default=None),
make_option('--model_name',
help='Model name.',
type='str',
default=None))
@transaction.commit_on_success
def handle(self, *args, **options):
if not options['app'] or not options['model_name']:
logger.error("Expected --app and --model args. "\
"Use -help for example.")
return
model = get_model(options['app'], options['model_name'])
for field in model._meta.fields:
code = ".".join([options['app'],
options['model_name'],
field.name])
AreaField.objects.get_or_create(
code=code,
app_name=options['app'].lower(),
model_name=options['model_name'].lower(),
field_name=field.name)
logger.debug('Inserting "%s" field', field.name)
| lizardsystem/lizard-wbconfiguration | lizard_wbconfiguration/management/commands/wb_configuration.py | Python | gpl-3.0 | 1,672 |
#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""Module to upload standin plans.
This module is there in order to parse, figure out and uploads
standin plans for the FLS Wiesbaden framework.
"""
__all__ = []
__version__ = '4.36.1'
__author__ = 'Lukas Schreiner'
import urllib.parse
import urllib.error
import traceback
import sys
import os
import os.path
import json
import base64
import configparser
import shutil
import pickle
import requests
import glob
from requests.auth import HTTPBasicAuth
from threading import Thread
from datetime import datetime
from PyQt5.QtWidgets import QApplication, QSystemTrayIcon, QMenu
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject
from PyQt5.QtGui import QIcon
from searchplaner import SearchPlaner
from errorlog import ErrorDialog
from planparser import getParser
from planparser.untis import Parser as UntisParser
import sentry_sdk
# absolute hack, but required for cx_Freeze to work properly.
if sys.platform == 'win32':
import PyQt5.sip
APP = None
APPQT = None
class WatchFile(object):
"""A file which is or was watched in order to retrieve information"""
def __init__(self, path, fname):
self.path = path
self.name = fname
stInfo = os.stat(self.getFullName)
self.mtime = stInfo.st_mtime
self.atime = stInfo.st_atime
@property
def getFullName(self):
return '%s/%s' % (self.path, self.name)
class Vertretungsplaner(QObject):
showDlg = pyqtSignal()
hideDlg = pyqtSignal()
message = pyqtSignal(str, str, int, int)
cleanupDlg = pyqtSignal()
def getWatchPath(self):
return self.config.get("default", "path")
def getSendURL(self):
return self.config.get("default", "url")
def getAPIKey(self):
return self.config.get("default", "api")
def getStatus(self):
return self.locked
def getOption(self, name):
return self.config.has_option('options', name) and self.config.get('options', name) in ['True', True]
def getIntervall(self):
return float(self.config.get("default", "intervall"))
def isProxyEnabled(self):
return self.config.getboolean('proxy', 'enable', fallback=False)
def getRun(self):
return self.run
def setRun(self, run):
self.run = run
def showInfo(self, title, msg):
self.showToolTip(title, msg, 'info')
def showError(self, title, msg):
self.showToolTip(title, msg, 'error')
def showToolTip(self, title, msg, msgtype):
trayIcon = QSystemTrayIcon.Critical if msgtype == 'error' else QSystemTrayIcon.Information
timeout = 10000
self.message.emit(title, msg, trayIcon, timeout)
def getHandler(self, fileName):
extension = os.path.splitext(fileName)[-1].lower()
ptype = self.config.get('vplan', 'type')
return getParser(extension, self.config)
@pyqtSlot()
def getNewFiles(self):
print('Starte suche...')
self.locked = True
pathToWatch = self.getWatchPath()
try:
after = dict([(f, WatchFile(pathToWatch, f)) for f in os.listdir(pathToWatch)])
except FileNotFoundError:
print('\nCould not poll directory %s (does not exist!)' % (pathToWatch,))
# try recreate the directory (maybe it does not exist in base path:
try:
os.makedirs(pathToWatch)
except:
pass
self.locked = False
return
added = [f for f in after if not f in self.before]
removed = [f for f in self.before if not f in after]
same = [f for f in after if f in self.before]
changed = [f for f in same if self.before[f].mtime != after[f].mtime]
todo = added + changed
if todo:
print("\nChanged/Added new Files: ", ", ".join(todo))
for f in todo:
f = f.strip()
handler = self.getHandler(f)
if handler:
transName = '{}_{}_{}'.format(
datetime.now().strftime('%Y-%m-%dT%H%M%S'),
self.config.get('sentry', 'transPrefix', fallback='n'),
f.replace(' ', '_')
)
with sentry_sdk.start_transaction(op='parseUploadPlan', name=transName) as transaction:
try:
self.parsePlanByHandler(transaction, handler, f)
except Exception as e:
sentry_sdk.capture_exception(e)
self.showError(
'Neuer Vertretungsplan',
'Vertretungsplan konnte nicht verarbeitet ' + \
'werden, weil die Datei fehlerhaft ist.'
)
print('Error: %s' % (str(e),))
traceback.print_exc()
self.dlg.addError(str(e))
#FIXME: self.showDlg.emit()
raise
print('Ending transaction {}'.format(transName))
transaction.finish()
# for untis, we parse only the first one!
if handler.onlyFirstFile():
break
else:
print('"%s" will be ignored.' % (f,))
if removed:
print("\nRemoved files: ", ", ".join(removed))
self.before = after
self.locked = False
def initPlan(self):
pathToWatch = self.getWatchPath()
try:
if not os.path.exists(pathToWatch):
os.makedirs(pathToWatch)
self.before = dict([(f, WatchFile(pathToWatch, f)) for f in os.listdir(pathToWatch)])
except FileNotFoundError:
print('\nCould not poll directory %s (does not exist!)' % (pathToWatch,))
self.before = {}
# Now start Looping
self.search = Thread(target=SearchPlaner, args=(self,)).start()
def sendPlan(self, transaction, table, absFile, planType='all'):
data = json.dumps(table).encode('utf-8')
# check what we need to do.
# 1st we need to save the data?
if self.config.getboolean('options', 'saveResult'):
destFileName = os.path.join(
self.config.get('default', 'resultPath'),
'vplan-result-{:s}.json'.format(datetime.now().strftime('%Y-%m-%d_%H%M%S_%f'))
)
if not os.path.exists(os.path.dirname(destFileName)):
os.makedirs(os.path.dirname(destFileName))
with open(destFileName, 'wb') as f:
f.write('Type: {:s}\n'.format(planType).encode('utf-8'))
f.write(data)
if self.config.getboolean('options', 'upload'):
data = base64.b64encode(data).decode('utf-8').replace('\n', '')
values = {
'apikey': base64.b64encode(self.getAPIKey().encode('utf-8')).decode('utf-8').replace('\n', ''),
'data': data,
'type': planType
}
values = urllib.parse.urlencode(values)
if self.getOption('debugOnline'):
values['XDEBUG_SESSION_START'] = '1'
proxies = None
if self.isProxyEnabled():
print('Proxy is activated')
httpproxy = "http://"+self.config.get("proxy", "phost")+":"+self.config.get("proxy", "pport")
proxies = {
"http" : httpproxy,
"https": httpproxy
}
transaction.set_data('http.proxy_uri', httpproxy)
transaction.set_tag('http.proxy', True)
else:
print('Proxy is deactivated')
transaction.set_tag('http.proxy', False)
headers = {}
httpauth = None
if self.config.has_option("siteauth", "enable") and self.config.get("siteauth", "enable") == 'True':
httpauth = HTTPBasicAuth(
self.config.get('siteauth', 'username'),
self.config.get('siteauth', 'password')
)
transaction.set_tag('http.basic_auth', True)
else:
transaction.set_tag('http.basic_auth', False)
# add post info
headers['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8'
errorMessage = None
errObj = None
try:
req = requests.post(self.getSendURL(), data=values, proxies=proxies, headers=headers, auth=httpauth)
except requests.exceptions.ConnectionError as err:
self.createCoreDump(err)
errorMessage = (
'Warnung',
'Der Vertretungsplan konnte eventuell nicht korrekt hochgeladen werden. '
'Bitte kontaktieren Sie das Website-Team der FLS! '
'Beim Hochladen konnte keine Verbindung zum Server aufgebaut werden.'
)
errObj = err
print('HTTP-Fehler aufgetreten: {:s}'.format(str(err)))
sentry_sdk.capture_exception(err)
except urllib.error.URLError as err:
self.createCoreDump(err)
errorMessasge = (
'Warnung',
'Der Vertretungsplan konnte eventuell nicht korrekt hochgeladen werden. \
Bitte kontaktieren Sie das Website-Team der FLS!'
)
errObj = err
print('URL-Fehler aufgetreten: {:s}'.format(err.reason))
sentry_sdk.capture_exception(err)
except Exception as err:
self.createCoreDump(err)
errorMessage = (
'Warnung',
'Der Vertretungsplan konnte eventuell nicht korrekt hochgeladen werden. \
Bitte kontaktieren Sie das Website-Team der FLS!'
)
errObj = err
print("Unbekannter Fehler aufgetreten: ", err)
sentry_sdk.capture_exception(err)
else:
transaction.set_tag('http.status_code', req.status_code)
transaction.set_data('http.text', req.text)
if req.status_code != 204:
errorMessage = (
'Warnung',
'Der Vertretungsplan konnte eventuell nicht korrekt hochgeladen werden. '
'Es wurde ein abweichender Statuscode erhalten: {:d}'.format(req.status_code)
)
errObj = req.text
else:
print(req.text)
print('Erfolgreich hochgeladen.')
# any error to show in detail to user?
if errorMessage:
transaction.set_data('vplan.send_error', errorMessage)
if errObj:
self.dlg.addData(str(errObj))
self.showError(*errorMessage)
self.dlg.addError(errorMessage[1])
else:
self.showInfo('Vertretungsplan hochgeladen', 'Die Datei wurde erfolgreich hochgeladen.')
# now move the file and save an backup. Also delete the older one.
self.moveAndDeleteVPlanFile(absFile)
def createCoreDump(self, err):
if not self.getOption('createCoreDump'):
return
try:
__file__
except NameError:
__file__ = 'flsvplan.py'
path = os.path.dirname(__file__) if os.path.dirname(__file__) else sys.path[0]
if path and not os.path.isdir(path):
path = os.path.dirname(path)
path = '%s%scoredump' % (path, os.sep)
filename = '%s%s%s-%s.dump' % (path, os.sep, __file__, datetime.now().strftime('%Y%m%d%H%M%S%f'))
# truncate folder
if os.path.exists(path):
shutil.rmtree(path, ignore_errors=False, onerror=None)
os.makedirs(path)
dump = {}
dump['tb'] = traceback.format_exc()
dump['tbTrace'] = {}
dump['err'] = self.dumpObject(err)
excInfo = sys.exc_info()
i = 0
while i < len(excInfo):
dump['tbTrace'][i] = 'No args available: %s' % (excInfo[i],)
i += 1
with open(filename, 'wb') as f:
pickle.dump(dump, f, protocol=pickle.HIGHEST_PROTOCOL)
print('Coredump created in %s' % (filename,))
def dumpObject(self, obj):
struc = {}
for k, v in vars(obj).items():
if not k.startswith('_') and k != 'fp':
try:
struc[k] = self.dumpObject(v)
except:
struc[k] = v
return struc
def moveAndDeleteVPlanFile(self, absFile):
# file => Actual file (move to lastFile)
# self.lastFile => last File (delete)
path = absFile
if os.path.exists(self.lastFile) and self.lastFile != '':
# delete
os.remove(self.lastFile)
print('File %s removed' % (self.lastFile))
# move
newFile = ''
if self.config.get('options', 'backupFiles') == 'True':
newFile = "%s.backup" % (path)
if self.config.get('options', 'backupFolder') != 'False':
backdir = self.config.get('options', 'backupFolder')
if backdir[-1:] is not os.sep:
backdir = '%s%s' % (backdir, os.sep)
newFile = '%s%s%s%s.backup' % (self.getWatchPath(), os.sep, backdir, path)
# before: check if folder eixsts.
backdir = '%s%s%s' % (self.getWatchPath(), os.sep, backdir)
if not os.path.exists(backdir):
os.makedirs(backdir)
print('Copy %s to %s for backup.' % (path, newFile))
shutil.copyfile(path, newFile)
if self.config.get('options', 'delUpFile') == 'True' and os.path.exists(path):
print('Delete uploaded file %s' % (path))
os.remove(path)
folderPath = os.path.dirname(path)
if self.config.get('options', 'delFolder') == 'True' and os.path.exists(folderPath):
for filename in glob.iglob(folderPath + '/*'):
try:
os.remove(filename)
except:
pass
self.lastFile = newFile
def parsePlanByHandler(self, transaction, hdl, fileName):
# send a notification
self.showInfo('Neuer Vertretungsplan', 'Es wurde eine neue Datei gefunden und wird jetzt verarbeitet.')
absPath = os.path.join(self.config.get('default', 'path'), fileName)
djp = hdl(self.config, self.dlg, absPath)
djp.planFileLoaded.connect(self.planFileLoaded)
djp.planParserPrepared.connect(self.planParserPrepared)
with transaction.start_child(op='parse::loadFile', description=fileName) as transChild:
djp.loadFile(transChild)
with transaction.start_child(op='parse::preParse', description=fileName):
djp.preParse(transChild)
with transaction.start_child(op='parse::parse', description=fileName):
djp.parse(transChild)
with transaction.start_child(op='parse::postParse', description=fileName):
djp.postParse(transChild)
data = djp.getResult()
data['system'] = {
'version': __version__,
'handler': hdl.__name__,
'fname': absPath
}
self.showInfo('Neuer Vertretungsplan', 'Vertretungsplan wurde verarbeitet und wird nun hochgeladen.')
with transaction.start_child(op='parse::sendPlan', description=fileName):
self.sendPlan(transChild, data, absPath)
# something to show?
if self.dlg.hasData:
self.showDlg.emit()
@pyqtSlot()
def planFileLoaded(self):
pass
@pyqtSlot()
def planParserPrepared(self):
if self.dlg.isVisible():
self.hideDlg.emit()
self.cleanupDlg.emit()
def loadConfig(self):
self.config = configparser.ConfigParser()
self.config.read(["config.ini"], encoding='utf-8')
@pyqtSlot()
def bye(self):
global APPQT
self.run = False
sys.exit(0)
def initTray(self):
self.tray = QSystemTrayIcon(QIcon('logo.ico'), self)
menu = QMenu('FLS Vertretungsplaner')
menu.addAction('Planer hochladen', self.getNewFiles)
menu.addAction('Beenden', self.bye)
self.tray.setContextMenu(menu)
self.message.connect(self.tray.showMessage)
self.tray.show()
self.showInfo(
'Vertretungsplaner startet...',
'Bei Problemen wenden Sie sich bitte an das Website-Team der Friedrich-List-Schule Wiesbaden.'
)
def initSentry(self):
# check if sentry is enabled.
if not self.config.getboolean('sentry', 'enable', fallback=False) \
or not self.config.get('sentry', 'sendsn', fallback=None):
return
try:
import sentry_sdk
except:
pass
else:
# proxy settings?
if self.isProxyEnabled():
httpproxy = "http://"+self.config.get("proxy", "phost")+":"+self.config.get("proxy", "pport")
else:
httpproxy = None
def logSentrySend(event, hint):
print('Now sending sentry data!!!')
sentry_sdk.init(
self.config.get('sentry', 'sendsn'),
max_breadcrumbs=self.config.getint('sentry', 'maxBreadcrumbs', fallback=50),
debug=self.config.getboolean('sentry', 'debug', fallback=False),
send_default_pii=self.config.getboolean('sentry', 'pii', fallback=False),
environment=self.config.get('sentry', 'environment', fallback=None),
sample_rate=self.config.getfloat('sentry', 'sampleRate', fallback=1.0),
traces_sample_rate=self.config.getfloat('sentry', 'tracesSampleRate', fallback=1.0),
http_proxy=httpproxy,
https_proxy=httpproxy,
before_send=logSentrySend,
release=__version__
)
self._sentryEnabled = True
def __init__(self):
super().__init__()
self.lastFile = ''
self.run = True
self.config = None
self.tray = None
self.search = None
self.before = None
self.locked = False
self._sentryEnabled = False
self.loadConfig()
self.initSentry()
self.initTray()
debugLog = self.config.getboolean('options', 'debugLogs', fallback=False)
self.dlg = ErrorDialog(debugLog)
self.showDlg.connect(self.dlg.open)
self.hideDlg.connect(self.dlg.close)
self.cleanupDlg.connect(self.dlg.cleanup)
self.initPlan()
if __name__ == '__main__':
APPQT = QApplication(sys.argv)
APPQT.setQuitOnLastWindowClosed(False)
APP = Vertretungsplaner()
sys.exit(APPQT.exec_())
| FLS-Wiesbaden/vplanUploader | flsvplan.py | Python | gpl-3.0 | 15,692 |
from Social import *
class Comment(db.Model):
__tablename__ = "Comments"
Id = db.Column(db.Integer, unique=True, primary_key=True)
post = db.Column(db.Integer)
author = db.Column(db.String(20))
text = db.Column(db.String(500))
date = db.Column(db.DateTime(25))
def __init__(self, post, author, text):
self.post = post
self.text = text
self.author = author.UserId
self.date = datetime.datetime.now()
class Post(db.Model):
__tablename__ = "Posts"
Id = db.Column(db.Integer, unique=True, primary_key=True)
author = db.Column(db.String(20))
text = db.Column(db.String(500))
date = db.Column(db.DateTime(25))
def __init__(self, author, text):
self.author = author.UserId
self.text = text
self.date = datetime.datetime.now()
class Hashtag(db.Model):
__tablename__ = "Hashtag"
Id = db.Column(db.Integer, unique=True, primary_key=True)
name = db.Column(db.String(25))
posts = db.Column(db.String(25))
def __init__(self, name, FirstPost):
self.name = name
self.posts = FirstPost
def __repr__(self):
return "<Hashtag #"+self.name+">"
class User(db.Model):
__tablename__ = "Users"
UserId = db.Column(db.String(20), unique=True, primary_key=True)
name = db.Column(db.String(10))
cognome = db.Column(db.String(10))
img = db.Column(db.String(255))
desc = db.Column(db.String(255))
Hash = db.Column(db.String(60))
Salt = db.Column(db.String(100))
follows = db.Column(db.String(1000))
FeedKey = db.Column(db.String(70), unique=True)
def __init__(self, name, cognome, UserName, password, desc):
self.UserId = UserName
self.name = name
self.cognome = cognome
self.img = "/static/NewUser.png"
self.desc = desc
self.Salt = bcrypt.gensalt()[7:]
self.Hash = bcrypt.hashpw(str(password), "$2a$12$"+self.Salt)
self.follows = ""
self.FeedKey = bcrypt.hashpw(str(password+UserName), bcrypt.gensalt())
db.session.add(self)
db.session.commit()
def __repr__(self):
return "<User {id}>".format(id=self.UserId)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def get_id(self):
return unicode(self.UserId)
| JackSpera/NapNap | Models.py | Python | gpl-3.0 | 2,444 |
# -*- coding: utf-8 -*-
"""
Started on thu, jun 21st, 2018
@author: carlos.arana
"""
# Librerias utilizadas
import pandas as pd
import sys
module_path = r'D:\PCCS\01_Dmine\Scripts'
if module_path not in sys.path:
sys.path.append(module_path)
from VarInt.VarInt import VarInt
from classes.Meta import Meta
from Compilador.Compilador import compilar
"""
Las librerias locales utilizadas renglones arriba se encuentran disponibles en las siguientes direcciones:
SCRIPT: | DISPONIBLE EN:
------ | ------------------------------------------------------------------------------------
VarInt | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/VarInt
Meta | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/Classes
Compilador | https://github.com/INECC-PCCS/01_Dmine/tree/master/Scripts/Compilador
"""
# Documentacion del Parametro ---------------------------------------------------------------------------------------
# Descripciones del Parametro
M = Meta
M.ClaveParametro = 'P9902'
M.NombreParametro = 'Personal Docente'
M.DescParam = 'Personal docente en educación básica y media superior de la modalidad escolarizada'
M.UnidadesParam = 'Personas'
M.TituloParametro = 'PD' # Para nombrar la columna del parametro
M.PeriodoParam = '2015'
M.TipoInt = 1 # 1: Binaria; 2: Multivariable, 3: Integral
# Handlings
M.ParDtype = 'float'
M.TipoVar = 'C' # (Tipos de Variable: [C]ontinua, [D]iscreta [O]rdinal, [B]inaria o [N]ominal)
M.array = []
M.TipoAgr = 'sum'
# Descripciones del proceso de Minería
M.nomarchivodataset = 'P9902'
M.extarchivodataset = 'xlsx'
M.ContenidoHojaDatos = 'Número de docentes por nivel educativo'
M.ClaveDataset = 'INEGI'
M.ActDatos = '2015'
M.Agregacion = 'Suma de unidades para los municipios que componen cada ciudad del SUN' \
# Descripciones generadas desde la clave del parámetro
M.getmetafromds = 1
Meta.fillmeta(M)
# Construccion del Parámetro -----------------------------------------------------------------------------------------
# Cargar dataset inicial
dataset = pd.read_excel(M.DirFuente + '\\' + M.ArchivoDataset,
sheetname='DATOS', dtype={'CVE_MUN': 'str'})
dataset.set_index('CVE_MUN', inplace=True)
del(dataset['Nombre'])
dataset = dataset.rename_axis('CVE_MUN')
dataset.head(2)
list(dataset)
# Generar dataset para parámetro y Variable de Integridad
var1 = 'Docentes Total'
par_dataset = dataset[var1]
par_dataset = dataset[var1].astype('float')
par_dataset = par_dataset.to_frame(name = M.ClaveParametro)
par_dataset, variables_dataset = VarInt(par_dataset, dataset, tipo=M.TipoInt)
# Compilacion
compilar(M, dataset, par_dataset, variables_dataset)
| Caranarq/01_Dmine | 99_Descentralizacion/P9902/P9902.py | Python | gpl-3.0 | 2,753 |
import configparser
import logging
import os
from shutil import copyfile
class wordclock_config:
def __init__(self, basePath):
self.loadConfig(basePath)
def loadConfig(self, basePath):
pathToConfigFile = basePath + '/wordclock_config/wordclock_config.cfg'
pathToReferenceConfigFile = basePath + '/wordclock_config/wordclock_config.reference.cfg'
if not os.path.exists(pathToConfigFile):
if not os.path.exists(pathToReferenceConfigFile):
logging.error('No config-file available!')
logging.error(' Expected ' + pathToConfigFile + ' or ' + pathToReferenceConfigFile)
raise Exception('Missing config-file')
copyfile(pathToReferenceConfigFile, pathToConfigFile)
logging.warning('No config-file specified! Was created from reference-config!')
logging.info('Parsing ' + pathToConfigFile)
self.config = configparser.ConfigParser()
self.config.read(pathToConfigFile)
self.reference_config = configparser.ConfigParser()
self.reference_config.read(pathToReferenceConfigFile)
# Add to the loaded configuration the current base path to provide it
# to other classes/plugins for further usage
self.config.set('wordclock', 'base_path', basePath)
def request(self, method, *args):
try:
return getattr(self.config, method)(*args)
except:
logging.warning("Defaulting to reference value for [" + str(args[0]) + "] " + str(args[1]) )
return getattr(self.reference_config, method)(*args)
def getboolean(self, *args):
return self.request("getboolean", *args)
def getint(self, *args):
return self.request("getint", *args)
def get(self, *args):
return self.request("get", *args)
| bk1285/rpi_wordclock | wordclock_tools/wordclock_config.py | Python | gpl-3.0 | 1,847 |
import enum
from zope.schema.interfaces import IBaseVocabulary
from zope.interface import directlyProvides
from isu.enterprise.enums import vocabulary
@vocabulary('mural')
@enum.unique
class Mural(enum.IntEnum):
Extramural = 0
Intramural = 1
@vocabulary('degree')
@enum.unique
class Degree(enum.IntEnum):
NoDegree = 0
Bacheloir = 5 # Бакалавр
Specialist = 6 # Специалист
Master = 7 # Магистр
PhD = 8
MD = 9
Professor = 10
@vocabulary('academicity')
@enum.unique
class AcademicRelevance(enum.IntEnum):
"""
Программы прикладного бакалавриата рассчитаны на то,
что выпускник получит больше практических навыков,
пройдет длительную стажировку и по окончании вуза
сможет "встать к станку".
Академический бакалавриат дает больше теоретических
знаний, и его выпускники более ориентированы на
продолжение обучения в магистратуре.
"""
Academс = 1
Applied = 2
| isu-enterprise/isu.college | src/isu/college/enums.py | Python | gpl-3.0 | 1,242 |
import base64
from django.contrib.auth import authenticate
import logging
def basic_http_authentication(request):
if not 'HTTP_AUTHORIZATION' in request.META:
return None
auth = request.META['HTTP_AUTHORIZATION'].split()
user = None
if len(auth) == 2:
if auth[0].lower() == "basic":
uname, passwd = base64.b64decode(auth[1]).split(':')
user = authenticate(username=uname, password=passwd)
return user
| efornal/shoal | app/http_auth.py | Python | gpl-3.0 | 468 |
import sys
if sys.version_info > (3,):
from builtins import chr
import unittest
import os
import re
from adsft import extraction, rules, utils
from adsft.tests import test_base
from adsputils import load_config
import unittest
import httpretty
from requests.exceptions import HTTPError
class TestXMLExtractorBase(test_base.TestUnit):
"""
Base class for XML Extractor unit tests
"""
def setUp(self):
super(TestXMLExtractorBase, self).setUp()
self.preferred_parser_names = load_config().get('PREFERRED_XML_PARSER_NAMES') # Iterate through all the parsers defined in config.py
#self.preferred_parser_names = (None,) # Iterate through the parsers (as defined in config.py) until one succeeds
class TestXMLExtractor(TestXMLExtractorBase):
"""
Checks the basic functionality of the XML extractor. The content that is to
be extracted is defined within a dictionary inside settings.py. If this is
modified, these tests should first be changed to reflect the needed updates.
"""
def setUp(self):
"""
Generic setup of the test class. Makes a dictionary item that the worker
would expect to receive from the RabbitMQ instance. Loads the relevant
worker as well into a class attribute so it is easier to access.
:return:
"""
super(TestXMLExtractor, self).setUp()
self.dict_item = {'ft_source': self.test_stub_xml,
'file_format': 'xml',
'provider': 'MNRAS'}
self.extractor = extraction.EXTRACTOR_FACTORY['xml'](self.dict_item)
def test_that_we_can_open_an_xml_file(self):
"""
Tests the open_xml method. Checks that it opens and reads the XML file
correctly by comparing with the expected content of the file.
:return: no return
"""
full_text_content = self.extractor.open_xml()
self.assertIn(
'<journal-title>JOURNAL TITLE</journal-title>',
full_text_content
)
def test_that_we_can_parse_the_xml_content(self):
"""
Tests the parse_xml method. Checks that the parsed content allows access
to the XML marked-up content, and that the content extracted matches
what is expected.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
journal_title = self.extractor.extract_string('//journal-title')
self.assertEqual(journal_title, 'JOURNAL TITLE')
def test_that_we_correctly_remove_inline_fomulas_from_the_xml_content(self):
"""
Tests the parse_xml method. Checks that the parsed content allows access
to the XML marked-up content, and that the content extracted matches
what is expected.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body//sec[@id="s1"]//p')
self.assertEqual(section, 'INTRODUCTION GOES HERE')
def test_iso_8859_1_xml(self):
"""
Test that we properly read iso 8859 formatted file.
Since we are not reading the default file we must recreate the extractor object.
:return: no return
"""
self.dict_item['ft_source'] = self.test_stub_iso8859
self.extractor = extraction.EXTRACTOR_FACTORY['xml'](self.dict_item)
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
article_number = self.extractor.extract_string('//article-number')
self.assertEqual(article_number, '483879')
def test_multi_file(self):
"""
some entries in fulltext/all.links specify multiple files
typically the first has text from the article while the rest have the text from tables
:return: no return
"""
self.dict_item = {'ft_source': self.test_multi_file,
'file_format': 'xml',
'provider': 'MNRAS',
'bibcode': 'test'}
content = extraction.extract_content([self.dict_item])
# does the fulltext contain two copies of the file's contents
self.assertEqual(2, content[0]['fulltext'].count('Entry 1'))
def test_that_we_can_extract_using_settings_template(self):
"""
Tests the extract_multi_content method. This checks that all the meta
data extracted is what we expect. The expected meta data to be extracted
is defined in settings.py by the user.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
content = self.extractor.extract_multi_content(preferred_parser_names=(parser_name,))
self.assertEqual(list(rules.META_CONTENT['xml'].keys()).sort(), list(content.keys()).sort())
def test_that_we_can_extract_all_content_from_payload_input(self):
"""
Tests the extract_content method. This checks that all of the XML meta
data defined in settings.py is extracted from the stub XML data.
:return: no return
"""
file_path = u'{0}/{1}'.format(self.app.conf['FULLTEXT_EXTRACT_PATH'],
self.test_stub_xml)
pay_load = [self.dict_item]
content = extraction.extract_content(pay_load)
self.assertTrue(
set(rules.META_CONTENT['xml'].keys()).issubset(content[0].keys())
)
def test_that_the_correct_extraction_is_used_for_the_datatype(self):
"""
Ensure that the defined data type in the settings.py dictionary loads
the correct method for extraction
:return: no return
"""
extract_string = self.extractor.data_factory['string']
extract_list = self.extractor.data_factory['list']
if sys.version_info > (3,):
es_name = extract_string.__name__
el_name = extract_list.__name__
else:
es_name = extract_string.func_name
el_name = extract_list.func_name
self.assertTrue(
es_name == 'extract_string',
)
self.assertTrue(
el_name == 'extract_list',
)
def test_that_we_can_extract_a_list_of_datasets(self):
"""
Within an XML document there may exist more than one dataset. To
ensure that they are all extracted, we should check that this works
otherwise there will be missing content
:return: no return
"""
self.dict_item['bibcode'] = 'test'
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
content = self.extractor.extract_multi_content(preferred_parser_names=(parser_name,))
full_text = content['fulltext']
acknowledgements = content['acknowledgements']
data_set = content['dataset']
data_set_length = len(data_set)
if sys.version_info > (3,):
test_type = str
else:
test_type = unicode
self.assertIs(test_type, type(acknowledgements))
self.assertIs(test_type, type(full_text))
expected_full_text = 'INTRODUCTION'
self.assertTrue(
expected_full_text in full_text,
u'Full text is wrong: {0} [expected: {1}, data: {2}]'
.format(full_text,
expected_full_text,
full_text)
)
self.assertIs(list, type(data_set))
expected_dataset = 2
self.assertTrue(
data_set_length == expected_dataset,
u'Number of datasets is wrong: {0} [expected: {1}, data: {2}]'
.format(data_set_length,
expected_dataset,
data_set)
)
def test_that_we_can_parse_html_entity_correctly(self):
"""
Tests the parse_xml method. Checks that the HTML entities are parsed
without errors caused by escaped ambersands.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body//sec[@id="s2"]//p')
self.assertEqual(section, u'THIS SECTION TESTS HTML ENTITIES LIKE \xc5 >.')
def test_that_the_tail_is_preserved(self):
"""
Tests that when a tag is removed any trailing text is preserved by appending
it to the previous or parent element.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body//sec[@id="s3"]//p')
self.assertEqual(section, u'THIS SECTION TESTS THAT THE TAIL IS PRESERVED .')
def test_that_comments_are_ignored(self):
"""
Tests that parsing the xml file ignores any comments like <!-- example comment -->.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body//sec[@id="s4"]//p')
self.assertEqual(section, u'THIS SECTION TESTS THAT COMMENTS ARE REMOVED.')
def test_that_cdata_is_removed(self):
"""
Tests that parsing the xml file either removes CDATA tags like in the case of
<?CDATA some data?> where it is in the form of a "processing instruction" or ignores
the cdata content when in this <![CDATA] some data]]> form, which BeautifulSoup
calls a "declaration".
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body//sec[@id="s5"]//p')
self.assertEqual(section, u'THIS SECTION TESTS THAT CDATA IS REMOVED.')
def test_that_table_is_extracted_correctly(self):
"""
Tests that the labels/comments for tables are kept while the content of
the table is removed. Table footers are being removed in some cases where
a graphic tag with no closing tag like <graphic xlink:href="example.gif">
is reconciled with a closing tag that encompasses additonal content like the
table footer. Since graphics are one of the tags we remove to avoid garbage
text in our output, it correctly gets removed but takes content that should remain
in the fulltext output with it (like the table footer).
:return: no return
"""
full_text_content = self.extractor.open_xml()
s = u"TABLE I. TEXT a NOTES a TEXT"
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body//table-wrap')
self.assertEqual(section, s)
def test_body_tag(self):
"""
This tests that the parsers correctly extract the body tag.
This is important for parsers lxml-xml and direct-lxml-xml
which remove the body tag if they are not in the format:
<html>
<head></head>
<body></body>
</html>
Also important for parsers lxml-xml and direct-lxml-xml, which are
affected by namespaces in tags of the form namespace:name (e.g. ja:body).
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body')
self.assertEqual(section, u"I. INTRODUCTION INTRODUCTION GOES HERE "
u"II. SECTION II THIS SECTION TESTS HTML ENTITIES LIKE \xc5 >. "
u"III. SECTION III THIS SECTION TESTS THAT THE TAIL IS PRESERVED . "
u"IV. SECTION IV THIS SECTION TESTS THAT COMMENTS ARE REMOVED. "
u"V. SECTION V THIS SECTION TESTS THAT CDATA IS REMOVED. "
u"Manual Entry 1 Manual Entry 2 TABLE I. TEXT a NOTES a TEXT"
)
def test_extraction_of_acknowledgments(self):
"""
This tests that acknowledgments are extracted, acknowledgments should include the
facilities as of issue #100 in github. There are cases in which the acknowledgments
are found inside the body tag, but the body tag should not include the acknowledgments
as of issue #18. An acknowledgement field is included in the body tag in test.xml to
test that these are being moved outside of the body tag, which is why we see 'ACK
INSIDE BODY TAG.' appended to this acknowledgment.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
content = self.extractor.extract_multi_content(preferred_parser_names=(parser_name,))
self.assertEqual(content['acknowledgements'], u"Acknowledgments WE ACKNOWLEDGE. Facilities: FacilityName1 , "
"FacilityName2 , FacilityName3 , FacilityName4 , FacilityName5 , "
"FacilityName6 , FacilityName7\nACK INSIDE BODY TAG.")
def test_extraction_of_facilities(self):
"""
This tests that we can extract the faciltites field.The first facility is
a test to make sure we are not extracting facilities where the xlink:href is
missing, and the second facilities where xlink:href is empty. This is
discussed here: https://github.com/adsabs/ADSfulltext/issues/107
:return: no return
"""
facilities = [u'FacilityID3',
u'FacilityID4',
u'FacilityID5',
u'FacilityID6',
u'FacilityID7']
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
content = self.extractor.extract_multi_content(preferred_parser_names=(parser_name,))
self.assertEqual(sorted(content['facility']), facilities)
def test_removal_of_comment_syntax_around_body(self):
"""
This tests the removal of comment syntax indicating the body of
the article used by some publishers (AGU, sometimes Wiley).
See https://github.com/adsabs/ADSfulltext/issues/104
:return: no return
"""
raw_xml = "<!-- body <body><p>body content</p></body> endbody -->"
for parser_name in self.preferred_parser_names:
self.assertEqual(self.extractor._remove_special_elements(raw_xml, parser_name), "<body><p>body content</p></body> ")
class TestNonStandardXMLExtractor(TestXMLExtractorBase):
"""
Checks the basic functionality of the XML extractor. The content that is to
be extracted is defined within a dictionary inside settings.py. If this is
modified, these tests should first be changed to reflect the needed updates.
"""
def setUp(self):
"""
Generic setup of the test class. Makes a dictionary item that the worker
would expect to receive from the RabbitMQ instance. Loads the relevant
worker as well into a class attribute so it is easier to access.
:return:
"""
super(TestNonStandardXMLExtractor, self).setUp()
self.dict_item = {'ft_source': self.test_stub_nonstandard_xml,
'file_format': 'xml',
'provider': 'MNRAS'}
self.extractor = extraction.EXTRACTOR_FACTORY['xml'](self.dict_item)
def test_failure_of_all_parsers_in_loop(self):
"""
This ensures that articles that fail to get extracted by all of the parsers
we loop through (defined in config) will return an empty body.
See https://github.com/adsabs/ADSfulltext/issues/101
"""
self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
content = self.extractor.extract_string('//body')
self.assertEqual(content, u'')
class TestTEIXMLExtractor(TestXMLExtractorBase):
"""
Checks the basic functionality of the TEI XML extractor (content generated by Grobid).
"""
def setUp(self):
"""
Generic setup of the test class. Makes a dictionary item that the worker
would expect to receive from the RabbitMQ instance. Loads the relevant
worker as well into a class attribute so it is easier to access.
:return:
"""
super(TestTEIXMLExtractor, self).setUp()
self.dict_item = {'ft_source': self.test_stub_teixml,
'file_format': 'teixml',
'provider': 'A&A',
'bibcode': 'TEST'}
self.extractor = extraction.EXTRACTOR_FACTORY['teixml'](self.dict_item)
def test_that_we_can_open_an_xml_file(self):
"""
Tests the open_xml method. Checks that it opens and reads the XML file
correctly by comparing with the expected content of the file.
:return: no return
"""
full_text_content = self.extractor.open_xml()
self.assertIn(
'<title level="a" type="main">ASTRONOMY AND ASTROPHYSICS The NASA Astrophysics Data System: Architecture</title>',
full_text_content
)
def test_that_we_can_parse_the_xml_content(self):
"""
Tests the parse_xml method. Checks that the parsed content allows access
to the XML marked-up content, and that the content extracted matches
what is expected.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
journal_title = self.extractor.extract_string('//title')
self.assertEqual(journal_title, 'ASTRONOMY AND ASTROPHYSICS The NASA Astrophysics Data System: Architecture')
def test_that_we_can_extract_using_settings_template(self):
"""
Tests the extract_multi_content method. This checks that all the meta
data extracted is what we expect. The expected meta data to be extracted
is defined in settings.py by the user.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
content = self.extractor.extract_multi_content(preferred_parser_names=(parser_name,))
self.assertEqual(rules.META_CONTENT['teixml'].keys(), content.keys())
def test_that_we_can_extract_all_content_from_payload_input(self):
"""
Tests the extract_content method. This checks that all of the XML meta
data defined in settings.py is extracted from the stub XML data.
:return: no return
"""
pay_load = [self.dict_item]
content = extraction.extract_content(pay_load)
self.assertTrue(
set(rules.META_CONTENT['teixml'].keys()).issubset(content[0].keys())
)
def test_that_we_can_extract_acknowledgments(self):
"""
"""
ack = u"Acknowledgements. The usefulness of a bibliographic service is only as good as the quality and quantity of the data it contains . The ADS project has been lucky in benefitting from the skills and dedication of several people who have significantly contributed to the creation and management of the underlying datasets. In particular, we would like to acknowledge the work of Elizabeth Bohlen, Donna Thompson, Markus Demleitner, and Joyce Watson. Funding for this project has been provided by NASA under grant NCC5-189."
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
content = self.extractor.extract_multi_content(preferred_parser_names=(parser_name,))
self.assertEqual(content['acknowledgements'], ack)
class TestXMLElsevierExtractor(TestXMLExtractorBase):
"""
Checks the basic functionality of the Elsevier XML extractor.
The content that is to be extracted is defined within a dictionary inside
settings.py. This does inherit from the normal XML extractor, but has
different requirements for extraction XPATHs due to the name spaces
used within the XML.
"""
def setUp(self):
"""
Generic setup of the test class. Makes a dictionary item that the worker
would expect to receive from the RabbitMQ instance. Loads the relevant
worker as well into a class attribute so it is easier to access.
:return:
"""
super(TestXMLElsevierExtractor, self).setUp()
self.dict_item = {'ft_source': self.test_stub_exml,
'bibcode': 'TEST'
}
self.extractor = extraction.EXTRACTOR_FACTORY['elsevier'](self.dict_item)
def test_that_we_can_open_an_xml_file(self):
"""
Tests the open_xml method. Checks that it opens and reads the XML file
correctly by comparing with the expected content of the file. This is
different to opening a normal XML file.
:return: no return
"""
full_text_content = self.extractor.open_xml()
self.assertIn('JOURNAL CONTENT', full_text_content)
def test_that_we_can_parse_the_xml_content(self):
"""
Tests the parse_xml method. Checks that the parsed content allows access
to the XML marked-up content, and that the content extracted matches
what is expected.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
journal_title = self.extractor.extract_string('//*[local-name()=\'title\']')
self.assertIn('JOURNAL TITLE', journal_title)
def test_that_we_can_extract_using_settings_template(self):
"""
Tests the extract_multi_content method. This checks that all the meta
data keywords extracted are the same as those expected. The expected
meta data to be extracted is defined in settings.py by the user.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
content = self.extractor.extract_multi_content(preferred_parser_names=(parser_name,))
if sys.version_info > (3,):
func = self.assertCountEqual
else:
func = self.assertItemsEqual
func(['fulltext', 'acknowledgements', 'dataset'],
content.keys(),
content.keys())
self.assertIn('JOURNAL CONTENT', content['fulltext'])
def test_that_the_correct_extraction_is_used_for_the_datatype(self):
"""
Ensure that the defined data type in the settings.py dictionary loads
the correct method for extraction
:return: no return
"""
extract_string = self.extractor.data_factory['string']
extract_list = self.extractor.data_factory['list']
if sys.version_info > (3,):
es_name = extract_string.__name__
el_name = extract_list.__name__
else:
es_name = extract_string.func_name
el_name = extract_list.func_name
self.assertTrue(
es_name == 'extract_string',
)
self.assertTrue(
el_name == 'extract_list',
)
def test_that_we_can_extract_a_list_of_datasets(self):
"""
Within an XML document there may exist more than one dataset. To
ensure that they are all extracted, we should check that this works
otherwise there will be missing content
:return: no return
"""
self.dict_item['bibcode'] = 'test'
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
content = self.extractor.extract_multi_content(preferred_parser_names=(parser_name,))
full_text = content['fulltext']
acknowledgements = content['acknowledgements']
data_set = content['dataset']
data_set_length = len(data_set)
if sys.version_info > (3,):
test_type = str
else:
test_type = unicode
self.assertIs(test_type, type(acknowledgements))
self.assertIs(test_type, type(full_text))
expected_full_text = 'CONTENT'
self.assertTrue(
expected_full_text in full_text,
u'Full text is wrong: {0} [expected: {1}, data: {2}]'
.format(full_text,
expected_full_text,
full_text)
)
self.assertIs(list, type(data_set))
expected_dataset = 2
self.assertTrue(
data_set_length == expected_dataset,
u'Number of datasets is wrong: {0} [expected: {1}, data: {2}]'
.format(data_set_length,
expected_dataset,
data_set)
)
def test_that_we_can_parse_html_entity_correctly(self):
"""
Tests the parse_xml method. Checks that the HTML entities are parsed
without errors caused by escaped ambersands.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body//section[@id="s2"]//para')
self.assertEqual(section, u'THIS SECTION TESTS HTML ENTITIES LIKE \xc5 >.')
def test_that_the_tail_is_preserved(self):
"""
Tests that when a tag is removed any trailing text is preserved by appending
it to the previous or parent element.
This test currently only works with the lxml-xml parser when extracting
Elsevier XML data.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body//section[@id="s3"]//para')
self.assertEqual(section, u'THIS SECTION TESTS THAT THE TAIL IS PRESERVED .')
def test_that_comments_are_ignored(self):
"""
Tests that parsing the xml file ignores any comments like <!-- example comment -->.
:return: no return
"""
full_text_content = self.extractor.open_xml()
for parser_name in self.preferred_parser_names:
self.extractor.parse_xml(preferred_parser_names=(parser_name,))
section = self.extractor.extract_string('//body//section[@id="s4"]//para')
self.assertEqual(section, u'THIS SECTION TESTS THAT COMMENTS ARE REMOVED.')
class TestHTMLExtractor(test_base.TestUnit):
"""
Tests class to ensure the methods for opening and extracting content from
HTML files works correctly.
"""
def setUp(self):
"""
Generic setup of the test class. Makes a dictionary item that the worker
would expect to receive from the RabbitMQ instance. Loads the relevant
worker as well into a class attribute so it is easier to access.
:return: no return
"""
super(TestHTMLExtractor, self).setUp()
self.dict_item = {
'ft_source': u'{0},{1}'.format(self.test_stub_html,
self.test_stub_html_table),
'bibcode': 'TEST'
}
self.extractor = extraction.EXTRACTOR_FACTORY['html'](self.dict_item)
def test_that_we_can_open_an_html_file(self):
"""
Tests the open_html method. Checks the content loaded matches what is
inside the file.
:return: no return
"""
full_text_content = self.extractor.open_html()
self.assertIn('TITLE', full_text_content)
def test_can_parse_an_html_file(self):
"""
Tests the parse_html method. Checks that the HTML is parsed correctly,
and that it allows relevant content to be extracted in the way we expect
it to be.
:return: no return
"""
raw_html = self.extractor.open_html()
parsed_html = self.extractor.parse_html()
header = parsed_html.xpath('//h2')[0].text
self.assertIn('TITLE', header, self.app.conf['PROJ_HOME'])
def test_that_we_can_extract_table_contents_correctly(self):
"""
Tests the collate_tables method. This checks that the tables linked
inside the HTML file are found and aggregated into a dictionary, where
each entry in the dictionary has the table name as the keyword and the
table content as the value. This just ensures they exist and that they
can be searched as expect.
:return: no return
"""
raw_html = self.extractor.open_html()
parsed_html = self.extractor.parse_html()
table_content = self.extractor.collate_tables()
for key in table_content.keys():
self.assertTrue(table_content[key].xpath('//table'))
self.assertTrue(self.extractor.parsed_html.xpath('//h2'))
def test_that_we_can_extract_using_settings_template(self):
"""
Tests the extract_mutli_content. This checks that the full text that was
extracted from the HTML document includes the content of the HTML tables
that are linked from within the parent HTML document.
:return: no return
"""
content = self.extractor.extract_multi_content()
self.assertEqual(list(content.keys()), ['fulltext'])
self.assertIn(
'ONLY IN TABLE',
content['fulltext'],
u'Table is not in the fulltext: {0}'.format(content['fulltext'])
)
class TestOCRandTXTExtractor(test_base.TestUnit):
"""
Class that test the methods of loading and extracting full text content
from text and optical character recognition files.
"""
def setUp(self):
"""
Generic setup of the test class. Makes a dictionary item that the worker
would expect to receive from the RabbitMQ instance. Loads the relevant
worker as well into a class attribute so it is easier to access.
:return: no return
"""
super(TestOCRandTXTExtractor, self).setUp()
self.dict_item = {'ft_source': self.test_stub_text,
'bibcode': 'TEST'}
self.dict_item_ocr = {'ft_source': self.test_stub_ocr,
'bibcode': 'TEST'}
self.extractor = extraction.EXTRACTOR_FACTORY['txt'](self.dict_item)
self.TC = utils.TextCleaner(text='')
def test_open_txt_file(self):
"""
Tests the open_text method. Checks that the content loaded matches what
is in the file.
:return: no return
"""
raw_text = self.extractor.open_text()
self.assertIn('Introduction', raw_text)
def test_parse_txt_file(self):
"""
Tests the parse_text method. Checks that the text is parsed correctly,
specifically, it should be decoded, translated, and normalised, so it
should not contain certain escape characters. This checks it does not
have strange escape characters. This is for a 'txt' file.
:return: no return
"""
raw_text = self.extractor.open_text()
parsed_text = self.extractor.parse_text(translate=True, decode=True)
self.assertIn('Introduction', parsed_text)
self.assertNotIn("\x00", parsed_text)
def test_parse_ocr_file(self):
"""
Tests the parse_text method. Checks that the text is parsed correctly,
specifically, it should be decoded, translated, and normalised, so it
should not contain certain escape characters. This checks it does not
have strange escape characters. This is for a 'ocr' file.
:return: no return
"""
self.extractor.dict_item = self.dict_item_ocr
raw_text = self.extractor.open_text()
parsed_text = self.extractor.parse_text(translate=True, decode=True)
self.assertIn('introduction', parsed_text.lower())
self.assertIn('THIS IS AN INTERESTING TITLE', parsed_text)
self.assertNotIn("\x00", parsed_text)
def test_ASCII_parsing(self):
"""
Tests the parse_text method. Checks that escape characters are removed
as expected for ASCII characters.
:return: no return
"""
self.extractor.raw_text \
= 'Tab\t CarriageReturn\r New line\n Random Escape characters:' \
+ chr(1) + chr(4) + chr(8)
expected_out_string = 'Tab CarriageReturn New line Random Escape characters:'
new_instring = self.extractor.parse_text(translate=True, decode=True)
self.assertEqual(new_instring, expected_out_string)
def test_Unicode_parsing(self):
"""
Tests the parse_text method. Checks that escape characters are removed
as expected for unicode characters.
:return: no return
"""
self.extractor.raw_text = \
u'Tab\t CarriageReturn New line\n Random Escape characters:' \
+ u'\u0000'
expected_out_string = u'Tab CarriageReturn New line Random Escape characters:'
new_instring = self.extractor.parse_text(translate=True, decode=True)
self.assertEqual(new_instring, expected_out_string)
def test_translation_map_works(self):
"""
Tests the translation map from the utils.py module. Ensures that
escape characters are removed from the string.
:return: no return
"""
# test replace with spaces
instring = \
'Tab\t CarriageReturn\r New line\n Random Escape characters:'\
+ chr(0x0B) + chr(0xA0) + chr(0x1680)
expected_out_string = \
'Tab\t CarriageReturn New line\n Random Escape characters: '
new_instring = instring.translate(self.TC.master_translate_map)
self.assertEqual(new_instring, expected_out_string)
# test replace with None
instring = \
'Tab\t CarriageReturn\r New line\n Random Escape characters:' \
+ chr(0x00) + chr(0xAD) + chr(0xE000)
expected_out_string = \
'Tab\t CarriageReturn New line\n Random Escape characters:'
new_instring = instring.translate(self.TC.master_translate_map)
self.assertEqual(new_instring, expected_out_string)
# test both
instring = \
'Tab\t CarriageReturn\r New line\n Random Escape characters:' \
+ chr(0x202F) + chr(0xFDD0)
expected_out_string = \
'Tab\t CarriageReturn New line\n Random Escape characters: '
new_instring = instring.translate(self.TC.master_translate_map)
self.assertEqual(new_instring, expected_out_string)
def test_extract_multi_content_on_text_data(self):
"""
Tests the extract_multi_content method. Checks that the full text
extracted matches what we expect it should extract.
:return: no return
"""
content = self.extractor.extract_multi_content()
self.assertIn('introduction', content['fulltext'].lower())
class TestHTTPExtractor(test_base.TestUnit):
"""
Class that tests the methods used to extract full text content from HTTP
sources behaves as expected.
"""
def setUp(self):
"""
Generic setup of the test class. Makes a dictionary item that the worker
would expect to receive from the RabbitMQ instance. Loads the relevant
worker as well into a class attribute so it is easier to access.
:return: no return
"""
super(TestHTTPExtractor, self).setUp()
self.dict_item = {'ft_source': 'http://fake/http/address',
'bibcode': 'TEST'}
self.extractor = extraction.EXTRACTOR_FACTORY['http'](self.dict_item)
self.body_content = 'Full text extract'
def tearDown(self):
"""
Generic teardown of the test class. It closes down all the instances of
HTTPretty that is used to mock HTTP responses.
:return: no return
"""
# disable afterwards, so that you will have no problems in code that
# uses that socket module
httpretty.disable()
# reset HTTPretty state (clean up registered urls and request history)
httpretty.reset()
@httpretty.activate
def test_http_can_be_open(self):
"""
Tests the open_http method. Checks that the HTTP content is loaded
correctly.
:return: no return
"""
httpretty.register_uri(httpretty.GET,
self.dict_item['ft_source'],
body=self.body_content)
response = self.extractor.open_http()
self.assertEqual(
response,
self.body_content,
u'Expected response: {0}\n but got: {1}'
.format(self.body_content, response)
)
@httpretty.activate
def test_http_response_not_200(self):
"""
Tests the open_http method. Checks that an HTTPError is thrown if it
receives a response from the server that is not equal to 200.
:return: no return
"""
httpretty.register_uri(httpretty.GET,
self.dict_item['ft_source'],
body=self.body_content,
status=304)
self.assertRaises(HTTPError, self.extractor.open_http)
@httpretty.activate
def test_http_parses(self):
"""
Tests the parse_http method. Checks that the content received from the
server is parsed as we expect it to be. The result is compared to the
expected output.
:return: no return
"""
httpretty.register_uri(httpretty.GET,
self.dict_item['ft_source'],
body=self.body_content,
status=200)
self.extractor.open_http()
parsed_content = self.extractor.parse_http()
self.assertEqual(parsed_content, self.body_content)
@httpretty.activate
def test_http_multi_content(self):
"""
Tests the extract_multi_content method. Checks that the full text
content is extracted from the HTTP resource correctly, by comparin to
what we expect the content to be.
:return: no return
"""
httpretty.register_uri(httpretty.GET,
self.dict_item['ft_source'],
body=self.body_content,
status=200)
content = self.extractor.extract_multi_content()
self.assertEqual(content['fulltext'], self.body_content)
if __name__ == '__main__':
unittest.main()
| adsabs/ADSfulltext | adsft/tests/test_extraction.py | Python | gpl-3.0 | 40,776 |
'''
Created on Sep 02, 2014
:author: svakulenko
'''
# Bing API Version 2.0
# sample URL for web search
# https://api.datamarket.azure.com/Bing/Search/Web?$format=json&Query=%27Xbox%
# 27&$top=2
from eWRT.ws.rest import RESTClient
from eWRT.ws import AbstractIterableWebSource
class BingSearch(AbstractIterableWebSource):
"""wrapper for the Bing Search API"""
NAME = "Bing Search"
ROOT_URL = 'https://api.datamarket.azure.com/Bing/Search'
DEFAULT_MAX_RESULTS = 50 # requires only 1 api access
SUPPORTED_PARAMS = ['command', 'output_format']
DEFAULT_COMMAND = 'Web' # Image, News
DEFAULT_FORMAT = 'json'
DEFAULT_START_INDEX = 0
RESULT_PATH = lambda x: x['d']['results'] # path to the results in json
MAPPING = {'date': ('valid_from', 'convert_date'),
'text': ('content', None),
'title': 'Title',
}
def __init__(self, api_key, username, api_url=ROOT_URL):
"""fixes the credentials and initiates the RESTClient"""
assert(api_key)
self.api_key = api_key
self.api_url = api_url
self.username = username
self.client = RESTClient(
self.api_url, password=self.api_key, user=self.username,
authentification_method='basic')
def search_documents(self, search_terms, max_results=DEFAULT_MAX_RESULTS,
from_date=None, to_date=None, command=DEFAULT_COMMAND,
output_format=DEFAULT_FORMAT):
"""calls iterator and results' post-processor"""
# Web search is by default
fetched = self.invoke_iterator(search_terms, max_results, from_date,
to_date, command, output_format)
result_path = lambda x: x['d']['results']
return self.process_output(fetched, result_path)
def request(self, search_term, current_index,
max_results=DEFAULT_MAX_RESULTS, from_date=None,
to_date=None, command=DEFAULT_COMMAND,
output_format=DEFAULT_FORMAT):
"""calls Bing Search API"""
parameters = {'Query': search_term,
'$format': output_format,
'$top': max_results,
'$skip': current_index}
# for testing purposes
# print(current_index, max_results, search_term)
response = self.client.execute(command, query_parameters=parameters)
return response
@classmethod
def convert_item(cls, item):
"""output convertor: applies a mapping to convert
the result to the required format
"""
result = {'url': item['Url'],
'title': item['Title'],
}
return result | weblyzard/ewrt | src/eWRT/ws/bing/search.py | Python | gpl-3.0 | 2,765 |
from vsg.rules import token_prefix as Rule
from vsg import token
lTokens = []
lTokens.append(token.alias_declaration.alias_designator)
class rule_600(Rule):
'''
This rule checks for valid prefixes on alias designators.
Default prefix is *a\_*.
|configuring_prefix_and_suffix_rules_link|
**Violation**
.. code-block:: vhdl
alias header is name;
alias footer is name;
**Fix**
.. code-block:: vhdl
alias a_header is name;
alias a_footer is name;
'''
def __init__(self):
Rule.__init__(self, 'alias_declaration', '600', lTokens)
self.prefixes = ['a_']
self.solution = 'Alias designators'
| jeremiah-c-leary/vhdl-style-guide | vsg/rules/alias_declaration/rule_600.py | Python | gpl-3.0 | 689 |
import json
import mailbox
import numpy as np
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
from sklearn.feature_extraction.text import TfidfVectorizer
from lib.analysis.author import ranking
from lib.util import custom_stopwords
from lib.util.read import *
def get_top_authors(top_n, json_filename):
"""
Gets the top n authors based on the ranking generated from generate_author_ranking in analysis.author.ranking
:param top_n: The number of top authors to be returned.
:param json_filename: The JSON file from which author scores are generated.
:return: Top authors and their indices
"""
top_authors = set()
top_authors_index = dict()
author_scores = ranking.get(json_filename, output_filename=None, active_score=2, passive_score=1, write_to_file=False)
index = 0
for email_addr, author_score in author_scores:
index += 1
top_authors.add(email_addr)
top_authors_index[email_addr] = index
if index == top_n:
break
return top_authors, top_authors_index
def save_sparse_csr(filename, array):
"""
This function writes a numpy matrix to a file,given as a parameter, in a sparse format.
:param filename: The file to store the matrix.
:param array: The numpy array.
"""
np.savez(filename,data = array.data ,indices=array.indices,
indptr =array.indptr, shape=array.shape )
def get_message_body(message):
"""
Gets the message body of the message passed as a parameter.
:param message: The message whose body is to be extracted.
:return: The message body from the message.
"""
msg_body = None
if message.is_multipart():
for part in message.walk():
if part.is_multipart():
for subpart in part.walk():
msg_body = subpart.get_payload(decode=False)
else:
msg_body = part.get_payload(decode=False)
else:
msg_body = message.get_payload(decode=False)
msg_body = msg_body.splitlines()
for num in range(len(msg_body)):
if msg_body[num]:
if msg_body[num] == "---":
msg_body = msg_body[:num]
break
if msg_body[num][0] == '>' or msg_body[num][0] == '+' or msg_body[num][0] == '-' or msg_body[num][0] == '@':
msg_body[num] = ""
if num > 0:
msg_body[num - 1] = ""
elif msg_body[num][:3] == "Cc:":
msg_body[num] = ""
elif msg_body[num][:14] == "Signed-off-by:":
msg_body[num] = ""
elif msg_body[num][:9] == "Acked-by:":
msg_body[num] = ""
elif msg_body[num][:5] == "From:":
msg_body[num] = ""
elif msg_body[num][:10] == "Tested-by:":
msg_body[num] = ""
elif msg_body[num][:12] == "Reported-by:":
msg_body[num] = ""
elif msg_body[num][:12] == "Reviewed-by:":
msg_body[num] = ""
elif msg_body[num][:5] == "Link:":
msg_body[num] = ""
elif msg_body[num][:13] == "Suggested-by:":
msg_body[num] = ""
msg_body = [x.strip() for x in msg_body]
msg_body = [x for x in msg_body if x != ""]
msg_body = '\n'.join(msg_body)
return msg_body
def generate_keyword_digest(mbox_filename, output_filename, author_uid_filename, json_filename, top_n = None, console_output=True):
"""
From the .MBOX file, this function extracts the email content is extracted using two predefined classes
available in the Python Standard Library: Mailbox and Message. Feature vectors are created for all the authors
by obtaining meaningful words from the mail content, after removing the stop words, using NLTK libraries.
The words obtained are transformed using stemming or lemmatization before adding these words to the word list of
the corresponding authors. A matrix is created out of these word lists such that row set is the union of terms of
all the authors and the column set contains the authors. If a term does not appear in a document, the corresponding
matrix entry would be zero. The resulting matrix is called term-document matrix. Then tf-idf analysis is performed
on the term-document matrix. Finally the top-10 words of each author is listed by their weight values.Each entry corresponds to the tf-idf normalized coefficient of the keyword for a user. If a keyword is not present
in the top-10 keywords of a user, then the corresponding matrix entry would be zero. Also returns the feature names.
:param mbox_filename: Contains the absolute or relative address of the MBOX file to be opened
:return: Term Document Matrix: The columns of the matrix are the users and the rows of the matrix are the keywords.
"""
english_stopwords = set(stopwords.words('english')) | custom_stopwords.common_words | custom_stopwords.custom_words
email_re = re.compile(r'[\w\.-]+@[\w\.-]+')
wnl = WordNetLemmatizer()
print("Reading messages from MBOX file...")
mailbox_obj = mailbox.mbox(mbox_filename)
with open(author_uid_filename, 'r') as map_file:
author_uid_map = json.load(map_file)
map_file.close()
if top_n is None:
print("Reading author UIDs from JSON file...")
keywords_list = [list() for x in range(max(author_uid_map.values())+1)]
else:
top_n = min(len(author_uid_map), top_n)
top_authors, top_authors_index = get_top_authors(top_n, json_filename)
keywords_list = [list() for x in range(top_n+1)]
i = 0 # Number of emails processed
with open(output_filename, 'w') as out_file:
for message in mailbox_obj:
temp = email_re.search(str(message['From']))
from_addr = temp.group(0) if temp is not None else message['From']
if top_n is not None and from_addr not in top_authors:
continue
if top_n is None and from_addr not in author_uid_map.keys():
continue
msg_body = get_message_body(message)
if from_addr is None:
from_addr = message['From']
msg_tokens = [x.lower() for x in re.sub('\W+', ' ', msg_body).split() if 2 < len(x) < 30]
# Toggle comment below if numbers and underscores should also be removed.
# msg_tokens = [x for x in re.sub('[^a-zA-Z]+', ' ', msg_body).split() if 2 < len(x) < 30]
msg_tokens = [wnl.lemmatize(x) for x in msg_tokens if not x.isdigit() and x not in from_addr]
msg_tokens = [x for x in msg_tokens if x not in english_stopwords]
if top_n is None:
keywords_list[author_uid_map[from_addr]].extend(msg_tokens)
else:
keywords_list[top_authors_index[from_addr]].extend(msg_tokens)
if not console_output:
i += 1
if not i % 10000:
print(i, "of", len(mailbox_obj), "messages processed.")
for num in range(len(keywords_list)):
keywords_list[num] = " ".join(keywords_list[num])
print("Performing tf-idf analysis on the term-document matrix...")
vectorizer = TfidfVectorizer(analyzer='word', stop_words=english_stopwords, max_df=0.9, min_df=0.05,
use_idf=True, ngram_range=(1, 4))
tfidf_matrix = vectorizer.fit_transform(keywords_list).toarray()
feature_names = vectorizer.get_feature_names()
if top_n is None:
for author_email, author_uid in author_uid_map.items():
if max(tfidf_matrix[author_uid]) > 0 and len(keywords_list[num]) > 99:
try:
indices = tfidf_matrix[author_uid].argsort()[-20:][::-1]
if not console_output:
out_file.write(author_email + "\n")
author_features = list()
for i in indices:
author_features.append(feature_names[i])
# author_features.append((feature_names[i], tfidf_matrix[author_uid][i]))
author_features.sort(key=lambda x: -1*len(x))
for i2 in range(len(author_features)):
overlap_flag = 0
for i1 in range(i2+1, len(author_features)):
if author_features[i1] in author_features[i2]:
overlap_flag = 1
break
if not overlap_flag:
out_file.write(author_features[i2] + ", ")
else:
print("ERROR: Console Output not implemented! Please write to file.")
except:
pass
finally:
if console_output:
print("\n-----\n")
else:
out_file.write("\n-----\n")
else:
term_document_matrix = np.zeros((len(feature_names), top_n), dtype=float)
for author_email, author_uid in top_authors_index.items():
if max(tfidf_matrix[author_uid]) > 0 and len(keywords_list[author_uid]) > 99:
try:
indices = tfidf_matrix[author_uid].argsort()[-20:][::-1]
if not console_output:
out_file.write(author_email + "\n")
author_features = list()
for i in indices:
author_features.append(feature_names[i])
# author_features.append((feature_names[i], tfidf_matrix[author_uid][i]))
author_features.sort(key=lambda x: -1 * len(x))
for i2 in range(len(author_features)):
overlap_flag = 0
for i1 in range(i2+1, len(author_features)):
if author_features[i1] in author_features[i2]:
overlap_flag = 1
break
if not overlap_flag:
out_file.write(author_features[i2]+", ")
else:
print("ERROR: Console Output not implemented! Please write to file.")
except:
pass
finally:
if console_output:
print("\n-----\n")
else:
out_file.write("\n-----\n")
# with open("author_top_index.json", 'w') as json_file:
# json.dump(top_authors_index, json_file)
# print(feature_names)
return top_authors_index, term_document_matrix, feature_names
# generate_keyword_digest("lkml.mbox")
| prasadtalasila/MailingListParser | lib/input/mbox/keyword_digest.py | Python | gpl-3.0 | 11,492 |
#!/usr/bin/env python2
import netsnmp
import argparse
def getCAM(DestHost, Version = 2, Community='public'):
sess = netsnmp.Session(Version = 2, DestHost=DestHost, Community=Community)
sess.UseLongNames = 1
sess.UseNumeric = 1 #to have <tags> returned by the 'get' methods untranslated (i.e. dotted-decimal). Best used with UseLongNames
Vars1 = netsnmp.VarList(netsnmp.Varbind('.1.3.6.1.2.1.17.4.3.1'))
result = sess.walk(Vars1)
#result = sess.getbulk(0, 10, Vars1)#get vars in one req, but dont stop...
Vars2 = netsnmp.VarList(netsnmp.Varbind('.1.3.6.1.2.1.17.1.4.1.2'))
result += sess.walk(Vars2)
Vars3 = netsnmp.VarList(netsnmp.Varbind('.1.3.6.1.2.1.31.1.1.1.1'))
result += sess.walk(Vars3)
if result == ():
raise Exception('Error : ' + sess.ErrorStr + ' ' + str(sess.ErrorNum) + ' ' + str(sess.ErrorInd))
l = {}
for v in Vars1:
myid = (v.tag + '.' + v.iid)[24:]
if v.tag[22] == '1':
l[myid] = [v.val]
elif v.tag[22] == '2':
l[myid] += [v.val]
elif v.tag[22] == '3':
l[myid] += [v.val]
#Get the bridge port to ifIndex mapping, dot1dBasePortIfIndex (.1.3.6.1.2.1.17.1.4.1.2)
dot1dBasePortIfIndex = {}
for v in Vars2:
dot1dBasePortIfIndex[v.iid] = v.val
for cle, valeur in l.items():
valeur += [dot1dBasePortIfIndex[valeur[1]]]
ifName = {}
for v in Vars3:
ifName[v.iid] = v.val
for cle, valeur in l.items():
valeur += [ifName[valeur[3]]]
return l
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Describe this program')
#parser.add_argument('-v', '--version', dest = 'version', type = int, default = 2, help = 'Version of the protocol. Only v2 is implemented at the moment.')
parser.add_argument('desthost', action = 'store', default = 'localhost', help = 'Address of the switch')
parser.add_argument('-c', '--community', dest = 'community', action = 'store', default = 'public', help = 'The community string, default is "public".')
args = parser.parse_args()
try:
l = getCAM(Version = 2, DestHost=args.desthost, Community=args.community)
except Exception as e:
print(e)
else:
for t in l.values():
print(''.join('%02x:' % ord(b) for b in bytes(t[0]))[:-1] + ' = ' + t[4])
| c4ffein/snmp-cam-table-logger | getCAM.py | Python | gpl-3.0 | 2,240 |
#castle script for minecraft by joshua cartwright
from mcpi import minecraft
from mcpi import block
import time
mc = minecraft.Minecraft.create()
#castle
pos = mc.player.getPos()
#clear
mc.setBlocks(pos.x-5,pos.y-1,pos.z-5,pos.x+5,pos.y+50,pos.z+5,block.AIR)
#floor
mc.setBlocks(pos.x-5,pos.y-1,pos.z-5,pos.x+5,pos.y,pos.z+5,block.COBBLESTONE)
#walls
mc.setBlocks(pos.x-5,pos.y,pos.z-5,pos.x+5,pos.y+4,pos.z-5,block.COBBLESTONE)
mc.setBlocks(pos.x-5,pos.y,pos.z+5,pos.x+5,pos.y+4,pos.z+5,block.COBBLESTONE)
mc.setBlocks(pos.x-5,pos.y,pos.z-5,pos.x-5,pos.y+4,pos.z+5,block.COBBLESTONE)
mc.setBlocks(pos.x+5,pos.y,pos.z-5,pos.x+5,pos.y+4,pos.z+5,block.COBBLESTONE)
#walkway
mc.setBlocks(pos.x-4,pos.y+3,pos.z-4,pos.x+4,pos.y+3,pos.z+4,block.COBBLESTONE)
mc.setBlocks(pos.x-3,pos.y+3,pos.z-3,pos.x+3,pos.y+3,pos.z+3,block.AIR)
#door
mc.setBlocks(pos.x+5,pos.y+1,pos.z,pos.x+5,pos.y+2,pos.z,block.AIR)
#stairs
mc.setBlocks(pos.x+4,pos.y+1,pos.z+3,pos.x+4,pos.y+2,pos.z+3,block.COBBLESTONE)
mc.setBlocks(pos.x+3,pos.y+1,pos.z+3,pos.x+3,pos.y+2,pos.z+3,block.COBBLESTONE)
mc.setBlock(pos.x+3,pos.y+3,pos.z+3,67,0)
mc.setBlock(pos.x+2,pos.y+1,pos.z+3,block.COBBLESTONE)
mc.setBlock(pos.x+2,pos.y+2,pos.z+3,67,0)
mc.setBlock(pos.x+1,pos.y+1,pos.z+3,67,0)
mc.setBlocks(pos.x-3,pos.y+1,pos.z+4,pos.x-3,pos.y+2,pos.z+4,block.COBBLESTONE)
mc.setBlocks(pos.x-3,pos.y+1,pos.z+3,pos.x-3,pos.y+2,pos.z+3,block.COBBLESTONE)
mc.setBlock(pos.x-3,pos.y+3,pos.z+3,67,2)
mc.setBlock(pos.x-3,pos.y+1,pos.z+2,block.COBBLESTONE)
mc.setBlock(pos.x-3,pos.y+2,pos.z+2,67,2)
mc.setBlock(pos.x-3,pos.y+1,pos.z+1,67,2)
mc.setBlocks(pos.x+3,pos.y+1,pos.z-4,pos.x+3,pos.y+2,pos.z-4,block.COBBLESTONE)
mc.setBlocks(pos.x+3,pos.y+1,pos.z-3,pos.x+3,pos.y+2,pos.z-3,block.COBBLESTONE)
mc.setBlock(pos.x+3,pos.y+3,pos.z-3,67,3)
mc.setBlock(pos.x+3,pos.y+1,pos.z-2,block.COBBLESTONE)
mc.setBlock(pos.x+3,pos.y+2,pos.z-2,67,3)
mc.setBlock(pos.x+3,pos.y+1,pos.z-1,67,3)
mc.setBlocks(pos.x-4,pos.y+1,pos.z-3,pos.x-4,pos.y+2,pos.z-3,block.COBBLESTONE)
mc.setBlocks(pos.x-3,pos.y+1,pos.z-3,pos.x-3,pos.y+2,pos.z-3,block.COBBLESTONE)
mc.setBlock(pos.x-3,pos.y+3,pos.z-3,67,1)
mc.setBlock(pos.x-2,pos.y+1,pos.z-3,block.COBBLESTONE)
mc.setBlock(pos.x-2,pos.y+2,pos.z-3,67,1)
mc.setBlock(pos.x-1,pos.y+1,pos.z-3,67,1)
#holes
mc.setBlock(pos.x+4,pos.y+2,pos.z+5,block.FENCE)
mc.setBlock(pos.x+2,pos.y+2,pos.z+5,block.FENCE)
mc.setBlock(pos.x,pos.y+2,pos.z+5,block.FENCE)
mc.setBlock(pos.x-2,pos.y+2,pos.z+5,block.FENCE)
mc.setBlock(pos.x-4,pos.y+2,pos.z+5,block.FENCE)
mc.setBlock(pos.x+4,pos.y+2,pos.z-5,block.FENCE)
mc.setBlock(pos.x+2,pos.y+2,pos.z-5,block.FENCE)
mc.setBlock(pos.x,pos.y+2,pos.z-5,block.FENCE)
mc.setBlock(pos.x-2,pos.y+2,pos.z-5,block.FENCE)
mc.setBlock(pos.x-4,pos.y+2,pos.z-5,block.FENCE)
mc.setBlock(pos.x+5,pos.y+2,pos.z+4,block.FENCE)
mc.setBlock(pos.x+5,pos.y+2,pos.z+2,block.FENCE)
mc.setBlock(pos.x+5,pos.y+2,pos.z-2,block.FENCE)
mc.setBlock(pos.x+5,pos.y+2,pos.z-4,block.FENCE)
mc.setBlock(pos.x-5,pos.y+2,pos.z+4,block.FENCE)
mc.setBlock(pos.x-5,pos.y+2,pos.z+2,block.FENCE)
mc.setBlock(pos.x-5,pos.y+2,pos.z,block.FENCE)
mc.setBlock(pos.x-5,pos.y+2,pos.z-2,block.FENCE)
mc.setBlock(pos.x-5,pos.y+2,pos.z-4,block.FENCE)
#holes roof
mc.setBlock(pos.x+4,pos.y+4,pos.z+5,block.AIR)
mc.setBlock(pos.x+2,pos.y+4,pos.z+5,block.AIR)
mc.setBlock(pos.x,pos.y+4,pos.z+5,block.AIR)
mc.setBlock(pos.x-2,pos.y+4,pos.z+5,block.AIR)
mc.setBlock(pos.x-4,pos.y+4,pos.z+5,block.AIR)
mc.setBlock(pos.x+4,pos.y+4,pos.z-5,block.AIR)
mc.setBlock(pos.x+2,pos.y+4,pos.z-5,block.AIR)
mc.setBlock(pos.x,pos.y+4,pos.z-5,block.AIR)
mc.setBlock(pos.x-2,pos.y+4,pos.z-5,block.AIR)
mc.setBlock(pos.x-4,pos.y+4,pos.z-5,block.AIR)
mc.setBlock(pos.x+5,pos.y+4,pos.z+4,block.AIR)
mc.setBlock(pos.x+5,pos.y+4,pos.z+2,block.AIR)
mc.setBlock(pos.x+5,pos.y+4,pos.z,block.AIR)
mc.setBlock(pos.x+5,pos.y+4,pos.z-2,block.AIR)
mc.setBlock(pos.x+5,pos.y+4,pos.z-4,block.AIR)
mc.setBlock(pos.x-5,pos.y+4,pos.z+4,block.AIR)
mc.setBlock(pos.x-5,pos.y+4,pos.z+2,block.AIR)
mc.setBlock(pos.x-5,pos.y+4,pos.z,block.AIR)
mc.setBlock(pos.x-5,pos.y+4,pos.z-2,block.AIR)
mc.setBlock(pos.x-5,pos.y+4,pos.z-4,block.AIR)
#well
time.sleep(.25)
mc.setBlocks(pos.x+1,pos.y+1,pos.z+1,pos.x-1,pos.y+3,pos.z-1,block.COBBLESTONE)
mc.setBlocks(pos.x+1,pos.y+2,pos.z,pos.x-1,pos.y+2,pos.z,block.AIR)
mc.setBlocks(pos.x,pos.y+2,pos.z-1,pos.x,pos.y+2,pos.z+1,block.AIR)
mc.setBlocks(pos.x,pos.y+1,pos.z,pos.x,pos.y+2,pos.z,block.AIR)
mc.setBlock(pos.x,pos.y+4,pos.z,block.COBBLESTONE)
mc.setBlock(pos.x+1,pos.y+1,pos.z,63,4)
#mc.setBlock(pos.x+2,pos.y,pos.z,block.AIR)
#bacement
time.sleep(.25)
mc.setBlocks(pos.x-5,pos.y-2,pos.z-5,pos.x+5,pos.y-6,pos.z+5,block.COBBLESTONE)
mc.setBlocks(pos.x-4,pos.y-2,pos.z-4,pos.x+4,pos.y-5,pos.z+4,block.AIR)
mc.setBlocks(pos.x-4,pos.y-5,pos.z-4,pos.x+4,pos.y-5,pos.z+4,block.BOOKSHELF)
mc.setBlocks(pos.x-3,pos.y-5,pos.z-3,pos.x+3,pos.y-5,pos.z+3,block.AIR)
mc.setBlocks(pos.x+1,pos.y-5,pos.z-1,pos.x+3,pos.y-5,pos.z+1,35,2)
mc.setBlocks(pos.x+4,pos.y-5,pos.z-1,pos.x+4,pos.y-5,pos.z+1,35,0)
mc.setBlock(pos.x+4,pos.y-5,pos.z+4,block.GLOWSTONE_BLOCK)
mc.setBlock(pos.x+4,pos.y-5,pos.z-4,block.GLOWSTONE_BLOCK)
mc.setBlock(pos.x-4,pos.y-5,pos.z+4,block.GLOWSTONE_BLOCK)
mc.setBlock(pos.x-4,pos.y-5,pos.z-4,block.GLOWSTONE_BLOCK)
mc.setBlock(pos.x,pos.y-6,pos.z,block.AIR)
mc.setBlock(pos.x,pos.y-7,pos.z,block.COBBLESTONE)
time.sleep(.25)
mc.setBlock(pos.x,pos.y+1,pos.z,block.WATER)
time.sleep(.25)
mc.setBlocks(pos.x,pos.y,pos.z,pos.x,pos.y-4,pos.z,block.AIR)
#set location
mc.setBlock(pos.x+6,pos.y,pos.z,block.COBBLESTONE)
mc.setBlocks(pos.x+6,pos.y+1,pos.z,pos.x+6,pos.y+2,pos.z,block.AIR)
time.sleep(.25)
mc.player.setPos(pos.x+6,pos.y+1,pos.z)
| UTC-Sheffield/mcpi_ideas | misc/Castle.py | Python | gpl-3.0 | 5,725 |
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/mkhuthir/learnROS/src/chessbot/src/nasa_r2_simulator/gazebo_taskboard/msg/TaskboardPanelA.msg"
services_str = "/home/mkhuthir/learnROS/src/chessbot/src/nasa_r2_simulator/gazebo_taskboard/srv/ManipulateNumPad.srv;/home/mkhuthir/learnROS/src/chessbot/src/nasa_r2_simulator/gazebo_taskboard/srv/ManipulatePowerCover.srv;/home/mkhuthir/learnROS/src/chessbot/src/nasa_r2_simulator/gazebo_taskboard/srv/ManipulatePowerSwitch.srv;/home/mkhuthir/learnROS/src/chessbot/src/nasa_r2_simulator/gazebo_taskboard/srv/ManipulateRockerSwitch.srv;/home/mkhuthir/learnROS/src/chessbot/src/nasa_r2_simulator/gazebo_taskboard/srv/ManipulateSafeToggle.srv"
pkg_name = "gazebo_taskboard"
dependencies_str = "std_msgs"
langs = "gencpp;genlisp;genpy"
dep_include_paths_str = "gazebo_taskboard;/home/mkhuthir/learnROS/src/chessbot/src/nasa_r2_simulator/gazebo_taskboard/msg;std_msgs;/opt/ros/indigo/share/std_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/indigo/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| mkhuthir/catkin_ws | src/chessbot/build/nasa_r2_simulator/gazebo_taskboard/cmake/gazebo_taskboard-genmsg-context.py | Python | gpl-3.0 | 1,163 |
from brainforge.learner import Backpropagation
from brainforge.layers import Dense
from brainforge.optimizers import Momentum
from brainforge.util import etalon
class DNI:
def __init__(self, bpropnet, synth):
self.bpropnet = bpropnet
self.synth = synth
self._predictor = None
def predictor_coro(self):
prediction = None
delta_backwards = None
while 1:
inputs = yield prediction, delta_backwards
prediction = self.bpropnet.predict(inputs)
synthesized_delta = self.synth.predict(prediction)
self.bpropnet.update(len(inputs))
delta_backwards = self.bpropnet.backpropagate(synthesized_delta)
def predict(self, X):
if self._predictor is None:
self._predictor = self.predictor_coro()
next(self._predictor)
return self._predictor.send(X)
def udpate(self, true_delta):
synthesizer_delta = self.synth.cost.derivative(
self.synth.output, true_delta
)
self.synth.backpropagate(synthesizer_delta)
self.synth.update(len(true_delta))
def build_net(inshape, outshape):
net = Backpropagation(input_shape=inshape, layerstack=[
Dense(30, activation="tanh"),
Dense(outshape, activation="softmax")
], cost="cxent", optimizer=Momentum(0.01))
return net
def build_synth(inshape, outshape):
synth = Backpropagation(input_shape=inshape, layerstack=[
Dense(outshape)
], cost="mse", optimizer=Momentum(0.01))
return synth
X, Y = etalon
predictor = build_net(X.shape[1:], Y.shape[1:])
pred_os = predictor.layers[-1].outshape
synthesizer = build_synth(pred_os, pred_os)
dni = DNI(predictor, synthesizer)
pred, delta = dni.predict(X)
| csxeba/brainforge | xperiments/xp_decoupled.py | Python | gpl-3.0 | 1,775 |
__author__ = 'Davide'
import win32api
import win32con
import socket
# stop not defined
VK_MEDIA_STOP = 0xB2
class RemoteController:
def play_pause(self):
win32api.keybd_event(win32con.VK_MEDIA_PLAY_PAUSE, 34)
def stop(self):
win32api.keybd_event(VK_MEDIA_STOP, 34)
def next(self):
win32api.keybd_event(win32con.VK_MEDIA_NEXT_TRACK, 34)
def prev(self):
win32api.keybd_event(win32con.VK_MEDIA_PREV_TRACK, 34)
def vol_up(self):
win32api.keybd_event(win32con.VK_VOLUME_UP, 34)
def vol_down(self):
win32api.keybd_event(win32con.VK_VOLUME_DOWN, 34)
def vol_mute(self):
win32api.keybd_event(win32con.VK_VOLUME_MUTE, 34)
class Handler:
def __init__(self):
self.controller = RemoteController()
def dispatch(self, msg):
if msg == b"p":
self.controller.play_pause()
elif msg == b"n":
self.controller.next()
elif msg == b"s":
self.controller.stop()
elif msg == b"v":
self.controller.prev()
elif msg == b"+":
self.controller.vol_up()
elif msg == b"-":
self.controller.vol_down()
elif msg == b"m":
self.controller.vol_mute()
if __name__ == "__main__":
HOST, PORT = "localhost", 9999
handler = Handler()
server = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
server.bind((HOST, PORT))
while True:
data, addr = server.recvfrom(256)
print(data, addr)
handler.dispatch(data)
| DavideCanton/Python3 | wmp_remote/server.py | Python | gpl-3.0 | 1,561 |
import theano
import theano.tensor as T
import numpy as np
class RNN:
"""
Base class containing the RNN weights used by both the encoder and decoder
"""
def __init__(self,
K,
embedding_size,
hidden_layer=8,
use_context_vector=False,
E=None):
"""
K : dimensionality of the word embeddings
embedding_size : dimensionality of the word embeddings
hidden_layer : size of hidden layer
use_context_vector : whether or not to use a context vector
E : a word embedding to use (optional)
"""
# state of the hidden layer
self.h = theano.shared(np.zeros(hidden_layer), name='h')
# input weights to the hidden layer
self.W = theano.shared(np.random.uniform(
size=(hidden_layer, embedding_size),
low=-0.1, high=0.1), name='W')
# recurrent weights for the hidden layer
self.U = theano.shared(np.random.uniform(
size=(hidden_layer, hidden_layer),
low=-0.1, high=0.1), name='U')
# the extra transformation between the encoder and decoder
self.V = theano.shared(np.eye(hidden_layer))
# word embedding matrix
if E is None:
self.E = theano.shared(np.random.uniform(
size=(embedding_size, K),
low=-0.1, high=0.1), name='E')
else:
self.E = E
self.params = [self.W, self.U, self.V, self.E]
# additional weights for the context vector
if use_context_vector:
self.C = theano.shared(np.random.uniform(
size=(hidden_layer, hidden_layer),
low=-0.1, high=0.1), name='C')
self.params.extend([self.C])
def compute(self, x_t, h_tm1, c=None):
"""
Input
x_t : the current word (a K-dimensional vector)
h_tm1 : the state of the hidden layer before the current step
Output
h_t : the state of the hidden layer after the current step
"""
if c is None:
return T.tanh(self.W.dot(self.E.dot(x_t)) + self.U.dot(h_tm1))
else:
return T.tanh(self.W.dot(self.E.dot(x_t)) + self.U.dot(h_tm1) + self.C.dot(c))
| pepijnkokke/ull2 | code/rnn.py | Python | gpl-3.0 | 2,367 |
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: elb_application_lb
short_description: Manage an Application load balancer
description:
- Manage an AWS Application Elastic Load Balancer. See U(https://aws.amazon.com/blogs/aws/new-aws-application-load-balancer/) for details.
version_added: "2.4"
requirements: [ boto3 ]
author: "Rob White (@wimnat)"
options:
access_logs_enabled:
description:
- "Whether or not to enable access logs. When true, I(access_logs_s3_bucket) must be set."
required: false
type: bool
access_logs_s3_bucket:
description:
- The name of the S3 bucket for the access logs. This attribute is required if access logs in Amazon S3 are enabled. The bucket must exist in the same
region as the load balancer and have a bucket policy that grants Elastic Load Balancing permission to write to the bucket.
required: false
access_logs_s3_prefix:
description:
- The prefix for the location in the S3 bucket. If you don't specify a prefix, the access logs are stored in the root of the bucket.
required: false
deletion_protection:
description:
- Indicates whether deletion protection for the ELB is enabled.
required: false
default: no
type: bool
http2:
description:
- Indicates whether to enable HTTP2 routing.
required: false
default: no
type: bool
version_added: 2.6
idle_timeout:
description:
- The number of seconds to wait before an idle connection is closed.
required: false
default: 60
listeners:
description:
- A list of dicts containing listeners to attach to the ELB. See examples for detail of the dict required. Note that listener keys
are CamelCased.
required: false
name:
description:
- The name of the load balancer. This name must be unique within your AWS account, can have a maximum of 32 characters, must contain only alphanumeric
characters or hyphens, and must not begin or end with a hyphen.
required: true
purge_listeners:
description:
- If yes, existing listeners will be purged from the ELB to match exactly what is defined by I(listeners) parameter. If the I(listeners) parameter is
not set then listeners will not be modified
default: yes
type: bool
purge_tags:
description:
- If yes, existing tags will be purged from the resource to match exactly what is defined by I(tags) parameter. If the I(tags) parameter is not set then
tags will not be modified.
required: false
default: yes
type: bool
subnets:
description:
- A list of the IDs of the subnets to attach to the load balancer. You can specify only one subnet per Availability Zone. You must specify subnets from
at least two Availability Zones. Required if state=present.
required: false
security_groups:
description:
- A list of the names or IDs of the security groups to assign to the load balancer. Required if state=present.
required: false
default: []
scheme:
description:
- Internet-facing or internal load balancer. An ELB scheme can not be modified after creation.
required: false
default: internet-facing
choices: [ 'internet-facing', 'internal' ]
state:
description:
- Create or destroy the load balancer.
required: true
choices: [ 'present', 'absent' ]
tags:
description:
- A dictionary of one or more tags to assign to the load balancer.
required: false
wait:
description:
- Wait for the load balancer to have a state of 'active' before completing. A status check is
performed every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
default: no
type: bool
version_added: 2.6
wait_timeout:
description:
- The time in seconds to use in conjunction with I(wait).
version_added: 2.6
purge_rules:
description:
- When set to no, keep the existing load balancer rules in place. Will modify and add, but will not delete.
default: yes
type: bool
version_added: 2.7
extends_documentation_fragment:
- aws
- ec2
notes:
- Listeners are matched based on port. If a listener's port is changed then a new listener will be created.
- Listener rules are matched based on priority. If a rule's priority is changed then a new rule will be created.
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Create an ELB and attach a listener
- elb_application_lb:
name: myelb
security_groups:
- sg-12345678
- my-sec-group
subnets:
- subnet-012345678
- subnet-abcdef000
listeners:
- Protocol: HTTP # Required. The protocol for connections from clients to the load balancer (HTTP or HTTPS) (case-sensitive).
Port: 80 # Required. The port on which the load balancer is listening.
# The security policy that defines which ciphers and protocols are supported. The default is the current predefined security policy.
SslPolicy: ELBSecurityPolicy-2015-05
Certificates: # The ARN of the certificate (only one certficate ARN should be provided)
- CertificateArn: arn:aws:iam::12345678987:server-certificate/test.domain.com
DefaultActions:
- Type: forward # Required. Only 'forward' is accepted at this time
TargetGroupName: # Required. The name of the target group
state: present
# Create an ELB and attach a listener with logging enabled
- elb_application_lb:
access_logs_enabled: yes
access_logs_s3_bucket: mybucket
access_logs_s3_prefix: "/logs"
name: myelb
security_groups:
- sg-12345678
- my-sec-group
subnets:
- subnet-012345678
- subnet-abcdef000
listeners:
- Protocol: HTTP # Required. The protocol for connections from clients to the load balancer (HTTP or HTTPS) (case-sensitive).
Port: 80 # Required. The port on which the load balancer is listening.
# The security policy that defines which ciphers and protocols are supported. The default is the current predefined security policy.
SslPolicy: ELBSecurityPolicy-2015-05
Certificates: # The ARN of the certificate (only one certficate ARN should be provided)
- CertificateArn: arn:aws:iam::12345678987:server-certificate/test.domain.com
DefaultActions:
- Type: forward # Required. Only 'forward' is accepted at this time
TargetGroupName: # Required. The name of the target group
state: present
# Create an ALB with listeners and rules
- elb_application_lb:
name: test-alb
subnets:
- subnet-12345678
- subnet-87654321
security_groups:
- sg-12345678
scheme: internal
listeners:
- Protocol: HTTPS
Port: 443
DefaultActions:
- Type: forward
TargetGroupName: test-target-group
Certificates:
- CertificateArn: arn:aws:iam::12345678987:server-certificate/test.domain.com
SslPolicy: ELBSecurityPolicy-2015-05
Rules:
- Conditions:
- Field: path-pattern
Values:
- '/test'
Priority: '1'
Actions:
- TargetGroupName: test-target-group
Type: forward
state: present
# Remove an ELB
- elb_application_lb:
name: myelb
state: absent
'''
RETURN = '''
access_logs_s3_bucket:
description: The name of the S3 bucket for the access logs.
returned: when state is present
type: string
sample: mys3bucket
access_logs_s3_enabled:
description: Indicates whether access logs stored in Amazon S3 are enabled.
returned: when state is present
type: string
sample: true
access_logs_s3_prefix:
description: The prefix for the location in the S3 bucket.
returned: when state is present
type: string
sample: /my/logs
availability_zones:
description: The Availability Zones for the load balancer.
returned: when state is present
type: list
sample: "[{'subnet_id': 'subnet-aabbccddff', 'zone_name': 'ap-southeast-2a'}]"
canonical_hosted_zone_id:
description: The ID of the Amazon Route 53 hosted zone associated with the load balancer.
returned: when state is present
type: string
sample: ABCDEF12345678
created_time:
description: The date and time the load balancer was created.
returned: when state is present
type: string
sample: "2015-02-12T02:14:02+00:00"
deletion_protection_enabled:
description: Indicates whether deletion protection is enabled.
returned: when state is present
type: string
sample: true
dns_name:
description: The public DNS name of the load balancer.
returned: when state is present
type: string
sample: internal-my-elb-123456789.ap-southeast-2.elb.amazonaws.com
idle_timeout_timeout_seconds:
description: The idle timeout value, in seconds.
returned: when state is present
type: string
sample: 60
ip_address_type:
description: The type of IP addresses used by the subnets for the load balancer.
returned: when state is present
type: string
sample: ipv4
listeners:
description: Information about the listeners.
returned: when state is present
type: complex
contains:
listener_arn:
description: The Amazon Resource Name (ARN) of the listener.
returned: when state is present
type: string
sample: ""
load_balancer_arn:
description: The Amazon Resource Name (ARN) of the load balancer.
returned: when state is present
type: string
sample: ""
port:
description: The port on which the load balancer is listening.
returned: when state is present
type: int
sample: 80
protocol:
description: The protocol for connections from clients to the load balancer.
returned: when state is present
type: string
sample: HTTPS
certificates:
description: The SSL server certificate.
returned: when state is present
type: complex
contains:
certificate_arn:
description: The Amazon Resource Name (ARN) of the certificate.
returned: when state is present
type: string
sample: ""
ssl_policy:
description: The security policy that defines which ciphers and protocols are supported.
returned: when state is present
type: string
sample: ""
default_actions:
description: The default actions for the listener.
returned: when state is present
type: string
contains:
type:
description: The type of action.
returned: when state is present
type: string
sample: ""
target_group_arn:
description: The Amazon Resource Name (ARN) of the target group.
returned: when state is present
type: string
sample: ""
load_balancer_arn:
description: The Amazon Resource Name (ARN) of the load balancer.
returned: when state is present
type: string
sample: arn:aws:elasticloadbalancing:ap-southeast-2:0123456789:loadbalancer/app/my-elb/001122334455
load_balancer_name:
description: The name of the load balancer.
returned: when state is present
type: string
sample: my-elb
routing_http2_enabled:
description: Indicates whether HTTP/2 is enabled.
returned: when state is present
type: string
sample: true
scheme:
description: Internet-facing or internal load balancer.
returned: when state is present
type: string
sample: internal
security_groups:
description: The IDs of the security groups for the load balancer.
returned: when state is present
type: list
sample: ['sg-0011223344']
state:
description: The state of the load balancer.
returned: when state is present
type: dict
sample: "{'code': 'active'}"
tags:
description: The tags attached to the load balancer.
returned: when state is present
type: dict
sample: "{
'Tag': 'Example'
}"
type:
description: The type of load balancer.
returned: when state is present
type: string
sample: application
vpc_id:
description: The ID of the VPC for the load balancer.
returned: when state is present
type: string
sample: vpc-0011223344
'''
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import boto3_conn, get_aws_connection_info, camel_dict_to_snake_dict, ec2_argument_spec, \
boto3_tag_list_to_ansible_dict, compare_aws_tags, HAS_BOTO3
from ansible.module_utils.aws.elbv2 import ApplicationLoadBalancer, ELBListeners, ELBListener, ELBListenerRules, ELBListenerRule
from ansible.module_utils.aws.elb_utils import get_elb_listener_rules
def create_or_update_elb(elb_obj):
"""Create ELB or modify main attributes. json_exit here"""
if elb_obj.elb:
# ELB exists so check subnets, security groups and tags match what has been passed
# Subnets
if not elb_obj.compare_subnets():
elb_obj.modify_subnets()
# Security Groups
if not elb_obj.compare_security_groups():
elb_obj.modify_security_groups()
# Tags - only need to play with tags if tags parameter has been set to something
if elb_obj.tags is not None:
# Delete necessary tags
tags_need_modify, tags_to_delete = compare_aws_tags(boto3_tag_list_to_ansible_dict(elb_obj.elb['tags']),
boto3_tag_list_to_ansible_dict(elb_obj.tags), elb_obj.purge_tags)
if tags_to_delete:
elb_obj.delete_tags(tags_to_delete)
# Add/update tags
if tags_need_modify:
elb_obj.modify_tags()
else:
# Create load balancer
elb_obj.create_elb()
# ELB attributes
elb_obj.update_elb_attributes()
elb_obj.modify_elb_attributes()
# Listeners
listeners_obj = ELBListeners(elb_obj.connection, elb_obj.module, elb_obj.elb['LoadBalancerArn'])
listeners_to_add, listeners_to_modify, listeners_to_delete = listeners_obj.compare_listeners()
# Delete listeners
for listener_to_delete in listeners_to_delete:
listener_obj = ELBListener(elb_obj.connection, elb_obj.module, listener_to_delete, elb_obj.elb['LoadBalancerArn'])
listener_obj.delete()
listeners_obj.changed = True
# Add listeners
for listener_to_add in listeners_to_add:
listener_obj = ELBListener(elb_obj.connection, elb_obj.module, listener_to_add, elb_obj.elb['LoadBalancerArn'])
listener_obj.add()
listeners_obj.changed = True
# Modify listeners
for listener_to_modify in listeners_to_modify:
listener_obj = ELBListener(elb_obj.connection, elb_obj.module, listener_to_modify, elb_obj.elb['LoadBalancerArn'])
listener_obj.modify()
listeners_obj.changed = True
# If listeners changed, mark ELB as changed
if listeners_obj.changed:
elb_obj.changed = True
# Rules of each listener
for listener in listeners_obj.listeners:
if 'Rules' in listener:
rules_obj = ELBListenerRules(elb_obj.connection, elb_obj.module, elb_obj.elb['LoadBalancerArn'], listener['Rules'], listener['Port'])
rules_to_add, rules_to_modify, rules_to_delete = rules_obj.compare_rules()
# Delete rules
if elb_obj.module.params['purge_rules']:
for rule in rules_to_delete:
rule_obj = ELBListenerRule(elb_obj.connection, elb_obj.module, {'RuleArn': rule}, rules_obj.listener_arn)
rule_obj.delete()
elb_obj.changed = True
# Add rules
for rule in rules_to_add:
rule_obj = ELBListenerRule(elb_obj.connection, elb_obj.module, rule, rules_obj.listener_arn)
rule_obj.create()
elb_obj.changed = True
# Modify rules
for rule in rules_to_modify:
rule_obj = ELBListenerRule(elb_obj.connection, elb_obj.module, rule, rules_obj.listener_arn)
rule_obj.modify()
elb_obj.changed = True
# Get the ELB again
elb_obj.update()
# Get the ELB listeners again
listeners_obj.update()
# Update the ELB attributes
elb_obj.update_elb_attributes()
# Convert to snake_case and merge in everything we want to return to the user
snaked_elb = camel_dict_to_snake_dict(elb_obj.elb)
snaked_elb.update(camel_dict_to_snake_dict(elb_obj.elb_attributes))
snaked_elb['listeners'] = []
for listener in listeners_obj.current_listeners:
# For each listener, get listener rules
listener['rules'] = get_elb_listener_rules(elb_obj.connection, elb_obj.module, listener['ListenerArn'])
snaked_elb['listeners'].append(camel_dict_to_snake_dict(listener))
# Change tags to ansible friendly dict
snaked_elb['tags'] = boto3_tag_list_to_ansible_dict(snaked_elb['tags'])
elb_obj.module.exit_json(changed=elb_obj.changed, **snaked_elb)
def delete_elb(elb_obj):
if elb_obj.elb:
elb_obj.delete()
elb_obj.module.exit_json(changed=elb_obj.changed)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
access_logs_enabled=dict(type='bool'),
access_logs_s3_bucket=dict(type='str'),
access_logs_s3_prefix=dict(type='str'),
deletion_protection=dict(type='bool'),
http2=dict(type='bool'),
idle_timeout=dict(type='int'),
listeners=dict(type='list',
elements='dict',
options=dict(
Protocol=dict(type='str', required=True),
Port=dict(type='int', required=True),
SslPolicy=dict(type='str'),
Certificates=dict(type='list'),
DefaultActions=dict(type='list', required=True),
Rules=dict(type='list')
)
),
name=dict(required=True, type='str'),
purge_listeners=dict(default=True, type='bool'),
purge_tags=dict(default=True, type='bool'),
subnets=dict(type='list'),
security_groups=dict(type='list'),
scheme=dict(default='internet-facing', choices=['internet-facing', 'internal']),
state=dict(choices=['present', 'absent'], type='str'),
tags=dict(type='dict'),
wait_timeout=dict(type='int'),
wait=dict(default=False, type='bool'),
purge_rules=dict(default=True, type='bool')
)
)
module = AnsibleAWSModule(argument_spec=argument_spec,
required_if=[
('state', 'present', ['subnets', 'security_groups'])
],
required_together=(
['access_logs_enabled', 'access_logs_s3_bucket', 'access_logs_s3_prefix']
)
)
# Quick check of listeners parameters
listeners = module.params.get("listeners")
if listeners is not None:
for listener in listeners:
for key in listener.keys():
if key == 'Protocol' and listener[key] == 'HTTPS':
if listener.get('SslPolicy') is None:
module.fail_json(msg="'SslPolicy' is a required listener dict key when Protocol = HTTPS")
if listener.get('Certificates') is None:
module.fail_json(msg="'Certificates' is a required listener dict key when Protocol = HTTPS")
connection = module.client('elbv2')
connection_ec2 = module.client('ec2')
state = module.params.get("state")
elb = ApplicationLoadBalancer(connection, connection_ec2, module)
if state == 'present':
create_or_update_elb(elb)
else:
delete_elb(elb)
if __name__ == '__main__':
main()
| hryamzik/ansible | lib/ansible/modules/cloud/amazon/elb_application_lb.py | Python | gpl-3.0 | 21,506 |
#-*-:coding:utf-8-*-
from blindtex.latex2ast import converter
#from latex2ast import ast
from blindtex.interpreter import dictionary
to_read = {'simple_superscript' : 'super %s ',
'comp_superscript' : 'super %s endSuper ',
'simple_subscript' : 'sub %s ',
'comp_subscript' : 'sub %s endSub ',
'simple_frac' : '%s over %s ',
'comp_frac' : 'fraction %s over %s endFraction ',
'simple_sqrt' : 'squareRootOf %s ',
'comp_sqrt' : 'squareRootOf %s endRoot ',
'simple_root' : 'root %s of %s ',
'comp_root' : 'root %s of %s endRoot ',
'simple_choose' : 'from %s choose %s ',
'comp_choose' : 'from %s choose %s endChoose ',
'simple_modulo' : 'modulo %s ',
'comp_modulo' : 'modulo %s endModulo ',
'simple_text' : 'text %s ',
'comp_text' : 'text %s endText ',
'from_to' : 'from %s to %s of ',
'over' : 'over %s of ',
'to' : 'to %s of ',
'end' : 'end%s ',
'linebreak' : 'linebreak',
'array' : 'array %s endArray ',
'array_element' : 'element',}
'''dict: A dictionary with the strings to read some mathematical structures,
like: Fractions, roots, large operators, etc.'''
#Function to convert a math_object in a string.
def lineal_read(Node):
'''Function to convert a mathObject into a string.
Args:
Node(mathObject): A mathObject created by parser.
Returns:
String: A string with the lineal read of the mathObject.'''
#Dictionary with the possible nodes with children and the functions each case call.
with_children = {'block' : lineal_read_block,
'fraction' : lineal_read_fraction,
'root' : lineal_read_root,
'choose' : lineal_read_choose_binom,
'binom' : lineal_read_choose_binom,
'pmod' : lineal_read_pmod,
'text' : lineal_read_text,
'label' : lineal_read_label,
'array' : lineal_read_array}
str_lineal_read = ''
#The attributes will be readed in this order:
# accent -> content-> children* -> style -> superscript -> subscript
#TODO: Add the option for the user to change this.
#TODO: Add the option to ommit style or whatever.
#TODO: Add the translation by dictionary.
#I'll go for the easiest and less elegant way: A chain of ifs.
if(Node.content in with_children): #If the node has children.
#Identify the type of children the node has and act accordingly
str_lineal_read = str_lineal_read + with_children[Node.content](Node.children)
else:
str_lineal_read = str_lineal_read + lineal_read_content(Node)
if(Node.style != None):#Add the style of the node.
str_lineal_read = lineal_read_style(Node, str_lineal_read)
if(Node.accent != None): #The accent
str_lineal_read = lineal_read_accent(Node, str_lineal_read)
#This part is to read the limits of large Operators like integral or sum.
if(Node.kind == 'LargeOperators'):
if(Node.subscript != None and Node.superscript != None):
str_lineal_read = str_lineal_read + to_read['from_to']%(lineal_read(Node.subscript[0]),
lineal_read(Node.superscript[0]))
elif(Node.subscript != None and Node.superscript == None):
str_lineal_read = str_lineal_read + to_read['over']%lineal_read(Node.subscript[0])
elif(Node.subscript == None and Node.superscript != None):
str_lineal_read = str_lineal_read + to_read['to']%lineal_read(Node.superscript[0])
else:#If the math_object is not a LargeOperator but has scripts.
if(Node.superscript != None):
str_lineal_read = str_lineal_read + lineal_read_superscript(Node.superscript[0])
if(Node.subscript != None):
str_lineal_read = str_lineal_read + lineal_read_subscript(Node.subscript[0])
return str_lineal_read
#EndOfFunction
#Function to read the content of the math_object
def lineal_read_content(node):
'''A function to returns as a string the content of a mathObject.
Args:
node(mathObject)
Returns:
string: A string with the content of the mathObject. '''
if(node.kind != None):
return '%s '%node.content
elif(node.content == r'\\'):
return to_read['linebreak']
else:
return '%s '%node.content
#EOF
#To read the accent
def lineal_read_accent(node, str_read):
'''
Args:
node(mathObject)
str_read(string): String with the reading so far.
Returns:
string: The string with the proper accent of the mathObject in the linear read. '''
if(is_simple(node)):
return str_read + '%s '%node.accent
else:
return '%s '%node.accent + str_read + to_read['end']%node.accent
#EOF
#To read the style.
def lineal_read_style(node, str_read):
'''
Args:
node(mathObject)
str_read(string): String with the reading so far.
Returns:
string: The string with the proper stylr of the mathObject in the linear read. '''
if(is_simple(node)):
return str_read + '%s '%node.style
else:
return '%s '%node.style + str_read + to_read['end']%node.style
#EOF
def lineal_read_superscript(node_script):
'''
Args:
node_script(mathObject): the mathObject that is a super script of another mathObject.
Returns:
string: The reading of this object that changes wether it is simple or not.
'''
#Here if the script is a block it will be considered as a compound script.
#This to avoid the following ambiguity: a^{b_c} and a^b_c
#That, otherwise, would be read the same.
if(is_simple(node_script) and node_script.content != 'block'):
return to_read['simple_superscript']%lineal_read(node_script)
else:
return to_read['comp_superscript']%lineal_read(node_script)
#EndOfFunction
def lineal_read_subscript(node_script):
'''
Args:
node_script(mathObject): the mathObject that is a sub script of another mathObject.
Returns:
string: The reading of this object that changes wether it is simple or not.
'''
#See lineal_read_superscript.
if(is_simple(node_script) and node_script.content != 'block'):
return to_read['simple_subscript']%lineal_read(node_script)
else:
return to_read['comp_subscript']%lineal_read(node_script)
#EndOfFunction
def is_simple(Node):
'''A node is simple if it is just a character e.g. "a", "2", "\alpha"
or if it is a block with just one character in it e.g. "{a}", "{2}", "{\alpha}"
Args:
node(mathObject)
Returns:
bool: True if the node is simple.
'''
if(Node.get_children() == None):
return True
elif(Node.content == 'block' and len(Node.get_children()) == 1 ):
return(is_simple(Node.get_children()[0]))#This is for cases like {{a+b}} (not simple) or {{{\alpha}}}(simple).
else:
return False
#EndOfFunction
def lineal_read_block(list_children):
'''A Block has from one to several children, thus functions reads those.
Args:
list_childre(list of mathObject): The list of a block children, they al are mathObject.
Returns:
string:The linear read of such children.
'''
# The child of a block is a formula, a formula is a list of nodes.
str_result = ''
for node in list_children:
str_result = str_result + lineal_read(node)
return str_result
#EndOfFunction
def lineal_read_fraction(list_children):
'''A fraction is simple if both of its parts are simple, otherwise the function will tell where it begins
and where it ends.
Args:
list_children(list of mathObjects): A list of lenght 2, the first is the numeratos the second the denominator.
Returns:
string: The reading of the fraction depending wether it is simple or not.'''
#The list received here must be of lenght 2. The numerator and denominator.
if(is_simple(list_children[0]) and is_simple(list_children[1])):
return to_read['simple_frac']%(lineal_read(list_children[0]), lineal_read(list_children[1]))
else:
return to_read['comp_frac']%(lineal_read(list_children[0]), lineal_read(list_children[1]))
#EndOfFunction
def lineal_read_root(list_children):
'''Here we have four cases: Square root simple or not, other root simple or not.
In the not simple roots it tells where it begins and where it ends.
Args:
list_children(list of mathObjects): The arguments of a root, could be
one if the root is square, of two if the root has other index, in this case
the first mathObject is the index and the second the object of the root.
Returns:
string: The reading of the root depending wether it is simple or not.
'''
#There are two cases here: Either the root has an index \sqrt[i]{k} or not.
if(len(list_children) == 1):#It has not index
if(is_simple(list_children[0])):
return to_read['simple_sqrt']%(lineal_read(list_children[0]))
else:
return to_read['comp_sqrt']%(lineal_read(list_children[0]))
else:
if(is_simple(list_children[1])):
return to_read['simple_root']%(lineal_read(list_children[0]), lineal_read(list_children[1]))
else:
return to_read['comp_root']%(lineal_read(list_children[0]), lineal_read(list_children[1]))
#EndOfFunction
def lineal_read_choose_binom(list_children):
'''A binomial coefficient is simple if both of its parts are simple, otherwise the function will tell where it begins
and where it ends.
Args:
list_childre(list of mathObjects): There are two arguments of a binomial
coeffiecient.
Returns:
string: The reading of the binomial coefficient.
'''
if(is_simple(list_children[0]) and is_simple(list_children[1])):
return to_read['simple_choose']%(lineal_read(list_children[0]), lineal_read(list_children[1]))
else:
return to_read['comp_choose']%(lineal_read(list_children[0]), lineal_read(list_children[1]))
#EndOfFunction
def lineal_read_pmod(list_children):
'''If the argument of the module is not simple the function will
tell where the module begins and where it ends.
Args:
list_children(list of mathObjects): A single mathObject.
Returns:
string: The reading of the module depending wether is simple or not.
'''
if(is_simple(list_children[0])):
return to_read['simple_modulo']%(lineal_read(list_children[0]))
else:
return to_read['comp_modulo']%(lineal_read(list_children[0]))
#EndOfFunction
def lineal_read_text(list_children):
'''
Args:
list_children(list of mathObjects): The mathObject with the internal text.
Returns:
string: The reading of the text. It indicates where it begins and
where it ends.
'''
return 'text %s endtext'%(list_children)#Here the child is a string
#EndOfFunction
def lineal_read_label(list_children):
'''As labels are important of LaTeX self reference every label is left
the same.
Args:
list_children(list of mathObjects): The mathObject with the label.
Returns:
string: The same label.
'''
return r'\%s'%(list_children)#The labels must be inaltered.
#EndOFFunction
def lineal_read_formula(list_formula):
'''The parser of blindtex takes a formula in LaTeX and divide it into
the different mathObjects present in the formula, this list of mathObjects
is passed to this function.
Args:
list_formula(list of mathObjects): The list of all the mathObjects
that shape the whole formula.
Returns:
string: The total lineal read of the formula, shaped by the lineal
reads of every mathObject.
'''
str_result = ''
for node in list_formula:
str_result = str_result + lineal_read(node)
return str_result
#EndOfFunction
def lineal_read_array(list_array):
'''
Args:
list_array(list of mathObjects): The elemenst of the array and
the delimitators of such array (strings like '&' and '\\')
Returns:
string: the lineal read of the array, distinguishing each element by a
title that indicates its position (first its row and then its column)
e.g. "element 1_2 a", indicates that the element is in the first row
and second column. Also the result indicates where the array beings and
where it ends.
'''
nrow = 1
ncol = 1
str_result = '%s%d_%d '%(to_read['array_element'],nrow, ncol)
array = list_array
for element in array:
str_element_reading = lineal_read(element)
if(str_element_reading == '& '):
ncol += 1
str_result += '%s%d_%d '%(to_read['array_element'],nrow, ncol)
continue
elif(str_element_reading == to_read['linebreak']):
nrow += 1
ncol = 1
str_result += '%s%d_%d '%(to_read['array_element'],nrow, ncol)
continue
str_result += '%s '%str_element_reading
return to_read['array']%str_result
#EndOfFunction
def lineal_read_formula_list(list_formula):
str_result = ''
for node in list_formula:
str_result = str_result + lineal_read(node)
return str_result.split()
#EndOfFunction
if __name__ == "__main__":
while True:
try:
try:
s = raw_input()
except NameError: # Python3
s = input('spi> ')
cv_s = converter.latex2list(s)
print(lineal_read_formula(cv_s))
except EOFError:
break
| blindtex/blindtex | blindtex/interpreter/reader.py | Python | gpl-3.0 | 14,005 |
import logging
from agent import run
logger = logging.getLogger(__name__)
def example_experiment():
repeat = 5
avg_rew = 0.0
episodes = 100000
sleep = 0.00
params = run.getparams(episodes)
reward_list = []
for i in range(repeat):
reward, allparams, totrewlist, totrewavglist, greedyrewlist, reward_threshold = run.main(params.copy(),numavg=100,sleep=sleep)
reward_list.append(reward)
avg_rew+=reward
avg_rew/=repeat
print(allparams)
print("avg_rew",avg_rew)
print("rew list",reward_list)
if __name__ == '__main__':
run.main(None) | davidenitti/ML | RL/run_agent.py | Python | gpl-3.0 | 604 |
timer = 500 # delay in milliseconds
def toggle():
pass
| librallu/RICM4Projet | tests/led/led.py | Python | gpl-3.0 | 57 |
# -*- coding: utf-8 -*-
#
# Copyright (C) Translate House and contributors.
#
# This file is a part of the PyLIFF project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import pytest
@pytest.mark.test
def test_pyliff_target(file_units_xliff):
f1 = file_units_xliff.files.next()
units = f1.units
segments = units.next().segments
s1 = segments.next()
assert s1.target.text == 'Unit 1 target'
s2 = segments.next()
assert s2.target.text == 'Unit 1 target - part 2'
s3 = segments.next()
assert s3.target is None
| translate/pyliff | tests/target.py | Python | gpl-3.0 | 668 |
from django.contrib import admin
from comun.models import Departamento, Partido
admin.site.register(Departamento)
admin.site.register(Partido)
| mmanto/sstuv | comun/admin.py | Python | gpl-3.0 | 144 |
# coding: utf-8
""" Copyright (c) 2013 João Bernardo Vianna Oliveira
This file is part of Discoder.
Discoder is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Discoder is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Discoder. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import print_function
import multiprocessing
import multiprocessing.dummy
import socket
import threading
from discoder.distributed import send_data, get_data
from discoder.lib.helper import star
__author__ = 'jb'
QUEUE_LOCK = threading.Lock()
KEEP_BALANCE_ORDER = False
def load_client(client, cmd):
""" Open socket and write command to be executed remotely
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(client)
send_data(s, cmd)
out = get_data(s)
s.close()
return out
class CommandInfo(object):
""" Keep track of position of command after its execution
"""
def __init__(self, pos, data):
self.pos = pos
self.data = data
class LoadBalancer(object):
def __init__(self, queue):
self.queue = queue
def run(self, client, num):
done = []
lock = threading.Lock()
def queue_run(n):
with lock:
done_thread = []
done.append(done_thread)
while True:
try:
with QUEUE_LOCK:
cmd = self.queue.get_nowait()
except Exception:
break
res = load_client(client, [cmd.data])[0]
done_thread.append([cmd.pos, res])
ths = []
for x in range(num):
th = threading.Thread(target=queue_run, args=(x,))
th.daemon = True
th.start()
ths.append(th)
for th in ths:
th.join()
return done
def list_numeric_order(data):
""" Convert a list of balance outputs and put then in numerical segment order.
[
[
[ [1, X1], [3, X3] ],
[ [0, X0], [5, X5] ],
],
[
[ [2, X2], [7, X7] ],
[ [4, X4], [6, X6] ],
],
]
Output:
[X0, X1, X2, X3, X4, X5, X6, X7]
:param data: 4-Dimensional list
:return: list
"""
dic = {}
for d in data:
d = [x for sub in d for x in sub]
dic.update(d)
return [val for key, val in sorted(dic.items())]
def run(nodes, args, balance=False):
""" Connect to clients using multiple processes.
"""
#import pprint
#print('Connecting to nodes:')
#print(*('\t{0}:{1}'.format(*i) for i in nodes), sep='\n')
#pprint.pprint(args, width=200)
#print('Command:', len(args))
nnodes = len(nodes)
pool = multiprocessing.dummy.Pool(nnodes)
if balance:
args = [CommandInfo(i, x) for i, x in enumerate(args)]
queue = multiprocessing.Queue(maxsize=len(args))
for el in args:
queue.put(el, block=True)
parts = [balance for _ in range(nnodes)]
out = pool.map(star(LoadBalancer(queue).run), zip(nodes, parts))
if not KEEP_BALANCE_ORDER:
out = list_numeric_order(out)
else:
n = len(args) // nnodes
# noinspection PyArgumentList
parts = [args[i:i+n] for i in range(0, n * nnodes, n)]
out = pool.map(star(load_client), zip(nodes, parts))
#pool.close()
return out
| jbvsmo/discoder | discoder/distributed/server.py | Python | gpl-3.0 | 3,939 |
# -*- coding: utf-8 -*-
"""
env.testsuite.blueprints
~~~~~~~~~~~~~~~~~~~~~~~~~~
Blueprints (and currently mod_auth)
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
import warnings
from flask.testsuite import FlaskTestCase, emits_module_deprecation_warning
from flask._compat import text_type
from werkzeug.exceptions import NotFound
from werkzeug.http import parse_cache_control_header
from jinja2 import TemplateNotFound
# import moduleapp here because it uses deprecated features and we don't
# want to see the warnings
warnings.simplefilter('ignore', DeprecationWarning)
from moduleapp import app as moduleapp
warnings.simplefilter('default', DeprecationWarning)
class ModuleTestCase(FlaskTestCase):
@emits_module_deprecation_warning
def test_basic_module(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin', url_prefix='/admin')
@admin.route('/')
def admin_index():
return 'admin index'
@admin.route('/login')
def admin_login():
return 'admin login'
@admin.route('/logout')
def admin_logout():
return 'admin logout'
@app.route('/')
def index():
return 'the index'
app.register_module(admin)
c = app.test_client()
self.assert_equal(c.get('/').data, b'the index')
self.assert_equal(c.get('/admin/').data, b'admin index')
self.assert_equal(c.get('/admin/login').data, b'admin login')
self.assert_equal(c.get('/admin/logout').data, b'admin logout')
@emits_module_deprecation_warning
def test_default_endpoint_name(self):
app = flask.Flask(__name__)
mod = flask.Module(__name__, 'frontend')
def index():
return 'Awesome'
mod.add_url_rule('/', view_func=index)
app.register_module(mod)
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'Awesome')
with app.test_request_context():
self.assert_equal(flask.url_for('frontend.index'), '/')
@emits_module_deprecation_warning
def test_request_processing(self):
catched = []
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin', url_prefix='/admin')
@admin.before_request
def before_admin_request():
catched.append('before-admin')
@admin.after_request
def after_admin_request(response):
catched.append('after-admin')
return response
@admin.route('/')
def admin_index():
return 'the admin'
@app.before_request
def before_request():
catched.append('before-app')
@app.after_request
def after_request(response):
catched.append('after-app')
return response
@app.route('/')
def index():
return 'the index'
app.register_module(admin)
c = app.test_client()
self.assert_equal(c.get('/').data, b'the index')
self.assert_equal(catched, ['before-app', 'after-app'])
del catched[:]
self.assert_equal(c.get('/admin/').data, b'the admin')
self.assert_equal(catched, ['before-app', 'before-admin',
'after-admin', 'after-app'])
@emits_module_deprecation_warning
def test_context_processors(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin', url_prefix='/admin')
@app.context_processor
def inject_all_regular():
return {'a': 1}
@admin.context_processor
def inject_admin():
return {'b': 2}
@admin.app_context_processor
def inject_all_module():
return {'c': 3}
@app.route('/')
def index():
return flask.render_template_string('{{ a }}{{ b }}{{ c }}')
@admin.route('/')
def admin_index():
return flask.render_template_string('{{ a }}{{ b }}{{ c }}')
app.register_module(admin)
c = app.test_client()
self.assert_equal(c.get('/').data, b'13')
self.assert_equal(c.get('/admin/').data, b'123')
@emits_module_deprecation_warning
def test_late_binding(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin')
@admin.route('/')
def index():
return '42'
app.register_module(admin, url_prefix='/admin')
self.assert_equal(app.test_client().get('/admin/').data, b'42')
@emits_module_deprecation_warning
def test_error_handling(self):
app = flask.Flask(__name__)
admin = flask.Module(__name__, 'admin')
@admin.app_errorhandler(404)
def not_found(e):
return 'not found', 404
@admin.app_errorhandler(500)
def internal_server_error(e):
return 'internal server error', 500
@admin.route('/')
def index():
flask.abort(404)
@admin.route('/error')
def error():
1 // 0
app.register_module(admin)
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.status_code, 404)
self.assert_equal(rv.data, b'not found')
rv = c.get('/error')
self.assert_equal(rv.status_code, 500)
self.assert_equal(b'internal server error', rv.data)
def test_templates_and_static(self):
app = moduleapp
app.testing = True
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.data, b'Hello from the Frontend')
rv = c.get('/admin/')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/index2')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/static/test.txt')
self.assert_equal(rv.data.strip(), b'Admin File')
rv.close()
rv = c.get('/admin/static/css/test.css')
self.assert_equal(rv.data.strip(), b'/* nested file */')
rv.close()
with app.test_request_context():
self.assert_equal(flask.url_for('admin.static', filename='test.txt'),
'/admin/static/test.txt')
with app.test_request_context():
try:
flask.render_template('missing.html')
except TemplateNotFound as e:
self.assert_equal(e.name, 'missing.html')
else:
self.assert_true(0, 'expected exception')
with flask.Flask(__name__).test_request_context():
self.assert_equal(flask.render_template('nested/nested.txt'), 'I\'m nested')
def test_safe_access(self):
app = moduleapp
with app.test_request_context():
f = app.view_functions['admin.static']
try:
f('/etc/passwd')
except NotFound:
pass
else:
self.assert_true(0, 'expected exception')
try:
f('../__init__.py')
except NotFound:
pass
else:
self.assert_true(0, 'expected exception')
# testcase for a security issue that may exist on windows systems
import os
import ntpath
old_path = os.path
os.path = ntpath
try:
try:
f('..\\__init__.py')
except NotFound:
pass
else:
self.assert_true(0, 'expected exception')
finally:
os.path = old_path
@emits_module_deprecation_warning
def test_endpoint_decorator(self):
from werkzeug.routing import Submount, Rule
from flask import Module
app = flask.Flask(__name__)
app.testing = True
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
module = Module(__name__, __name__)
@module.endpoint('bar')
def bar():
return 'bar'
@module.endpoint('index')
def index():
return 'index'
app.register_module(module)
c = app.test_client()
self.assert_equal(c.get('/foo/').data, b'index')
self.assert_equal(c.get('/foo/bar').data, b'bar')
class BlueprintTestCase(FlaskTestCase):
def test_blueprint_specific_error_handling(self):
frontend = flask.Blueprint('frontend', __name__)
backend = flask.Blueprint('backend', __name__)
sideend = flask.Blueprint('sideend', __name__)
@frontend.errorhandler(403)
def frontend_forbidden(e):
return 'frontend says no', 403
@frontend.route('/frontend-no')
def frontend_no():
flask.abort(403)
@backend.errorhandler(403)
def backend_forbidden(e):
return 'backend says no', 403
@backend.route('/backend-no')
def backend_no():
flask.abort(403)
@sideend.route('/what-is-a-sideend')
def sideend_no():
flask.abort(403)
app = flask.Flask(__name__)
app.register_blueprint(frontend)
app.register_blueprint(backend)
app.register_blueprint(sideend)
@app.errorhandler(403)
def app_forbidden(e):
return 'application itself says no', 403
c = app.test_client()
self.assert_equal(c.get('/frontend-no').data, b'frontend says no')
self.assert_equal(c.get('/backend-no').data, b'backend says no')
self.assert_equal(c.get('/what-is-a-sideend').data, b'application itself says no')
def test_blueprint_url_definitions(self):
bp = flask.Blueprint('test', __name__)
@bp.route('/foo', defaults={'baz': 42})
def foo(bar, baz):
return '%s/%d' % (bar, baz)
@bp.route('/bar')
def bar(bar):
return text_type(bar)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/1', url_defaults={'bar': 23})
app.register_blueprint(bp, url_prefix='/2', url_defaults={'bar': 19})
c = app.test_client()
self.assert_equal(c.get('/1/foo').data, b'23/42')
self.assert_equal(c.get('/2/foo').data, b'19/42')
self.assert_equal(c.get('/1/bar').data, b'23')
self.assert_equal(c.get('/2/bar').data, b'19')
def test_blueprint_url_processors(self):
bp = flask.Blueprint('frontend', __name__, url_prefix='/<lang_code>')
@bp.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', flask.g.lang_code)
@bp.url_value_preprocessor
def pull_lang_code(endpoint, values):
flask.g.lang_code = values.pop('lang_code')
@bp.route('/')
def index():
return flask.url_for('.about')
@bp.route('/about')
def about():
return flask.url_for('.index')
app = flask.Flask(__name__)
app.register_blueprint(bp)
c = app.test_client()
self.assert_equal(c.get('/de/').data, b'/de/about')
self.assert_equal(c.get('/de/about').data, b'/de/')
def test_templates_and_static(self):
from blueprintapp import app
c = app.test_client()
rv = c.get('/')
self.assert_equal(rv.data, b'Hello from the Frontend')
rv = c.get('/admin/')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/index2')
self.assert_equal(rv.data, b'Hello from the Admin')
rv = c.get('/admin/static/test.txt')
self.assert_equal(rv.data.strip(), b'Admin File')
rv.close()
rv = c.get('/admin/static/css/test.css')
self.assert_equal(rv.data.strip(), b'/* nested file */')
rv.close()
# try/finally, in case other tests use this app for Blueprint tests.
max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT']
try:
expected_max_age = 3600
if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == expected_max_age:
expected_max_age = 7200
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = expected_max_age
rv = c.get('/admin/static/css/test.css')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, expected_max_age)
rv.close()
finally:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default
with app.test_request_context():
self.assert_equal(flask.url_for('admin.static', filename='test.txt'),
'/admin/static/test.txt')
with app.test_request_context():
try:
flask.render_template('missing.html')
except TemplateNotFound as e:
self.assert_equal(e.name, 'missing.html')
else:
self.assert_true(0, 'expected exception')
with flask.Flask(__name__).test_request_context():
self.assert_equal(flask.render_template('nested/nested.txt'), 'I\'m nested')
def test_default_static_cache_timeout(self):
app = flask.Flask(__name__)
class MyBlueprint(flask.Blueprint):
def get_send_file_max_age(self, filename):
return 100
blueprint = MyBlueprint('blueprint', __name__, static_folder='static')
app.register_blueprint(blueprint)
# try/finally, in case other tests use this app for Blueprint tests.
max_age_default = app.config['SEND_FILE_MAX_AGE_DEFAULT']
try:
with app.test_request_context():
unexpected_max_age = 3600
if app.config['SEND_FILE_MAX_AGE_DEFAULT'] == unexpected_max_age:
unexpected_max_age = 7200
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = unexpected_max_age
rv = blueprint.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 100)
rv.close()
finally:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = max_age_default
def test_templates_list(self):
from blueprintapp import app
templates = sorted(app.jinja_env.list_templates())
self.assert_equal(templates, ['admin/index.html',
'frontend/index.html'])
def test_dotted_names(self):
frontend = flask.Blueprint('myapp.frontend', __name__)
backend = flask.Blueprint('myapp.backend', __name__)
@frontend.route('/fe')
def frontend_index():
return flask.url_for('myapp.backend.backend_index')
@frontend.route('/fe2')
def frontend_page2():
return flask.url_for('.frontend_index')
@backend.route('/be')
def backend_index():
return flask.url_for('myapp.frontend.frontend_index')
app = flask.Flask(__name__)
app.register_blueprint(frontend)
app.register_blueprint(backend)
c = app.test_client()
self.assert_equal(c.get('/fe').data.strip(), b'/be')
self.assert_equal(c.get('/fe2').data.strip(), b'/fe')
self.assert_equal(c.get('/be').data.strip(), b'/fe')
def test_dotted_names_from_app(self):
app = flask.Flask(__name__)
app.testing = True
test = flask.Blueprint('test', __name__)
@app.route('/')
def app_index():
return flask.url_for('test.index')
@test.route('/test/')
def index():
return flask.url_for('app_index')
app.register_blueprint(test)
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.data, b'/test/')
def test_empty_url_defaults(self):
bp = flask.Blueprint('bp', __name__)
@bp.route('/', defaults={'page': 1})
@bp.route('/page/<int:page>')
def something(page):
return str(page)
app = flask.Flask(__name__)
app.register_blueprint(bp)
c = app.test_client()
self.assert_equal(c.get('/').data, b'1')
self.assert_equal(c.get('/page/2').data, b'2')
def test_route_decorator_custom_endpoint(self):
bp = flask.Blueprint('bp', __name__)
@bp.route('/foo')
def foo():
return flask.request.endpoint
@bp.route('/bar', endpoint='bar')
def foo_bar():
return flask.request.endpoint
@bp.route('/bar/123', endpoint='123')
def foo_bar_foo():
return flask.request.endpoint
@bp.route('/bar/foo')
def bar_foo():
return flask.request.endpoint
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.request.endpoint
c = app.test_client()
self.assertEqual(c.get('/').data, b'index')
self.assertEqual(c.get('/py/foo').data, b'bp.foo')
self.assertEqual(c.get('/py/bar').data, b'bp.bar')
self.assertEqual(c.get('/py/bar/123').data, b'bp.123')
self.assertEqual(c.get('/py/bar/foo').data, b'bp.bar_foo')
def test_route_decorator_custom_endpoint_with_dots(self):
bp = flask.Blueprint('bp', __name__)
@bp.route('/foo')
def foo():
return flask.request.endpoint
try:
@bp.route('/bar', endpoint='bar.bar')
def foo_bar():
return flask.request.endpoint
except AssertionError:
pass
else:
raise AssertionError('expected AssertionError not raised')
try:
@bp.route('/bar/123', endpoint='bar.123')
def foo_bar_foo():
return flask.request.endpoint
except AssertionError:
pass
else:
raise AssertionError('expected AssertionError not raised')
def foo_foo_foo():
pass
self.assertRaises(
AssertionError,
lambda: bp.add_url_rule(
'/bar/123', endpoint='bar.123', view_func=foo_foo_foo
)
)
self.assertRaises(
AssertionError,
bp.route('/bar/123', endpoint='bar.123'),
lambda: None
)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
c = app.test_client()
self.assertEqual(c.get('/py/foo').data, b'bp.foo')
# The rule's didn't actually made it through
rv = c.get('/py/bar')
assert rv.status_code == 404
rv = c.get('/py/bar/123')
assert rv.status_code == 404
def test_template_filter(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter()
def my_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('my_reverse', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse)
self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba')
def test_add_template_filter(self):
bp = flask.Blueprint('bp', __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('my_reverse', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse)
self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba')
def test_template_filter_with_name(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter('strrev')
def my_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('strrev', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['strrev'], my_reverse)
self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba')
def test_add_template_filter_with_name(self):
bp = flask.Blueprint('bp', __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse, 'strrev')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('strrev', app.jinja_env.filters.keys())
self.assert_equal(app.jinja_env.filters['strrev'], my_reverse)
self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba')
def test_template_filter_with_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter()
def super_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_filter_after_route_with_template(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter()
def super_reverse(s):
return s[::-1]
app.register_blueprint(bp, url_prefix='/py')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_add_template_filter_with_template(self):
bp = flask.Blueprint('bp', __name__)
def super_reverse(s):
return s[::-1]
bp.add_app_template_filter(super_reverse)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_filter_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_filter('super_reverse')
def my_reverse(s):
return s[::-1]
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_add_template_filter_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse, 'super_reverse')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_filter.html', value='abcd')
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'dcba')
def test_template_test(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test()
def is_boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('is_boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['is_boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['is_boolean'](False))
def test_add_template_test(self):
bp = flask.Blueprint('bp', __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('is_boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['is_boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['is_boolean'](False))
def test_template_test_with_name(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test('boolean')
def is_boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_add_template_test_with_name(self):
bp = flask.Blueprint('bp', __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean, 'boolean')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
self.assert_in('boolean', app.jinja_env.tests.keys())
self.assert_equal(app.jinja_env.tests['boolean'], is_boolean)
self.assert_true(app.jinja_env.tests['boolean'](False))
def test_template_test_with_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test()
def boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_template_test_after_route_with_template(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test()
def boolean(value):
return isinstance(value, bool)
app.register_blueprint(bp, url_prefix='/py')
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_add_template_test_with_template(self):
bp = flask.Blueprint('bp', __name__)
def boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(boolean)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_template_test_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
@bp.app_template_test('boolean')
def is_boolean(value):
return isinstance(value, bool)
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def test_add_template_test_with_name_and_template(self):
bp = flask.Blueprint('bp', __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean, 'boolean')
app = flask.Flask(__name__)
app.register_blueprint(bp, url_prefix='/py')
@app.route('/')
def index():
return flask.render_template('template_test.html', value=False)
rv = app.test_client().get('/')
self.assert_in(b'Success!', rv.data)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(BlueprintTestCase))
suite.addTest(unittest.makeSuite(ModuleTestCase))
return suite
| ncdesouza/bookworm | env/lib/python2.7/site-packages/flask/testsuite/blueprints.py | Python | gpl-3.0 | 28,088 |
# ! /usr/bin/env python2.7
# _*_ coding:utf-8 _*_
"""
@author = lucas.wang
@create_time = 2018-01-12
"""
import optparse
import os
import sys
import getpass
import json
import hashlib
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.header import Header
from datetime import date, time, datetime, timedelta
# 配置文件路径
commendPath = "/Users/" + getpass.getuser() + "/"
commendFinderName = ".ipa_build_py"
commendFullPath = commendPath + commendFinderName
configFileName = "ipaBuildPyConfigFile.json"
commendFilePath = commendFullPath + "/" + configFileName
# 工程名
targetName = None
# 临时文件夹名称
tempFinder = None
# git地址
gitPath = None
# checkout后的本地路径
target_path = commendPath + "Documents"
# 主路径
mainPath = None
# 证书名
certificateName = None
# 邮件参数
emailFromUser = None
emailToUser = None
emailPassword = None
emailHost = None
# 判断是否是workspace
isWorkSpace = False
# 版本
tag = "dev"
# 钥匙链相关
keychainPath = "~/Library/Keychains/login.keychain"
keychainPassword = ""
# 显示已有的参数
def showParameter():
print
"targetName :%s" % targetName
print
"gitPath :%s" % gitPath
print
"certificateName :%s" % certificateName
print
"emailFromUser :%s" % emailFromUser
print
"emailToUser :%s" % emailToUser
print
"emailPassword :%s" % emailPassword
print
"emailHost :%s" % emailHost
print
"keychainPassword(Optional) :%s" % keychainPassword
# 设置参数
def setParameter():
global targetName
global tempFinder
global mainPath
global gitPath
global certificateName
global emailFromUser
global emailToUser
global emailPassword
global emailHost
global keychainPassword
targetName = raw_input("input targetName:")
if not isNone(targetName):
m = hashlib.md5()
m.update('BossZP')
tempFinder = m.hexdigest()
mainPath = commendPath + 'Documents' + '/' + tempFinder
gitPath = raw_input("input gitPath:")
certificateName = raw_input("input certificateName:")
emailFromUser = raw_input("input emailFromUser:")
emailToUser = raw_input("input emailToUser:")
emailPassword = raw_input("input emailPassword:")
emailHost = raw_input("input emailHost:")
keychainPassword = raw_input("input keychainPassword:")
# 保存到本地
writeJsonFile()
# 判断字符串是否为空
def isNone(para):
if para == None or len(para) == 0:
return True
else:
return False
# 是否需要设置参数
def isNeedSetParameter():
if isNone(targetName) or isNone(gitPath) or isNone(certificateName) or isNone(
emailFromUser) or isNone(emailToUser) or isNone(emailPassword) or isNone(emailHost):
return True
else:
return False
# 参数设置
def setOptparse():
p = optparse.OptionParser()
# 参数配置指令
p.add_option("--config", "-c", action="store_true", default=None, help="config User's data")
# 获取所有版本
p.add_option("--showTags", "-s", action="store_true", default=None, help="show all tags")
# 设置版本指令
p.add_option('--tag', '-t', default="master", help="app's tag")
options, arguments = p.parse_args()
global tag
tag = options.tag
# 配置信息
if options.config == True and len(arguments) == 0:
configMethod()
# 获取所有版本
if options.showTags == True and len(arguments) == 0:
gitShowTags()
# 配置信息
def configMethod():
os.system("clear")
readJsonFile()
print
"您的参数如下:"
print
"************************************"
showParameter()
print
"************************************"
setParameter()
sys.exit()
# 设置配置文件路径
def createFinder():
# 没有文件夹,创建文件夹
if not os.path.exists(commendPath + commendFinderName):
os.system("cd %s;mkdir %s" % (commendPath, commendFinderName))
# 没有文件,创建文件
if not os.path.isfile(commendFilePath):
os.system("cd %s;touch %s" % (commendFullPath, configFileName))
initJsonFile()
return
# 初始化json配置文件
def initJsonFile():
fout = open(commendFilePath, 'w')
js = {}
js["targetName"] = None
js["gitPath"] = None
js["certificateName"] = None
js["emailFromUser"] = None
js["emailToUser"] = None
js["emailPassword"] = None
js["emailHost"] = None
js["tempFinder"] = None
js["mainPath"] = None
js["keychainPassword"] = None
outStr = json.dumps(js, ensure_ascii=False)
fout.write(outStr.strip().encode('utf-8') + '\n')
fout.close()
# 读取json文件
def readJsonFile():
fin = open(commendFilePath, 'r')
for eachLine in fin:
line = eachLine.strip().decode('utf-8')
line = line.strip(',')
js = None
try:
js = json.loads(line)
global targetName
global tempFinder
global mainPath
global gitPath
global certificateName
global emailFromUser
global emailToUser
global emailPassword
global emailHost
global keychainPassword
targetName = js["targetName"]
gitPath = js["gitPath"]
certificateName = js["certificateName"]
# firToken = js["firToken"]
emailFromUser = js["emailFromUser"]
emailToUser = js["emailToUser"]
emailPassword = js["emailPassword"]
emailHost = js["emailHost"]
tempFinder = js["tempFinder"]
mainPath = js["mainPath"]
keychainPassword = js["keychainPassword"]
except Exception, e:
print
Exception
print
e
continue
fin.close()
# 写json文件
def writeJsonFile():
showParameter()
try:
fout = open(commendFilePath, 'w')
js = {}
js["targetName"] = targetName
js["gitPath"] = gitPath
js["certificateName"] = certificateName
js["emailFromUser"] = emailFromUser
js["emailToUser"] = emailToUser
js["emailPassword"] = emailPassword
js["emailHost"] = emailHost
js["tempFinder"] = tempFinder
js["mainPath"] = mainPath
js["keychainPassword"] = keychainPassword
outStr = json.dumps(js, ensure_ascii=False)
fout.write(outStr.strip().encode('utf-8') + '\n')
fout.close()
except Exception, e:
print
Exception
print
e
# 删除文件夹
def rmoveFinder():
os.system("rm -r -f %s" % mainPath)
return
# 创建文件夹
def createFileFinder():
os.system("mkdir %s" % mainPath)
return
# 对文件夹授权
def allowFinder():
os.system("chmod -R 777 %s" % mainPath)
return
# 查找文件
def scan_files(directory, postfix):
files_list = []
for root, sub_dirs, files in os.walk(directory):
for special_file in sub_dirs:
if special_file.endswith(postfix):
files_list.append(os.path.join(root, special_file))
return files_list
# 判断文件夹是否存在
def isFinderExists():
return os.path.exists(mainPath)
# clone工程
def gitClone():
os.system('git clone %s %s --depth 1' % (gitPath, mainPath))
return
# 显示所有版本
def gitShowTags():
os.system("clear")
readJsonFile()
print
"所有的版本"
print
mainPath
print
"************************************"
os.system('cd %s;git tag' % mainPath)
print
"************************************"
sys.exit()
# pull工程
def gitPull():
os.system("cd %s;git reset --hard;git pull" % mainPath)
return
# 设置版本
def setGitVersion(version):
if len(version) > 0:
os.system("cd %s;git reset --hard;git checkout %s" % (mainPath, version))
return
# 回到主版本
def setGitVersionMaster():
setGitVersion("master")
return
# clean工程
def cleanPro():
global isWorkSpace
if isWorkSpace:
os.system('cd %s;xcodebuild -workspace %s.xcworkspace -scheme %s clean' % (mainPath, targetName, targetName))
else:
os.system('cd %s;xcodebuild -target %s clean' % (mainPath, targetName))
return
# 清理pbxproj文件
def clearPbxproj():
global all_the_text
path = "%s/%s.xcodeproj/project.pbxproj" % (mainPath, targetName)
file_object = open(path)
try:
all_the_text = file_object.readlines()
for text in all_the_text:
if 'PROVISIONING_PROFILE' in text:
all_the_text.remove(text)
finally:
file_object.close()
file_object = open(path, 'w')
try:
for text in all_the_text:
file_object.write(text)
finally:
file_object.close()
return
def allowKeychain():
# User interaction is not allowed
os.system("security unlock-keychain -p '%s' %s" % (keychainPassword, keychainPath))
return
# 编译获取.app文件和dsym
def buildApp():
global isWorkSpace
files_list = scan_files(mainPath, postfix=".xcodeproj")
temp = -1
for k in range(len(files_list)):
if files_list[k] == mainPath + "/" + targetName + ".xcodeproj":
temp = k
if temp >= 0:
files_list.pop(temp)
for target in files_list:
target = target.replace(".xcodeproj", "")
tmpList = target.split('/')
name = tmpList[len(tmpList) - 1]
path = target.replace(name, "")
path = path[0:len(path) - 1]
os.system("cd %s;xcodebuild -target %s CODE_SIGN_IDENTITY='%s'" % (path, name, certificateName))
if isWorkSpace:
os.system(
"cd %s;xcodebuild -workspace %s.xcworkspace -scheme %s CODE_SIGN_IDENTITY='%s' -derivedDataPath build/" % (
mainPath, targetName, targetName, certificateName))
else:
os.system("cd %s;xcodebuild -target %s CODE_SIGN_IDENTITY='%s'" % (mainPath, targetName, certificateName))
return
# 创建ipa
def cerateIPA():
os.system("cd %s;rm -r -f %s.ipa" % (mainPath, targetName))
os.system(
"cd %s;xcrun -sdk iphoneos PackageApplication -v %s/build/Build/Products/Debug-iphoneos/%s.app -o %s/%s.ipa CODE_SIGN_IDENTITY='%s'" % (
mainPath, mainPath, targetName, mainPath, targetName, certificateName))
return
# 发邮件给测试不带附件
def sendEmail(text):
if not os.path.exists("%s/%s.ipa" % (mainPath, targetName)):
print "没有找到ipa文件"
return
msg = MIMEText('地址:%s' % text, 'plain', 'utf-8')
msg['to'] = emailToUser
msg['from'] = emailFromUser
msg['subject'] = '新的测试包已经上传'
try:
server = smtplib.SMTP()
server.connect(emailHost)
server.login(emailFromUser, emailPassword)
server.sendmail(msg['from'], msg['to'], msg.as_string())
server.quit()
print
'发送成功'
except Exception, e:
print
str(e)
return
# 定时任务
def runTask(func, day=0, hour=0, min=0, second=0):
# Init time
now = datetime.now()
strnow = now.strftime('%Y-%m-%d %H:%M:%S')
print
"now:", strnow
# First next run time
period = timedelta(days=day, hours=hour, minutes=min, seconds=second)
next_time = now + period
strnext_time = next_time.strftime('%Y-%m-%d %H:%M:%S')
print
"next run:", strnext_time
while True:
# Get system current time
iter_now = datetime.now()
iter_now_time = iter_now.strftime('%Y-%m-%d %H:%M:%S')
if str(iter_now_time) == str(strnext_time):
# Get every start work time
print
"start work: %s" % iter_now_time
# Call task func
func()
print
"task done."
# Get next iteration time
iter_time = iter_now + period
strnext_time = iter_time.strftime('%Y-%m-%d %H:%M:%S')
print
"next_iter: %s" % strnext_time
# Continue next iteration
continue
def setVersion():
global tag
setGitVersion(tag)
return
# 判断是否是workspace
def checkWorkSpace():
global isWorkSpace
if os.path.exists("%s/%s.xcworkspace" % (mainPath, targetName)):
isWorkSpace = True
else:
isWorkSpace = False
return
# 主函数
def main():
# 设置配置文件路径
createFinder()
# 参数设置
setOptparse()
# 读取json文件
readJsonFile()
# 是否需要设置参数
if isNeedSetParameter():
print
"您需要设置参数,您的参数如下(使用 --config 或者 -c):"
showParameter()
sys.exit()
# 获取最新代码
if not isFinderExists():
createFileFinder()
gitClone()
else:
gitPull()
# 判断是否是workspace
checkWorkSpace()
# 设置版本
setVersion()
# 设置文件夹权限
allowFinder()
allowKeychain()
# clear pbxproj文件
clearPbxproj()
# clean工程
cleanPro()
# 编译
buildApp()
# 生成ipa文件
cerateIPA()
# 发邮件给测试
sendEmail("Test address")
return
main() | Lucas-Wong/ToolsProject | IOS/ipa.py | Python | gpl-3.0 | 13,459 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-08-26 08:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("accounts", "0001_squashed_0037_auto_20180416_1406")]
operations = [
migrations.AddField(
model_name="profile",
name="uploaded",
field=models.IntegerField(db_index=True, default=0),
)
]
| dontnod/weblate | weblate/accounts/migrations/0002_profile_uploaded.py | Python | gpl-3.0 | 466 |
#!/usr/bin/python
#
# original code from adafruit, see below copyrights
# hacked version for bioreactor one
#
#--------------------------------------------------------------------------------------------
# -- 031616 -- converting temp to fahrenhight, and logging
# -- 031816 -- adding in the led libraries to do a simple test, hopefully will add scrolling display
# -- 031816 -- well, attempt at creating a font library for the 8x8 screen,
# since they aint one.
# -- 031916 -- (b) reverse the current characters
# -- (g) open the log, and get the temp in f and humidity in h, with dec
# -- then build the logic to display the right character.
# -- (s) implement the rest of the characters
# -- 032116 -- just received the max7219, branching to max2719_scroller,
# -- attempting to intgrate the new display and add time
# -- (s) add feature for weather, stocks, and headlines.
# -- also removed the adafruit webIDE, checking it didn't catasrophically
# -- fuck up anything else with it.. also upgrading and updating... fingers crossed...
#---------------------------------------------------------------------------------------------------
#
# .=-.-. _,.---._ ,---.
# _..---. /==/_ /,-.' , - `. _.-. .--.' \ _..---.
# .' .'.-. \|==|, |/==/_, , - \ .-,.'| \==\-/\ \ .' .'.-. \
# /==/- '=' /|==| |==| .=. |==|, | /==/-|_\ | /==/- '=' /
# |==|-, ' |==|- |==|_ : ;=: - |==|- | \==\, - \ |==|-, '
# |==| .=. \|==| ,|==| , '=' |==|, | /==/ - ,| |==| .=. \
# /==/- '=' ,|==|- |\==\ - ,_ /|==|- `-._/==/- /\ - \ /==/- '=' ,|
# |==| - //==/. / '.='. - .' /==/ - , ,|==\ _.\=\.-'|==| - /
# `-._`.___,' `--`-` `--`--'' `--`-----' `--` `-._`.___,'
# _,.---._ .-._ ,----.
# ,-.' , - `. /==/ \ .-._ ,-.--` , \
# /==/_, , - \|==|, \/ /, /==|- _.-`
# |==| .=. |==|- \| ||==| `.-.
# |==|_ : ;=: - |==| , | -/==/_ , /
# |==| , '=' |==| - _ |==| .-'
# \==\ - ,_ /|==| /\ , |==|_ ,`-._
# '.='. - .' /==/, | |- /==/ , /
# `--`--'' `--`./ `--`--`-----``
#
# ,--.--------. ,--.--------. ,--.--------.
# /==/, - , -\ /==/, - , -\ /==/, - , -\
# \==\.-. - ,-./ \==\.-. - ,-./ \==\.-. - ,-./
# `--`--------` `--`--------` `--`--------`
#
# ,----. ,---.--. ,-.--, ,---.--.
# _.-. ,-.--` , \ _,..---._ / -_ \==\.--.-. /=/, .'/ -_ \==\
# .-,.'| |==|- _.-`/==/, - \ |` / \/==/\==\ -\/=/- / |` / \/==/
# |==|, | |==| `.-.|==| _ _\ \ \ /==/ \==\ `-' ,/ \ \ /==/
# |==|- | /==/_ , /|==| .=. | / \==/ |==|, - | / \==/
# |==|, | |==| .-' |==|,| | -| /. / \==\ /==/ , \ /. / \==\
# |==|- `-._|==|_ ,`-._|==| '=' / | _ \_/\==\/==/, .--, - \| _ \_/\==\
# /==/ - , ,/==/ , /|==|-, _`/ \ . - /==/\==\- \/=/ , /\ . - /==/
# `--`-----'`--`-----`` `-.`.____.' '----`--` `--`-' `--` '----`--`
#
#--------------------------------------------------------------------------------
# Copyleft (c) 2016 Alchemy Computing
# Copyright (c) 2014 Adafruit Industries
# Hacker: Justin Knox
# legal shit---------------------------------------------------------------------
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# end of legal shit---------------------------------------------------------------
#---------------------------------------------------
# .__ __
# |__| _____ ______ ____________/ |_ ______
# | |/ \\____ \ / _ \_ __ \ __\/ ___/
# | | Y Y \ |_> > <_> ) | \/| | \___ \
# |__|__|_| / __/ \____/|__| |__| /____ >
# \/|__| \/
#
#---------------------------------------------------
import time
import datetime
import max7219.led as led
import time
from max7219.font import proportional, SINCLAIR_FONT, TINY_FONT, CP437_FONT
from random import randrange
from collections import deque
#----------------------------------------------------
# .__
# _____ _____ |__| ____
# / \\__ \ | |/ \
# | Y Y \/ __ \| | | \
# |__|_| (____ /__|___| /
# \/ \/ \/
#----------------------------------------------------
# ===========================================================================
# 8x8 Pixel Example
#
# ===========================================================================
toggle = True
sleepCount = 0
print "-----=-=-=-------=- bioreactor-one - montior-one -=---------=-=-=--------"
print " .... testing .... pixels ... LEDS .................... "
print "-------=---------=---------------------------------=-----------=----------"
print "Press CTRL+Z to exit"
print "--------------------------------------------------------------------------"
device = led.matrix(cascaded=1)
device.orientation(180)
#device.show_message("-----=-=-=-------=- bioreactor-one - montior-one -=---------=-=-=--------\
# \
# .... testing .... pixels ... LEDS .................... \
# ", font=proportional(CP437_FONT))
#
# open the log for reading values
#----------------------------------------------------------------------------------
print "-----=-=-=-------=- bioreactor-one - montior-one -=---------=-=-=--------"
tempValFlog = open("/home/pi/curr_F.log", "r");
print "Opening log... .", tempValFlog.name, " ...in..", tempValFlog.mode, "..access mode."
tempValHlog = open("/home/pi/curr_H.log", "r");
print "Opening log... .", tempValHlog.name, " ...in..", tempValHlog.mode, "..access mode."
print "-------=---------=---------------------------------=-----------=----------"
print " press CTL-Z to exit "
print "--------------------------------------------------------------------------"
prevFloatFval = 0
prevFloatHval = 0
scrollon = True
#device.show_message(" Welcome to BioLabOne - AlchemyComputing ")
while(scrollon):
print "-----=-=-=-------=- bioreactor-one - montior-one -=---------=-=-=--------"
tempValFlog = open("/home/pi/curr_F.log", "r");
print "Opening log... .", tempValFlog.name, " ...in..", tempValFlog.mode, "..access mode."
tempValHlog = open("/home/pi/curr_H.log", "r");
print "Opening log... .", tempValHlog.name, " ...in..", tempValHlog.mode, "..access mode."
#----------------------------------------------------------------------------------
# 032116 -- new strategy is to open the log, check for a new value,
# -- if there's no new value, close the log, and wait half a second
# -- if there is a new value, display the new value, and close the log!
#----------------------------------------------------------------------------------
currentF = tempValFlog.read(5)
currentH = tempValHlog.read(5)
print "Got values..."
print "....current from log F: ", currentF
print "....current fom log H: ", currentH
print "--------------------"
#--------------------------------------------------------
# closing the log just in case simple_logger.py needs it
#--------------------------------------------------------
print "Closing log...", tempValFlog.name
print "Closing log...", tempValHlog.name
tempValFlog.close()
tempValHlog.close()
print " ", tempValFlog.closed
print " ", tempValHlog.closed
print "--------------------------=-=-=-=-=-=------------------------------------------"
# 032216 -- converting back in the old code that grabbed the decimal values
#first we have to isolate the 100's place, in this case 0
#if the 100's is 0, then we'll display a space
#then lets grab the 10's, 1's and the decimal portion.
#also, we gotta typecast the shit out of this cause of pythons implicit typing...
hundyPlaceFval = int(float(currentF) / 100)
tensPlaceFval = int(float(currentF) / 10)
onesPlaceFval = int( float(currentF) - (hundyPlaceFval*100 + tensPlaceFval*10) )
decimalPlaceFval = int((float(currentF) - ( hundyPlaceFval + tensPlaceFval + onesPlaceFval )) * 10)
decimalPlaceFval /= 100
#lets see what we got
print 'F hundy', int(hundyPlaceFval)
print 'F tens', int(tensPlaceFval)
print 'F ones', int(onesPlaceFval)
print 'F deci', int(decimalPlaceFval)
#now lets do the Humidity's
hundyPlaceHval = int(float(currentH) / 100)
tensPlaceHval = int(float(currentH) / 10)
onesPlaceHval = int( float(currentH) - (hundyPlaceHval*100 + tensPlaceHval*10) )
decimalPlaceHval = int((float(currentH) - ( hundyPlaceHval + tensPlaceHval + onesPlaceHval )) * 10)
decimalPlaceHval /= 100
#lets see what we got
print '\n'
print 'H hundy', int(hundyPlaceHval)
print 'H tens', int(tensPlaceHval)
print 'H ones', int(onesPlaceHval)
print 'H deci', int(decimalPlaceHval)
floatFval = float(hundyPlaceFval*100 + tensPlaceFval*10 + onesPlaceFval + decimalPlaceFval/10)
floatHval = float(hundyPlaceHval*100 + tensPlaceHval*10 + onesPlaceHval + decimalPlaceHval/10)
#-------------- always display the values --------------------------------------------
device.show_message( " F: " )
device.show_message( str( floatFval ) )
device.letter(0, 248)
device.show_message( " H: " )
device.show_message( str( floatHval ) )
device.show_message("%")
device.show_message( time.strftime("%c"))
#------------------------------------------code below only shows when temp changes
#-----------------------------------------------------------------------show fahrenheit
#device.show_message( "Fahrenheit = " )
if( floatFval > (0.1 + prevFloatFval ) ):
#device.letter(0, 176);time.sleep(1)
#device.letter(0, 177);time.sleep(1)
#device.letter(0, 219);time.sleep(1)
#device.letter(0, 219);time.sleep(1)
#device.letter(0, 177);time.sleep(1)
#device.letter(0, 176);time.sleep(1)
device.show_message( " +F: " )
device.show_message( str( floatFval ) )
device.letter(0, 248)
sleepCount = 0
if( floatFval < (0.1 - prevFloatFval) ):
#device.letter(0, 176);time.sleep(1)
#device.letter(0, 177);time.sleep(1)
#device.letter(0, 219);time.sleep(1)
#device.letter(0, 219);time.sleep(1)
#device.letter(0, 177);time.sleep(1)
#device.letter(0, 176);time.sleep(1)
device.show_message( " -F: " )
device.show_message( str( floatFval ) )
device.letter(0, 248)
sleepCount = 0
if( floatFval == ( prevFloatFval ) ):
if(sleepCount<6):
device.show_message( " - - - " )
sleepCount+=1
#-----------------------------------------------------------------------one by one display
#if(hundyPlaceFval!=0):
# device.show_message( str(hundyPlaceFval ) )
#device.show_message( str(tensPlaceFval ) )
#device.show_message( str(onesPlaceFval ) )
#device.show_message(".")
#device.show_message( str(decimalPlaceFval ) )
#------------------------------------------------------------------------show humidity
#device.show_message( "Humidity = " )
if( floatHval > (0.1 + prevFloatHval) ):
#device.letter(0, 176);time.sleep(1)
#device.letter(0, 177);time.sleep(1)
#device.letter(0, 219);time.sleep(1)
#device.letter(0, 219);time.sleep(1)
#device.letter(0, 177);time.sleep(1)
#device.letter(0, 176);time.sleep(1)
device.show_message( " +H: " )
device.show_message( str( floatHval ) )
device.show_message("%")
sleepCount = 0
if( floatHval < (0.1 - prevFloatHval) ):
#device.letter(0, 176);time.sleep(1)
#device.letter(0, 177);time.sleep(1)
#device.letter(0, 219);time.sleep(1)
#device.letter(0, 219);time.sleep(1)
#device.letter(0, 177);time.sleep(1)
#device.letter(0, 176);time.sleep(1)
device.show_message( " -H: " )
device.show_message( str( floatHval ) )
device.show_message("%")
sleepCount = 0
if( floatHval == ( prevFloatHval ) ):
if(sleepCount<4):
device.show_message( "- - - " )
sleepCount+=1
#------------------------------------------------------------------------go to sleep
if(sleepCount > 3):
sleepCount-=1
print "Sleeping ... ", sleepCount
time.sleep(3)
#------------------------------------------------------------------------single message method
#-----------------------------------------------------------------------one by one display
#if(hundyPlaceHval!=0):
# device.show_message( str(hundyPlaceHval ) )
#device.show_message( str(tensPlaceHval ) )
#device.show_message( str(onesPlaceHval ) )
#device.show_message(".")
#device.show_message( str(decimalPlaceHval ) )
prevFloatFval = floatFval
prevFloatHval = floatHval
#time.sleep(3)
#device.show_message( "Current Time: ")
#device.show_message( time.strftime("%c"))
#tempRSSfeed = open("/home/pi/feeds/feeds.db", "r")
#feedData = tempRSSfeed.read(end)
#device.show_message( feedData)
| alchemycomputing/raspberrypi-bioreactorproject | maxscroller.py | Python | gpl-3.0 | 13,796 |
########################################################################
# $HeadURL$
# File : InstallTools.py
# Author : Ricardo Graciani
########################################################################
"""
Collection of Tools for installation of DIRAC components:
MySQL, DB's, Services's, Agents
It only makes use of defaults in LocalInstallation Section in dirac.cfg
The Following Options are used::
/DIRAC/Setup: Setup to be used for any operation
/LocalInstallation/InstanceName: Name of the Instance for the current Setup (default /DIRAC/Setup)
/LocalInstallation/LogLevel: LogLevel set in "run" script for all components installed
/LocalInstallation/RootPath: Used instead of rootPath in "run" script if defined (if links are used to named versions)
/LocalInstallation/InstancePath: Location where runit and startup directories are created (default rootPath)
/LocalInstallation/UseVersionsDir: DIRAC is installed under versions/<Versioned Directory> with a link from pro
(This option overwrites RootPath and InstancePath)
/LocalInstallation/Host: Used when build the URL to be published for the installed service (default: socket.getfqdn())
/LocalInstallation/RunitDir: Location where runit directory is created (default InstancePath/runit)
/LocalInstallation/StartupDir: Location where startup directory is created (default InstancePath/startup)
/LocalInstallation/MySQLDir: Location where mysql databases are created (default InstancePath/mysql)
/LocalInstallation/Database/User: (default Dirac)
/LocalInstallation/Database/Password: (must be set for SystemAdministrator Service to work)
/LocalInstallation/Database/RootPwd: (must be set for SystemAdministrator Service to work)
/LocalInstallation/Database/Host: (must be set for SystemAdministrator Service to work)
/LocalInstallation/Database/MySQLSmallMem: Configure a MySQL with small memory requirements for testing purposes innodb_buffer_pool_size=200MB
/LocalInstallation/Database/MySQLLargeMem: Configure a MySQL with high memory requirements for production purposes innodb_buffer_pool_size=10000MB
The setupSite method (used by the dirac-setup-site command) will use the following info::
/LocalInstallation/Systems: List of Systems to be defined for this instance in the CS (default: Configuration, Framework)
/LocalInstallation/Databases: List of Databases to be installed and configured
/LocalInstallation/Services: List of System/ServiceName to be setup
/LocalInstallation/Agents: List of System/AgentName to be setup
/LocalInstallation/WebPortal: Boolean to setup the Web Portal (default no)
/LocalInstallation/ConfigurationMaster: Boolean, requires Configuration/Server to be given in the list of Services (default: no)
/LocalInstallation/PrivateConfiguration: Boolean, requires Configuration/Server to be given in the list of Services (default: no)
If a Master Configuration Server is being installed the following Options can be used::
/LocalInstallation/ConfigurationName: Name of the Configuration (default: Setup )
/LocalInstallation/AdminUserName: Name of the Admin user (default: None )
/LocalInstallation/AdminUserDN: DN of the Admin user certificate (default: None )
/LocalInstallation/AdminUserEmail: Email of the Admin user (default: None )
/LocalInstallation/AdminGroupName: Name of the Admin group (default: dirac_admin )
/LocalInstallation/HostDN: DN of the host certificate (default: None )
/LocalInstallation/VirtualOrganization: Name of the main Virtual Organization (default: None)
"""
__RCSID__ = "$Id$"
#
import os, re, glob, stat, time, shutil, socket
gDefaultPerms = stat.S_IWUSR | stat.S_IRUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH
import DIRAC
from DIRAC import rootPath
from DIRAC import gLogger
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Utilities.CFG import CFG
from DIRAC.Core.Utilities.Version import getVersion
from DIRAC.Core.Utilities.Subprocess import systemCall
from DIRAC.ConfigurationSystem.Client.CSAPI import CSAPI
from DIRAC.ConfigurationSystem.Client.Helpers import cfgPath, cfgPathToList, cfgInstallPath, \
cfgInstallSection, ResourcesDefaults, CSGlobals
from DIRAC.Core.Security.Properties import ALARMS_MANAGEMENT, SERVICE_ADMINISTRATOR, \
CS_ADMINISTRATOR, JOB_ADMINISTRATOR, \
FULL_DELEGATION, PROXY_MANAGEMENT, OPERATOR, \
NORMAL_USER, TRUSTED_HOST
from DIRAC.ConfigurationSystem.Client import PathFinder
from DIRAC.Core.Base.private.ModuleLoader import ModuleLoader
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Base.ExecutorModule import ExecutorModule
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.Core.Utilities.PrettyPrint import printTable
from DIRAC.Core.Utilities.Platform import getPlatformString
# On command line tools this can be set to True to abort after the first error.
exitOnError = False
# First some global defaults
gLogger.debug( 'DIRAC Root Path =', rootPath )
def loadDiracCfg( verbose = False ):
"""
Read again defaults from dirac.cfg
"""
global localCfg, cfgFile, setup, instance, logLevel, linkedRootPath, host
global basePath, instancePath, runitDir, startDir
global db, mysqlDir, mysqlDbDir, mysqlLogDir, mysqlMyOrg, mysqlMyCnf, mysqlStartupScript
global mysqlRootPwd, mysqlUser, mysqlPassword, mysqlHost, mysqlMode
global mysqlSmallMem, mysqlLargeMem, mysqlPort, mysqlRootUser
from DIRAC.Core.Utilities.Network import getFQDN
localCfg = CFG()
cfgFile = os.path.join( rootPath, 'etc', 'dirac.cfg' )
try:
localCfg.loadFromFile( cfgFile )
except Exception:
gLogger.always( "Can't load ", cfgFile )
gLogger.always( "Might be OK if setting up the site" )
setup = localCfg.getOption( cfgPath( 'DIRAC', 'Setup' ), '' )
instance = localCfg.getOption( cfgInstallPath( 'InstanceName' ), setup )
logLevel = localCfg.getOption( cfgInstallPath( 'LogLevel' ), 'INFO' )
linkedRootPath = localCfg.getOption( cfgInstallPath( 'RootPath' ), rootPath )
useVersionsDir = localCfg.getOption( cfgInstallPath( 'UseVersionsDir' ), False )
host = localCfg.getOption( cfgInstallPath( 'Host' ), getFQDN() )
basePath = os.path.dirname( rootPath )
instancePath = localCfg.getOption( cfgInstallPath( 'InstancePath' ), rootPath )
if useVersionsDir:
# This option takes precedence
instancePath = os.path.dirname( os.path.dirname( rootPath ) )
linkedRootPath = os.path.join( instancePath, 'pro' )
if verbose:
gLogger.notice( 'Using Instance Base Dir at', instancePath )
runitDir = os.path.join( instancePath, 'runit' )
runitDir = localCfg.getOption( cfgInstallPath( 'RunitDir' ), runitDir )
if verbose:
gLogger.notice( 'Using Runit Dir at', runitDir )
startDir = os.path.join( instancePath, 'startup' )
startDir = localCfg.getOption( cfgInstallPath( 'StartupDir' ), startDir )
if verbose:
gLogger.notice( 'Using Startup Dir at', startDir )
# Now some MySQL default values
db = {}
mysqlDir = os.path.join( instancePath, 'mysql' )
mysqlDir = localCfg.getOption( cfgInstallPath( 'MySQLDir' ), mysqlDir )
if verbose:
gLogger.notice( 'Using MySQL Dir at', mysqlDir )
mysqlDbDir = os.path.join( mysqlDir, 'db' )
mysqlLogDir = os.path.join( mysqlDir, 'log' )
mysqlMyOrg = os.path.join( rootPath, 'mysql', 'etc', 'my.cnf' )
mysqlMyCnf = os.path.join( mysqlDir, '.my.cnf' )
mysqlStartupScript = os.path.join( rootPath, 'mysql', 'share', 'mysql', 'mysql.server' )
mysqlRootPwd = localCfg.getOption( cfgInstallPath( 'Database', 'RootPwd' ), mysqlRootPwd )
if verbose and mysqlRootPwd:
gLogger.notice( 'Reading Root MySQL Password from local configuration' )
mysqlUser = localCfg.getOption( cfgInstallPath( 'Database', 'User' ), '' )
if mysqlUser:
if verbose:
gLogger.notice( 'Reading MySQL User from local configuration' )
else:
mysqlUser = 'Dirac'
mysqlPassword = localCfg.getOption( cfgInstallPath( 'Database', 'Password' ), mysqlPassword )
if verbose and mysqlPassword:
gLogger.notice( 'Reading %s MySQL Password from local configuration ' % mysqlUser )
mysqlHost = localCfg.getOption( cfgInstallPath( 'Database', 'Host' ), '' )
if mysqlHost:
if verbose:
gLogger.notice( 'Using MySQL Host from local configuration', mysqlHost )
else:
# if it is not defined use the same as for dirac services
mysqlHost = host
mysqlPort = localCfg.getOption( cfgInstallPath( 'Database', 'Port' ), 0 )
if mysqlPort:
if verbose:
gLogger.notice( 'Using MySQL Port from local configuration ', mysqlPort )
else:
# if it is not defined use the same as for dirac services
mysqlPort = 3306
mysqlRootUser = localCfg.getOption( cfgInstallPath( 'Database', 'RootUser' ), '' )
if mysqlRootUser:
if verbose:
gLogger.notice( 'Using MySQL root user from local configuration ', mysqlRootUser )
else:
# if it is not defined use root
mysqlRootUser = 'root'
mysqlMode = localCfg.getOption( cfgInstallPath( 'Database', 'MySQLMode' ), '' )
if verbose and mysqlMode:
gLogger.notice( 'Configuring MySQL server as %s' % mysqlMode )
mysqlSmallMem = localCfg.getOption( cfgInstallPath( 'Database', 'MySQLSmallMem' ), False )
if verbose and mysqlSmallMem:
gLogger.notice( 'Configuring MySQL server for Low Memory uasge' )
mysqlLargeMem = localCfg.getOption( cfgInstallPath( 'Database', 'MySQLLargeMem' ), False )
if verbose and mysqlLargeMem:
gLogger.notice( 'Configuring MySQL server for Large Memory uasge' )
# FIXME: we probably need a better way to do this
mysqlRootPwd = ''
mysqlPassword = ''
mysqlMode = ''
localCfg = None
cfgFile = ''
setup = ''
instance = ''
logLevel = ''
linkedRootPath = ''
host = ''
basePath = ''
instancePath = ''
runitDir = ''
startDir = ''
db = {}
mysqlDir = ''
mysqlDbDir = ''
mysqlLogDir = ''
mysqlMyOrg = ''
mysqlMyCnf = ''
mysqlStartupScript = ''
mysqlUser = ''
mysqlHost = ''
mysqlPort = ''
mysqlRootUser = ''
mysqlSmallMem = ''
mysqlLargeMem = ''
loadDiracCfg()
def getInfo( extensions ):
result = getVersion()
if not result['OK']:
return result
rDict = result['Value']
if setup:
rDict['Setup'] = setup
else:
rDict['Setup'] = 'Unknown'
return S_OK( rDict )
def getExtensions():
"""
Get the list of installed extensions
"""
initList = glob.glob( os.path.join( rootPath, '*DIRAC', '__init__.py' ) )
extensions = [ os.path.basename( os.path.dirname( k ) ) for k in initList]
try:
extensions.remove( 'DIRAC' )
except Exception:
error = 'DIRAC is not properly installed'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return S_OK( extensions )
def _addCfgToDiracCfg( cfg, verbose = False ):
"""
Merge cfg into existing dirac.cfg file
"""
global localCfg
if str( localCfg ):
newCfg = localCfg.mergeWith( cfg )
else:
newCfg = cfg
result = newCfg.writeToFile( cfgFile )
if not result:
return result
loadDiracCfg( verbose )
return result
def _addCfgToCS( cfg ):
"""
Merge cfg into central CS
"""
cfgClient = CSAPI()
result = cfgClient.downloadCSData()
if not result['OK']:
return result
result = cfgClient.mergeFromCFG( cfg )
if not result['OK']:
return result
result = cfgClient.commit()
return result
def _addCfgToLocalCS( cfg ):
"""
Merge cfg into local CS
"""
csName = localCfg.getOption( cfgPath( 'DIRAC', 'Configuration', 'Name' ) , '' )
if not csName:
error = 'Missing %s' % cfgPath( 'DIRAC', 'Configuration', 'Name' )
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
csCfg = CFG()
csFile = os.path.join( rootPath, 'etc', '%s.cfg' % csName )
if os.path.exists( csFile ):
csCfg.loadFromFile( csFile )
if str( csCfg ):
newCfg = csCfg.mergeWith( cfg )
else:
newCfg = cfg
return newCfg.writeToFile( csFile )
def _getCentralCfg( installCfg ):
"""
Create the skeleton of central Cfg for an initial Master CS
"""
# First copy over from installation cfg
centralCfg = CFG()
# DIRAC/Extensions
extensions = localCfg.getOption( cfgInstallPath( 'Extensions' ), [] )
while 'Web' in list( extensions ):
extensions.remove( 'Web' )
centralCfg.createNewSection( 'DIRAC', '' )
if extensions:
centralCfg['DIRAC'].addKey( 'Extensions', ','.join( extensions ), '' )
vo = localCfg.getOption( cfgInstallPath( 'VirtualOrganization' ), '' )
if vo:
centralCfg['DIRAC'].addKey( 'VirtualOrganization', vo, '' )
for section in [ 'Systems', 'Resources',
'Resources/Sites', 'Resources/Domains',
'Operations', 'Website', 'Registry' ]:
if installCfg.isSection( section ):
centralCfg.createNewSection( section, contents = installCfg[section] )
# Now try to add things from the Installation section
# Registry
adminUserName = localCfg.getOption( cfgInstallPath( 'AdminUserName' ), '' )
adminUserDN = localCfg.getOption( cfgInstallPath( 'AdminUserDN' ), '' )
adminUserEmail = localCfg.getOption( cfgInstallPath( 'AdminUserEmail' ), '' )
adminGroupName = localCfg.getOption( cfgInstallPath( 'AdminGroupName' ), 'dirac_admin' )
hostDN = localCfg.getOption( cfgInstallPath( 'HostDN' ), '' )
defaultGroupName = 'user'
adminGroupProperties = [ ALARMS_MANAGEMENT, SERVICE_ADMINISTRATOR,
CS_ADMINISTRATOR, JOB_ADMINISTRATOR,
FULL_DELEGATION, PROXY_MANAGEMENT, OPERATOR ]
defaultGroupProperties = [ NORMAL_USER ]
defaultHostProperties = [ TRUSTED_HOST, CS_ADMINISTRATOR,
JOB_ADMINISTRATOR, FULL_DELEGATION,
PROXY_MANAGEMENT, OPERATOR ]
for section in ( cfgPath( 'Registry' ),
cfgPath( 'Registry', 'Users' ),
cfgPath( 'Registry', 'Groups' ),
cfgPath( 'Registry', 'Hosts' ) ):
if not centralCfg.isSection( section ):
centralCfg.createNewSection( section )
if adminUserName:
if not ( adminUserDN and adminUserEmail ):
gLogger.error( 'AdminUserName is given but DN or Mail is missing it will not be configured' )
else:
for section in [ cfgPath( 'Registry', 'Users', adminUserName ),
cfgPath( 'Registry', 'Groups', defaultGroupName ),
cfgPath( 'Registry', 'Groups', adminGroupName ) ]:
if not centralCfg.isSection( section ):
centralCfg.createNewSection( section )
if centralCfg['Registry'].existsKey( 'DefaultGroup' ):
centralCfg['Registry'].deleteKey( 'DefaultGroup' )
centralCfg['Registry'].addKey( 'DefaultGroup', defaultGroupName, '' )
if centralCfg['Registry']['Users'][adminUserName].existsKey( 'DN' ):
centralCfg['Registry']['Users'][adminUserName].deleteKey( 'DN' )
centralCfg['Registry']['Users'][adminUserName].addKey( 'DN', adminUserDN, '' )
if centralCfg['Registry']['Users'][adminUserName].existsKey( 'Email' ):
centralCfg['Registry']['Users'][adminUserName].deleteKey( 'Email' )
centralCfg['Registry']['Users'][adminUserName].addKey( 'Email' , adminUserEmail, '' )
# Add Admin User to Admin Group and default group
for group in [adminGroupName, defaultGroupName]:
if not centralCfg['Registry']['Groups'][group].isOption( 'Users' ):
centralCfg['Registry']['Groups'][group].addKey( 'Users', '', '' )
users = centralCfg['Registry']['Groups'][group].getOption( 'Users', [] )
if adminUserName not in users:
centralCfg['Registry']['Groups'][group].appendToOption( 'Users', ', %s' % adminUserName )
if not centralCfg['Registry']['Groups'][group].isOption( 'Properties' ):
centralCfg['Registry']['Groups'][group].addKey( 'Properties', '', '' )
properties = centralCfg['Registry']['Groups'][adminGroupName].getOption( 'Properties', [] )
for prop in adminGroupProperties:
if prop not in properties:
properties.append( prop )
centralCfg['Registry']['Groups'][adminGroupName].appendToOption( 'Properties', ', %s' % prop )
properties = centralCfg['Registry']['Groups'][defaultGroupName].getOption( 'Properties', [] )
for prop in defaultGroupProperties:
if prop not in properties:
properties.append( prop )
centralCfg['Registry']['Groups'][defaultGroupName].appendToOption( 'Properties', ', %s' % prop )
# Add the master Host description
if hostDN:
hostSection = cfgPath( 'Registry', 'Hosts', host )
if not centralCfg.isSection( hostSection ):
centralCfg.createNewSection( hostSection )
if centralCfg['Registry']['Hosts'][host].existsKey( 'DN' ):
centralCfg['Registry']['Hosts'][host].deleteKey( 'DN' )
centralCfg['Registry']['Hosts'][host].addKey( 'DN', hostDN, '' )
if not centralCfg['Registry']['Hosts'][host].isOption( 'Properties' ):
centralCfg['Registry']['Hosts'][host].addKey( 'Properties', '', '' )
properties = centralCfg['Registry']['Hosts'][host].getOption( 'Properties', [] )
for prop in defaultHostProperties:
if prop not in properties:
properties.append( prop )
centralCfg['Registry']['Hosts'][host].appendToOption( 'Properties', ', %s' % prop )
# Operations
if adminUserEmail:
operationsCfg = __getCfg( cfgPath( 'Operations', 'Defaults', 'EMail' ), 'Production', adminUserEmail )
centralCfg = centralCfg.mergeWith( operationsCfg )
operationsCfg = __getCfg( cfgPath( 'Operations', 'Defaults', 'EMail' ), 'Logging', adminUserEmail )
centralCfg = centralCfg.mergeWith( operationsCfg )
# Website
websiteCfg = __getCfg( cfgPath( 'Website', 'Authorization',
'systems', 'configuration' ), 'Default', 'all' )
websiteCfg['Website'].addKey( 'DefaultGroups',
', '.join( ['visitor', defaultGroupName, adminGroupName] ), '' )
websiteCfg['Website'].addKey( 'DefaultSetup', setup, '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'showHistory' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'commitConfiguration' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'showCurrentDiff' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'showDiff' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'rollbackToVersion' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'manageRemoteConfig' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].appendToOption( 'manageRemoteConfig' ,
', ServiceAdministrator' )
centralCfg = centralCfg.mergeWith( websiteCfg )
return centralCfg
def __getCfg( section, option = '', value = '' ):
"""
Create a new Cfg with given info
"""
if not section:
return None
cfg = CFG()
sectionList = []
for sect in cfgPathToList( section ):
if not sect:
continue
sectionList.append( sect )
cfg.createNewSection( cfgPath( *sectionList ) )
if not sectionList:
return None
if option and value:
sectionList.append( option )
cfg.setOption( cfgPath( *sectionList ), value )
return cfg
def addOptionToDiracCfg( option, value ):
"""
Add Option to dirac.cfg
"""
optionList = cfgPathToList( option )
optionName = optionList[-1]
section = cfgPath( *optionList[:-1] )
cfg = __getCfg( section, optionName, value )
if not cfg:
return S_ERROR( 'Wrong option: %s = %s' % ( option, value ) )
if _addCfgToDiracCfg( cfg ):
return S_OK()
return S_ERROR( 'Could not merge %s=%s with local configuration' % ( option, value ) )
def addDefaultOptionsToCS( gConfig, componentType, systemName,
component, extensions, mySetup = setup,
specialOptions = {}, overwrite = False,
addDefaultOptions = True ):
"""
Add the section with the component options to the CS
"""
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', mySetup, system )
if gConfig:
compInstance = gConfig.getValue( instanceOption, '' )
else:
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
sectionName = "Agents"
if componentType == 'service':
sectionName = "Services"
elif componentType == 'executor':
sectionName = "Executors"
# Check if the component CS options exist
addOptions = True
componentSection = cfgPath( 'Systems', system, compInstance, sectionName, component )
if not overwrite:
if gConfig:
result = gConfig.getOptions( componentSection )
if result['OK']:
addOptions = False
if not addOptions:
return S_OK( 'Component options already exist' )
# Add the component options now
result = getComponentCfg( componentType, system, component, compInstance, extensions, specialOptions, addDefaultOptions )
if not result['OK']:
return result
compCfg = result['Value']
gLogger.notice( 'Adding to CS', '%s %s/%s' % ( componentType, system, component ) )
resultAddToCFG = _addCfgToCS( compCfg )
if componentType == 'executor':
# Is it a container ?
execList = compCfg.getOption( '%s/Load' % componentSection, [] )
for element in execList:
result = addDefaultOptionsToCS( gConfig, componentType, systemName, element, extensions, setup,
{}, overwrite )
resultAddToCFG.setdefault( 'Modules', {} )
resultAddToCFG['Modules'][element] = result['OK']
return resultAddToCFG
def addDefaultOptionsToComponentCfg( componentType, systemName, component, extensions ):
"""
Add default component options local component cfg
"""
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', setup, system )
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
# Add the component options now
result = getComponentCfg( componentType, system, component, compInstance, extensions )
if not result['OK']:
return result
compCfg = result['Value']
compCfgFile = os.path.join( rootPath, 'etc', '%s_%s.cfg' % ( system, component ) )
return compCfg.writeToFile( compCfgFile )
def addCfgToComponentCfg( componentType, systemName, component, cfg ):
"""
Add some extra configuration to the local component cfg
"""
sectionName = 'Services'
if componentType == 'agent':
sectionName = 'Agents'
if not cfg:
return S_OK()
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', setup, system )
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
compCfgFile = os.path.join( rootPath, 'etc', '%s_%s.cfg' % ( system, component ) )
compCfg = CFG()
if os.path.exists( compCfgFile ):
compCfg.loadFromFile( compCfgFile )
sectionPath = cfgPath( 'Systems', system, compInstance, sectionName )
newCfg = __getCfg( sectionPath )
newCfg.createNewSection( cfgPath( sectionPath, component ), 'Added by InstallTools', cfg )
if newCfg.writeToFile( compCfgFile ):
return S_OK( compCfgFile )
error = 'Can not write %s' % compCfgFile
gLogger.error( error )
return S_ERROR( error )
def getComponentCfg( componentType, system, component, compInstance, extensions,
specialOptions = {}, addDefaultOptions = True ):
"""
Get the CFG object of the component configuration
"""
sectionName = 'Services'
if componentType == 'agent':
sectionName = 'Agents'
if componentType == 'executor':
sectionName = 'Executors'
componentModule = component
if "Module" in specialOptions:
componentModule = specialOptions['Module']
compCfg = CFG()
if addDefaultOptions:
extensionsDIRAC = [ x + 'DIRAC' for x in extensions ] + extensions
for ext in extensionsDIRAC + ['DIRAC']:
cfgTemplatePath = os.path.join( rootPath, ext, '%sSystem' % system, 'ConfigTemplate.cfg' )
if os.path.exists( cfgTemplatePath ):
gLogger.notice( 'Loading configuration template', cfgTemplatePath )
# Look up the component in this template
loadCfg = CFG()
loadCfg.loadFromFile( cfgTemplatePath )
compCfg = loadCfg.mergeWith( compCfg )
compPath = cfgPath( sectionName, componentModule )
if not compCfg.isSection( compPath ):
error = 'Can not find %s in template' % compPath
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
compCfg = compCfg[sectionName][componentModule]
# Delete Dependencies section if any
compCfg.deleteKey( 'Dependencies' )
sectionPath = cfgPath( 'Systems', system, compInstance, sectionName )
cfg = __getCfg( sectionPath )
cfg.createNewSection( cfgPath( sectionPath, component ), '', compCfg )
for option, value in specialOptions.items():
cfg.setOption( cfgPath( sectionPath, component, option ), value )
# Add the service URL
if componentType == "service":
port = compCfg.getOption( 'Port' , 0 )
if port and host:
urlsPath = cfgPath( 'Systems', system, compInstance, 'URLs' )
cfg.createNewSection( urlsPath )
cfg.setOption( cfgPath( urlsPath, component ),
'dips://%s:%d/%s/%s' % ( host, port, system, component ) )
return S_OK( cfg )
def addDatabaseOptionsToCS( gConfig, systemName, dbName, mySetup = setup, overwrite = False ):
"""
Add the section with the database options to the CS
"""
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', mySetup, system )
if gConfig:
compInstance = gConfig.getValue( instanceOption, '' )
else:
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
# Check if the component CS options exist
addOptions = True
if not overwrite:
databasePath = cfgPath( 'Systems', system, compInstance, 'Databases', dbName )
result = gConfig.getOptions( databasePath )
if result['OK']:
addOptions = False
if not addOptions:
return S_OK( 'Database options already exist' )
# Add the component options now
result = getDatabaseCfg( system, dbName, compInstance )
if not result['OK']:
return result
databaseCfg = result['Value']
gLogger.notice( 'Adding to CS', '%s/%s' % ( system, dbName ) )
return _addCfgToCS( databaseCfg )
def getDatabaseCfg( system, dbName, compInstance ):
"""
Get the CFG object of the database configuration
"""
databasePath = cfgPath( 'Systems', system, compInstance, 'Databases', dbName )
cfg = __getCfg( databasePath, 'DBName', dbName )
cfg.setOption( cfgPath( databasePath, 'Host' ), mysqlHost )
cfg.setOption( cfgPath( databasePath, 'Port' ), mysqlPort )
return S_OK( cfg )
def addSystemInstance( systemName, compInstance, mySetup = setup, myCfg = False ):
"""
Add a new system instance to dirac.cfg and CS
"""
system = systemName.replace( 'System', '' )
gLogger.notice( 'Adding %s system as %s instance for %s setup to dirac.cfg and CS' % ( system, compInstance, mySetup ) )
cfg = __getCfg( cfgPath( 'DIRAC', 'Setups', mySetup ), system, compInstance )
if myCfg:
if not _addCfgToDiracCfg( cfg ):
return S_ERROR( 'Failed to add system instance to dirac.cfg' )
return _addCfgToCS( cfg )
def printStartupStatus( rDict ):
"""
Print in nice format the return dictionary from getStartupComponentStatus
(also returned by runsvctrlComponent)
"""
fields = ['Name','Runit','Uptime','PID']
records = []
try:
for comp in rDict:
records.append( [comp,
rDict[comp]['RunitStatus'],
rDict[comp]['Timeup'],
str( rDict[comp]['PID'] ) ] )
printTable( fields, records )
except Exception, x:
print "Exception while gathering data for printing: %s" % str( x )
return S_OK()
def printOverallStatus( rDict ):
"""
Print in nice format the return dictionary from getOverallStatus
"""
fields = ['System','Name','Type','Setup','Installed','Runit','Uptime','PID']
records = []
try:
for compType in rDict:
for system in rDict[compType]:
for component in rDict[compType][system]:
record = [ system, component, compType.lower()[:-1] ]
if rDict[compType][system][component]['Setup']:
record.append( 'SetUp' )
else:
record.append( 'NotSetUp' )
if rDict[compType][system][component]['Installed']:
record.append( 'Installed' )
else:
record.append( 'NotInstalled' )
record.append( str( rDict[compType][system][component]['RunitStatus'] ) )
record.append( str( rDict[compType][system][component]['Timeup'] ) )
record.append( str( rDict[compType][system][component]['PID'] ) )
records.append( record )
printTable( fields, records )
except Exception, x:
print "Exception while gathering data for printing: %s" % str( x )
return S_OK()
def getAvailableSystems( extensions ):
"""
Get the list of all systems (in all given extensions) locally available
"""
systems = []
for extension in extensions:
extensionPath = os.path.join( DIRAC.rootPath, extension, '*System' )
for system in [ os.path.basename( k ).split( 'System' )[0] for k in glob.glob( extensionPath ) ]:
if system not in systems:
systems.append( system )
return systems
def getSoftwareComponents( extensions ):
"""
Get the list of all the components ( services and agents ) for which the software
is installed on the system
"""
# The Gateway does not need a handler
services = { 'Framework' : ['Gateway'] }
agents = {}
executors = {}
for extension in ['DIRAC'] + [ x + 'DIRAC' for x in extensions]:
if not os.path.exists( os.path.join( rootPath, extension ) ):
# Not all the extensions are necessarily installed in this instance
continue
systemList = os.listdir( os.path.join( rootPath, extension ) )
for sys in systemList:
system = sys.replace( 'System', '' )
try:
agentDir = os.path.join( rootPath, extension, sys, 'Agent' )
agentList = os.listdir( agentDir )
for agent in agentList:
if agent[-3:] == ".py":
agentFile = os.path.join( agentDir, agent )
afile = open( agentFile, 'r' )
body = afile.read()
afile.close()
if body.find( 'AgentModule' ) != -1 or body.find( 'OptimizerModule' ) != -1:
if not agents.has_key( system ):
agents[system] = []
agents[system].append( agent.replace( '.py', '' ) )
except OSError:
pass
try:
serviceDir = os.path.join( rootPath, extension, sys, 'Service' )
serviceList = os.listdir( serviceDir )
for service in serviceList:
if service.find( 'Handler' ) != -1 and service[-3:] == '.py':
if not services.has_key( system ):
services[system] = []
if system == 'Configuration' and service == 'ConfigurationHandler.py':
service = 'ServerHandler.py'
services[system].append( service.replace( '.py', '' ).replace( 'Handler', '' ) )
except OSError:
pass
try:
executorDir = os.path.join( rootPath, extension, sys, 'Executor' )
executorList = os.listdir( executorDir )
for executor in executorList:
if executor[-3:] == ".py":
executorFile = os.path.join( executorDir, executor )
afile = open( executorFile, 'r' )
body = afile.read()
afile.close()
if body.find( 'OptimizerExecutor' ) != -1:
if not executors.has_key( system ):
executors[system] = []
executors[system].append( executor.replace( '.py', '' ) )
except OSError:
pass
resultDict = {}
resultDict['Services'] = services
resultDict['Agents'] = agents
resultDict['Executors'] = executors
return S_OK( resultDict )
def getInstalledComponents():
"""
Get the list of all the components ( services and agents )
installed on the system in the runit directory
"""
services = {}
agents = {}
executors = {}
systemList = os.listdir( runitDir )
for system in systemList:
systemDir = os.path.join( runitDir, system )
components = os.listdir( systemDir )
for component in components:
try:
runFile = os.path.join( systemDir, component, 'run' )
rfile = open( runFile, 'r' )
body = rfile.read()
rfile.close()
if body.find( 'dirac-service' ) != -1:
if not services.has_key( system ):
services[system] = []
services[system].append( component )
elif body.find( 'dirac-agent' ) != -1:
if not agents.has_key( system ):
agents[system] = []
agents[system].append( component )
elif body.find( 'dirac-executor' ) != -1:
if not executors.has_key( system ):
executors[system] = []
executors[system].append( component )
except IOError:
pass
resultDict = {}
resultDict['Services'] = services
resultDict['Agents'] = agents
resultDict['Executors'] = executors
return S_OK( resultDict )
def getSetupComponents():
"""
Get the list of all the components ( services and agents )
set up for running with runsvdir in startup directory
"""
services = {}
agents = {}
executors = {}
if not os.path.isdir( startDir ):
return S_ERROR( 'Startup Directory does not exit: %s' % startDir )
componentList = os.listdir( startDir )
for component in componentList:
try:
runFile = os.path.join( startDir, component, 'run' )
rfile = open( runFile, 'r' )
body = rfile.read()
rfile.close()
if body.find( 'dirac-service' ) != -1:
system, service = component.split( '_' )[0:2]
if not services.has_key( system ):
services[system] = []
services[system].append( service )
elif body.find( 'dirac-agent' ) != -1:
system, agent = component.split( '_' )[0:2]
if not agents.has_key( system ):
agents[system] = []
agents[system].append( agent )
elif body.find( 'dirac-executor' ) != -1:
system, executor = component.split( '_' )[0:2]
if not executors.has_key( system ):
executors[system] = []
executors[system].append( executor )
except IOError:
pass
resultDict = {}
resultDict['Services'] = services
resultDict['Agents'] = agents
resultDict['Executors'] = executors
return S_OK( resultDict )
def getStartupComponentStatus( componentTupleList ):
"""
Get the list of all the components ( services and agents )
set up for running with runsvdir in startup directory
"""
try:
if componentTupleList:
cList = []
for componentTuple in componentTupleList:
cList.extend( glob.glob( os.path.join( startDir, '_'.join( componentTuple ) ) ) )
else:
cList = glob.glob( os.path.join( startDir, '*' ) )
except Exception:
error = 'Failed to parse List of Components'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 0, ['runsvstat'] + cList )
if not result['OK']:
return result
output = result['Value'][1].strip().split( '\n' )
componentDict = {}
for line in output:
if not line:
continue
cname, routput = line.split( ':' )
cname = cname.replace( '%s/' % startDir, '' )
run = False
reResult = re.search( '^ run', routput )
if reResult:
run = True
down = False
reResult = re.search( '^ down', routput )
if reResult:
down = True
reResult = re.search( '([0-9]+) seconds', routput )
timeup = 0
if reResult:
timeup = reResult.group( 1 )
reResult = re.search( 'pid ([0-9]+)', routput )
pid = 0
if reResult:
pid = reResult.group( 1 )
runsv = "Not running"
if run or down:
runsv = "Running"
reResult = re.search( 'runsv not running', routput )
if reResult:
runsv = "Not running"
runDict = {}
runDict['Timeup'] = timeup
runDict['PID'] = pid
runDict['RunitStatus'] = "Unknown"
if run:
runDict['RunitStatus'] = "Run"
if down:
runDict['RunitStatus'] = "Down"
if runsv == "Not running":
runDict['RunitStatus'] = "NoRunitControl"
componentDict[cname] = runDict
return S_OK( componentDict )
def getComponentModule( gConfig, system, component, compType ):
"""
Get the component software module
"""
setup = CSGlobals.getSetup()
instance = gConfig.getValue( cfgPath( 'DIRAC', 'Setups', setup, system ), '' )
if not instance:
return S_OK( component )
module = gConfig.getValue( cfgPath( 'Systems', system, instance, compType, component, 'Module' ), '' )
if not module:
module = component
return S_OK( module )
def getOverallStatus( extensions ):
"""
Get the list of all the components ( services and agents )
set up for running with runsvdir in startup directory
"""
result = getSoftwareComponents( extensions )
if not result['OK']:
return result
softDict = result['Value']
result = getSetupComponents()
if not result['OK']:
return result
setupDict = result['Value']
result = getInstalledComponents()
if not result['OK']:
return result
installedDict = result['Value']
result = getStartupComponentStatus( [] )
if not result['OK']:
return result
runitDict = result['Value']
# Collect the info now
resultDict = {'Services':{}, 'Agents':{}, 'Executors':{} }
for compType in ['Services', 'Agents', 'Executors' ]:
if softDict.has_key( 'Services' ):
for system in softDict[compType]:
resultDict[compType][system] = {}
for component in softDict[compType][system]:
if system == 'Configuration' and component == 'Configuration':
# Fix to avoid missing CS due to different between Service name and Handler name
component = 'Server'
resultDict[compType][system][component] = {}
resultDict[compType][system][component]['Setup'] = False
resultDict[compType][system][component]['Installed'] = False
resultDict[compType][system][component]['RunitStatus'] = 'Unknown'
resultDict[compType][system][component]['Timeup'] = 0
resultDict[compType][system][component]['PID'] = 0
# TODO: why do we need a try here?
try:
if component in setupDict[compType][system]:
resultDict[compType][system][component]['Setup'] = True
except Exception:
pass
try:
if component in installedDict[compType][system]:
resultDict[compType][system][component]['Installed'] = True
except Exception:
pass
try:
compDir = system + '_' + component
if runitDict.has_key( compDir ):
resultDict[compType][system][component]['RunitStatus'] = runitDict[compDir]['RunitStatus']
resultDict[compType][system][component]['Timeup'] = runitDict[compDir]['Timeup']
resultDict[compType][system][component]['PID'] = runitDict[compDir]['PID']
except Exception:
#print str(x)
pass
# Installed components can be not the same as in the software list
if installedDict.has_key( 'Services' ):
for system in installedDict[compType]:
for component in installedDict[compType][system]:
if compType in resultDict:
if system in resultDict[compType]:
if component in resultDict[compType][system]:
continue
resultDict[compType][system][component] = {}
resultDict[compType][system][component]['Setup'] = False
resultDict[compType][system][component]['Installed'] = True
resultDict[compType][system][component]['RunitStatus'] = 'Unknown'
resultDict[compType][system][component]['Timeup'] = 0
resultDict[compType][system][component]['PID'] = 0
# TODO: why do we need a try here?
try:
if component in setupDict[compType][system]:
resultDict[compType][system][component]['Setup'] = True
except Exception:
pass
try:
compDir = system + '_' + component
if runitDict.has_key( compDir ):
resultDict[compType][system][component]['RunitStatus'] = runitDict[compDir]['RunitStatus']
resultDict[compType][system][component]['Timeup'] = runitDict[compDir]['Timeup']
resultDict[compType][system][component]['PID'] = runitDict[compDir]['PID']
except Exception:
#print str(x)
pass
return S_OK( resultDict )
def checkComponentModule( componentType, system, module ):
"""
Check existence of the given module
and if it inherits from the proper class
"""
if componentType == 'agent':
loader = ModuleLoader( "Agent", PathFinder.getAgentSection, AgentModule )
elif componentType == 'service':
loader = ModuleLoader( "Service", PathFinder.getServiceSection,
RequestHandler, moduleSuffix = "Handler" )
elif componentType == 'executor':
loader = ModuleLoader( "Executor", PathFinder.getExecutorSection, ExecutorModule )
else:
return S_ERROR( 'Unknown component type %s' % componentType )
return loader.loadModule( "%s/%s" % ( system, module ) )
def checkComponentSoftware( componentType, system, component, extensions ):
"""
Check the component software
"""
result = getSoftwareComponents( extensions )
if not result['OK']:
return result
if componentType == 'service':
softDict = result['Value']['Services']
elif componentType == 'agent':
softDict = result['Value']['Agents']
else:
return S_ERROR( 'Unknown component type %s' % componentType )
if system in softDict and component in softDict[system]:
return S_OK()
return S_ERROR( 'Unknown Component %s/%s' % ( system, component ) )
def runsvctrlComponent( system, component, mode ):
"""
Execute runsvctrl and check status of the specified component
"""
if not mode in ['u', 'd', 'o', 'p', 'c', 'h', 'a', 'i', 'q', '1', '2', 't', 'k', 'x', 'e']:
return S_ERROR( 'Unknown runsvctrl mode "%s"' % mode )
startCompDirs = glob.glob( os.path.join( startDir, '%s_%s' % ( system, component ) ) )
# Make sure that the Configuration server restarts first and the SystemAdmin restarts last
tmpList = list( startCompDirs )
for comp in tmpList:
if "Framework_SystemAdministrator" in comp:
startCompDirs.append( startCompDirs.pop( startCompDirs.index( comp ) ) )
if "Configuration_Server" in comp:
startCompDirs.insert( 0, startCompDirs.pop( startCompDirs.index( comp ) ) )
startCompList = [ [k] for k in startCompDirs]
for startComp in startCompList:
result = execCommand( 0, ['runsvctrl', mode] + startComp )
if not result['OK']:
return result
time.sleep( 1 )
# Check the runsv status
if system == '*' or component == '*':
time.sleep( 5 )
# Final check
result = getStartupComponentStatus( [( system, component )] )
if not result['OK']:
return S_ERROR( 'Failed to start the component' )
return result
def getLogTail( system, component, length = 100 ):
"""
Get the tail of the component log file
"""
retDict = {}
for startCompDir in glob.glob( os.path.join( startDir, '%s_%s' % ( system, component ) ) ):
compName = os.path.basename( startCompDir )
logFileName = os.path.join( startCompDir, 'log', 'current' )
if not os.path.exists( logFileName ):
retDict[compName] = 'No log file found'
else:
logFile = open( logFileName, 'r' )
lines = [ line.strip() for line in logFile.readlines() ]
logFile.close()
if len( lines ) < length:
retDict[compName] = '\n'.join( lines )
else:
retDict[compName] = '\n'.join( lines[-length:] )
return S_OK( retDict )
def setupSite( scriptCfg, cfg = None ):
"""
Setup a new site using the options defined
"""
# First we need to find out what needs to be installed
# by default use dirac.cfg, but if a cfg is given use it and
# merge it into the dirac.cfg
diracCfg = CFG()
installCfg = None
if cfg:
try:
installCfg = CFG()
installCfg.loadFromFile( cfg )
for section in ['DIRAC', 'LocalSite', cfgInstallSection]:
if installCfg.isSection( section ):
diracCfg.createNewSection( section, contents = installCfg[section] )
if instancePath != basePath:
if not diracCfg.isSection( 'LocalSite' ):
diracCfg.createNewSection( 'LocalSite' )
diracCfg.setOption( cfgPath( 'LocalSite', 'InstancePath' ), instancePath )
_addCfgToDiracCfg( diracCfg, verbose = True )
except Exception:
error = 'Failed to load %s' % cfg
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
# Now get the necessary info from localCfg
setupSystems = localCfg.getOption( cfgInstallPath( 'Systems' ), ['Configuration', 'Framework'] )
installMySQLFlag = localCfg.getOption( cfgInstallPath( 'InstallMySQL' ), False )
setupDatabases = localCfg.getOption( cfgInstallPath( 'Databases' ), [] )
setupServices = [ k.split( '/' ) for k in localCfg.getOption( cfgInstallPath( 'Services' ), [] ) ]
setupAgents = [ k.split( '/' ) for k in localCfg.getOption( cfgInstallPath( 'Agents' ), [] ) ]
setupExecutors = [ k.split( '/' ) for k in localCfg.getOption( cfgInstallPath( 'Executors' ), [] ) ]
setupWeb = localCfg.getOption( cfgInstallPath( 'WebPortal' ), False )
setupWebApp = localCfg.getOption( cfgInstallPath( 'WebApp' ), False )
setupConfigurationMaster = localCfg.getOption( cfgInstallPath( 'ConfigurationMaster' ), False )
setupPrivateConfiguration = localCfg.getOption( cfgInstallPath( 'PrivateConfiguration' ), False )
setupConfigurationName = localCfg.getOption( cfgInstallPath( 'ConfigurationName' ), setup )
setupAddConfiguration = localCfg.getOption( cfgInstallPath( 'AddConfiguration' ), True )
for serviceTuple in setupServices:
error = ''
if len( serviceTuple ) != 2:
error = 'Wrong service specification: system/service'
# elif serviceTuple[0] not in setupSystems:
# error = 'System %s not available' % serviceTuple[0]
if error:
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
serviceSysInstance = serviceTuple[0]
if not serviceSysInstance in setupSystems:
setupSystems.append( serviceSysInstance )
for agentTuple in setupAgents:
error = ''
if len( agentTuple ) != 2:
error = 'Wrong agent specification: system/agent'
# elif agentTuple[0] not in setupSystems:
# error = 'System %s not available' % agentTuple[0]
if error:
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
agentSysInstance = agentTuple[0]
if not agentSysInstance in setupSystems:
setupSystems.append( agentSysInstance )
for executorTuple in setupExecutors:
error = ''
if len( executorTuple ) != 2:
error = 'Wrong executor specification: system/executor'
if error:
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
executorSysInstance = executorTuple[0]
if not executorSysInstance in setupSystems:
setupSystems.append( executorSysInstance )
# And to find out the available extensions
result = getExtensions()
if not result['OK']:
return result
extensions = [ k.replace( 'DIRAC', '' ) for k in result['Value']]
# Make sure the necessary directories are there
if basePath != instancePath:
if not os.path.exists( instancePath ):
try:
os.makedirs( instancePath )
except Exception:
error = 'Can not create directory for instance %s' % instancePath
if exitOnError:
gLogger.exception( error )
DIRAC.exit( -1 )
return S_ERROR( error )
if not os.path.isdir( instancePath ):
error = 'Instance directory %s is not valid' % instancePath
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
instanceEtcDir = os.path.join( instancePath, 'etc' )
etcDir = os.path.dirname( cfgFile )
if not os.path.exists( instanceEtcDir ):
try:
os.symlink( etcDir, instanceEtcDir )
except Exception:
error = 'Can not create link to configuration %s' % instanceEtcDir
if exitOnError:
gLogger.exception( error )
DIRAC.exit( -1 )
return S_ERROR( error )
if os.path.realpath( instanceEtcDir ) != os.path.realpath( etcDir ):
error = 'Instance etc (%s) is not the same as DIRAC etc (%s)' % ( instanceEtcDir, etcDir )
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# if any server or agent needs to be install we need the startup directory and runsvdir running
if setupServices or setupAgents or setupExecutors or setupWeb:
if not os.path.exists( startDir ):
try:
os.makedirs( startDir )
except Exception:
error = 'Can not create %s' % startDir
if exitOnError:
gLogger.exception( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# And need to make sure runsvdir is running
result = execCommand( 0, ['ps', '-ef'] )
if not result['OK']:
if exitOnError:
gLogger.error( 'Failed to verify runsvdir running', result['Message'] )
DIRAC.exit( -1 )
return S_ERROR( result['Message'] )
processList = result['Value'][1].split( '\n' )
cmd = 'runsvdir %s' % startDir
cmdFound = False
for process in processList:
if process.find( cmd ) != -1:
cmdFound = True
if not cmdFound:
gLogger.notice( 'Starting runsvdir ...' )
os.system( "runsvdir %s 'log: DIRAC runsv' &" % startDir )
if ['Configuration', 'Server'] in setupServices and setupConfigurationMaster:
# This server hosts the Master of the CS
from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
gLogger.notice( 'Installing Master Configuration Server' )
cfg = __getCfg( cfgPath( 'DIRAC', 'Setups', setup ), 'Configuration', instance )
_addCfgToDiracCfg( cfg )
cfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'Master' , 'yes' )
cfg.setOption( cfgPath( 'DIRAC', 'Configuration', 'Name' ) , setupConfigurationName )
serversCfgPath = cfgPath( 'DIRAC', 'Configuration', 'Servers' )
if not localCfg.getOption( serversCfgPath , [] ):
serverUrl = 'dips://%s:9135/Configuration/Server' % host
cfg.setOption( serversCfgPath, serverUrl )
gConfigurationData.setOptionInCFG( serversCfgPath, serverUrl )
instanceOptionPath = cfgPath( 'DIRAC', 'Setups', setup )
instanceCfg = __getCfg( instanceOptionPath, 'Configuration', instance )
cfg = cfg.mergeWith( instanceCfg )
_addCfgToDiracCfg( cfg )
result = getComponentCfg( 'service', 'Configuration', 'Server', instance, extensions, addDefaultOptions = True )
if not result['OK']:
if exitOnError:
DIRAC.exit( -1 )
else:
return result
compCfg = result['Value']
cfg = cfg.mergeWith( compCfg )
gConfigurationData.mergeWithLocal( cfg )
addDefaultOptionsToComponentCfg( 'service', 'Configuration', 'Server', [] )
if installCfg:
centralCfg = _getCentralCfg( installCfg )
else:
centralCfg = _getCentralCfg( localCfg )
_addCfgToLocalCS( centralCfg )
setupComponent( 'service', 'Configuration', 'Server', [], checkModule = False )
runsvctrlComponent( 'Configuration', 'Server', 't' )
while ['Configuration', 'Server'] in setupServices:
setupServices.remove( ['Configuration', 'Server'] )
time.sleep( 5 )
# Now need to check if there is valid CS to register the info
result = scriptCfg.enableCS()
if not result['OK']:
if exitOnError:
DIRAC.exit( -1 )
return result
cfgClient = CSAPI()
if not cfgClient.initialize():
error = 'Configuration Server not defined'
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# We need to make sure components are connecting to the Master CS, that is the only one being update
from DIRAC import gConfig
localServers = localCfg.getOption( cfgPath( 'DIRAC', 'Configuration', 'Servers' ) )
masterServer = gConfig.getValue( cfgPath( 'DIRAC', 'Configuration', 'MasterServer' ), '' )
initialCfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'Servers' , localServers )
masterCfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'Servers' , masterServer )
_addCfgToDiracCfg( masterCfg )
# 1.- Setup the instances in the CS
# If the Configuration Server used is not the Master, it can take some time for this
# info to be propagated, this may cause the later setup to fail
if setupAddConfiguration:
gLogger.notice( 'Registering System instances' )
for system in setupSystems:
addSystemInstance( system, instance, setup, True )
for system, service in setupServices:
if not addDefaultOptionsToCS( None, 'service', system, service, extensions, overwrite = True )['OK']:
# If we are not allowed to write to the central CS, add the configuration to the local file
addDefaultOptionsToComponentCfg( 'service', system, service, extensions )
for system, agent in setupAgents:
if not addDefaultOptionsToCS( None, 'agent', system, agent, extensions, overwrite = True )['OK']:
# If we are not allowed to write to the central CS, add the configuration to the local file
addDefaultOptionsToComponentCfg( 'agent', system, agent, extensions )
for system, executor in setupExecutors:
if not addDefaultOptionsToCS( None, 'executor', system, executor, extensions, overwrite = True )['OK']:
# If we are not allowed to write to the central CS, add the configuration to the local file
addDefaultOptionsToComponentCfg( 'executor', system, executor, extensions )
else:
gLogger.warn( 'Configuration parameters definition is not requested' )
if ['Configuration', 'Server'] in setupServices and setupPrivateConfiguration:
cfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'AutoPublish' , 'no' )
_addCfgToDiracCfg( cfg )
# 2.- Check if MySQL is to be installed
if installMySQLFlag:
gLogger.notice( 'Installing MySQL' )
getMySQLPasswords()
installMySQL()
# 3.- Install requested Databases
# if MySQL is not installed locally, we assume a host is given
if setupDatabases:
result = getDatabases()
if not result['OK']:
if exitOnError:
gLogger.error( 'Failed to get databases', result['Message'] )
DIRAC.exit( -1 )
return result
installedDatabases = result['Value']
for dbName in setupDatabases:
if dbName not in installedDatabases:
extension, system = installDatabase( dbName )['Value']
gLogger.notice( 'Database %s from %s/%s installed' % ( dbName, extension, system ) )
result = addDatabaseOptionsToCS( None, system, dbName, overwrite = True )
if not result['OK']:
gLogger.error( 'Database %s CS registration failed: %s' % ( dbName, result['Message'] ) )
else:
gLogger.notice( 'Database %s already installed' % dbName )
if mysqlPassword:
if not _addMySQLToDiracCfg():
error = 'Failed to add MySQL user password to local configuration'
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# 4.- Then installed requested services
for system, service in setupServices:
setupComponent( 'service', system, service, extensions )
# 5.- Now the agents
for system, agent in setupAgents:
setupComponent( 'agent', system, agent, extensions )
# 6.- Now the executors
for system, executor in setupExecutors:
setupComponent( 'executor', system, executor, extensions )
# 7.- And finally the Portal
if setupWeb:
if setupWebApp:
setupNewPortal()
else:
setupPortal()
if localServers != masterServer:
_addCfgToDiracCfg( initialCfg )
for system, service in setupServices:
runsvctrlComponent( system, service, 't' )
for system, agent in setupAgents:
runsvctrlComponent( system, agent, 't' )
for system, executor in setupExecutors:
runsvctrlComponent( system, executor, 't' )
return S_OK()
def _createRunitLog( runitCompDir ):
logDir = os.path.join( runitCompDir, 'log' )
os.makedirs( logDir )
logConfigFile = os.path.join( logDir, 'config' )
fd = open( logConfigFile, 'w' )
fd.write(
"""s10000000
n20
""" )
fd.close()
logRunFile = os.path.join( logDir, 'run' )
fd = open( logRunFile, 'w' )
fd.write(
"""#!/bin/bash
#
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec svlogd .
""" % { 'bashrc' : os.path.join( instancePath, 'bashrc' ) } )
fd.close()
os.chmod( logRunFile, gDefaultPerms )
def installComponent( componentType, system, component, extensions, componentModule = '', checkModule = True ):
"""
Install runit directory for the specified component
"""
# Check if the component is already installed
runitCompDir = os.path.join( runitDir, system, component )
if os.path.exists( runitCompDir ):
msg = "%s %s_%s already installed" % ( componentType, system, component )
gLogger.notice( msg )
return S_OK( runitCompDir )
# Check that the software for the component is installed
# Any "Load" or "Module" option in the configuration defining what modules the given "component"
# needs to load will be taken care of by checkComponentModule.
if checkModule:
cModule = componentModule
if not cModule:
cModule = component
result = checkComponentModule( componentType, system, cModule )
if not result['OK']:
if not checkComponentSoftware( componentType, system, cModule, extensions )['OK'] and componentType != 'executor':
error = 'Software for %s %s/%s is not installed' % ( componentType, system, component )
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
gLogger.notice( 'Installing %s %s/%s' % ( componentType, system, component ) )
# Now do the actual installation
try:
componentCfg = os.path.join( linkedRootPath, 'etc', '%s_%s.cfg' % ( system, component ) )
if not os.path.exists( componentCfg ):
fd = open( componentCfg, 'w' )
fd.close()
_createRunitLog( runitCompDir )
runFile = os.path.join( runitCompDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
[ "%(componentType)s" = "agent" ] && renice 20 -p $$
#
exec python $DIRAC/DIRAC/Core/scripts/dirac-%(componentType)s.py %(system)s/%(component)s %(componentCfg)s < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ),
'componentType': componentType,
'system' : system,
'component': component,
'componentCfg': componentCfg } )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for %s %s/%s' % ( componentType, system, component )
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
return S_OK( runitCompDir )
def setupComponent( componentType, system, component, extensions, componentModule = '', checkModule = True ):
"""
Install and create link in startup
"""
result = installComponent( componentType, system, component, extensions, componentModule, checkModule )
if not result['OK']:
return result
# Create the startup entry now
runitCompDir = result['Value']
startCompDir = os.path.join( startDir, '%s_%s' % ( system, component ) )
if not os.path.exists( startDir ):
os.makedirs( startDir )
if not os.path.lexists( startCompDir ):
gLogger.notice( 'Creating startup link at', startCompDir )
os.symlink( runitCompDir, startCompDir )
time.sleep( 10 )
# Check the runsv status
start = time.time()
while ( time.time() - 20 ) < start:
result = getStartupComponentStatus( [ ( system, component )] )
if not result['OK']:
continue
if result['Value'] and result['Value']['%s_%s' % ( system, component )]['RunitStatus'] == "Run":
break
time.sleep( 1 )
# Final check
result = getStartupComponentStatus( [( system, component )] )
if not result['OK']:
return S_ERROR( 'Failed to start the component %s_%s' % ( system, component ) )
resDict = {}
resDict['ComponentType'] = componentType
resDict['RunitStatus'] = result['Value']['%s_%s' % ( system, component )]['RunitStatus']
return S_OK( resDict )
def unsetupComponent( system, component ):
"""
Remove link from startup
"""
for startCompDir in glob.glob( os.path.join( startDir, '%s_%s' % ( system, component ) ) ):
try:
os.unlink( startCompDir )
except Exception:
gLogger.exception()
return S_OK()
def uninstallComponent( system, component ):
"""
Remove startup and runit directories
"""
result = runsvctrlComponent( system, component, 'd' )
if not result['OK']:
pass
result = unsetupComponent( system, component )
for runitCompDir in glob.glob( os.path.join( runitDir, system, component ) ):
try:
shutil.rmtree( runitCompDir )
except Exception:
gLogger.exception()
return S_OK()
def installPortal():
"""
Install runit directories for the Web Portal
"""
# Check that the software for the Web Portal is installed
error = ''
webDir = os.path.join( linkedRootPath, 'Web' )
if not os.path.exists( webDir ):
error = 'Web extension not installed at %s' % webDir
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# First the lighthttpd server
# Check if the component is already installed
runitHttpdDir = os.path.join( runitDir, 'Web', 'httpd' )
runitPasterDir = os.path.join( runitDir, 'Web', 'paster' )
if os.path.exists( runitHttpdDir ):
msg = "lighthttpd already installed"
gLogger.notice( msg )
else:
gLogger.notice( 'Installing Lighttpd' )
# Now do the actual installation
try:
_createRunitLog( runitHttpdDir )
runFile = os.path.join( runitHttpdDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
exec lighttpdSvc.sh < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ), } )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for lighttpd'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
# Second the Web portal
# Check if the component is already installed
if os.path.exists( runitPasterDir ):
msg = "Web Portal already installed"
gLogger.notice( msg )
else:
gLogger.notice( 'Installing Web Portal' )
# Now do the actual installation
try:
_createRunitLog( runitPasterDir )
runFile = os.path.join( runitPasterDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
cd %(DIRAC)s/Web
exec paster serve --reload production.ini < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ),
'DIRAC': linkedRootPath} )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for Web Portal'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
return S_OK( [runitHttpdDir, runitPasterDir] )
def setupPortal():
"""
Install and create link in startup
"""
result = installPortal()
if not result['OK']:
return result
# Create the startup entries now
runitCompDir = result['Value']
startCompDir = [ os.path.join( startDir, 'Web_httpd' ),
os.path.join( startDir, 'Web_paster' ) ]
if not os.path.exists( startDir ):
os.makedirs( startDir )
for i in range( 2 ):
if not os.path.lexists( startCompDir[i] ):
gLogger.notice( 'Creating startup link at', startCompDir[i] )
os.symlink( runitCompDir[i], startCompDir[i] )
time.sleep( 1 )
time.sleep( 5 )
# Check the runsv status
start = time.time()
while ( time.time() - 10 ) < start:
result = getStartupComponentStatus( [ ( 'Web', 'httpd' ), ( 'Web', 'paster' ) ] )
if not result['OK']:
return S_ERROR( 'Failed to start the Portal' )
if result['Value'] and \
result['Value']['%s_%s' % ( 'Web', 'httpd' )]['RunitStatus'] == "Run" and \
result['Value']['%s_%s' % ( 'Web', 'paster' )]['RunitStatus'] == "Run" :
break
time.sleep( 1 )
# Final check
return getStartupComponentStatus( [ ( 'Web', 'httpd' ), ( 'Web', 'paster' ) ] )
def setupNewPortal():
"""
Install and create link in startup
"""
result = installNewPortal()
if not result['OK']:
return result
# Create the startup entries now
runitCompDir = result['Value']
startCompDir = os.path.join( startDir, 'Web_WebApp' )
if not os.path.exists( startDir ):
os.makedirs( startDir )
if not os.path.lexists( startCompDir ):
gLogger.notice( 'Creating startup link at', startCompDir )
os.symlink( runitCompDir, startCompDir )
time.sleep( 5 )
# Check the runsv status
start = time.time()
while ( time.time() - 10 ) < start:
result = getStartupComponentStatus( [( 'Web', 'WebApp' )] )
if not result['OK']:
return S_ERROR( 'Failed to start the Portal' )
if result['Value'] and \
result['Value']['%s_%s' % ( 'Web', 'WebApp' )]['RunitStatus'] == "Run":
break
time.sleep( 1 )
# Final check
return getStartupComponentStatus( [ ('Web', 'WebApp') ] )
def installNewPortal():
"""
Install runit directories for the Web Portal
"""
result = execCommand( False, ["pip", "install", "tornado"] )
if not result['OK']:
error = "Tornado can not be installed:%s" % result['Value']
gLogger.error( error )
DIRAC.exit(-1)
return error
else:
gLogger.notice("Tornado is installed successfully!")
# Check that the software for the Web Portal is installed
error = ''
webDir = os.path.join( linkedRootPath, 'WebAppDIRAC' )
if not os.path.exists( webDir ):
error = 'WebApp extension not installed at %s' % webDir
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
#compile the JS code
prodMode = ""
webappCompileScript = os.path.join( linkedRootPath, "WebAppDIRAC/scripts", "dirac-webapp-compile.py" )
if os.path.isfile( webappCompileScript ):
os.chmod( webappCompileScript , gDefaultPerms )
gLogger.notice( "Executing %s..." % webappCompileScript )
if os.system( "python '%s' > '%s.out' 2> '%s.err'" % ( webappCompileScript,
webappCompileScript,
webappCompileScript ) ):
gLogger.error( "Compile script %s failed. Check %s.err" % ( webappCompileScript,
webappCompileScript ) )
else:
prodMode = "-p"
# Check if the component is already installed
runitWebAppDir = os.path.join( runitDir, 'Web', 'WebApp' )
# Check if the component is already installed
if os.path.exists( runitWebAppDir ):
msg = "Web Portal already installed"
gLogger.notice( msg )
else:
gLogger.notice( 'Installing Web Portal' )
# Now do the actual installation
try:
_createRunitLog( runitWebAppDir )
runFile = os.path.join( runitWebAppDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
exec python %(DIRAC)s/WebAppDIRAC/scripts/dirac-webapp-run.py %(prodMode)s < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ),
'DIRAC': linkedRootPath,
'prodMode':prodMode} )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for Web Portal'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
return S_OK( runitWebAppDir )
def fixMySQLScripts( startupScript = mysqlStartupScript ):
"""
Edit MySQL scripts to point to desired locations for db and my.cnf
"""
gLogger.verbose( 'Updating:', startupScript )
try:
fd = open( startupScript, 'r' )
orgLines = fd.readlines()
fd.close()
fd = open( startupScript, 'w' )
for line in orgLines:
if line.find( 'export HOME' ) == 0:
continue
if line.find( 'datadir=' ) == 0:
line = 'datadir=%s\n' % mysqlDbDir
gLogger.debug( line )
line += 'export HOME=%s\n' % mysqlDir
if line.find( 'basedir=' ) == 0:
platform = getPlatformString()
line = 'basedir=%s\n' % os.path.join( rootPath, platform )
if line.find( 'extra_args=' ) == 0:
line = 'extra_args="-n"\n'
if line.find( '$bindir/mysqld_safe --' ) >= 0 and not ' --no-defaults ' in line:
line = line.replace( 'mysqld_safe', 'mysqld_safe --no-defaults' )
fd.write( line )
fd.close()
except Exception:
error = 'Failed to Update MySQL startup script'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return S_OK()
def mysqlInstalled( doNotExit = False ):
"""
Check if MySQL is already installed
"""
if os.path.exists( mysqlDbDir ) or os.path.exists( mysqlLogDir ):
return S_OK()
if doNotExit:
return S_ERROR()
error = 'MySQL not properly Installed'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
def getMySQLPasswords():
"""
Get MySQL passwords from local configuration or prompt
"""
import getpass
global mysqlRootPwd, mysqlPassword
if not mysqlRootPwd:
mysqlRootPwd = getpass.getpass( 'MySQL root password: ' )
if not mysqlPassword:
# Take it if it is already defined
mysqlPassword = localCfg.getOption( '/Systems/Databases/Password', '' )
if not mysqlPassword:
mysqlPassword = getpass.getpass( 'MySQL Dirac password: ' )
return S_OK()
def setMySQLPasswords( root = '', dirac = '' ):
"""
Set MySQL passwords
"""
global mysqlRootPwd, mysqlPassword
if root:
mysqlRootPwd = root
if dirac:
mysqlPassword = dirac
return S_OK()
def startMySQL():
"""
Start MySQL server
"""
result = mysqlInstalled()
if not result['OK']:
return result
return execCommand( 0, [mysqlStartupScript, 'start'] )
def stopMySQL():
"""
Stop MySQL server
"""
result = mysqlInstalled()
if not result['OK']:
return result
return execCommand( 0, [mysqlStartupScript, 'stop'] )
def installMySQL():
"""
Attempt an installation of MySQL
mode:
-Master
-Slave
-None
"""
fixMySQLScripts()
if mysqlInstalled( doNotExit = True )['OK']:
gLogger.notice( 'MySQL already installed' )
return S_OK()
if mysqlMode.lower() not in [ '', 'master', 'slave' ]:
error = 'Unknown MySQL server Mode'
if exitOnError:
gLogger.fatal( error, mysqlMode )
DIRAC.exit( -1 )
gLogger.error( error, mysqlMode )
return S_ERROR( error )
if mysqlHost:
gLogger.notice( 'Installing MySQL server at', mysqlHost )
if mysqlMode:
gLogger.notice( 'This is a MySQl %s server' % mysqlMode )
try:
os.makedirs( mysqlDbDir )
os.makedirs( mysqlLogDir )
except Exception:
error = 'Can not create MySQL dirs'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
try:
fd = open( mysqlMyOrg, 'r' )
myOrg = fd.readlines()
fd.close()
fd = open( mysqlMyCnf, 'w' )
for line in myOrg:
if line.find( '[mysqld]' ) == 0:
line += '\n'.join( [ 'innodb_file_per_table', '' ] )
elif line.find( 'innodb_log_arch_dir' ) == 0:
line = ''
elif line.find( 'innodb_data_file_path' ) == 0:
line = line.replace( '2000M', '200M' )
elif line.find( 'server-id' ) == 0 and mysqlMode.lower() == 'master':
# MySQL Configuration for Master Server
line = '\n'.join( ['server-id = 1',
'# DIRAC Master-Server',
'sync-binlog = 1',
'replicate-ignore-table = mysql.MonitorData',
'# replicate-ignore-db=db_name',
'log-bin = mysql-bin',
'log-slave-updates', '' ] )
elif line.find( 'server-id' ) == 0 and mysqlMode.lower() == 'slave':
# MySQL Configuration for Slave Server
line = '\n'.join( ['server-id = %s' % int( time.time() ),
'# DIRAC Slave-Server',
'sync-binlog = 1',
'replicate-ignore-table = mysql.MonitorData',
'# replicate-ignore-db=db_name',
'log-bin = mysql-bin',
'log-slave-updates', '' ] )
elif line.find( '/opt/dirac/mysql' ) > -1:
line = line.replace( '/opt/dirac/mysql', mysqlDir )
if mysqlSmallMem:
if line.find( 'innodb_buffer_pool_size' ) == 0:
line = 'innodb_buffer_pool_size = 200M\n'
elif mysqlLargeMem:
if line.find( 'innodb_buffer_pool_size' ) == 0:
line = 'innodb_buffer_pool_size = 10G\n'
fd.write( line )
fd.close()
except Exception:
error = 'Can not create my.cnf'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
gLogger.notice( 'Initializing MySQL...' )
result = execCommand( 0, ['mysql_install_db',
'--defaults-file=%s' % mysqlMyCnf,
'--datadir=%s' % mysqlDbDir ] )
if not result['OK']:
return result
gLogger.notice( 'Starting MySQL...' )
result = startMySQL()
if not result['OK']:
return result
gLogger.notice( 'Setting MySQL root password' )
result = execCommand( 0, ['mysqladmin', '-u', mysqlRootUser, 'password', mysqlRootPwd] )
if not result['OK']:
return result
# MySQL tends to define root@host user rather than [email protected]
hostName = mysqlHost.split('.')[0]
result = execMySQL( "UPDATE user SET Host='%s' WHERE Host='%s'" % (mysqlHost,hostName),
localhost=True )
if not result['OK']:
return result
result = execMySQL( "FLUSH PRIVILEGES" )
if not result['OK']:
return result
if mysqlHost and socket.gethostbyname( mysqlHost ) != '127.0.0.1' :
result = execCommand( 0, ['mysqladmin', '-u', mysqlRootUser, '-h', mysqlHost, 'password', mysqlRootPwd] )
if not result['OK']:
return result
result = execMySQL( "DELETE from user WHERE Password=''", localhost=True )
if not _addMySQLToDiracCfg():
return S_ERROR( 'Failed to add MySQL user password to local configuration' )
return S_OK()
def getMySQLStatus():
"""
Get the status of the MySQL database installation
"""
result = execCommand( 0, ['mysqladmin', 'status' ] )
if not result['OK']:
return result
output = result['Value'][1]
_d1, uptime, nthreads, nquestions, nslow, nopens, nflash, nopen, nqpersec = output.split( ':' )
resDict = {}
resDict['UpTime'] = uptime.strip().split()[0]
resDict['NumberOfThreads'] = nthreads.strip().split()[0]
resDict['NumberOfQuestions'] = nquestions.strip().split()[0]
resDict['NumberOfSlowQueries'] = nslow.strip().split()[0]
resDict['NumberOfOpens'] = nopens.strip().split()[0]
resDict['OpenTables'] = nopen.strip().split()[0]
resDict['FlushTables'] = nflash.strip().split()[0]
resDict['QueriesPerSecond'] = nqpersec.strip().split()[0]
return S_OK( resDict )
def getAvailableDatabases( extensions ):
dbDict = {}
for extension in extensions + ['']:
databases = glob.glob( os.path.join( rootPath, '%sDIRAC' % extension, '*', 'DB', '*.sql' ) )
for dbPath in databases:
dbName = os.path.basename( dbPath ).replace( '.sql', '' )
dbDict[dbName] = {}
dbDict[dbName]['Extension'] = extension
dbDict[dbName]['System'] = dbPath.split( '/' )[-3].replace( 'System', '' )
return S_OK( dbDict )
def getDatabases():
"""
Get the list of installed databases
"""
result = execMySQL( 'SHOW DATABASES' )
if not result['OK']:
return result
dbList = []
for dbName in result['Value']:
if not dbName[0] in ['Database', 'information_schema', 'mysql', 'test']:
dbList.append( dbName[0] )
return S_OK( dbList )
def installDatabase( dbName ):
"""
Install requested DB in MySQL server
"""
global mysqlRootPwd, mysqlPassword
if not mysqlRootPwd:
rootPwdPath = cfgInstallPath( 'Database', 'RootPwd' )
return S_ERROR( 'Missing %s in %s' % ( rootPwdPath, cfgFile ) )
if not mysqlPassword:
mysqlPassword = localCfg.getOption( cfgPath( 'Systems', 'Databases', 'Password' ), mysqlPassword )
if not mysqlPassword:
mysqlPwdPath = cfgPath( 'Systems', 'Databases', 'Password' )
return S_ERROR( 'Missing %s in %s' % ( mysqlPwdPath, cfgFile ) )
gLogger.notice( 'Installing', dbName )
dbFile = glob.glob( os.path.join( rootPath, '*', '*', 'DB', '%s.sql' % dbName ) )
if not dbFile:
error = 'Database %s not found' % dbName
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
dbFile = dbFile[0]
# just check
result = execMySQL( 'SHOW STATUS' )
if not result['OK']:
error = 'Could not connect to MySQL server'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
# now creating the Database
result = execMySQL( 'CREATE DATABASE `%s`' % dbName )
if not result['OK']:
gLogger.error( 'Failed to create databases', result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return result
perms = "SELECT,INSERT,LOCK TABLES,UPDATE,DELETE,CREATE,DROP,ALTER,CREATE VIEW, SHOW VIEW"
for cmd in ["GRANT %s ON `%s`.* TO '%s'@'localhost' IDENTIFIED BY '%s'" % ( perms, dbName, mysqlUser,
mysqlPassword ),
"GRANT %s ON `%s`.* TO '%s'@'%s' IDENTIFIED BY '%s'" % ( perms, dbName, mysqlUser,
mysqlHost, mysqlPassword ),
"GRANT %s ON `%s`.* TO '%s'@'%%' IDENTIFIED BY '%s'" % ( perms, dbName, mysqlUser,
mysqlPassword ) ]:
result = execMySQL( cmd )
if not result['OK']:
error = "Error executing '%s'" % cmd
gLogger.error( error, result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execMySQL( 'FLUSH PRIVILEGES' )
if not result['OK']:
gLogger.error( 'Failed to flush provileges', result['Message'] )
if exitOnError:
exit( -1 )
return result
# first getting the lines to be executed, and then execute them
try:
cmdLines = _createMySQLCMDLines( dbFile )
# We need to run one SQL cmd at once, mysql is much happier that way.
# Create a string of commands, ignoring comment lines
sqlString = '\n'.join( x for x in cmdLines if not x.startswith( "--" ) )
# Now run each command (They are seperated by ;)
# Ignore any empty ones
cmds = [ x.strip() for x in sqlString.split( ";" ) if x.strip() ]
for cmd in cmds:
result = execMySQL( cmd, dbName )
if not result['OK']:
error = 'Failed to initialize Database'
gLogger.notice( cmd )
gLogger.error( error, result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
except Exception, e:
gLogger.error( str( e ) )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return S_OK( dbFile.split( '/' )[-4:-2] )
def _createMySQLCMDLines( dbFile ):
""" Creates a list of MYSQL commands to be executed, inspecting the dbFile(s)
"""
cmdLines = []
fd = open( dbFile )
dbLines = fd.readlines()
fd.close()
for line in dbLines:
# Should we first source an SQL file (is this sql file an extension)?
if line.lower().startswith('source'):
sourcedDBbFileName = line.split( ' ' )[1].replace( '\n', '' )
gLogger.info( "Found file to source: %s" % sourcedDBbFileName )
sourcedDBbFile = os.path.join( rootPath, sourcedDBbFileName )
fdSourced = open( sourcedDBbFile )
dbLinesSourced = fdSourced.readlines()
fdSourced.close()
for lineSourced in dbLinesSourced:
if lineSourced.strip():
cmdLines.append( lineSourced.strip() )
# Creating/adding cmdLines
else:
if line.strip():
cmdLines.append( line.strip() )
return cmdLines
def execMySQL( cmd, dbName = 'mysql', localhost=False ):
"""
Execute MySQL Command
"""
global db
from DIRAC.Core.Utilities.MySQL import MySQL
if not mysqlRootPwd:
return S_ERROR( 'MySQL root password is not defined' )
if dbName not in db:
dbHost = mysqlHost
if localhost:
dbHost = 'localhost'
db[dbName] = MySQL( dbHost, mysqlRootUser, mysqlRootPwd, dbName, mysqlPort )
if not db[dbName]._connected:
error = 'Could not connect to MySQL server'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return db[dbName]._query( cmd )
def _addMySQLToDiracCfg():
"""
Add the database access info to the local configuration
"""
if not mysqlPassword:
return S_ERROR( 'Missing %s in %s' % ( cfgInstallPath( 'Database', 'Password' ), cfgFile ) )
sectionPath = cfgPath( 'Systems', 'Databases' )
cfg = __getCfg( sectionPath, 'User', mysqlUser )
cfg.setOption( cfgPath( sectionPath, 'Password' ), mysqlPassword )
return _addCfgToDiracCfg( cfg )
def configureCE( ceName = '', ceType = '', cfg = None, currentSectionPath = '' ):
"""
Produce new dirac.cfg including configuration for new CE
"""
from DIRAC.Resources.Computing.ComputingElementFactory import ComputingElementFactory
from DIRAC import gConfig
cesCfg = ResourcesDefaults.getComputingElementDefaults( ceName, ceType, cfg, currentSectionPath )
ceNameList = cesCfg.listSections()
if not ceNameList:
error = 'No CE Name provided'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
for ceName in ceNameList:
if 'CEType' not in cesCfg[ceName]:
error = 'Missing Type for CE "%s"' % ceName
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
localsiteCfg = localCfg['LocalSite']
# Replace Configuration under LocalSite with new Configuration
for ceName in ceNameList:
if localsiteCfg.existsKey( ceName ):
gLogger.notice( ' Removing existing CE:', ceName )
localsiteCfg.deleteKey( ceName )
gLogger.notice( 'Configuring CE:', ceName )
localsiteCfg.createNewSection( ceName, contents = cesCfg[ceName] )
# Apply configuration and try to instantiate the CEs
gConfig.loadCFG( localCfg )
for ceName in ceNameList:
ceFactory = ComputingElementFactory()
try:
ceInstance = ceFactory.getCE( ceType, ceName )
except Exception:
error = 'Fail to instantiate CE'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
if not ceInstance['OK']:
error = 'Fail to instantiate CE: %s' % ceInstance['Message']
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
# Everything is OK, we can save the new cfg
localCfg.writeToFile( cfgFile )
gLogger.always( 'LocalSite section in %s has been uptdated with new configuration:' % os.path.basename( cfgFile ) )
gLogger.always( str( localCfg['LocalSite'] ) )
return S_OK( ceNameList )
def configureLocalDirector( ceNameList = '' ):
"""
Install a Local DIRAC TaskQueueDirector, basically write the proper configuration file
"""
if ceNameList:
result = setupComponent( 'agent', 'WorkloadManagement', 'TaskQueueDirector', [] )
if not result['OK']:
return result
# Now write a local Configuration for the Director
directorCfg = CFG()
directorCfg.addKey( 'SubmitPools', 'DIRAC', 'Added by InstallTools' )
directorCfg.addKey( 'DefaultSubmitPools', 'DIRAC', 'Added by InstallTools' )
directorCfg.addKey( 'ComputingElements', ', '.join( ceNameList ), 'Added by InstallTools' )
result = addCfgToComponentCfg( 'agent', 'WorkloadManagement', 'TaskQueueDirector', directorCfg )
if not result['OK']:
return result
return runsvctrlComponent( 'WorkloadManagement', 'TaskQueueDirector', 't' )
def execCommand( timeout, cmd ):
"""
Execute command tuple and handle Error cases
"""
result = systemCall( timeout, cmd )
if not result['OK']:
if timeout and result['Message'].find( 'Timeout' ) == 0:
return result
gLogger.error( 'Failed to execute', '%s: %s' % ( cmd[0], result['Message'] ) )
if exitOnError:
DIRAC.exit( -1 )
return result
if result['Value'][0]:
error = 'Failed to execute'
gLogger.error( error, cmd[0] )
gLogger.error( 'Exit code:' , ( '%s\n' % result['Value'][0] ) + '\n'.join( result['Value'][1:] ) )
if exitOnError:
DIRAC.exit( -1 )
error = S_ERROR( error )
error['Value'] = result['Value']
return error
gLogger.verbose( result['Value'][1] )
return result
| Sbalbp/DIRAC | Core/Utilities/InstallTools.py | Python | gpl-3.0 | 88,169 |
#!/usr/bin/env python
# encoding:utf-8
# __author__: huxianglin
# date: 2016-09-17
# blog: http://huxianglin.cnblogs.com/ http://xianglinhu.blog.51cto.com/
import os
from module import actions
from module import db_handler
from conf import settings
ATM_AUTH_DIR = settings.DATABASE["path"]
ATM_CARD_LIST=os.listdir(ATM_AUTH_DIR)
atm_auth_flag = False
atm_auth_admin = False
atm_auth_card_id = ""
def atm_auth_log(atm_log):
def atm_auth(func):
def wrapper(*args, **kwargs):
global ATM_CARD_LIST,atm_auth_flag, atm_auth_admin, atm_auth_card_id
if atm_auth_flag:
return func(*args, **kwargs)
else:
print("欢迎登陆华夏银行".center(50, "*"))
auth_count = 0
auth_id = ""
while auth_count < 3:
auth_id = input("请输入卡号:").strip()
auth_passwd = input("请输入密码:").strip()
atm_log.info("Card:%s try %s login!"%(auth_id,auth_count+1))
if auth_id in ATM_CARD_LIST and actions.encry_passwd(auth_passwd) == db_handler.read_data(auth_id)["password"]:
auth_user_data=db_handler.read_data(auth_id)
atm_log.info("Card:%s login auth successful!"%auth_id)
if auth_user_data["freeze"]:
print("抱歉,您的信用卡:%s已被冻结,请联系管理人员解除冻结..." % auth_id)
atm_log.warning("Card:%s is freezed!login fail"%auth_id)
break
elif auth_user_data["privilege"] == "admin":
atm_auth_flag = True
atm_auth_admin = True
atm_auth_card_id = auth_user_data["card_id"]
atm_log.info("Card:%s is administrator!"%auth_id)
return func(*args, **kwargs)
break
else:
atm_auth_flag = True
atm_auth_card_id = auth_user_data["card_id"]
atm_log.info("Card:%s is user!"%auth_id)
return func(*args, **kwargs)
break
else:
print("抱歉,您输入的用户名或密码错误,请重新输入...")
atm_log.warning("Card:%s try %s login failed!"%(auth_id,auth_count+1))
auth_count += 1
else:
print("抱歉,您已连续三次输入错误,您的账户将会被冻结...")
if auth_id in ATM_CARD_LIST:
auth_user_data=db_handler.read_data(auth_id)
auth_user_data["freeze"] = True
db_handler.write_data(auth_id,auth_user_data)
atm_log.warning("Card:%s will be freeze!"%auth_id)
return wrapper
return atm_auth | huxianglin/pythonstudy | week05-胡湘林/ATM/ATM/module/auth.py | Python | gpl-3.0 | 3,084 |
"""This is part of the Mouse Tracks Python application.
Source: https://github.com/Peter92/MouseTracks
"""
#Import the local scipy if possible, otherwise fallback to the installed one
from __future__ import absolute_import
from ...utils.numpy import process_numpy_array
try:
from .gaussian import gaussian_filter
from .zoom import zoom
except ImportError:
from scipy.ndimage.filters import gaussian_filter
from scipy.ndimage.interpolation import zoom
@process_numpy_array
def blur(array, size):
return gaussian_filter(array, sigma=size)
@process_numpy_array
def upscale(array, factor):
if factor[0] == 1 and factor[1] == 1:
return array
return zoom(array, factor, order=0) | Peter92/MouseTrack | mousetracks/image/scipy/__init__.py | Python | gpl-3.0 | 718 |
from DIRAC import S_OK
from DIRAC.AccountingSystem.Client.Types.Pilot import Pilot
from DIRAC.AccountingSystem.private.Plotters.BaseReporter import BaseReporter
class PilotPlotter(BaseReporter):
_typeName = "Pilot"
_typeKeyFields = [dF[0] for dF in Pilot().definitionKeyFields]
def _reportCumulativeNumberOfJobs(self, reportRequest):
selectFields = (
self._getSelectStringForGrouping(reportRequest["groupingFields"]) + ", %s, %s, SUM(%s)",
reportRequest["groupingFields"][1] + ["startTime", "bucketLength", "Jobs"],
)
retVal = self._getTimedData(
reportRequest["startTime"],
reportRequest["endTime"],
selectFields,
reportRequest["condDict"],
reportRequest["groupingFields"],
{},
)
if not retVal["OK"]:
return retVal
dataDict, granularity = retVal["Value"]
self.stripDataField(dataDict, 0)
dataDict = self._fillWithZero(granularity, reportRequest["startTime"], reportRequest["endTime"], dataDict)
dataDict = self._accumulate(granularity, reportRequest["startTime"], reportRequest["endTime"], dataDict)
baseDataDict, graphDataDict, maxValue, unitName = self._findSuitableUnit(
dataDict, self._getAccumulationMaxValue(dataDict), "jobs"
)
return S_OK(
{"data": baseDataDict, "graphDataDict": graphDataDict, "granularity": granularity, "unit": unitName}
)
def _plotCumulativeNumberOfJobs(self, reportRequest, plotInfo, filename):
metadata = {
"title": "Cumulative Jobs by %s" % reportRequest["grouping"],
"starttime": reportRequest["startTime"],
"endtime": reportRequest["endTime"],
"span": plotInfo["granularity"],
"ylabel": plotInfo["unit"],
"sort_labels": "last_value",
}
return self._generateCumulativePlot(filename, plotInfo["graphDataDict"], metadata)
def _reportNumberOfJobs(self, reportRequest):
selectFields = (
self._getSelectStringForGrouping(reportRequest["groupingFields"]) + ", %s, %s, SUM(%s)",
reportRequest["groupingFields"][1] + ["startTime", "bucketLength", "Jobs"],
)
retVal = self._getTimedData(
reportRequest["startTime"],
reportRequest["endTime"],
selectFields,
reportRequest["condDict"],
reportRequest["groupingFields"],
{},
)
if not retVal["OK"]:
return retVal
dataDict, granularity = retVal["Value"]
self.stripDataField(dataDict, 0)
dataDict, maxValue = self._divideByFactor(dataDict, granularity)
dataDict = self._fillWithZero(granularity, reportRequest["startTime"], reportRequest["endTime"], dataDict)
baseDataDict, graphDataDict, maxValue, unitName = self._findSuitableRateUnit(
dataDict, self._getAccumulationMaxValue(dataDict), "jobs"
)
return S_OK(
{"data": baseDataDict, "graphDataDict": graphDataDict, "granularity": granularity, "unit": unitName}
)
def _plotNumberOfJobs(self, reportRequest, plotInfo, filename):
metadata = {
"title": "Jobs by %s" % reportRequest["grouping"],
"starttime": reportRequest["startTime"],
"endtime": reportRequest["endTime"],
"span": plotInfo["granularity"],
"ylabel": plotInfo["unit"],
}
return self._generateTimedStackedBarPlot(filename, plotInfo["graphDataDict"], metadata)
def _reportCumulativeNumberOfPilots(self, reportRequest):
selectFields = (
self._getSelectStringForGrouping(reportRequest["groupingFields"]) + ", %s, %s, SUM(%s)",
reportRequest["groupingFields"][1] + ["startTime", "bucketLength", "entriesInBucket"],
)
retVal = self._getTimedData(
reportRequest["startTime"],
reportRequest["endTime"],
selectFields,
reportRequest["condDict"],
reportRequest["groupingFields"],
{},
)
if not retVal["OK"]:
return retVal
dataDict, granularity = retVal["Value"]
self.stripDataField(dataDict, 0)
dataDict = self._fillWithZero(granularity, reportRequest["startTime"], reportRequest["endTime"], dataDict)
dataDict = self._accumulate(granularity, reportRequest["startTime"], reportRequest["endTime"], dataDict)
baseDataDict, graphDataDict, maxValue, unitName = self._findSuitableUnit(
dataDict, self._getAccumulationMaxValue(dataDict), "jobs"
)
return S_OK(
{"data": baseDataDict, "graphDataDict": graphDataDict, "granularity": granularity, "unit": unitName}
)
def _plotCumulativeNumberOfPilots(self, reportRequest, plotInfo, filename):
metadata = {
"title": "Cumulative Pilots by %s" % reportRequest["grouping"],
"starttime": reportRequest["startTime"],
"endtime": reportRequest["endTime"],
"span": plotInfo["granularity"],
"ylabel": plotInfo["unit"].replace("job", "pilot"),
"sort_labels": "last_value",
}
return self._generateCumulativePlot(filename, plotInfo["graphDataDict"], metadata)
def _reportNumberOfPilots(self, reportRequest):
selectFields = (
self._getSelectStringForGrouping(reportRequest["groupingFields"]) + ", %s, %s, SUM(%s)",
reportRequest["groupingFields"][1] + ["startTime", "bucketLength", "entriesInBucket"],
)
retVal = self._getTimedData(
reportRequest["startTime"],
reportRequest["endTime"],
selectFields,
reportRequest["condDict"],
reportRequest["groupingFields"],
{},
)
if not retVal["OK"]:
return retVal
dataDict, granularity = retVal["Value"]
self.stripDataField(dataDict, 0)
dataDict, maxValue = self._divideByFactor(dataDict, granularity)
dataDict = self._fillWithZero(granularity, reportRequest["startTime"], reportRequest["endTime"], dataDict)
baseDataDict, graphDataDict, maxValue, unitName = self._findSuitableRateUnit(
dataDict, self._getAccumulationMaxValue(dataDict), "jobs"
)
return S_OK(
{"data": baseDataDict, "graphDataDict": graphDataDict, "granularity": granularity, "unit": unitName}
)
def _plotNumberOfPilots(self, reportRequest, plotInfo, filename):
metadata = {
"title": "Pilots by %s" % reportRequest["grouping"],
"starttime": reportRequest["startTime"],
"endtime": reportRequest["endTime"],
"span": plotInfo["granularity"],
"ylabel": plotInfo["unit"].replace("job", "pilot"),
}
return self._generateTimedStackedBarPlot(filename, plotInfo["graphDataDict"], metadata)
def _reportJobsPerPilot(self, reportRequest):
selectFields = (
self._getSelectStringForGrouping(reportRequest["groupingFields"]) + ", %s, %s, SUM(%s), SUM(%s)",
reportRequest["groupingFields"][1] + ["startTime", "bucketLength", "Jobs", "entriesInBucket"],
)
retVal = self._getTimedData(
reportRequest["startTime"],
reportRequest["endTime"],
selectFields,
reportRequest["condDict"],
reportRequest["groupingFields"],
{
"checkNone": True,
"convertToGranularity": "sum",
"calculateProportionalGauges": False,
"consolidationFunction": self._averageConsolidation,
},
)
if not retVal["OK"]:
return retVal
dataDict, granularity = retVal["Value"]
self.stripDataField(dataDict, 0)
dataDict = self._fillWithZero(granularity, reportRequest["startTime"], reportRequest["endTime"], dataDict)
return S_OK({"data": dataDict, "granularity": granularity})
def _plotJobsPerPilot(self, reportRequest, plotInfo, filename):
metadata = {
"title": "Jobs per pilot by %s" % reportRequest["grouping"],
"starttime": reportRequest["startTime"],
"endtime": reportRequest["endTime"],
"span": plotInfo["granularity"],
"ylabel": "jobs/pilot",
"normalization": max(x for y in plotInfo["data"].values() for x in y.values()),
}
return self._generateQualityPlot(filename, plotInfo["data"], metadata)
def _reportTotalNumberOfPilots(self, reportRequest):
selectFields = (
self._getSelectStringForGrouping(reportRequest["groupingFields"]) + ", SUM(%s)",
reportRequest["groupingFields"][1] + ["entriesInBucket"],
)
retVal = self._getSummaryData(
reportRequest["startTime"],
reportRequest["endTime"],
selectFields,
reportRequest["condDict"],
reportRequest["groupingFields"],
{},
)
if not retVal["OK"]:
return retVal
dataDict = retVal["Value"]
return S_OK({"data": dataDict})
def _plotTotalNumberOfPilots(self, reportRequest, plotInfo, filename):
metadata = {
"title": "Total Number of Pilots by %s" % reportRequest["grouping"],
"ylabel": "Pilots",
"starttime": reportRequest["startTime"],
"endtime": reportRequest["endTime"],
}
return self._generatePiePlot(filename, plotInfo["data"], metadata)
| DIRACGrid/DIRAC | src/DIRAC/AccountingSystem/private/Plotters/PilotPlotter.py | Python | gpl-3.0 | 9,695 |
# -*- coding: utf-8 -*-
from gettext import gettext as _
EXP1 = [
_('Regions'),
['lineasDepto'],
[],
['deptos']
]
EXP2 = [
_('Regional capitals'),
['lineasDepto', 'capitales'],
[],
['capitales']
]
EXP3 = [
_('Cities'),
['lineasDepto', 'capitales', 'ciudades'],
[],
['capitales', 'ciudades']
]
EXP4 = [
_('Waterways'),
['rios'],
[],
['rios']
]
EXP5 = [
_('Routes'),
['rutas', 'capitales'],
['capitales'],
['rutas']
]
EXPLORATIONS = [EXP1, EXP2, EXP3, EXP4, EXP5]
| AlanJAS/iknowAmerica | recursos/0guyana/datos/explorations.py | Python | gpl-3.0 | 550 |
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.simpleapi import mtd
from testhelpers import run_algorithm
from mantid.api import WorkspaceGroup, MatrixWorkspace
from mantid import config
class IndirectILLReductionFWS(unittest.TestCase):
# cache the def instrument and data search dirs
_def_fac = config['default.facility']
_def_inst = config['default.instrument']
_data_dirs = config['datasearch.directories']
# EFWS+IFWS, two wing
_run_two_wing_mixed = '170299:170304'
# EFWS+IFWS, one wing
_run_one_wing_mixed = '083072:083077'
def setUp(self):
# set instrument and append datasearch directory
config['default.facility'] = 'ILL'
config['default.instrument'] = 'IN16B'
config.appendDataSearchSubDir('ILL/IN16B/')
def tearDown(self):
# set cached facility and datasearch directory
config['default.facility'] = self._def_fac
config['default.instrument'] = self._def_inst
config['datasearch.directories'] = self._data_dirs
def test_two_wing(self):
args = {'Run': self._run_two_wing_mixed,
'OutputWorkspace': 'out'}
alg_test = run_algorithm('IndirectILLReductionFWS', **args)
self.assertTrue(alg_test.isExecuted(), "IndirectILLReductionFWS not executed")
self._check_workspace_group(mtd['out_red'], 2, 18, 3)
runs_log1 = mtd['out_red'].getItem(0).getRun().getLogData('ReducedRunsList').value
runs_log2 = mtd['out_red'].getItem(1).getRun().getLogData('ReducedRunsList').value
self.assertEquals(runs_log1,'170299,170301,170303',"Reduced runs list mismatch.")
self.assertEquals(runs_log2,'170300,170302,170304',"Reduced runs list mismatch.")
def test_one_wing(self):
args = {'Run': self._run_one_wing_mixed,
'OutputWorkspace': 'out'}
alg_test = run_algorithm('IndirectILLReductionFWS', **args)
self.assertTrue(alg_test.isExecuted(), "IndirectILLReductionFWS not executed")
self._check_workspace_group(mtd['out_red'], 3, 18, 2)
def _check_workspace_group(self, wsgroup, nentries, nspectra, nbins):
self.assertTrue(isinstance(wsgroup, WorkspaceGroup),
"{0} should be a group workspace".format(wsgroup.getName()))
self.assertEquals(wsgroup.getNumberOfEntries(), nentries,
"{0} should contain {1} workspaces".format(wsgroup.getName(), nentries))
item = wsgroup.getItem(0)
name = item.getName()
self.assertTrue(isinstance(item, MatrixWorkspace),
"{0} should be a matrix workspace".format(name))
self.assertEquals(item.getNumberHistograms(), nspectra,
"{0} should contain {1} spectra".format(name, nspectra))
self.assertEquals(item.blocksize(), nbins,
"{0} should contain {1} bins".format(name, nbins))
self.assertTrue(item.getSampleDetails(),
"{0} should have sample logs".format(name))
self.assertTrue(item.getHistory().lastAlgorithm(),
"{0} should have history".format(name))
if __name__ == "__main__":
unittest.main()
| mganeva/mantid | Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/IndirectILLReductionFWSTest.py | Python | gpl-3.0 | 3,560 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Pambudi Satria (<https://github.com/pambudisatria>).
# @author Pambudi Satria <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import fields, models, api
class account_invoice_line(models.Model):
_inherit = "account.invoice.line"
jne_number = fields.Char('JNE #') | sumihai-tekindo/account_sicepat | sicepat_erp/invoice_line_jne_number/invoice_line_jne_number.py | Python | gpl-3.0 | 1,191 |
# vim: ts=8:sts=8:sw=8:noexpandtab
#
# This file is part of ReText
# Copyright: 2017-2021 Dmitry Shachnev
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from os.path import exists
import time
from PyQt5.QtCore import QDir, QUrl
from PyQt5.QtGui import QDesktopServices, QTextCursor, QTextDocument
from PyQt5.QtWidgets import QTextBrowser
from ReText import globalSettings
class ReTextPreview(QTextBrowser):
def __init__(self, tab):
QTextBrowser.__init__(self)
self.tab = tab
# if set to True, links to other files will unsuccessfully be opened as anchors
self.setOpenLinks(False)
self.anchorClicked.connect(self.openInternal)
self.lastRenderTime = 0
self.distToBottom = None
self.verticalScrollBar().rangeChanged.connect(self.updateScrollPosition)
def disconnectExternalSignals(self):
pass
def openInternal(self, link):
url = link.url()
if url.startswith('#'):
self.scrollToAnchor(url[1:])
return
elif link.isRelative():
fileToOpen = QDir.current().filePath(url)
else:
fileToOpen = link.toLocalFile() if link.isLocalFile() else None
if fileToOpen is not None:
if exists(fileToOpen):
link = QUrl.fromLocalFile(fileToOpen)
if globalSettings.handleWebLinks and fileToOpen.endswith('.html'):
self.setSource(link)
return
# This is outside the "if exists" block because we can prompt for
# creating the file
if self.tab.openSourceFile(fileToOpen):
return
QDesktopServices.openUrl(link)
def findText(self, text, flags, wrap=False):
cursor = self.textCursor()
if wrap and flags & QTextDocument.FindFlag.FindBackward:
cursor.movePosition(QTextCursor.MoveOperation.End)
elif wrap:
cursor.movePosition(QTextCursor.MoveOperation.Start)
newCursor = self.document().find(text, cursor, flags)
if not newCursor.isNull():
self.setTextCursor(newCursor)
return True
if not wrap:
return self.findText(text, flags, wrap=True)
return False
def updateScrollPosition(self, minimum, maximum):
"""Called when vertical scroll bar range changes.
If this happened during preview rendering (less than 0.5s since it
was started), set the position such that distance to bottom is the
same as before refresh.
"""
timeSinceRender = time.time() - self.lastRenderTime
if timeSinceRender < 0.5 and self.distToBottom is not None and maximum:
newValue = maximum - self.distToBottom
if newValue >= minimum:
self.verticalScrollBar().setValue(newValue)
def setFont(self, font):
self.document().setDefaultFont(font)
| retext-project/retext | ReText/preview.py | Python | gpl-3.0 | 3,107 |
#!/usr/bin/python
'''
Argument parser for infile/outfile for converters
'''
import argparse
import sys
import os.path
class Parser:
def __init__(self):
self.args = self.parse()
self.verify()
def parse(self):
p = argparse.ArgumentParser(description="Convert SLAB6 VOX files to DCMMO .vox json")
p.add_argument('infile')
p.add_argument('outfile')
p.add_argument('-f', '--force', action='store_true')
args = p.parse_args()
return args
def get_overwrite_command(self):
return raw_input('Overwrite? [Y/n]').lower()
def verify(self):
p = self.args
err = False
if not os.path.exists(p.infile):
print '%s file does not exist' % (p.infile,)
err = True
if not p.force:
if os.path.exists(p.outfile):
print '%s file exists' % (p.outfile,)
while True:
over = self.get_overwrite_command()
if over in ['y', 'n']:
break
if over == 'n':
print 'Aborting'
err = True
else:
print 'Overwriting %s' % (p.outfile,)
if err:
sys.exit()
| Gnomescroll/Gnomescroll | tools/vox_lib/converters/converter_args.py | Python | gpl-3.0 | 1,296 |
import psycopg2 as dbapi2
import datetime
class Favorite:
def __init__(self, app):
self.app = app
def initialize_Favorite(self):
with dbapi2.connect(self.app.config['dsn']) as connection:
try:
cursor = connection.cursor()
cursor.execute(""" CREATE TABLE IF NOT EXISTS FAVORITES(
ID SERIAL PRIMARY KEY,
HYPE_ID INTEGER NOT NULL REFERENCES HYPES (HYPE_ID) ON DELETE CASCADE,
USER_ID INTEGER NOT NULL REFERENCES USERS (USER_ID) ON DELETE CASCADE,
DATE DATE NOT NULL,
RATE INTEGER NOT NULL,
UNIQUE(HYPE_ID,USER_ID)
)""")
connection.commit()
except dbapi2.DatabaseError:
connection.rollback()
finally:
connection.commit()
def drop_Favorite(self):
with dbapi2.connect(self.app.config['dsn']) as connection:
try:
cursor = connection.cursor()
query = """DROP TABLE IF EXISTS FAVORITES"""
cursor.execute(query)
except dbapi2.DatabaseError:
connection.rollback()
finally:
connection.commit()
def List_Favorites(self, user_ids):
with dbapi2.connect(self.app.config['dsn']) as connection:
user_ids = str(user_ids)
cursor = connection.cursor()
query = """ SELECT * FROM FAVORITES WHERE USER_ID = %s ORDER BY DATE ASC"""
cursor.execute(query,(user_ids))
favorites = cursor.fetchall()
return favorites
def List_FavoritesID(self, favorite_id):
with dbapi2.connect(self.app.config['dsn']) as connection:
cursor = connection.cursor()
query = """ SELECT * FROM FAVORITES WHERE ID = %s ORDER BY DATE ASC"""
cursor.execute(query,(favorite_id))
favorites = cursor.fetchall()
return favorites
def Delete_Favorite(self, favorite_id):
with dbapi2.connect(self.app.config['dsn']) as connection:
try:
cursor = connection.cursor()
query = """DELETE FROM FAVORITES WHERE (ID = %s)"""
cursor.execute(query, (favorite_id,))
connection.commit()
cursor.close()
except dbapi2.DatabaseError:
connection.rollback()
finally:
connection.commit()
def Add_Favorite(self, user_ids, hype_id):
with dbapi2.connect(self.app.config['dsn']) as connection:
try:
date = datetime.date.today()
rate = 1
cursor = connection.cursor()
query = """INSERT INTO FAVORITES(HYPE_ID, USER_ID, DATE, RATE) VALUES (%s, %s, %s, %s)"""
cursor.execute(query, (hype_id, user_ids, date, rate))
connection.commit()
cursor.close()
except dbapi2.DatabaseError:
connection.rollback()
finally:
connection.commit()
def Update_Favorite(self, favorite_id, rate):
with dbapi2.connect(self.app.config['dsn']) as connection:
try:
cursor = connection.cursor()
query = """UPDATE FAVORITES
SET RATE = %s
WHERE (ID = %s)"""
cursor.execute(query, (rate, favorite_id))
connection.commit()
cursor.close()
except dbapi2.DatabaseError:
connection.rollback()
finally:
connection.commit()
| itucsdb1618/itucsdb1618 | favorite.py | Python | gpl-3.0 | 3,818 |
#!/usr/bin/python
import logging
import argparse
import os
import os.path
import re
from datetime import datetime
from datetime import timedelta
BasePOSIXTime = datetime(1970, 1, 1)
def GetPOSIXTimestamp(dateTimeObj):
return int((dateTimeObj - BasePOSIXTime) / timedelta(seconds = 1))
def ListPhotos():
return
def main():
parser = argparse.ArgumentParser(description = 'Flickr RESTful APIs Client')
parser.add_argument('option', nargs='?', default='list', choices=['list'])
parser.add_argument('-v', '--verbose', help='verbose messages', action='store_true', dest='verbose')
args = parser.parse_args()
CurrentDebugLevel = logging.INFO
if args.verbose: CurrentDebugLevel = logging.DEBUG
logging.basicConfig(level=CurrentDebugLevel, datefmt='%Y.%m.%d %H:%M:%S', format='%(asctime)s %(message)s')
logging.debug(args)
#now = datetime.utcnow()
now = datetime.now()
logging.info('Start working ... Now={}[{}]'.format(now.isoformat(), GetPOSIXTimestamp(now)))
option = args.option.lower()
if option == 'list':
ListPhotos()
else:
parser.print_help()
return
if __name__ == '__main__': main()
| WesleyLight/FlickrREST | python/flickr.py | Python | gpl-3.0 | 1,192 |
import os
import shutil
import pytest
import __builtin__
from libturpial.config import *
from libturpial.exceptions import EmptyOAuthCredentials
from tests.helpers import DummyFileHandler
class DummyConfigParser:
def read(self, value):
pass
def sections(self):
return []
def options(self):
return []
def add_section(self, value):
pass
def set(self, x, y, z):
pass
def write(self, value):
pass
def has_section(self, value):
return True
class DummyGenerator:
def __init__(self, array):
self.array = array
def __iter__(self):
return iter(self.array)
class TestConfigBase:
@classmethod
@pytest.fixture(autouse=True)
def setup_class(self, monkeypatch):
self.default = {
'foo': {
'bar': 987,
},
'bla': {
'ble': 'on',
'bli': 'off',
}
}
self.config_base = ConfigBase(self.default)
monkeypatch.setattr(__builtin__, 'open', lambda x, y: DummyFileHandler())
monkeypatch.setattr(self.config_base.cfg, 'add_section', lambda x: None)
monkeypatch.setattr(self.config_base.cfg, 'set', lambda x, y, z: None)
monkeypatch.setattr(self.config_base.cfg, 'write', lambda x: None)
self.config_base.configpath = '/tmp/foo'
def test_default_values(self):
assert 'General' in APP_CFG
assert 'update-interval' in APP_CFG['General']
assert 'queue-interval' in APP_CFG['General']
assert 'minimize-on-close' in APP_CFG['General']
assert 'statuses' in APP_CFG['General']
assert 'Columns' in APP_CFG
assert 'Services' in APP_CFG
assert 'shorten-url' in APP_CFG['Services']
assert 'upload-pic' in APP_CFG['Services']
assert 'Proxy' in APP_CFG
assert 'username' in APP_CFG['Proxy']
assert 'password' in APP_CFG['Proxy']
assert 'server' in APP_CFG['Proxy']
assert 'port' in APP_CFG['Proxy']
assert 'protocol' in APP_CFG['Proxy']
assert 'Advanced' in APP_CFG
assert 'socket-timeout' in APP_CFG['Advanced']
assert 'show-user-avatars' in APP_CFG['Advanced']
assert 'Window' in APP_CFG
assert 'size' in APP_CFG['Window']
assert 'Notifications' in APP_CFG
assert 'on-updates' in APP_CFG['Notifications']
assert 'on-actions' in APP_CFG['Notifications']
assert 'Sounds' in APP_CFG
assert 'on-login' in APP_CFG['Sounds']
assert 'on-updates' in APP_CFG['Sounds']
assert 'Browser' in APP_CFG
assert 'cmd' in APP_CFG['Browser']
assert 'OAuth' in ACCOUNT_CFG
assert 'key' in ACCOUNT_CFG['OAuth']
assert 'secret' in ACCOUNT_CFG['OAuth']
assert 'Login' in ACCOUNT_CFG
assert 'username' in ACCOUNT_CFG['Login']
assert 'protocol' in ACCOUNT_CFG['Login']
def test_init_config_base(self):
config_base = ConfigBase(self.default)
assert config_base.filepath == ''
assert config_base.extra_sections == {}
assert 'foo' in config_base.default
config_base = ConfigBase(None)
assert 'Advanced' in config_base.default
def test_register_extra_option(self, monkeypatch):
monkeypatch.setattr(self.config_base, 'write', lambda x, y, z: None)
self.config_base.register_extra_option('foo', 'baz', 000)
assert 'baz' in self.config_base.extra_sections['foo']
assert self.config_base.extra_sections['foo']['baz'] == 000
def test_create(self, monkeypatch):
self.config_base.create()
assert self.config_base._ConfigBase__config['foo']['bar'] == 987
def test_load(self, monkeypatch):
default = {
'foo': {
'bar': 987,
},
'bla': {
'ble': 'on',
'bli': 'off',
},
'dummy': {},
}
monkeypatch.setattr(self.config_base.cfg, 'read', lambda x: None)
monkeypatch.setattr(self.config_base, 'default', default)
monkeypatch.setattr(self.config_base, 'save', lambda: None)
# TODO: How to test this?
assert self.config_base.load() == None
def test_load_failsafe(self):
config_base = ConfigBase(self.default)
config_base.load_failsafe()
assert config_base._ConfigBase__config == self.default
def test_save(self, monkeypatch):
self.config_base.save({'foo2': {'bar2': 2}})
assert self.config_base._ConfigBase__config['foo2']['bar2'] == 2
def test_write(self, monkeypatch):
monkeypatch.setattr(self.config_base.cfg, 'has_section', lambda x: False)
self.config_base.write('foo', 'qux', -1)
assert self.config_base._ConfigBase__config['foo']['qux'] == -1
monkeypatch.setattr(self.config_base.cfg, 'has_section', lambda x: True)
self.config_base.write('foo', 'qux', 99)
assert self.config_base._ConfigBase__config['foo']['qux'] == 99
def test_write_section(self, monkeypatch):
monkeypatch.setattr(self.config_base.cfg, 'remove_section', lambda x: None)
monkeypatch.setattr(self.config_base.cfg, 'has_section', lambda x: False)
self.config_base.write_section('foo', {'ble': 2})
assert len(self.config_base._ConfigBase__config['foo']) == 1
assert self.config_base._ConfigBase__config['foo']['ble'] == 2
monkeypatch.setattr(self.config_base.cfg, 'has_section', lambda x: True)
self.config_base.write_section('foo', {'ble': 2})
assert len(self.config_base._ConfigBase__config['foo']) == 1
assert self.config_base._ConfigBase__config['foo']['ble'] == 2
def test_read(self):
self.config_base.create()
value = self.config_base.read('foo', 'bar')
assert value == 987
value = self.config_base.read('bla', 'ble', True)
assert value == True
value = self.config_base.read('bla', 'bli', False)
assert value == 'off'
value = self.config_base.read('bla', 'bli', True)
assert value == False
value = self.config_base.read('dummy', 'var')
assert value == None
value = self.config_base.read('foo', 'bar', True)
assert value == 987
def test_read_section(self):
self.config_base.create()
section = self.config_base.read_section('foo')
assert section == self.default['foo']
section = self.config_base.read_section('faa')
assert section is None
def test_read_all(self, monkeypatch):
self.config_base.create()
assert self.config_base.read_all() == self.default
monkeypatch.delattr(self.config_base, '_ConfigBase__config')
assert self.config_base.read_all() == None
class TestAppConfig:
@classmethod
@pytest.fixture(autouse=True)
def setup_class(self, monkeypatch):
self.default = {
'foo': {
'bar': 987,
},
'bla': {
'ble': 'on',
'bli': 'off',
}
}
monkeypatch.setattr(os, 'makedirs', lambda x: None)
monkeypatch.setattr(__builtin__, 'open', lambda x, y: DummyFileHandler())
monkeypatch.setattr(ConfigParser, 'ConfigParser', lambda: DummyConfigParser())
self.app_config = AppConfig('/tmp/user', self.default)
def test_init(self):
assert self.app_config.configpath == '/tmp/user/config'
assert self.app_config.filterpath == '/tmp/user/filtered'
assert self.app_config.friendspath == '/tmp/user/friends'
def test_load_filters(self, monkeypatch):
monkeypatch.setattr(__builtin__, 'open', lambda x, y: DummyFileHandler(['@foo', 'bar', "\n"]))
filters = self.app_config.load_filters()
assert filters[0] == '@foo'
assert len(filters) == 2
# TODO: How to test that this works? Return 0 maybe?
def test_save_filters(self):
assert self.app_config.save_filters(['foo', 'bar']) == ['foo', 'bar']
# TODO: How to test that this works? Return 0 maybe?
def test_append_filter(self, monkeypatch):
assert self.app_config.append_filter('@dummy') == None
monkeypatch.setattr(self.app_config, 'load_filters', lambda: ['@dummy'])
with pytest.raises(ExpressionAlreadyFiltered):
self.app_config.append_filter('@dummy')
# TODO: How to test that this works? Return 0 maybe?
def test_remove_filter(self, monkeypatch):
monkeypatch.setattr(self.app_config, 'load_filters', lambda: ['@foo', 'bar', '@dummy'])
monkeypatch.setattr(self.app_config, 'save_filters', lambda x: None)
assert self.app_config.remove_filter('bar') == None
def test_load_friends(self, monkeypatch):
monkeypatch.setattr(__builtin__, 'open', lambda x, y: DummyFileHandler(['foo', 'bar\n', "\n"]))
friends = self.app_config.load_friends()
assert friends == ['foo', 'bar']
# TODO: How to test that this works? Return 0 maybe?
def test_save_friends(self):
assert self.app_config.save_friends(['foo', 'bar']) == ['foo', 'bar']
def test_get_stored_accounts(self, monkeypatch):
monkeypatch.setattr(os, 'walk', lambda x: DummyGenerator([('foopath', ['dirpath1', 'dirpath2'], ['filename1'])]))
monkeypatch.setattr(os.path, 'isfile', lambda x: True)
accounts = self.app_config.get_stored_accounts()
assert accounts == ['dirpath1', 'dirpath2']
def test_get_stored_columns(self, monkeypatch):
temp = {'column3': 'foo-twitter-timeline', 'column1': 'foo-twitter-directs', 'column2': 'foo-twitter-sent'}
monkeypatch.setattr(self.app_config, 'read_section', lambda x: temp)
columns = self.app_config.get_stored_columns()
assert columns[0] == 'foo-twitter-directs'
assert columns[1] == 'foo-twitter-sent'
assert columns[2] == 'foo-twitter-timeline'
temp = {'column1': '', 'column2': ''}
monkeypatch.setattr(self.app_config, 'read_section', lambda x: temp)
columns = self.app_config.get_stored_columns()
assert len(columns) == 0
def test_get_proxy(self, monkeypatch):
proxy_temp = {'server': '127.0.0.1', 'port': 80, 'protocol': 'http', 'username': '', 'password': ''}
monkeypatch.setattr(self.app_config, 'read_section', lambda x: proxy_temp)
proxy = self.app_config.get_proxy()
assert proxy.secure == False
assert proxy.host == '127.0.0.1'
assert proxy.port == 80
assert proxy.username == ''
assert proxy.password == ''
proxy_temp['protocol'] = 'https'
proxy = self.app_config.get_proxy()
assert proxy.secure == True
def test_get_socket_timeout(self, monkeypatch):
monkeypatch.setattr(self.app_config, 'read', lambda x, y: 9999)
assert self.app_config.get_socket_timeout() == 9999
# TODO: How to test that this works? Return 0 maybe?
def test_delete(self, monkeypatch):
monkeypatch.setattr(os, 'remove', lambda x: None)
assert self.app_config.delete() == True
class TestAccountConfig:
@classmethod
@pytest.fixture(autouse=True)
def setup_class(self, monkeypatch):
monkeypatch.setattr(os, 'makedirs', lambda x: None)
monkeypatch.setattr(os.path, 'isdir', lambda x: False)
monkeypatch.setattr(os, 'remove', lambda x: None)
monkeypatch.setattr(__builtin__, 'open', lambda x, y: DummyFileHandler())
monkeypatch.setattr('libturpial.config.AccountConfig.write', lambda w, x, y, z: None)
monkeypatch.setattr('libturpial.config.AccountConfig.exists', lambda x, y: False)
monkeypatch.setattr('libturpial.config.AccountConfig.create', lambda x: None)
self.account_config = AccountConfig('foo-twitter')
def test_init(self, monkeypatch):
assert isinstance(self.account_config, AccountConfig)
def test_save_oauth_credentials(self, monkeypatch):
monkeypatch.setattr(self.account_config, 'write', lambda x, y, z: None)
assert self.account_config.save_oauth_credentials('123', '456', '789') == None
def test_load_oauth_credentials(self, monkeypatch):
monkeypatch.setattr(self.account_config, 'read', lambda x, y: 'dummy')
key, secret = self.account_config.load_oauth_credentials()
assert (key == 'dummy' and secret == 'dummy')
monkeypatch.setattr(self.account_config, 'read', lambda x, y: None)
with pytest.raises(EmptyOAuthCredentials):
self.account_config.load_oauth_credentials()
def test_forget_oauth_credentials(self, monkeypatch):
monkeypatch.setattr(self.account_config, 'write', lambda x, y, z: None)
assert self.account_config.forget_oauth_credentials() == None
def test_transform(self):
assert self.account_config.transform('123', 'foo') == 'm1TP9YzVa10RTpVTDRlWZ10b'
def test_revert(self):
assert self.account_config.revert('m1TP9YzVa10RTpVTDRlWZ10b', 'foo') == '123'
assert self.account_config.revert('', 'foo') == None
def test_dismiss(self, monkeypatch):
monkeypatch.setattr(os.path, 'isdir', lambda x: True)
monkeypatch.setattr(os.path, 'isfile', lambda x: True)
monkeypatch.setattr(shutil, 'rmtree', lambda x: None)
# TODO: How to test this?
assert self.account_config.dismiss() == None
def test_delete_cache(self, monkeypatch):
monkeypatch.setattr(os, 'walk', lambda x: [('/tmp', ['my_dir'], ['file1', 'file2'])])
# TODO: How to test this?
assert self.account_config.delete_cache() == list()
def test_calculate_cache_size(self, monkeypatch):
monkeypatch.setattr(os, 'walk', lambda x: [('/tmp', ['my_dir'], ['file1', 'file2'])])
monkeypatch.setattr(os.path, 'getsize', lambda x: 10)
assert self.account_config.calculate_cache_size() == 20
| satanas/libturpial | tests/test_config.py | Python | gpl-3.0 | 14,032 |
from django import forms
from .models import Client, Contact
class ClientForm(forms.ModelForm):
class Meta:
model = Client
fields = ('first_name', 'last_name', 'city', 'email',
'phone_number', 'comment',
)
class ContactForm(forms.ModelForm):
class Meta:
model = Contact
fields = ('way', 'date', 'comment')
| rklimcza/not-yet-crm | crm/forms.py | Python | gpl-3.0 | 407 |
"""
"""
from __future__ import absolute_import
import logging
import json
from datetime import datetime
from flask import render_template
from flask import request
from flask import make_response
from . import app
from ..utils.slack import Slack
from ..models.breakfast import Breakfast
def datetime_handler(x):
if isinstance(x, datetime):
return x.isoformat()
raise TypeError("Unknown type")
def addBreakfast(userid, channelid, channelname):
slack = Slack(token=app.config['SLACK_APP_TOKEN'])
userresult = slack.getUserInfos(userid=userid)
if userresult.status_code != 200:
return 'Cant\' retrieve user infos'
if channelname == 'directmessage':
return 'Breakfast command need to be triggered in a public or private channel !'
elif channelname == 'privategroup':
groupinfo = slack.getGroupInfos(channelid=channelid)
if userresult.status_code != 200:
return 'Cant\' retrieve user infos'
group = json.loads(groupinfo.content)
channelname = group['group']['name']
user = json.loads(userresult.content)
fullname = user['user']['real_name']
avatar = user['user']['profile']['image_512']
username = user['user']['name']
date = Breakfast.getNextAvailableDate(channelid)
bt = Breakfast(
username=username,
date=date,
userid=userid,
fullname=fullname,
avatar=avatar,
channelid=channelid,
channelname=channelname
)
bt.put()
text = '@' + username + ' merci pour le petit dej, le ' + date.strftime('%A %d %B %Y')
slack.postMessage(channelid, text)
return 'Merci pour ce moment !'
def listNextBreakfasts(channel=None):
nextBreakfasts = Breakfast.getNextBreakfasts(channel)
text = 'Breakfast planning : \n'
for b in nextBreakfasts:
text += b['date'].strftime('%d/%m/%Y') + ' : ' + b['fullname']
text += ' pour #' + b['channelname'] if channel is None and b.has_key('channelname') and b['channelname'] is not None else ''
text += '\n'
resp = make_response(json.dumps({
'text': text
}))
resp.headers['Content-Type'] = 'application/json'
return resp
@app.route('/breakfast-cmd', methods=['POST'])
def breakfast():
text = request.form.get('text', type=str)
userid = request.form.get('user_id', type=str)
command = request.form.get('command', type=str)
channelid = request.form.get('channel_id', type=str)
channelname = request.form.get('channel_name', type=str)
if command == '/bt':
return addBreakfast(userid, channelid, channelname)
elif command == '/breakfast':
if text == 'list':
return listNextBreakfasts(channel=channelid)
elif text == 'all':
return listNextBreakfasts()
return 'Bad command'
| jeremlb/breakfast-tracker | server/controllers/index.py | Python | gpl-3.0 | 2,838 |
# Dictionary data structure in python
# Dictionary
sample_dict = {"Roll": 50, "Name": "Nityan"}
# Looping to get key and values from dictionary
for key in sample_dict:
print(key, sample_dict[key])
# List of keys
keys = list(sample_dict.keys())
print("Keys = ", keys)
# List of values
values = list(sample_dict.values())
print("values = ", values)
name_dict = dict()
name_list = list()
# Taking input of names and add into the list, enter 'done' to stop the loop
while True:
a = input("Enter a name: ")
if a == "done":
break
else:
name_list.append(a)
print("Names in the list = ", name_list)
# Form a dictionary using the created list
# If a lebel is prsent more than once keep increamenting the value, starting from 1
for name in name_list:
if name not in name_dict:
name_dict[name] = 1
else:
name_dict[name] += 1
print("Name Dictionary = ", name_dict)
name_dict2 = dict()
name_list2 = list() | nityansuman/Python-3 | data_structures/dictionary.py | Python | gpl-3.0 | 995 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-13 20:05
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Album',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Artist',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=80, unique=True)),
],
),
migrations.CreateModel(
name='Song',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order', models.IntegerField()),
('name', models.CharField(max_length=100)),
('duration', models.IntegerField()),
('album', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='songs', to='songs.Album')),
],
options={
'ordering': ['order'],
},
),
migrations.AddField(
model_name='album',
name='artist',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='albums', to='songs.Artist'),
),
]
| migglu/song-streamer | songstreamer/songs/migrations/0001_initial.py | Python | gpl-3.0 | 1,687 |
from calfbox._cbox2 import *
from io import BytesIO
import struct
import sys
import traceback
import calfbox.metadata as metadata #local file metadata.py
type_wrapper_debug = False
is_python3 = not sys.version.startswith("2")
###############################################################################
# Ugly internals. Please skip this section for your own sanity.
###############################################################################
class GetUUID:
"""An object that calls a C layer command, receives a /uuid callback from it
and stores the passed UUID in its uuid attribute.
Example use: GetUUID('/command', arg1, arg2...).uuid
"""
def __init__(self, cmd, *cmd_args):
def callback(cmd, fb, args):
if cmd == "/uuid" and len(args) == 1:
self.uuid = args[0]
else:
raise ValueException("Unexpected callback: %s" % cmd)
self.callback = callback
self.uuid = None
do_cmd(cmd, self, list(cmd_args))
def __call__(self, *args):
self.callback(*args)
class GetThings:
"""A generic callback object that receives various forms of information from
C layer and converts then into object's Python attributes.
This is an obsolete interface, to be replaced by GetUUID or metaclass
based type-safe autoconverter. However, there are still some cases that
aren't (yet) handled by either.
"""
@staticmethod
def by_uuid(uuid, cmd, anames, args):
return GetThings(Document.uuid_cmd(uuid, cmd), anames, args)
def __init__(self, cmd, anames, args):
for i in anames:
if i.startswith("*"):
setattr(self, i[1:], [])
elif i.startswith("%"):
setattr(self, i[1:], {})
else:
setattr(self, i, None)
anames = set(anames)
self.seq = []
def update_callback(cmd, fb, args):
self.seq.append((cmd, fb, args))
cmd = cmd[1:]
if cmd in anames:
if len(args) == 1:
setattr(self, cmd, args[0])
else:
setattr(self, cmd, args)
elif "*" + cmd in anames:
if len(args) == 1:
getattr(self, cmd).append(args[0])
else:
getattr(self, cmd).append(args)
elif "%" + cmd in anames:
if len(args) == 2:
getattr(self, cmd)[args[0]] = args[1]
else:
getattr(self, cmd)[args[0]] = args[1:]
elif "?" + cmd in anames:
setattr(self, cmd, bool(args[0]))
elif len(args) == 1:
setattr(self, cmd, args[0])
do_cmd(cmd, update_callback, args)
def __str__(self):
return str(self.seq)
class PropertyDecorator(object):
"""Abstract property decorator."""
def __init__(self, base):
self.base = base
def get_base(self):
return self.base
def map_cmd(self, cmd):
return cmd
class AltPropName(PropertyDecorator):
"""Command-name-changing property decorator. Binds a property to the
specified /path, different from the default one, which based on property name,
with -s and -es suffix removed for lists and dicts."""
def __init__(self, alt_name, base):
PropertyDecorator.__init__(self, base)
self.alt_name = alt_name
def map_cmd(self, cmd):
return self.alt_name
def execute(self, property, proptype, klass):
pass
class SettableProperty(PropertyDecorator):
"""Decorator that creates a setter method for the property."""
def execute(self, property, proptype, klass):
if type(proptype) is dict:
setattr(klass, 'set_' + property, lambda self, key, value: self.cmd('/' + property, None, key, value))
elif type(proptype) is bool:
setattr(klass, 'set_' + property, lambda self, value: self.cmd('/' + property, None, 1 if value else 0))
elif issubclass(proptype, DocObj):
setattr(klass, 'set_' + property, lambda self, value: self.cmd('/' + property, None, value.uuid))
else:
setattr(klass, 'set_' + property, lambda self, value: self.cmd('/' + property, None, proptype(value)))
def new_get_things(obj, cmd, settermap, args):
"""Call C command with arguments 'args', populating a return object obj
using settermap to interpret callback commands and initialise the return
object."""
def update_callback(cmd2, fb, args2):
try:
if cmd2 in settermap:
settermap[cmd2](obj, args2)
elif cmd2 != '/uuid': # Ignore UUID as it's usually safe to do so
print ("Unexpected command: %s" % cmd2)
except Exception as error:
traceback.print_exc()
raise
# Set initial values for the properties (None or empty dict/list)
for setterobj in settermap.values():
setattr(obj, setterobj.property, setterobj.init_value())
# Call command and apply callback commands via setters to the object
do_cmd(cmd, update_callback, args)
return obj
def _error_arg_mismatch(required, passed):
raise ValueError("Types required: %s, values passed: %s" % (repr(required), repr(passed)))
def _handle_object_wrapping(t):
if issubclass(t, DocObj):
return lambda uuid: Document.map_uuid_and_check(uuid, t)
return t
def _make_args_to_type_lambda(t):
t = _handle_object_wrapping(t)
return lambda args: t(*args)
def _make_args_to_tuple_of_types_lambda(ts):
ts = list(map(_handle_object_wrapping, ts))
return lambda args: tuple([ts[i](args[i]) for i in range(max(len(ts), len(args)))]) if len(ts) == len(args) else _error_arg_mismatch(ts, args)
def _make_args_decoder(t):
if type(t) is tuple:
return _make_args_to_tuple_of_types_lambda(t)
else:
return _make_args_to_type_lambda(t)
def get_thing(cmd, fieldcmd, datatype, *args):
pull = False
if type(datatype) is list:
assert (len(datatype) == 1)
decoder = _make_args_decoder(datatype[0])
value = []
def adder(data):
value.append(decoder(data))
elif type(datatype) is dict:
assert (len(datatype) == 1)
key_type, value_type = list(datatype.items())[0]
key_decoder = _make_args_decoder(key_type)
value_decoder = _make_args_decoder(value_type)
value = {}
def adder(data):
value[key_decoder([data[0]])] = value_decoder(data[1:])
else:
decoder = _make_args_decoder(datatype)
def adder(data):
value[0] = decoder(data)
value = [None]
pull = True
def callback(cmd2, fb, args2):
if cmd2 == fieldcmd:
adder(args2)
else:
print ("Unexpected command %s" % cmd2)
do_cmd(cmd, callback, list(args))
if pull:
return value[0]
else:
return value
class SetterWithConversion:
"""A setter object class that sets a specific property to a typed value or a tuple of typed value."""
def __init__(self, property, extractor):
self.property = property
self.extractor = extractor
def init_value(self):
return None
def __call__(self, obj, args):
# print ("Setting attr %s on object %s" % (self.property, obj))
setattr(obj, self.property, self.extractor(args))
class ListAdderWithConversion:
"""A setter object class that adds a tuple filled with type-converted arguments of the
callback to a list. E.g. ListAdderWithConversion('foo', (int, int))(obj, [1,2])
adds a tuple: (int(1), int(2)) to the list obj.foo"""
def __init__(self, property, extractor):
self.property = property
self.extractor = extractor
def init_value(self):
return []
def __call__(self, obj, args):
getattr(obj, self.property).append(self.extractor(args))
class DictAdderWithConversion:
"""A setter object class that adds a tuple filled with type-converted
arguments of the callback to a dictionary under a key passed as first argument
i.e. DictAdderWithConversion('foo', str, (int, int))(obj, ['bar',1,2]) adds
a tuple: (int(1), int(2)) under key 'bar' to obj.foo"""
def __init__(self, property, keytype, valueextractor):
self.property = property
self.keytype = keytype
self.valueextractor = valueextractor
def init_value(self):
return {}
def __call__(self, obj, args):
getattr(obj, self.property)[self.keytype(args[0])] = self.valueextractor(args[1:])
def _type_properties(base_type):
return {prop: getattr(base_type, prop) for prop in dir(base_type) if not prop.startswith("__")}
def _create_setter(prop, t):
if type(t) in [type, tuple] or issubclass(type(t), DocObj):
if type_wrapper_debug:
print ("%s is type %s" % (prop, repr(t)))
return SetterWithConversion(prop, _make_args_decoder(t))
elif type(t) is dict:
assert(len(t) == 1)
tkey, tvalue = list(t.items())[0]
if type_wrapper_debug:
print ("%s is type: %s -> %s" % (prop, repr(tkey), repr(tvalue)))
return DictAdderWithConversion(prop, tkey, _make_args_decoder(tvalue))
elif type(t) is list:
assert(len(t) == 1)
if type_wrapper_debug:
print ("%s is array of %s" % (prop, repr(t[0])))
return ListAdderWithConversion(prop, _make_args_decoder(t[0]))
else:
raise ValueError("Don't know what to do with property '%s' of type %s" % (prop, repr(t)))
def _create_unmarshaller(name, base_type, object_wrapper = False, property_grabber = _type_properties):
all_decorators = {}
prop_types = {}
settermap = {}
if type_wrapper_debug:
print ("Wrapping type: %s" % name)
print ("-----")
for prop, proptype in property_grabber(base_type).items():
decorators = []
propcmd = '/' + prop
if type(proptype) in [list, dict]:
if propcmd.endswith('s'):
if propcmd.endswith('es'):
propcmd = propcmd[:-2]
else:
propcmd = propcmd[:-1]
while isinstance(proptype, PropertyDecorator):
decorators.append(proptype)
propcmd = proptype.map_cmd(propcmd)
proptype = proptype.get_base()
settermap[propcmd] = _create_setter(prop, proptype)
all_decorators[prop] = decorators
prop_types[prop] = proptype
base_type.__str__ = lambda self: (str(name) + ":" + " ".join(["%s=%s" % (v.property, str(getattr(self, v.property))) for v in settermap.values()]))
if type_wrapper_debug:
print ("")
def exec_cmds(o):
for propname, decorators in all_decorators.items():
for decorator in decorators:
decorator.execute(propname, prop_types[propname], o)
if object_wrapper:
return exec_cmds, lambda cmd: (lambda self, *args: new_get_things(base_type(), self.path + cmd, settermap, list(args)))
else:
return lambda cmd, *args: new_get_things(base_type(), cmd, settermap, list(args))
class NonDocObj(object):
"""Root class for all wrapper classes that wrap objects that don't have
their own identity/UUID.
This covers various singletons and inner objects (e.g. engine in instruments)."""
class Status:
pass
def __init__(self, path):
self.path = path
def __new__(classObj, *args, **kwargs):
if is_python3:
result = object.__new__(classObj)
result.__init__(*args, **kwargs)
else:
result = object.__new__(classObj, *args, **kwargs)
name = classObj.__name__
if getattr(classObj, 'wrapped_class', None) != name:
classfinaliser, cmdwrapper = _create_unmarshaller(name, classObj.Status, object_wrapper = True)
classfinaliser(classObj)
classObj.status = cmdwrapper('/status')
classObj.wrapped_class = name
return result
def cmd(self, cmd, fb = None, *args):
do_cmd(self.path + cmd, fb, list(args))
def cmd_makeobj(self, cmd, *args):
return Document.map_uuid(GetUUID(self.path + cmd, *args).uuid)
def get_things(self, cmd, fields, *args):
return GetThings(self.path + cmd, fields, list(args))
def get_thing(self, cmd, fieldcmd, type, *args):
return get_thing(self.path + cmd, fieldcmd, type, *args)
def make_path(self, path):
return self.path + path
def __str__(self):
return "%s<%s>" % (self.__class__.__name__, self.path)
class DocObj(NonDocObj):
"""Root class for all wrapper classes that wrap first-class document objects."""
class Status:
pass
def __init__(self, uuid):
NonDocObj.__init__(self, Document.uuid_cmd(uuid, ''))
self.uuid = uuid
def delete(self):
self.cmd("/delete")
def __str__(self):
return "%s<%s>" % (self.__class__.__name__, self.uuid)
class VarPath:
def __init__(self, path, args = []):
self.path = path
self.args = args
def plus(self, subpath, *args):
return VarPath(self.path if subpath is None else self.path + "/" + subpath, self.args + list(args))
def set(self, *values):
do_cmd(self.path, None, self.args + list(values))
###############################################################################
# And those are the proper user-accessible objects.
###############################################################################
class Config:
class KeysUnmarshaller:
keys = [str]
keys_unmarshaller = _create_unmarshaller('Config.keys()', KeysUnmarshaller)
"""INI file manipulation class."""
@staticmethod
def sections(prefix = ""):
"""Return a list of configuration sections."""
return [CfgSection(name) for name in get_thing('/config/sections', '/section', [str], prefix)]
@staticmethod
def keys(section, prefix = ""):
"""Return a list of configuration keys in a section, with optional prefix filtering."""
return Config.keys_unmarshaller('/config/keys', str(section), str(prefix)).keys
@staticmethod
def get(section, key):
"""Return a string value of a given key."""
return get_thing('/config/get', '/value', str, str(section), str(key))
@staticmethod
def set(section, key, value):
"""Set a string value for a given key."""
do_cmd('/config/set', None, [str(section), str(key), str(value)])
@staticmethod
def delete(section, key):
"""Delete a given key."""
do_cmd('/config/delete', None, [str(section), str(key)])
@staticmethod
def save(filename = None):
"""Save config, either into current INI file or some other file."""
if filename is None:
do_cmd('/config/save', None, [])
else:
do_cmd('/config/save', None, [str(filename)])
@staticmethod
def add_section(section, content):
"""Populate a config section based on a string with key=value lists.
This is a toy/debug function, it doesn't handle any edge cases."""
for line in content.splitlines():
line = line.strip()
if line == '' or line.startswith('#'):
continue
try:
key, value = line.split("=", 2)
except ValueError as err:
raise ValueError("Cannot parse config line '%s'" % line)
Config.set(section, key.strip(), value.strip())
class Transport:
@staticmethod
def seek_ppqn(ppqn):
do_cmd('/master/seek_ppqn', None, [int(ppqn)])
@staticmethod
def seek_samples(samples):
do_cmd('/master/seek_samples', None, [int(samples)])
@staticmethod
def set_tempo(tempo):
do_cmd('/master/set_tempo', None, [float(tempo)])
@staticmethod
def set_timesig(nom, denom):
do_cmd('/master/set_timesig', None, [int(nom), int(denom)])
@staticmethod
def set_ppqn_factor(factor):
do_cmd('/master/set_ppqn_factor', None, [int(factor)])
@staticmethod
def play():
do_cmd('/master/play', None, [])
@staticmethod
def stop():
do_cmd('/master/stop', None, [])
@staticmethod
def panic():
do_cmd('/master/panic', None, [])
@staticmethod
def status():
return GetThings("/master/status", ['pos', 'pos_ppqn', 'tempo', 'timesig', 'sample_rate', 'playing', 'ppqn_factor'], [])
@staticmethod
def tell():
return GetThings("/master/tell", ['pos', 'pos_ppqn', 'playing'], [])
@staticmethod
def ppqn_to_samples(pos_ppqn):
return get_thing("/master/ppqn_to_samples", '/value', int, pos_ppqn)
@staticmethod
def samples_to_ppqn(pos_samples):
return get_thing("/master/samples_to_ppqn", '/value', int, pos_samples)
# Currently responsible for both JACK and USB I/O - not all functionality is
# supported by both.
class JackIO:
AUDIO_TYPE = "32 bit float mono audio"
MIDI_TYPE = "8 bit raw midi"
PORT_IS_SINK = 0x1
PORT_IS_SOURCE = 0x2
PORT_IS_PHYSICAL = 0x4
PORT_CAN_MONITOR = 0x8
PORT_IS_TERMINAL = 0x10
metadata.get_thing = get_thing #avoid circular dependency and redundant code
Metadata = metadata.Metadata #use with cbox.JackIO.Metadata.get_all_properties()
@staticmethod
def status():
# Some of these only make sense for JACK
return GetThings("/io/status", ['client_type', 'client_name',
'audio_inputs', 'audio_outputs', 'buffer_size', '*midi_output',
'*midi_input', 'sample_rate', 'output_resolution',
'*usb_midi_input', '*usb_midi_output', '?external_tempo'], [])
@staticmethod
def jack_transport_position():
# Some of these only make sense for JACK
return GetThings("/io/jack_transport_position", ['state', 'unique_lo',
'unique_hi', 'usecs_lo', 'usecs_hi', 'frame_rate', 'frame', 'bar',
'beat', 'tick', 'bar_start_tick', 'bbt_frame_offset', 'beats_per_bar',
'beat_type', 'ticks_per_beat', 'beats_per_minute', 'is_master'], [])
@staticmethod
def jack_transport_locate(pos):
do_cmd("/io/jack_transport_locate", None, [pos])
@staticmethod
def transport_mode(master = True, conditional = False):
if master:
do_cmd("/io/transport_mode", None, [1 if conditional else 2])
else:
do_cmd("/io/transport_mode", None, [0])
@staticmethod
def create_midi_input(name, autoconnect_spec = None):
uuid = GetUUID("/io/create_midi_input", name).uuid
if autoconnect_spec is not None and autoconnect_spec != '':
JackIO.autoconnect(uuid, autoconnect_spec)
return uuid
@staticmethod
def create_midi_output(name, autoconnect_spec = None):
uuid = GetUUID("/io/create_midi_output", name).uuid
if autoconnect_spec is not None and autoconnect_spec != '':
JackIO.autoconnect(uuid, autoconnect_spec)
return uuid
@staticmethod
def autoconnect(uuid, autoconnect_spec = None):
if autoconnect_spec is not None:
do_cmd("/io/autoconnect", None, [uuid, autoconnect_spec])
else:
do_cmd("/io/autoconnect", None, [uuid, ''])
autoconnect_midi_input = autoconnect
autoconnect_midi_output = autoconnect
autoconnect_audio_output = autoconnect
@staticmethod
def rename_midi_output(uuid, new_name):
do_cmd("/io/rename_midi_port", None, [uuid, new_name])
rename_midi_input = rename_midi_output
@staticmethod
def disconnect_midi_port(uuid):
do_cmd("/io/disconnect_midi_port", None, [uuid])
@staticmethod
def disconnect_midi_output(uuid):
do_cmd("/io/disconnect_midi_output", None, [uuid])
@staticmethod
def disconnect_midi_input(uuid):
do_cmd("/io/disconnect_midi_input", None, [uuid])
@staticmethod
def delete_midi_input(uuid):
do_cmd("/io/delete_midi_input", None, [uuid])
@staticmethod
def delete_midi_output(uuid):
do_cmd("/io/delete_midi_output", None, [uuid])
@staticmethod
def route_midi_input(input_uuid, scene_uuid):
do_cmd("/io/route_midi_input", None, [input_uuid, scene_uuid])
@staticmethod
def set_appsink_for_midi_input(input_uuid, enabled):
do_cmd("/io/set_appsink_for_midi_input", None, [input_uuid, 1 if enabled else 0])
@staticmethod
def get_new_events(input_uuid):
seq = []
do_cmd("/io/get_new_events", (lambda cmd, fb, args: seq.append((cmd, fb, args))), [input_uuid])
return seq
@staticmethod
def create_audio_output(name, autoconnect_spec = None):
uuid = GetUUID("/io/create_audio_output", name).uuid
if autoconnect_spec is not None and autoconnect_spec != '':
JackIO.autoconnect(uuid, autoconnect_spec)
return uuid
@staticmethod
def create_audio_output_router(uuid_left, uuid_right):
return get_thing("/io/create_audio_output_router", "/uuid", DocRecorder, uuid_left, uuid_right)
@staticmethod
def delete_audio_output(uuid):
do_cmd("/io/delete_audio_output", None, [uuid])
@staticmethod
def rename_audio_output(uuid, new_name):
do_cmd("/io/rename_audio_port", None, [uuid, new_name])
@staticmethod
def disconnect_audio_output(uuid):
do_cmd("/io/disconnect_audio_output", None, [uuid])
@staticmethod
def port_connect(pfrom, pto):
do_cmd("/io/port_connect", None, [pfrom, pto])
@staticmethod
def port_disconnect(pfrom, pto):
do_cmd("/io/port_disconnect", None, [pfrom, pto])
@staticmethod
def get_ports(name_mask = ".*", type_mask = ".*", flag_mask = 0):
return get_thing("/io/get_ports", '/port', [str], name_mask, type_mask, int(flag_mask))
@staticmethod
def get_connected_ports(port):
return get_thing("/io/get_connected_ports", '/port', [str], port)
@staticmethod
def external_tempo(enable):
"""Enable reacting to JACK transport tempo"""
do_cmd('/io/external_tempo', None, [1 if enable else 0])
def call_on_idle(callback = None):
do_cmd("/on_idle", callback, [])
def get_new_events():
seq = []
do_cmd("/on_idle", (lambda cmd, fb, args: seq.append((cmd, fb, args))), [])
return seq
def send_midi_event(*data, **kwargs):
output = kwargs.get('output', None)
do_cmd('/send_event_to', None, [output if output is not None else ''] + list(data))
def send_sysex(data, output = None):
do_cmd('/send_sysex_to', None, [output if output is not None else '', bytearray(data)])
def flush_rt():
do_cmd('/rt/flush', None, [])
class CfgSection:
def __init__(self, name):
self.name = name
def __getitem__(self, key):
return Config.get(self.name, key)
def __setitem__(self, key, value):
Config.set(self.name, key, value)
def __delitem__(self, key):
Config.delete(self.name, key)
def keys(self, prefix = ""):
return Config.keys(self.name, prefix)
class Pattern:
@staticmethod
def get_pattern():
pat_data = get_thing("/get_pattern", '/pattern', (bytes, int))
if pat_data is not None:
pat_blob, length = pat_data
pat_data = []
ofs = 0
while ofs < len(pat_blob):
data = list(struct.unpack_from("iBBbb", pat_blob, ofs))
data[1:2] = []
pat_data.append(tuple(data))
ofs += 8
return pat_data, length
return None
@staticmethod
def serialize_event(time, *data):
if len(data) >= 1 and len(data) <= 3:
return struct.pack("iBBbb"[0:2 + len(data)], int(time), len(data), *[int(v) for v in data])
raise ValueError("Invalid length of an event (%d)" % len(data))
class Document:
"""Document singleton."""
classmap = {}
objmap = {}
@staticmethod
def dump():
"""Print all objects in the documents to stdout. Only used for debugging."""
do_cmd("/doc/dump", None, [])
@staticmethod
def uuid_cmd(uuid, cmd):
"""Internal: execute a given request on an object with specific UUID."""
return "/doc/uuid/%s%s" % (uuid, cmd)
@staticmethod
def get_uuid(path):
"""Internal: retrieve an UUID of an object that has specified path."""
return GetUUID('%s/get_uuid' % path).uuid
@staticmethod
def map_path(path, *args):
"""Internal: return an object corresponding to a path"""
return Document.map_uuid(Document.get_uuid(path))
@staticmethod
def cmd_makeobj(cmd, *args):
"""Internal: create an object from the UUID result of a command"""
return Document.map_uuid(GetUUID(cmd, *args).uuid)
@staticmethod
def get_obj_class(uuid):
"""Internal: retrieve an internal class type of an object that has specified path."""
return get_thing(Document.uuid_cmd(uuid, "/get_class_name"), '/class_name', str)
@staticmethod
def get_song():
"""Retrieve the current song object of a given document. Each document can
only have one current song."""
return Document.map_path("/song")
@staticmethod
def get_scene():
"""Retrieve the first scene object of a default engine. This function
is considered obsolete-ish, because of multiple scene support."""
return Document.map_path("/scene")
@staticmethod
def get_engine():
"""Retrieve the current RT engine object of a given document. Each document can
only have one current RT engine."""
return Document.map_path("/rt/engine")
@staticmethod
def get_rt():
"""Retrieve the RT singleton. RT is an object used to communicate between
realtime and user thread, and is currently also used to access the audio
engine."""
return Document.map_path("/rt")
@staticmethod
def new_engine(srate, bufsize):
"""Create a new off-line engine object. This new engine object cannot be used for
audio playback - that's only allowed for default engine."""
return Document.cmd_makeobj('/new_engine', int(srate), int(bufsize))
@staticmethod
def map_uuid(uuid):
"""Create or retrieve a Python-side accessor proxy for a C-side object."""
if uuid is None:
return None
if uuid in Document.objmap:
return Document.objmap[uuid]
try:
oclass = Document.get_obj_class(uuid)
except Exception as e:
print ("Note: Cannot get class for " + uuid)
Document.dump()
raise
o = Document.classmap[oclass](uuid)
Document.objmap[uuid] = o
if hasattr(o, 'init_object'):
o.init_object()
return o
@staticmethod
def map_uuid_and_check(uuid, t):
o = Document.map_uuid(uuid)
if not isinstance(o, t):
raise TypeError("UUID %s is of type %s, expected %s" % (uuid, o.__class__, t))
return o
class DocPattern(DocObj):
class Status:
event_count = int
loop_end = int
name = str
def __init__(self, uuid):
DocObj.__init__(self, uuid)
def set_name(self, name):
self.cmd("/name", None, name)
Document.classmap['cbox_midi_pattern'] = DocPattern
class ClipItem:
def __init__(self, pos, offset, length, pattern, clip):
self.pos = pos
self.offset = offset
self.length = length
self.pattern = Document.map_uuid(pattern)
self.clip = Document.map_uuid(clip)
def __str__(self):
return "pos=%d offset=%d length=%d pattern=%s clip=%s" % (self.pos, self.offset, self.length, self.pattern.uuid, self.clip.uuid)
def __eq__(self, other):
return str(self) == str(other)
class DocTrackClip(DocObj):
class Status:
pos = SettableProperty(int)
offset = SettableProperty(int)
length = SettableProperty(int)
pattern = SettableProperty(DocPattern)
def __init__(self, uuid):
DocObj.__init__(self, uuid)
Document.classmap['cbox_track_item'] = DocTrackClip
class DocTrack(DocObj):
class Status:
clips = [ClipItem]
name = SettableProperty(str)
external_output = SettableProperty(str)
mute = SettableProperty(int)
def add_clip(self, pos, offset, length, pattern):
return self.cmd_makeobj("/add_clip", int(pos), int(offset), int(length), pattern.uuid)
def clear_clips(self):
return self.cmd_makeobj("/clear_clips")
Document.classmap['cbox_track'] = DocTrack
class TrackItem:
def __init__(self, name, count, track):
self.name = name
self.count = count
self.track = Document.map_uuid(track)
class PatternItem:
def __init__(self, name, length, pattern):
self.name = name
self.length = length
self.pattern = Document.map_uuid(pattern)
class MtiItem:
def __init__(self, pos, tempo, timesig_num, timesig_denom):
self.pos = pos
self.tempo = tempo
# Original misspelling
self.timesig_num = timesig_num
self.timesig_denom = timesig_denom
def __getattr__(self, name):
if name == 'timesig_nom':
return self.timesig_num
raise AttributeError(name)
def __setattr__(self, name, value):
if name == 'timesig_nom':
self.timesig_num = value
else:
self.__dict__[name] = value
def __eq__(self, o):
return self.pos == o.pos and self.tempo == o.tempo and self.timesig_num == o.timesig_num and self.timesig_denom == o.timesig_denom
def __repr__(self):
return ("pos: {}, bpm: {}, timesig: {}/{}".format(self.pos, self.tempo, self.timesig_num, self.timesig_denom))
class DocSongStatus:
tracks = None
patterns = None
class DocSong(DocObj):
class Status:
tracks = [TrackItem]
patterns = [PatternItem]
mtis = [MtiItem]
loop_start = int
loop_end = int
def clear(self):
return self.cmd("/clear", None)
def set_loop(self, ls, le):
return self.cmd("/set_loop", None, int(ls), int(le))
def set_mti(self, pos, tempo = None, timesig_num = None, timesig_denom = None, timesig_nom = None):
if timesig_nom is not None:
timesig_num = timesig_nom
self.cmd("/set_mti", None, int(pos), float(tempo) if tempo is not None else -1.0, int(timesig_num) if timesig_num is not None else -1, int(timesig_denom) if timesig_denom else -1)
def delete_mti(self, pos):
"""Deleting works only if we set everything to exactly 0. Not None, not -1"""
self.set_mti(pos, tempo = 0, timesig_num = 0, timesig_denom = 0, timesig_nom = 0)
def add_track(self):
return self.cmd_makeobj("/add_track")
def load_drum_pattern(self, name):
return self.cmd_makeobj("/load_pattern", name, 1)
def load_drum_track(self, name):
return self.cmd_makeobj("/load_track", name, 1)
def pattern_from_blob(self, blob, length):
return self.cmd_makeobj("/load_blob", bytearray(blob), int(length))
def loop_single_pattern(self, loader):
self.clear()
track = self.add_track()
pat = loader()
length = pat.status().loop_end
track.add_clip(0, 0, length, pat)
self.set_loop(0, length)
self.update_playback()
def update_playback(self):
# XXXKF Maybe make it a song-level API instead of global
do_cmd("/update_playback", None, [])
Document.classmap['cbox_song'] = DocSong
class UnknownModule(NonDocObj):
class Status:
pass
class DocRecorder(DocObj):
class Status:
filename = str
gain = SettableProperty(float)
Document.classmap['cbox_recorder'] = DocRecorder
class RecSource(NonDocObj):
class Status:
handler = [DocRecorder]
def attach(self, recorder):
self.cmd('/attach', None, recorder.uuid)
def detach(self, recorder):
self.cmd('/detach', None, recorder.uuid)
class EffectSlot(NonDocObj):
class Status:
insert_preset = SettableProperty(str)
insert_engine = SettableProperty(str)
bypass = SettableProperty(bool)
def init_object(self):
# XXXKF add wrapper classes for effect engines
self.engine = UnknownModule(self.path + "/engine")
class InstrumentOutput(EffectSlot):
class Status(EffectSlot.Status):
gain_linear = float
gain = float
output = SettableProperty(int)
def init_object(self):
EffectSlot.init_object(self)
self.rec_dry = RecSource(self.make_path('/rec_dry'))
self.rec_wet = RecSource(self.make_path('/rec_wet'))
class DocInstrument(DocObj):
class Status:
name = str
outputs = int
aux_offset = int
engine = str
def init_object(self):
s = self.status()
engine = s.engine
if engine in engine_classes:
self.engine = engine_classes[engine]("/doc/uuid/" + self.uuid + "/engine")
else:
raise ValueError("Unknown engine %s" % engine)
self.output_slots = []
for i in range(s.outputs):
io = InstrumentOutput(self.make_path('/output/%d' % (i + 1)))
io.init_object()
self.output_slots.append(io)
def move_to(self, target_scene, pos = 0):
return self.cmd_makeobj("/move_to", target_scene.uuid, pos + 1)
def get_output_slot(self, slot):
return self.output_slots[slot]
Document.classmap['cbox_instrument'] = DocInstrument
class DocLayer(DocObj):
class Status:
name = str
instrument_name = str
instrument = AltPropName('/instrument_uuid', DocInstrument)
enable = SettableProperty(bool)
low_note = SettableProperty(int)
high_note = SettableProperty(int)
fixed_note = SettableProperty(int)
in_channel = SettableProperty(int)
out_channel = SettableProperty(int)
disable_aftertouch = SettableProperty(bool)
invert_sustain = SettableProperty(bool)
consume = SettableProperty(bool)
ignore_scene_transpose = SettableProperty(bool)
ignore_program_changes = SettableProperty(bool)
transpose = SettableProperty(int)
external_output = SettableProperty(str)
def get_instrument(self):
return self.status().instrument
Document.classmap['cbox_layer'] = DocLayer
class SamplerEngine(NonDocObj):
class Status(object):
"""Maximum number of voices playing at the same time."""
polyphony = int
"""Current number of voices playing."""
active_voices = int
"""Current number of delayed-startup voices waiting to be played."""
active_prevoices = int
"""Current number of disk streams."""
active_pipes = int
"""GM volume (14-bit) per MIDI channel."""
volume = {int:int}
"""GM pan (14-bit) per MIDI channel."""
pan = {int:int}
"""Output offset per MIDI channel."""
output = {int:int}
"""Current number of voices playing per MIDI channel."""
channel_voices = AltPropName('/channel_voices', {int:int})
"""Current number of voices waiting to be played per MIDI channel."""
channel_prevoices = AltPropName('/channel_prevoices', {int:int})
"""MIDI channel -> (program number, program name)"""
patches = {int:(int, str)}
def load_patch_from_cfg(self, patch_no, cfg_section, display_name):
"""Load a sampler program from an 'spgm:' config section."""
return self.cmd_makeobj("/load_patch", int(patch_no), cfg_section, display_name)
def load_patch_from_string(self, patch_no, sample_dir, sfz_data, display_name):
"""Load a sampler program from a string, using given filesystem path for sample directory."""
return self.cmd_makeobj("/load_patch_from_string", int(patch_no), sample_dir, sfz_data, display_name)
def load_patch_from_file(self, patch_no, sfz_name, display_name):
"""Load a sampler program from a filesystem file."""
return self.cmd_makeobj("/load_patch_from_file", int(patch_no), sfz_name, display_name)
def load_patch_from_tar(self, patch_no, tar_name, sfz_name, display_name):
"""Load a sampler program from a tar file."""
return self.cmd_makeobj("/load_patch_from_file", int(patch_no), "sbtar:%s;%s" % (tar_name, sfz_name), display_name)
def set_patch(self, channel, patch_no):
"""Select patch identified by patch_no in a specified MIDI channel."""
self.cmd("/set_patch", None, int(channel), int(patch_no))
def set_output(self, channel, output):
"""Set output offset value in a specified MIDI channel."""
self.cmd("/set_output", None, int(channel), int(output))
def get_unused_program(self):
"""Returns first program number that has no program associated with it."""
return self.get_thing("/get_unused_program", '/program_no', int)
def set_polyphony(self, polyphony):
"""Set a maximum number of voices that can be played at a given time."""
self.cmd("/polyphony", None, int(polyphony))
def get_patches(self):
"""Return a map of program identifiers to program objects."""
return self.get_thing("/patches", '/patch', {int : (str, SamplerProgram, int)})
def get_keyswitch_state(self, channel, group):
"""Return a map of program identifiers to program objects."""
return self.get_thing("/keyswitch_state", '/last_key', int, channel, group)
class FluidsynthEngine(NonDocObj):
class Status:
polyphony = int
soundfont = str
patch = {int: (int, str)}
def load_soundfont(self, filename):
return self.cmd_makeobj("/load_soundfont", filename)
def set_patch(self, channel, patch_no):
self.cmd("/set_patch", None, int(channel), int(patch_no))
def set_polyphony(self, polyphony):
self.cmd("/polyphony", None, int(polyphony))
def get_patches(self):
return self.get_thing("/patches", '/patch', {int: str})
class StreamPlayerEngine(NonDocObj):
class Status:
filename = str
pos = int
length = int
playing = int
def play(self):
self.cmd('/play')
def stop(self):
self.cmd('/stop')
def seek(self, place):
self.cmd('/seek', None, int(place))
def load(self, filename, loop_start = -1):
self.cmd('/load', None, filename, int(loop_start))
def unload(self):
self.cmd('/unload')
class TonewheelOrganEngine(NonDocObj):
class Status:
upper_drawbar = SettableProperty({int: int})
lower_drawbar = SettableProperty({int: int})
pedal_drawbar = SettableProperty({int: int})
upper_vibrato = SettableProperty(bool)
lower_vibrato = SettableProperty(bool)
vibrato_mode = SettableProperty(int)
vibrato_chorus = SettableProperty(int)
percussion_enable = SettableProperty(bool)
percussion_3rd = SettableProperty(bool)
class JackInputEngine(NonDocObj):
class Status:
inputs = (int, int)
engine_classes = {
'sampler' : SamplerEngine,
'fluidsynth' : FluidsynthEngine,
'stream_player' : StreamPlayerEngine,
'tonewheel_organ' : TonewheelOrganEngine,
'jack_input' : JackInputEngine,
}
class DocAuxBus(DocObj):
class Status:
name = str
def init_object(self):
self.slot = EffectSlot("/doc/uuid/" + self.uuid + "/slot")
self.slot.init_object()
Document.classmap['cbox_aux_bus'] = DocAuxBus
class DocScene(DocObj):
class Status:
name = str
title = str
transpose = int
layers = [DocLayer]
instruments = {str: (str, DocInstrument)}
auxes = {str: DocAuxBus}
enable_default_song_input = SettableProperty(bool)
enable_default_external_input = SettableProperty(bool)
def clear(self):
self.cmd("/clear", None)
def load(self, name):
self.cmd("/load", None, name)
def load_aux(self, aux):
return self.cmd_makeobj("/load_aux", aux)
def delete_aux(self, aux):
return self.cmd("/delete_aux", None, aux)
def delete_layer(self, pos):
self.cmd("/delete_layer", None, int(1 + pos))
def move_layer(self, old_pos, new_pos):
self.cmd("/move_layer", None, int(old_pos + 1), int(new_pos + 1))
#Layer positions are 0 for "append" and other positions are 1...n which need to be unique
def add_layer(self, aux, pos = None):
if pos is None:
return self.cmd_makeobj("/add_layer", 0, aux)
else:
# Note: The positions in high-level API are zero-based.
return self.cmd_makeobj("/add_layer", int(1 + pos), aux)
def add_instrument_layer(self, name, pos = None):
if pos is None:
return self.cmd_makeobj("/add_instrument_layer", 0, name)
else:
return self.cmd_makeobj("/add_instrument_layer", int(1 + pos), name)
def add_new_instrument_layer(self, name, engine, pos = None):
if pos is None:
return self.cmd_makeobj("/add_new_instrument_layer", 0, name, engine)
else:
return self.cmd_makeobj("/add_new_instrument_layer", int(1 + pos), name, engine)
def add_new_midi_layer(self, ext_output_uuid, pos = None):
if pos is None:
return self.cmd_makeobj("/add_midi_layer", 0, ext_output_uuid)
else:
return self.cmd_makeobj("/add_midi_layer", int(1 + pos), ext_output_uuid)
def send_midi_event(self, *data):
self.cmd('/send_event', None, *data)
def play_pattern(self, pattern, tempo, id = 0):
self.cmd('/play_pattern', None, pattern.uuid, float(tempo), int(id))
Document.classmap['cbox_scene'] = DocScene
class DocRt(DocObj):
class Status:
audio_channels = (int, int)
state = (int, str)
Document.classmap['cbox_rt'] = DocRt
class DocModule(DocObj):
class Status:
pass
Document.classmap['cbox_module'] = DocModule
class DocEngine(DocObj):
class Status:
scenes = AltPropName('/scene', [DocScene])
def init_object(self):
self.master_effect = EffectSlot(self.path + "/master_effect")
self.master_effect.init_object()
def new_scene(self):
return self.cmd_makeobj('/new_scene')
def new_recorder(self, filename):
return self.cmd_makeobj("/new_recorder", filename)
def render_stereo(self, samples):
return self.get_thing("/render_stereo", '/data', bytes, samples)
Document.classmap['cbox_engine'] = DocEngine
class SamplerProgram(DocObj):
class Status:
name = str
sample_dir = str
source_file = str
program_no = int
in_use = int
def get_regions(self):
return self.get_thing("/regions", '/region', [SamplerLayer])
def get_global(self):
return self.cmd_makeobj("/global")
def get_hierarchy(self):
"""see SamplerLayer.get_hierarchy"""
return {self.get_global() : self.get_global().get_hierarchy()}
def get_control_inits(self):
return self.get_thing("/control_inits", '/control_init', [(int, int)])
def get_control_labels(self):
return self.get_thing("/control_labels", '/control_label', {int : str})
def get_key_labels(self):
return self.get_thing("/key_labels", '/key_label', {int : str})
def get_keyswitch_groups(self):
return self.get_thing("/keyswitch_groups", '/key_range', [(int, int)])
def new_group(self):
# Obsolete
return self.cmd_makeobj("/new_group")
def add_control_init(self, controller, value):
return self.cmd("/add_control_init", None, controller, value)
def add_control_label(self, controller, label):
return self.cmd("/add_control_label", None, controller, label)
# which = -1 -> remove all controllers with that number from the list
def delete_control_init(self, controller, which = 0):
return self.cmd("/delete_control_init", None, controller, which)
def load_file(self, filename, max_size = -1):
"""Return an in-memory file corresponding to a given file inside sfbank.
This can be used for things like scripts, images, descriptions etc."""
data = self.get_thing("/load_file", '/data', bytes, filename, max_size)
if data is None:
return data
return BytesIO(data)
def clone_to(self, dest_module, prog_index):
return self.cmd_makeobj('/clone_to', dest_module.uuid, int(prog_index))
Document.classmap['sampler_program'] = SamplerProgram
class SamplerLayer(DocObj):
class Status:
parent_program = SamplerProgram
parent = DocObj
level = str
def get_children(self):
"""Return all children SamplerLayer.
The hierarchy is always global-master-group-region
Will be empty if this is
an sfz <region>, which has no further children.
"""
return self.get_thing("/get_children", '/child', [SamplerLayer])
def get_hierarchy(self):
"""Returns either a level of hierarchy, e.g. <global> or <group>
or None, if this is a childless layer, such as a <region>.
The hierarchy is always global-master-group-region.
Regions alre always on the fourth level. But not all levels might have regions.
Hint: Print with pprint during development."""
children = self.get_children()
if children:
result = {}
for childLayer in children:
result[childLayer] = childLayer.get_hierarchy()
else:
result = None
return result
def as_dict(self):
"""Returns a dictionary of parameters set at this level of the
layer hierarchy."""
return self.get_thing("/as_list", '/value', {str: str})
def as_dict_full(self):
"""Returns a dictionary of parameters set either at this level of the
layer hierarchy or at one of the ancestors."""
return self.get_thing("/as_list_full", '/value', {str: str})
def as_string(self):
"""A space separated string of all sampler values at this level
in the hierarchy, for example ampeg_decay.
This only includes non-default values, e.g. from the sfz file"""
return self.get_thing("/as_string", '/value', str)
def as_string_full(self):
"""A space separated string of all sampler values at this level
in the hierarchy, for example ampeg_decay.
This includes all default values.
To access the values as dict with number data types use
get_params_full().
'_oncc1' will be converted to '_cc1'
"""
return self.get_thing("/as_string_full", '/value', str)
def set_param(self, key, value):
self.cmd("/set_param", None, key, str(value))
def unset_param(self, key):
self.cmd("/unset_param", None, key)
def new_child(self):
return self.cmd_makeobj("/new_child")
Document.classmap['sampler_layer'] = SamplerLayer
| kfoltman/calfbox | py/cbox.py | Python | gpl-3.0 | 47,278 |
# (c) Copyright 2014, University of Manchester
#
# This file is part of Pynsim.
#
# Pynsim is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pynsim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pynsim. If not, see <http://www.gnu.org/licenses/>.
from pynsim import Engine
from PyModel import OptimisationModel
from pyomo.environ import *
class PyomoAllocation(Engine):
name = """A pyomo-based engine which allocates water throughout a whole
network in a single time-step."""
target = None
storage = {}
def run(self):
"""
Calling Pyomo model
"""
print "========================= Timestep: %s =======================" % self.target.current_timestep
allocation = "_____________ Flows _____________"
storage = "_____________ Storage _____________"
alpha = "_____________ Demand satisfaction ratio _____________"
for n in self.target.nodes:
if n.type == 'agricultural' or n.type == 'urban':
print "%s target demand is %s" % (n.name, n.target_demand)
print "======== calling Pyomo =============="
optimisation = OptimisationModel(self.target)
results = optimisation.run()
for var in results.active_components(Var):
if var == "S":
s_var = getattr(results, var)
for vv in s_var:
name = ''.join(map(str, vv))
self.storage[name] = s_var[vv].value
storage += '\n' + name + ": " + str(s_var[vv].value)
elif var == "X":
x_var = getattr(results, var)
for xx in x_var:
name = "(" + ', '.join(map(str, xx)) + ")"
allocation += '\n' + name + ": " + str(x_var[xx].value)
elif var == "alpha":
alpha_var = getattr(results, var)
for aa in alpha_var:
name = ''.join(map(str, aa))
alpha += '\n' + name + ": " + str(alpha_var[aa].value)
print allocation
print storage
print alpha
self.target.set_initial_storage(self.storage)
| UMWRG/demos | WaterAllocationDemo/model/pynsim/engines/allocation.py | Python | gpl-3.0 | 2,662 |
import sys,time
from . import argparser
if sys.version < '3':
from threading import Semaphore
class Barrier:
def __init__(self, n):
self.n = n
self.count = 0
self.mutex = Semaphore(1)
self.barrier = Semaphore(0)
def wait(self):
self.mutex.acquire()
self.count = self.count + 1
self.mutex.release()
if self.count == self.n: self.barrier.release()
self.barrier.acquire()
self.barrier.release()
else:
from threading import Barrier
def print_verbose(*args):
if argparser.args.verbose:
print(args)
def die(msg=None):
if msg: print (msg)
sys.exit()
def print_service(svc):
print("Service Name: %s" % svc["name"])
print(" Host: %s" % svc["host"])
print(" Description: %s" % svc["description"])
print(" Provided By: %s" % svc["provider"])
print(" Protocol: %s" % svc["protocol"])
print(" channel/PSM: %s" % svc["port"])
print(" svc classes: %s "% svc["service-classes"])
print(" profiles: %s "% svc["profiles"])
print(" service id: %s "% svc["service-id"])
def inc_last_octet(addr):
return addr[:15] + hex((int(addr.split(':')[5], 16) + 1) & 0xff).replace('0x','').upper()
def RateLimited(maxPerSecond):
"""
Decorator for rate limiting a function
"""
minInterval = 1.0 / float(maxPerSecond)
def decorate(func):
lastTimeCalled = [0.0]
def rateLimitedFunction(*args,**kargs):
elapsed = time.clock() - lastTimeCalled[0]
leftToWait = minInterval - elapsed
if leftToWait>0:
time.sleep(leftToWait)
ret = func(*args,**kargs)
lastTimeCalled[0] = time.clock()
return ret
return rateLimitedFunction
return decorate
| intfrr/btproxy | libbtproxy/utils.py | Python | gpl-3.0 | 1,903 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-10 14:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserExtra',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email_confirmed', models.BooleanField(default=False)),
('activation_key', models.CharField(blank=True, max_length=40, verbose_name='activation key')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='userextra', to='accounts.User', verbose_name='user')),
],
),
]
| ravigadila/seemyhack | seemyhack/accounts/migrations/0002_userextra.py | Python | gpl-3.0 | 889 |
"""Power Supply Diag App."""
from ... import csdev as _csdev
from ...namesys import SiriusPVName as _PVName
from ...search import PSSearch as _PSSearch
class ETypes(_csdev.ETypes):
"""Local enumerate types."""
DIAG_STATUS_LABELS_AS = (
'PS Disconnected/Comm. Broken',
'PwrState-Sts Off',
'Current-(SP|Mon) are different',
'Interlocks',
'Alarms',
'OpMode-(Sel|Sts) are different',
'Reserved')
DIAG_STATUS_LABELS_LI = (
'PS Disconnected/Comm. Broken',
'PwrState-Sts Off',
'Current-(SP|Mon) are different',
'Interlocks',
'Reserved',
'Reserved',
'Reserved')
DIAG_STATUS_LABELS_BO = (
'PS Disconnected/Comm. Broken',
'PwrState-Sts Off',
'Current-(SP|Mon) are different',
'Interlocks',
'Alarms',
'OpMode-(Sel|Sts) are different',
'Wfm error exceeded tolerance')
_et = ETypes # syntactic sugar
def get_ps_diag_status_labels(psname):
"""Return Diag Status Labels enum."""
psname = _PVName(psname)
if psname.sec == 'BO':
return _et.DIAG_STATUS_LABELS_BO
if psname.sec == 'LI':
return _et.DIAG_STATUS_LABELS_LI
return _et.DIAG_STATUS_LABELS_AS
def get_ps_diag_propty_database(psname):
"""Return property database of diagnostics for power supplies."""
pstype = _PSSearch.conv_psname_2_pstype(psname)
splims = _PSSearch.conv_pstype_2_splims(pstype)
dtol = splims['DTOL_CUR']
enums = get_ps_diag_status_labels(psname)
dbase = {
'DiagVersion-Cte': {'type': 'str', 'value': 'UNDEF'},
'DiagCurrentDiff-Mon': {'type': 'float', 'value': 0.0,
'hilim': dtol, 'hihi': dtol, 'high': dtol,
'low': -dtol, 'lolo': -dtol, 'lolim': -dtol},
'DiagStatus-Mon': {'type': 'int', 'value': 0,
'hilim': 1, 'hihi': 1, 'high': 1,
'low': -1, 'lolo': -1, 'lolim': -1
},
'DiagStatusLabels-Cte': {'type': 'string', 'count': len(enums),
'value': enums}
}
dbase = _csdev.add_pvslist_cte(dbase, 'Diag')
return dbase
| lnls-sirius/dev-packages | siriuspy/siriuspy/diagsys/psdiag/csdev.py | Python | gpl-3.0 | 2,251 |
#!/usr/bin/env python
# coding=utf-8
#
# Copyright 2017 Ilya Zhivetiev <[email protected]>
#
# This file is part of tec-suite.
#
# tec-suite is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tec-suite is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tec-suite. If not, see <http://www.gnu.org/licenses/>.
"""
File: n.py
Description: GNSS RINEX observation data reader ver2.n
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import math
import re
from tecs.rinex.basic import ObservationData
from tecs.rinex.basic import RinexError
from tecs.rinex.common import validate_epoch
from tecs.rinex.header import RinexVersionType, ApproxPositionXYX, Interval, \
TimeOfFirstObs
NAME = 'tecs.rinex.v2.o'
LOGGER = logging.getLogger(NAME)
class Obs2(ObservationData):
"""Obs2(f_obj, filename, settings=None) -> instance
GPS observation data file ver.2.0
Parameters
----------
f_obj : file
file-like object
filename : str
a name of the file; we should use it because f_obj.name contains not
filename.
"""
VERSION = 2.0
REC_LEN = 16
OBS_TYPES = re.compile(r'\s{4}([LCPDT][12])')
RE_TOBS = re.compile(r'(.*)# / TYPES OF OBSERV')
RE_END_HEADER = re.compile(r'(.*)END OF HEADER')
RE_COMMENT = re.compile(r'(.*)COMMENT')
END_OF_HEADER = 'END OF HEADER'
def _get_val(self, rec, i, rlen):
"""get_val(rec, i, rlen) -> val, lli, sig_strength
parse the record rec to retrieve observation values: val, LLI
and signal strength.
Parameters
----------
rec : str
the whole record to parse
i : int
start of an observation data substring
rlen : int
length of the substring
"""
val = rec[i:i + rlen]
digs = '0123456789'
spaces = ' ' * len(val)
if val and val != spaces:
# observation
try:
obs = val[:14]
obs = float(obs)
except ValueError:
err = "wrong data string:\n%s" % rec
raise RinexError(self.filename, err)
val = val[14:]
# LLI
try:
lli = val[0]
if lli in digs:
lli = int(lli)
else:
lli = 0
except IndexError:
lli = 0
# Signal strength
try:
sig_strength = val[1]
if sig_strength in digs:
sig_strength = int(sig_strength)
else:
sig_strength = 0
except IndexError:
sig_strength = 0
else:
obs = None
lli = 0
sig_strength = 0
return obs, lli, sig_strength
def _get_prn(self, rec, i, cur_epoch):
"""get_prn(rec, i) -> prn"""
val = rec[i:i + 3]
if not val:
err = "can't extract satellite:\n%s <%s>" % (cur_epoch, rec)
raise RinexError(self.filename, err)
# system identifier
cur_sys = val[0].upper()
if cur_sys == ' ':
cur_sys = 'G'
# satellite number
cur_sat = None
try:
cur_sat = val[1:]
cur_sat = int(cur_sat)
except ValueError:
err = 'wrong PRN (%s) in epoch record:\n<%s>' % (
cur_sat, cur_epoch)
raise RinexError(self.filename, err)
cur_sat = "%02d" % cur_sat
# PRN
cur_prn = cur_sys + cur_sat
return cur_prn
def get_interval(self, n):
"""get_interval(n) -> interval
Get an observation interval using first n epochs of an observation file.
Parameters
----------
n : int
amount of epochs to read
Returns
-------
interval : float
observation interval, seconds
"""
epoch = None
epoch_count = 0
deltas = []
dt = None
records = self.read_records()
try:
while epoch_count < n:
rec = next(records)
if not epoch:
epoch = rec[0]
continue
elif epoch == rec[0]:
continue
dt = rec[0] - epoch
if dt:
deltas.append(dt.total_seconds())
epoch_count += 1
epoch = rec[0]
except RinexError as err:
msg = ("Can't find out obs interval: %s" % str(err))
raise RinexError(self.filename, msg)
except StopIteration as err:
if dt is None:
msg = ("Can't find out obs interval: %s" % str(err))
raise RinexError(self.filename, msg)
else:
pass
records.close()
del records
if len(set(deltas)) == 1:
interval = deltas[0]
else:
dt_dict = {}
for dt in deltas:
if dt not in dt_dict:
dt_dict[dt] = 1
else:
dt_dict[dt] += 1
tmp = list(dt_dict.keys())
tmp.sort(key=lambda k: dt_dict[k], reverse=True)
interval = tmp[0]
self._fobj.seek(0)
for rec in self._fobj:
rec = rec[60:].rstrip()
if rec == self.END_OF_HEADER:
break
return interval
def set_obs_num_types(self, header):
"""set_obs_num_types(header) -> None
Parameters
----------
header : list
check and set value of the self.properties['obs types']
"""
obs_types = []
def get_o_types(r, n, l):
"""get_o_types(r, n, l) -> obs_types
Parameters
----------
r : str
substring
n : int
amount of observations
l : int
len of the record
Returns
-------
obs_types : list
list of the observation types
"""
o_types = []
for i in range(0, n * l, l):
cur_o_type = r[i:i + l]
match = re.match(self.OBS_TYPES, cur_o_type)
if not match:
msg = "Unknown observation type: '%s'\n%s" % (
cur_o_type, r.rstrip())
self._logger.warning(msg)
cur_o_type = cur_o_type[-2:]
o_types.append(cur_o_type)
return o_types
for (idx, rec) in enumerate(header):
if not self.RE_TOBS.match(rec):
continue
try:
obs_num = rec[:6]
obs_num = int(obs_num)
except (IndexError, ValueError):
err = ('Can\'t read the number of the observation types:\n'
' <%s>') % rec.rstrip()
raise RinexError(self.filename, err)
if obs_num > 9:
obs_types = []
num_compl_lines = obs_num // 9
for cur_l in range(num_compl_lines):
obs_types_line = header[idx + cur_l]
obs_types += get_o_types(obs_types_line[6:60], 9, 6)
if obs_num % 9:
rest_num = obs_num - num_compl_lines * 9
obs_types_line = header[idx + num_compl_lines]
obs_types += get_o_types(obs_types_line[6:6 + rest_num * 6],
rest_num,
6)
else:
obs_types = get_o_types(rec[6:6 + obs_num * 6], obs_num, 6)
if obs_num != len(obs_types):
err = """Can't extract some observation types from:
'%s'""" % rec.rstrip()
raise RinexError(self.filename, err)
break
self.properties['obs types'] = tuple(obs_types)
def set_logger(self):
""" set_logger()
"""
self._logger = logging.getLogger(NAME + '.Obs2')
def read_epoch(self, epoch):
"""
read_epoch(epoch) -> epoch_components
parse epoch record.
Parameters
----------
epoch : str
epoch record
Returns
-------
epoch_components : tuple
(datetime, epoch-flag, num-of-satellites, rcvr-clock-offset, prns)
"""
# assume that the first element is an epoch
# 1. epoch flag
try:
epoch_flag = int(epoch[26:29])
except ValueError:
err = 'wrong epoch flag in epoch record\n%s' % epoch
raise RinexError(self.filename, err)
# set sat_num & return, need no date
if epoch_flag > 1:
msg = "epoch flag >1 (%s: %s)" % (self.filename, epoch)
self._logger.info(msg)
try:
sat_num = int(epoch[29:32])
return None, epoch_flag, sat_num, None, None
except ValueError:
err = "wrong event flag:\n%s" % epoch
raise RinexError(self.filename, err)
# 2. date
try:
d = []
for i in range(0, 13, 3):
val = epoch[i:i + 3]
val = int(val)
d.append(val)
sec = float(epoch[15:26])
microsec = (sec - int(sec)) * 1e+6
microsec = float("%.5f" % microsec)
d.append(int(sec))
d.append(int(microsec))
cur_epoch = validate_epoch(d)
except (IndexError, ValueError) as err:
msg = "wrong date in epoch record '%s': %s" % (epoch, str(err))
raise RinexError(self.filename, msg)
try:
receiver_offset = epoch[68:]
receiver_offset = float(receiver_offset)
except (IndexError, ValueError):
receiver_offset = 0.0
# 3. num of satellites
try:
sat_num = epoch[29:32]
sat_num = int(sat_num)
except ValueError:
err = 'wrong satellite number in epoch record:\n%s' % epoch
raise RinexError(self.filename, err)
# 4. list of PRNs (sat.num + sys identifier)
prev_epoch_line = ''
prns = []
# > 12
if sat_num > 12:
num_compl_lines = sat_num // 12
rest_sat_num = sat_num - num_compl_lines * 12
# read strings which contain 12 satellites
# - current row
for i in range(32, 66, 3):
cur_prn = self._get_prn(epoch, i, cur_epoch)
prns.append(cur_prn)
num_compl_lines -= 1
# - next rows (12 sat per row)
while num_compl_lines:
num_compl_lines -= 1
epoch = self._next_rec(self._fobj)
for i in range(32, 66, 3):
cur_prn = self._get_prn(epoch, i, cur_epoch)
prns.append(cur_prn)
# - the last one (if any)
if rest_sat_num:
epoch = self._next_rec(self._fobj)
r_stop = 32 + rest_sat_num * 3 - 2
for i in range(32, r_stop, 3):
cur_prn = self._get_prn(epoch, i, cur_epoch)
prns.append(cur_prn)
# < 12
else:
for i in range(32, 32 + 3 * sat_num - 2, 3):
cur_prn = self._get_prn(epoch, i, cur_epoch)
prns.append(cur_prn)
if sat_num != len(prns):
err = "can't extract all PRNs from epoch line:\n%s\n%s" % (
prev_epoch_line, epoch)
raise RinexError(self.filename, err)
return cur_epoch, epoch_flag, sat_num, receiver_offset, prns
def read_records(self):
"""read_records() -> generator
Returns
-------
dataset : tuple
(epoch, sat, data) with
data = { obs_1: val, obs_2: val, ... }
"""
for line in self._fobj:
(cur_epoch, epoch_flag,
sat_num, receiver_offset, prns) = self.read_epoch(line.rstrip())
# if the flag != 0
# 1. Power failure between previous and current epoch
if epoch_flag == 1:
msg = ('%s - power failure between previous and current '
'epoch %s.') % (self.filename, cur_epoch)
self._logger.info(msg)
# 3. New site occupation
elif epoch_flag == 3:
msg = "New site occupation: {} - {}."
msg = msg.format(cur_epoch, self.filename)
self._logger.info(msg)
header_slice = []
while sat_num > 0:
h_str = self._next_rec(self._fobj)
header_slice.append(h_str)
sat_num -= 1
self._parse_header(header_slice)
continue
# 4. Header information
elif epoch_flag == 4:
header_slice = []
while sat_num > 0:
sat_num -= 1
h_str = self._next_rec(self._fobj)
header_slice.append(h_str)
msg = "%s: %s." % (self.filename, h_str)
self._logger.debug(msg)
self._parse_header(header_slice)
continue
# n. Some other
elif epoch_flag > 1:
msg = 'epoch flag = %s; %s record(s) to follow: %s - %s.' % (
epoch_flag, sat_num, cur_epoch, self.filename)
self._logger.debug(msg)
while sat_num > 0:
msg = self._next_rec(self._fobj)
self._logger.debug(msg)
sat_num -= 1
continue
# FIXME should I?
if receiver_offset:
pass
# read the records
for cur_prn in prns:
data = []
for n in self.lines_per_rec:
rec = self._next_rec(self._fobj)
rend = n * self.REC_LEN - (self.REC_LEN - 1)
rstep = self.REC_LEN
for i in range(0, rend, rstep):
(val, lli, sig_strength) = (
self._get_val(rec, i, self.REC_LEN))
data.append((val, lli, sig_strength))
types_n_vals = {}
for i in range(len(data)):
o_type = self.properties['obs types'][i]
val = data[i]
types_n_vals[o_type] = val
# it could be epoch duplicate: just skip
if cur_epoch == self.preceding_epoch:
msg = "%s - duplicate dates: %s" % (
self.filename, str(cur_epoch))
self._logger.info(msg)
continue
yield (cur_epoch, cur_prn, types_n_vals)
self.preceding_epoch = cur_epoch
def __init__(self, f_obj, filename):
""" """
super(Obs2, self).__init__(f_obj, filename)
self._logger = None
self.set_logger()
# previous epoch to find duplicates
self.preceding_epoch = None
self.properties = {
'obs types': (None,),
}
self.lines_per_rec = []
file_header = []
for rec in self._fobj:
if self.RE_END_HEADER.match(rec):
break
file_header.append(rec)
self.set_obs_num_types(file_header)
self._det_lines_per_rec()
# header labels
self.ver_type = RinexVersionType(self.VERSION)
self.tofo = TimeOfFirstObs(self.VERSION)
self.xyz = ApproxPositionXYX(self.VERSION)
self.interval = Interval(self.VERSION)
self._parse_header(file_header)
dt = self.get_interval(10)
if self.interval.value != dt:
msg_wi = 'Wrong interval value in the header of {}: {}; ' \
'using {} instead.'
self._logger.warning(msg_wi.format(self.filename,
self.interval.value,
dt))
self.interval.value = '{:10.3f}'.format(dt)
def _det_lines_per_rec(self):
"""_det_lines_per_rec()
determine amount of the lines per record.
"""
obs_num = len(self.properties['obs types'])
s2flw = obs_num / 5.
s2flw = math.ceil(s2flw)
s2flw = int(s2flw)
lines_per_rec = []
n = obs_num
for s in range(s2flw):
n -= 5
if n < 0:
lines_per_rec.append(n + 5)
elif n >= 0:
lines_per_rec.append(5)
self.lines_per_rec = lines_per_rec
del lines_per_rec
del obs_num
def __del__(self):
self._fobj.close()
class Obs21(Obs2):
"""RINEX obs v2.11"""
VERSION = 2.1
# (F14.3,I1,I1)
REC_LEN = 16
OBS_TYPES = re.compile(r'\s{4}([LCPDTS][12])')
def set_logger(self):
self._logger = logging.getLogger(NAME + '.Obs21')
class Obs211(Obs21):
"""RINEX obs v2.11"""
VERSION = 2.11
# (F14.3,I1,I1)
REC_LEN = 16
OBS_TYPES = re.compile(r'\s{4}([LCPDS][125678])')
def set_logger(self):
self._logger = logging.getLogger(NAME + '.Obs211')
| gnss-lab/tec-suite | tecs/rinex/v2/o.py | Python | gpl-3.0 | 18,274 |
#!/usr/bin/env python
"""
Quick and dirty IRC notification script.
Any '{var}'-formatted environment variables names will be expanded
along with git "pretty" format placeholders (like "%H" for commit hash,
"%s" for commit message subject, and so on). Use commas to delineate
multiple messages.
Example:
python scripts/irc-notify.py chat.freenode.net:6697/#gridsync \[{branch}:%h\] {color}3$(python scripts/sha256sum.py dist/Gridsync.AppImage),:\)
"""
import os, random, socket, ssl, subprocess, sys, time
from subprocess import check_output as _co
color = "\x03"
branch = _co(["git", "rev-parse", "--abbrev-ref", "HEAD"]).decode().strip()
def _pf(s):
if "%" not in s:
return s
return _co(["git", "log", "-1", "--pretty={}".format(s)]).decode().strip()
protected_vars = vars().keys()
for key, value in os.environ.items():
if key.lower() not in protected_vars:
vars()[key.lower()] = value
messages = []
for msg in " ".join(sys.argv[2:]).split(","):
messages.append(_pf(msg.format(**vars())).strip())
_addr = sys.argv[1].split("/")[0]
_dest = sys.argv[1].split("/")[1]
_host = _addr.split(":")[0]
_port = _addr.split(":")[1]
_user = socket.gethostname().replace(".", "_")
try:
s = ssl.wrap_socket(socket.socket(socket.AF_INET, socket.SOCK_STREAM))
s.connect((socket.gethostbyname(_host), int(_port)))
s.send("NICK {0}\r\nUSER {0} * 0 :{0}\r\n".format(_user).encode())
f = s.makefile()
while f:
line = f.readline()
print(line.rstrip())
w = line.split()
if w[0] == "PING":
s.send("PONG {}\r\n".format(w[1]).encode())
elif w[1] == "433":
s.send(
"NICK {}-{}\r\n".format(
_user, str(random.randint(1, 9999))
).encode()
)
elif w[1] == "001":
time.sleep(5)
for msg in messages:
print("NOTICE {} :{}".format(_dest, msg))
s.send("NOTICE {} :{}\r\n".format(_dest, msg).encode())
time.sleep(5)
sys.exit()
except Exception as exc:
print("Error: {}".format(str(exc)))
sys.exit()
| gridsync/gridsync | scripts/irc-notify.py | Python | gpl-3.0 | 2,150 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import gtk
import sqlalchemy
VOCABULARY_DB = "sqlite:///data/vocabulary.db"
class EntryDialog(gtk.MessageDialog):
def __init__(self, *args, **kwargs):
'''
Creates a new EntryDialog. Takes all the arguments of the usual
MessageDialog constructor plus one optional named argument
"default_value" to specify the initial contents of the entry.
'''
if 'default_value' in kwargs:
default_value = kwargs['default_value']
del kwargs['default_value']
else:
default_value = ''
super(EntryDialog, self).__init__(*args, **kwargs)
entry = gtk.Entry()
entry.set_text(str(default_value))
entry.connect("activate",
lambda ent, dlg, resp: dlg.response(resp),
self, gtk.RESPONSE_OK)
self.vbox.pack_end(entry, True, True, 0)
self.vbox.show_all()
self.entry = entry
def set_value(self, text):
self.entry.set_text(text)
def run(self):
result = super(EntryDialog, self).run()
if result == gtk.RESPONSE_OK:
text = self.entry.get_text()
else:
text = None
return text
class VocabularyWidget(gtk.VBox):
def __init__(self):
gtk.VBox.__init__(self)
# Setup DB
self.db = sqlalchemy.create_engine(VOCABULARY_DB)
self.tb_les = sqlalchemy.Table('lessons',
sqlalchemy.MetaData(self.db),
autoload=True)
self.tb_vocab = sqlalchemy.Table('vocabulary',
sqlalchemy.MetaData(self.db),
autoload=True)
# create toolbar
toolbar = gtk.Toolbar()
label = gtk.Label("Lesson: ")
toolbar.append_element(gtk.TOOLBAR_CHILD_WIDGET, label, None, None,
None, None, lambda : None, None)
self.lessons = gtk.ListStore(int, str)
self.cmb_lessons = gtk.ComboBox(self.lessons)
cell = gtk.CellRendererText()
self.cmb_lessons.pack_start(cell, True)
self.cmb_lessons.add_attribute(cell, 'text', 1)
self.cmb_lessons.connect("changed", self._on_lesson_changed)
toolbar.append_element(gtk.TOOLBAR_CHILD_WIDGET, self.cmb_lessons,
"Lesson", None, None, None, lambda : None, None)
icon = gtk.Image()
icon.set_from_stock(gtk.STOCK_ADD, 4)
toolbar.append_element(gtk.TOOLBAR_CHILD_BUTTON, None, None,
"Add a new lesson", None, icon,
self._on_add_clicked, None)
icon = gtk.Image()
icon.set_from_stock(gtk.STOCK_DELETE, 4)
toolbar.append_element(gtk.TOOLBAR_CHILD_BUTTON, None, None,
"Delete current lesson", None, icon,
self._on_delete_clicked, None)
toolbar.append_element(gtk.TOOLBAR_CHILD_SPACE, None, None, None,
None, None, lambda : None, None)
self.pack_start(toolbar, expand=False, fill=False)
# create vocabulary table
self.table = VocabularyTable(self.db)
self.pack_start(self.table)
# load data from database
self._load_lessons()
def _load_lessons(self):
res = self.tb_les.select().execute()
self.lessons.clear()
self.lessons.append([-1, "All"])
for r in res:
self.lessons.append([r[0], r[1]])
self.cmb_lessons.set_active(0)
def _on_add_clicked(self, widget):
dialog = EntryDialog(None, gtk.DIALOG_MODAL,
gtk.MESSAGE_INFO, gtk.BUTTONS_OK_CANCEL,
"Enter the name of the new lesson")
response = dialog.run()
dialog.destroy()
if response != None and response != '':
self.db.execute(self.tb_les.insert().values(name=response))
self._load_lessons()
def _on_delete_clicked(self, widget):
row = self.cmb_lessons.get_model()[self.cmb_lessons.get_active()]
dialog = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_INFO,
gtk.BUTTONS_YES_NO,
"Are you sure you want to delete the '" + row[1] + "' lesson?"
+ "\nAll the lesson's vocabulary will be deleted too.")
response = dialog.run()
dialog.destroy()
if response == gtk.RESPONSE_YES:
q = self.tb_vocab.delete().where(self.tb_vocab.c.lesson == row[0])
self.db.execute(q)
q = self.tb_les.delete().where(self.tb_les.c.id == row[0])
self.db.execute(q)
self.lessons.remove(self.cmb_lessons.get_active_iter())
self.cmb_lessons.set_active(len(self.lessons)-1)
def _on_lesson_changed(self, widget):
it = widget.get_active_iter()
if it != None:
lesson_id = widget.get_model().get(it, 0)[0]
self.table.load(lesson_id)
class VocabularyTable(gtk.TreeView):
def __init__(self, db):
gtk.TreeView.__init__(self)
# Setup DB
self.db = db
self.tb_vocab = sqlalchemy.Table('vocabulary',
sqlalchemy.MetaData(self.db),
autoload=True)
self.model = gtk.ListStore(int, str, str, str, str, int)
self.set_model(self.model)
self.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
self.connect("key_press_event", self._on_key_press)
self.connect("button_press_event", self._on_click)
self.col_chars = gtk.TreeViewColumn('Characters')
self.col_reading = gtk.TreeViewColumn('Pinyin')
self.col_trans = gtk.TreeViewColumn('Translation')
self.append_column(self.col_chars)
self.append_column(self.col_reading)
self.append_column(self.col_trans)
self.cel_chars = gtk.CellRendererText()
self.cel_chars.set_property('editable', True)
self.cel_chars.connect("edited", self._on_cell_edited, 1)
self.cel_reading = gtk.CellRendererText()
self.cel_reading.set_property('editable', True)
self.cel_reading.connect("edited", self._on_cell_edited, 3)
self.cel_trans = gtk.CellRendererText()
self.cel_trans.set_property('editable', True)
self.cel_trans.connect("edited", self._on_cell_edited, 4)
self.col_chars.pack_start(self.cel_chars, False)
self.col_reading.pack_start(self.cel_reading, False)
self.col_trans.pack_start(self.cel_trans, False)
self.col_chars.set_attributes(self.cel_chars, text=1)
self.col_reading.set_attributes(self.cel_reading, text=3)
self.col_trans.set_attributes(self.cel_trans, text=4)
def load(self, lesson):
self.lesson = lesson
if lesson == -1:
query = self.tb_vocab.select()
else:
query = self.tb_vocab.select(self.tb_vocab.c.lesson == lesson)
res = query.execute()
self.model.clear()
for r in res:
self.model.append(r)
def _on_key_press(self, widget, event):
if event.keyval == gtk.keysyms.Delete:
dialog = gtk.MessageDialog(None, gtk.DIALOG_MODAL,
gtk.MESSAGE_INFO, gtk.BUTTONS_YES_NO,
"Are you sure you want to delete selected words?")
response = dialog.run()
dialog.destroy()
if response == gtk.RESPONSE_YES:
sel = self.get_selection()
model, pathlist = sel.get_selected_rows()
for path in pathlist:
self._delete_row(model.get_iter(path))
self._delete_commit()
def _on_click(self, widget, event):
if event.button == 3:
x = int(event.x)
y = int(event.y)
time = event.time
pthinfo = widget.get_path_at_pos(x, y)
if pthinfo is not None:
path, col, cellx, celly = pthinfo
widget.grab_focus()
widget.set_cursor( path, col, 0)
pmenu = gtk.Menu()
item = gtk.MenuItem("New")
item.connect("activate", self._on_popup_new_clicked)
pmenu.append(item)
item = gtk.MenuItem("Delete")
pmenu.append(item)
pmenu.show_all()
pmenu.popup( None, None, None, event.button, time)
return True
def _on_popup_new_clicked(self, widget):
ins = self.tb_vocab.insert()
new = ins.values(simplified='', traditional='', reading='',
translation='', lesson=self.lesson)
res = self.db.execute(new)
newid = res.last_inserted_ids()[0]
self.model.append([newid, '', '', '', '', self.lesson])
def _on_cell_edited(self, cell, path, new_text, col_id):
it = self.model.get_iter(path)
self.model[it][col_id] = new_text
self._update_row(it)
def _update_row(self, it):
row = self.model[it]
update = self.tb_vocab.update().where(
self.tb_vocab.c.id==row[0])
update_v = update.values(simplified=unicode(row[1]),
traditional=unicode(row[2]),
reading=unicode(row[3]),
translation=unicode(row[4]),
lesson=self.lesson)
self.db.execute(update_v)
def _delete_row(self, it):
i = self.model[it][0]
self.db.execute(self.tb_vocab.delete().where(self.tb_vocab.c.id == i))
self.model[it][0] = -2
def _delete_commit(self):
for it in self.model:
if it[0] == -2:
self.model.remove(it.iter)
| tomas-mazak/taipan | taipan/vocabulary.py | Python | gpl-3.0 | 9,999 |
import logging
from ..exports import ObjectCollectorMixin, ListCollector
#: Event name used to signal members to perform one-time initialization
INITIALIZE = 'initialize'
class Hooks(ObjectCollectorMixin, ListCollector):
"""
This class collects event hooks that should be installed during supervisor
start-up.
"""
export_key = 'hooks'
def install_member(self, hook):
hook_name = getattr(hook, 'hook', None)
if not hook_name:
logging.error('Missing hook name for {}'.format(hook))
return
# Initialize hook must be executed in-place, as soon as the component
# loads. Currently it's hard-wired here, because anywhere else seemed
# already too late to fire it, though a better alternative is welcome.
if hook_name == INITIALIZE:
hook(self.supervisor)
else:
self.events.subscribe(hook_name, hook)
| Outernet-Project/librarian | librarian/core/collectors/hooks.py | Python | gpl-3.0 | 927 |
"""Sis_legajos URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from Alumnos import views
from django.contrib.auth.views import login, logout
#from Alumnos.views import (
# ListaUsuarios,
# DetalleUsuario,
# CrearUsuario,
# ActualizarUsuario,
# EliminarUsuario,
# ListaMovimientos,
#)
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^home/', views.home, name='Home'),
url(r'^home2/', views.home2, name='Home2'),
url(r'^busqueda_dni/$', views.busqueda_dni),
url(r'^busqueda_apellido/$', views.busqueda_apellido),
url(r'^busqueda_legajo/$', views.busqueda_legajo),
url(r'^legajo/([0-9]+)/$', views.alumno, name='vista_alumno'),
url(r'^nuevo_alumno/$', views.nuevo_alumno),
url(r'^editar_alumno/([0-9]+)$', views.editar_alumno),
url(r'^borrar_alumno/([0-9]+)/$', views.borrar_alumno,name="borrar_alumno"),
#url(r'^almacenar/$', views.almacenar),
url(r'^login/$',login,{'template_name':'login.html'}),
url(r'^logout/$',logout,{'template_name':'logout.html'}),
#seleccionar un alumno para poder moverlo
url(r'^seleccionar_alumno/$', views.seleccionar_alumno),
url(r'^mover_legajo_lugar/([0-9]+)/$', views.mover_legajo_lugar,name="mover_legajo_lugar"),
url(r'^mover_legajo_archivo/([0-9]+)/$', views.mover_legajo_archivo),
url(r'^mover_legajo_cajon/([0-9]+)/$', views.mover_legajo_cajon),
url(r'^confirmar_mover/([0-9]+)/([0-9]+)/([0-9]+)/$', views.confirmar_mover, name="confirmar_mover"),
#URLS Lugar
url(r'^nuevo_lugar/$', views.nuevo_lugar),
url(r'^lugar_borrar/([0-9]+)/$', views.borrar_lugar,name="borrar_lugar"),
#URLS Archivo
url(r'^nuevo_archivo/$', views.nuevo_archivo),
url(r'^archivo_borrar/([0-9]+)/$', views.borrar_archivo,name="borrar_archivo"),
#URLS par almacenar un legajo
url(r'^almacenar_legajo_lugar/([0-9]+)/$', views.almacenar_legajo_lugar,name="almacenar_legajo_lugar"),
url(r'^almacenar_legajo_archivo/([0-9]+)/$', views.almacenar_legajo_archivo),
url(r'^almacenar_legajo_cajon/([0-9]+)/$', views.almacenar_legajo_cajon),
url(r'^confirmar_almacenar/([0-9]+)/([0-9]+)/$', views.confirmar_almacenar, name="confirmar_almacenar"),
#url(r'^ver_movimientos/$', views.ver_movimientos),
#usuarios
# url(r'^usuarios/$', ListaUsuarios.as_view(), name='lista'),
# url(r'^usuarios/(?P<pk>\d+)$', DetalleUsuario.as_view(), name='detalle'),
# url(r'^usuarios/nuevo$', CrearUsuario.as_view(), name='nuevo'),
# url(r'^usuarios/editar/(?P<pk>\d+)$', ActualizarUsuario.as_view(),name='editar'),
# url(r'^usuarios/borrar/(?P<pk>\d+)$', EliminarUsuario.as_view(),name='borrar')
#corrección usuarios
#url(r'^$', ListaUsuarios.as_view(), name='lista'),
#url(r'^(?P<pk>\d+)$', DetalleUsuario.as_view(), name='detalle'),
#url(r'^nuevo$', CrearUsuario.as_view(), name='nuevo'),
#url(r'^editar/(?P<pk>\d+)$', ActualizarUsuario.as_view(),name='editar'),
#url(r'^borrar/(?P<pk>\d+)$', EliminarUsuario.as_view(),name='borrar')
#tercer intento
#url(r'^usuarios/', include('usuarios.urls', namespace='usuarios'))
url(r'^movimientos/$', views.ver_movimientos),
#ver mov viejo url(r'ver_movimientos/^$', ListaMovimientos.as_view(), name='listamovimientos')
url(r'^cursos/', include('courses.urls', namespace='courses'))
]
| Informatorio/Sis_LegajosV2 | Sis_legajos/urls.py | Python | gpl-3.0 | 3,985 |
# coding: utf-8
import os
import re
import hashlib
import time
from string import strip
from datetime import datetime, timedelta, date
from dateutil.relativedelta import relativedelta
from flask import current_app, request, flash, render_template, session, redirect, url_for
from lac.data_modelz import *
from lac.helperz import *
from lac.formz import SizeQuotaForm, InodeQuotaForm
__author__ = "Nicolas CHATELAIN"
__copyright__ = "Copyright 2014, Nicolas CHATELAIN @ CINES"
__license__ = "GPL"
class Engine(object):
def __init__(self, app=None):
if app is not None:
self.app = app
self.init_app(self.app)
else:
self.app = None
def init_app(self, app):
if not hasattr(app, 'extensions'):
app.extensions = {}
app.extensions['engine'] = self
self.cache = app.extensions['cache']
self.ldap = app.extensions['ldap']
self.converter = app.extensions['converter']
self.fm = app.extensions['form_manager']
self.ldap_search_base = app.config['LDAP_SEARCH_BASE']
self.ldap_admin = app.config['LDAP_DEFAULT_ADMIN']
def is_ccc_group(self, member):
return 'ccc' == self.cache.get_group_from_member_uid(member)
def is_cines_group(self, uid):
return 'cines' == self.cache.get_group_from_member_uid(uid)
def is_principal_group(self, member, group):
return self.cache.get_posix_group_cn_by_gid(member['gidNumber'][0]) == group
def disable_account(self, user):
user_attr = user.get_attributes()
mod_attr = [('pwdAccountLockedTime', "000001010000Z")]
user_uid = user_attr['uid'][0]
if self.cache.get_group_from_member_uid(user_uid) == 'ccc':
new_shadow_expire_datetime = datetime.now() - timedelta(days=1)
new_shadow_expire = str(
self.converter.datetime_to_days_number(
new_shadow_expire_datetime)
)
mod_attr.append(('shadowExpire', new_shadow_expire))
self.ldap.update_uid_attribute(
user_uid,
mod_attr
)
flash(u'Compte {0} désactivé'.format(user_uid))
print(u'{0} : Compte {1} désactivé'.format(
session['uid'],
user_uid).encode('utf-8')
)
def enable_account(self, user):
user_uid = user.get_attributes()['uid'][0]
if self.cache.get_group_from_member_uid(user_uid) == 'ccc':
new_shadow_expire_datetime = self.converter.days_number_to_datetime(
user.get_attributes()['cinesdaterenew'][0]
) + relativedelta(
months = +self.app.config['SHADOW_DURATION']
)
new_shadow_expire = str(
self.converter.datetime_to_days_number(new_shadow_expire_datetime))
self.ldap.update_uid_attribute(user_uid,
[('shadowExpire', new_shadow_expire)]
)
self.ldap.remove_uid_attribute(user_uid,
[('pwdAccountLockedTime', None)])
print(u'{0} : Compte {1} activé'.format(
session['uid'],
user_uid).encode('utf-8')
)
def get_search_user_fieldz(self):
page = Page.query.filter_by(label = "search_user").first()
page_attributez = Field.query.filter_by(page_id = page.id).all()
return page_attributez
def get_search_user_attributez(self):
search_attributez = [attr.label.encode('utf-8')
for attr in self.get_search_user_fieldz()]
return search_attributez
def get_resultz_from_search_user_form(self, form):
filter_list =[]
if form.uid_number.data != "" :
filter_list.append("(uidNumber={0})".format(
strip(form.uid_number.data)
))
if form.sn.data != "" :
filter_list.append("(sn=*{0}*)".format(
strip(form.sn.data)
))
if form.uid.data != "" :
filter_list.append("(uid=*{0}*)".format(
strip(form.uid.data)
))
if form.mail.data != "":
filter_list.append("(mail=*{0}*)".format(
strip(form.mail.data)
))
if form.user_disabled.data :
filter_list.append("(shadowExpire=0)")
if form.ip.data :
filter_list.append("(cinesIpClient={0})".format(
strip(form.ip.data)
))
if form.nationality.data :
filter_list.append("(cinesNationality={0})".format(
strip(form.nationality.data)
))
if form.user_type.data == "":
base_dn = "ou=people,{0}".format(self.ldap_search_base)
else:
base_dn = "ou={0},ou=people,{1}".format(form.user_type.data,
self.ldap_search_base)
if filter_list != [] :
ldap_filter = "(&(objectClass=posixAccount){0})".format("".join(
filter_list
))
else:
ldap_filter = "(objectClass=posixAccount)"
search_resultz = self.ldap.search(
ldap_filter=ldap_filter,
attributes=self.get_search_user_attributez(),
base_dn=base_dn)
return search_resultz
def get_search_group_fieldz(self):
page = Page.query.filter_by(label = "search_group").first()
page_attributez = Field.query.filter_by(page_id = page.id, display=True).all()
return page_attributez
def get_search_group_attributez(self):
search_attributez = [attr.label.encode('utf-8')
for attr in self.get_search_group_fieldz()]
return search_attributez
def get_resultz_from_search_group_form(self, form):
filter_list =[]
if form.gid_number.data != "" :
filter_list.append("(gidNumber={0})".format(
strip(form.gid_number.data)
))
if form.cn.data != "" :
filter_list.append("(cn=*{0}*)".format(
strip(form.cn.data)
))
if form.description.data :
filter_list.append(
"(description=*{0}*)".format(
strip(form.description.data)
)
)
if form.group_type.data == "":
base_dn = "ou=groupePosix,{0}".format(self.ldap_search_base)
else:
base_dn = "ou={0},ou=groupePosix,{1}".format(
strip(form.group_type.data),
self.ldap_search_base)
if filter_list != [] :
ldap_filter = "(&(objectClass=posixGroup){0})".format("".join(
filter_list
))
else:
ldap_filter = "(objectClass=posixGroup)"
search_resultz = self.ldap.search(
ldap_filter=ldap_filter,
attributes=self.get_search_group_attributez(),
base_dn=base_dn)
return search_resultz
def update_group_memberz_cines_c4(self, branch, group, comite):
memberz_uid = self.ldap.get_posix_group_memberz(branch, group)
if len(memberz_uid)>1:
ldap_filter = '(&(objectClass=posixAccount)(|{0}))'.format(
''.join(['(uid={0})'.format(uid) for uid in memberz_uid]))
elif len(memberz_uid)==1:
ldap_filter = '(&(objectClass=posixAccount)(uid={0}))'.format(
memberz_uid[0])
else:
return
memberz = self.ldap.search(
ldap_filter=ldap_filter,
attributes=['cinesC4', 'dn', 'uid', 'gidNumber']
)
for member in memberz:
self.update_user_cines_c4(member, group, comite)
def update_user_cines_c4(self, user, group, comite):
user_attrz = user.get_attributes()
user_uid = user_attrz['uid'][0]
if (
self.is_ccc_group(user_uid)
and (
'cinesC4' not in user_attrz
or user_attrz['cinesC4'][0] != comite
)
):
if not comite and 'cinesC4' in user_attrz:
old_comite = user_attrz['cinesC4'][0]
self.ldap.remove_uid_attribute(
user_uid,
[('cinesC4', None)]
)
self.rem_user_from_container(user_uid, old_comite)
elif comite:
if "cinesC4" in user_attrz:
old_comite = user_attrz['cinesC4'][0]
self.rem_user_from_container(user_uid, old_comite)
self.add_user_to_container(user_uid, comite)
self.ldap.update_uid_attribute(
user_uid,
[('cinesC4', comite.encode('utf-8'))])
print(u'{0} : Nouveau comité pour {1} -> {2}'.format(
session['uid'],
user_uid,
comite).encode('utf-8')
)
def add_user_to_container(self, user_uid, container_cn):
container_dn = "cn={0},ou=grConteneur,ou=groupePosix,{1}".format(
container_cn,
self.ldap_search_base
)
pre_modlist = []
pre_modlist.append(('memberUid', [user_uid.encode('utf-8')]))
self.ldap.add_dn_attribute(container_dn, pre_modlist)
def rem_user_from_container(self, user_uid, container_cn):
container_dn = "cn={0},ou=grConteneur,ou=groupePosix,{1}".format(
container_cn,
self.ldap_search_base
)
pre_modlist = []
pre_modlist.append(('memberUid', [user_uid.encode('utf-8')]))
self.ldap.remove_dn_attribute(container_dn, pre_modlist)
def update_password_from_form(self, form, uid):
pre_modlist = []
if self.cache.get_group_from_member_uid(uid) == 'cines':
nt_hash = hashlib.new(
'md4',
strip(form.new_pass.data).encode('utf-16le')
).hexdigest().upper()
pre_modlist = [('sambaPwdLastSet', str(int(time.time()))),
('sambaNTPassword', nt_hash)]
if uid != session['uid']:
pre_modlist.append(('userPassword',
strip(form.new_pass.data).encode('utf-8')))
if self.cache.get_group_from_member_uid(uid) not in ['autre', 'soft']:
pre_modlist.append(('pwdReset', 'TRUE'))
if self.ldap.update_uid_attribute(uid, pre_modlist):
flash(u'Mot de passe pour {0} mis à jour avec succès.'.format(uid))
print(u'{0} : Mise à jour du mot de passe pour {1}'.format(
session['uid'],
uid).encode('utf-8')
)
return redirect(url_for('show_user',
page= self.cache.get_group_from_member_uid(uid),
uid=uid))
else:
if self.ldap.change_passwd(
uid,
session['password'],
strip(form.new_pass.data)
):
flash(
u'Votre mot de passe a été mis à jour avec succès.'.format(uid)
)
print(u'{0} : Mise à jour du mot de passe pour {1}'.format(
session['uid'],
uid).encode('utf-8')
)
if pre_modlist:
self.ldap.update_uid_attribute(uid, pre_modlist)
return redirect(url_for('home'))
def update_page_from_form(self, page, raw_form):
form = raw_form['form']
page_oc_id_list = raw_form['page_oc_id_list']
page_unic_attr_id_list = raw_form['page_unic_attr_id_list']
attr_label_list = raw_form['attr_label_list']
if attr_label_list is not None:
self.update_fields_from_edit_page_admin_form(form, attr_label_list, page)
if form.oc_form.selected_oc.data is not None:
# On traite les ObjectClass ajoutées
for oc_id in form.oc_form.selected_oc.data :
if oc_id not in page_oc_id_list:
print("Creation de l'Object Class id {0}".format(oc_id))
page_oc = PageObjectClass(page.id, oc_id)
db.session.add(page_oc)
# On traite les ObjectClass supprimées
# et les fieldz associés en cascade
for oc_id in page_oc_id_list:
if oc_id not in form.oc_form.selected_oc.data:
print("Suppression de l'Object Class id {0}".format(oc_id))
PageObjectClass.query.filter_by(page_id=page.id,
ldapobjectclass_id= oc_id
).delete()
attr_to_del_list = [
attr.id for attr in get_attr_from_oc_id_list([oc_id])
]
print("Attributs à supprimer {0}".format(attr_to_del_list))
Field.query.filter(Field.page_id==page.id,
Field.ldapattribute_id.in_(
attr_to_del_list
)
).delete(synchronize_session='fetch')
if form.attr_form.selected_attr.data is not None:
# On traite les Attributes ajoutées
for attr_id in form.attr_form.selected_attr.data :
if attr_id not in page_unic_attr_id_list:
print("Creation de l'attribut id {0}".format(attr_id))
attr = LDAPAttribute.query.filter_by(id = attr_id).first()
self.create_default_field(attr, page)
if page_unic_attr_id_list is not None:
# On traite les Attributes supprimées
for attr_id in page_unic_attr_id_list:
if attr_id not in form.attr_form.selected_attr.data:
print("Suppression de l'attribut id {0}".format(attr_id))
Field.query.filter_by(
id=attr_id
).delete()
db.session.commit()
print(u'{0} : Page {1} mise à jour'.format(
session['uid'],
page.label).encode('utf-8')
)
def update_lac_admin_from_form(self, form):
group_dn = "cn=lacadmin,ou=system,{0}".format(self.ldap_search_base)
memberz = [ get_uid_from_dn(dn)
for dn in self.ldap.get_lac_admin_memberz() ]
if form.selected_memberz.data is not None:
memberz_to_add = []
for member in form.selected_memberz.data:
if member not in memberz:
memberz_to_add.append(self.ldap.get_full_dn_from_uid(member))
if memberz_to_add:
self.ldap.add_dn_attribute(group_dn,
[('member', member.encode('utf8'))
for member in memberz_to_add]
)
if memberz is not None:
memberz_to_del = []
for member in memberz:
if member not in form.selected_memberz.data:
memberz_to_del.append(self.ldap.get_full_dn_from_uid(member))
if memberz_to_del:
self.ldap.remove_dn_attribute(group_dn,
[('member', member.encode('utf8'))
for member in memberz_to_del]
)
self.fm.populate_ldap_admin_choices(form)
print(u'{0} : Update des admin lac : {1}'.format(
session['uid'],
form.selected_memberz.data
if form.selected_memberz.data is not None
else "vide").encode('utf-8')
)
def update_ldap_admin_from_form(self, form):
group_dn = "cn=ldapadmin,ou=system,{0}".format(self.ldap_search_base)
memberz = [ get_uid_from_dn(dn)
for dn in self.ldap.get_ldap_admin_memberz() ]
if form.selected_memberz.data is not None:
memberz_to_add = []
for member in form.selected_memberz.data:
if member not in memberz:
memberz_to_add.append(self.ldap.get_full_dn_from_uid(member))
if memberz_to_add:
self.ldap.add_dn_attribute(group_dn,
[('member', member.encode('utf8'))
for member in memberz_to_add]
)
if memberz is not None:
memberz_to_del = []
for member in memberz:
if member not in form.selected_memberz.data:
memberz_to_del.append(self.ldap.get_full_dn_from_uid(member))
if memberz_to_del:
self.ldap.remove_dn_attribute(group_dn,
[('member', member.encode('utf8'))
for member in memberz_to_del]
)
self.fm.populate_ldap_admin_choices(form)
print(u'{0} : Update des admin ldap : {1}'.format(
session['uid'],
form.selected_memberz.data
if form.selected_memberz.data is not None
else "vide").encode('utf-8')
)
def get_last_used_id(self, ldap_ot):
attributes=['gidNumber'] if ldap_ot.apply_to == 'group' else ['uidNumber']
if ldap_ot.apply_to == 'group':
ldap_filter = '(objectClass=posixGroup)'
else:
ldap_filter = '(objectClass=posixAccount)'
base_dn='ou={0},ou={1},{2}'.format(
ldap_ot.label,
'groupePosix' if ldap_ot.apply_to == 'group' else 'people',
self.ldap_search_base
)
resultz = self.ldap.search(base_dn,ldap_filter,attributes)
if not resultz:
return 0
max_id= 0
for result in resultz:
result_id = int(result.get_attributes()[attributes[0]][0])
if result_id > max_id:
max_id = result_id
return str(max_id)
def is_active(self, user):
user_attrz = user.get_attributes()
if ('shadowExpire' in user_attrz and datetime.now()> self.converter.days_number_to_datetime(
user_attrz['shadowExpire'][0]
)) or ('pwdAccountLockedTime' in user_attrz):
return False
else:
return True
def create_default_quota(self, form):
cn = strip(form.common_name.data).encode('utf-8')
dn = 'cn={0},ou=quota,ou=system,{1}'.format(cn, self.ldap_search_base)
cinesQuotaSizeHard = self.fm.get_quota_value_from_form(
form,
'cinesQuotaSizeHard')
cinesQuotaSizeSoft = self.fm.get_quota_value_from_form(
form,
'cinesQuotaSizeSoft')
cinesQuotaInodeHard = self.fm.get_quota_value_from_form(
form,
'cinesQuotaInodeHard')
cinesQuotaInodeSoft = self.fm.get_quota_value_from_form(
form,
'cinesQuotaInodeSoft')
pre_modlist = [
('cn', cn),
('objectClass', ['top', 'cinesQuota']),
('cinesQuotaSizeHard', cinesQuotaSizeHard),
('cinesQuotaSizeSoft', cinesQuotaSizeSoft),
('cinesQuotaInodeHard', cinesQuotaInodeHard),
('cinesQuotaInodeSoft', cinesQuotaInodeSoft)]
self.ldap.add(dn, pre_modlist)
print(u'{0} : Quota par défaut {1} créé'.format(
session['uid'],
cn).encode('utf-8')
)
def update_default_quota(self, storage_cn, form):
storage_dn = "cn={0},ou=quota,ou=system,{1}".format(
storage_cn,
self.ldap_search_base
)
cinesQuotaSizeHard = self.fm.get_quota_value_from_form(
form,
'cinesQuotaSizeHard')
cinesQuotaSizeSoft = self.fm.get_quota_value_from_form(
form,
'cinesQuotaSizeSoft')
cinesQuotaInodeHard = self.fm.get_quota_value_from_form(
form,
'cinesQuotaInodeHard')
cinesQuotaInodeSoft = self.fm.get_quota_value_from_form(
form,
'cinesQuotaInodeSoft')
pre_modlist = [('cinesQuotaSizeHard', cinesQuotaSizeHard),
('cinesQuotaSizeSoft', cinesQuotaSizeSoft),
('cinesQuotaInodeHard', cinesQuotaInodeHard),
('cinesQuotaInodeSoft', cinesQuotaInodeSoft)]
self.ldap.update_dn_attribute(storage_dn, pre_modlist)
print(u'{0} : Quota par défaut {1} mis à jour'.format(
session['uid'],
storage_cn).encode('utf-8')
)
def update_quota(self, storage, form):
storage_cn = storage['cn'][0]
default_storage_cn, group_id = storage_cn.split('.G.')
default_storage = self.ldap.get_default_storage(default_storage_cn).get_attributes()
storage_dn = "cn={0},cn={1},ou={2},ou=groupePosix,{3}".format(
storage_cn,
self.cache.get_posix_group_cn_by_gid(group_id),
self.ldap.get_branch_from_posix_group_gidnumber(group_id),
self.ldap_search_base
)
pre_modlist = []
for field_name in self.app.config['QUOTA_FIELDZ']:
form_value = self.fm.get_quota_value_from_form(
form,
field_name)
default_field = self.app.config['QUOTA_FIELDZ'][field_name]['default']
if (
form_value != default_storage[default_field][0]
and (field_name not in storage
or form_value != storage[field_name][0])
):
pre_modlist.append((field_name, form_value))
if form.cinesQuotaSizeTempExpire.data:
cinesQuotaSizeTempExpire = self.converter.datetime_to_timestamp(
form.cinesQuotaSizeTempExpire.data
).encode('utf-8')
if (form.cinesQuotaSizeTempExpire.data is not None
and (
'cinesQuotaSizeTempExpire' not in storage
or cinesQuotaSizeTempExpire != storage[
'cinesQuotaSizeTempExpire'
]
)
):
pre_modlist.append(('cinesQuotaSizeTempExpire',
cinesQuotaSizeTempExpire))
if form.cinesQuotaInodeTempExpire.data:
cinesQuotaInodeTempExpire = self.converter.datetime_to_timestamp(
form.cinesQuotaInodeTempExpire.data
).encode('utf-8')
if (form.cinesQuotaInodeTempExpire.data is not None
and (
'cinesQuotaInodeTempExpire' not in storage
or cinesQuotaInodeTempExpire != storage[
'cinesQuotaInodeTempExpire'
]
)
):
pre_modlist.append(('cinesQuotaInodeTempExpire',
cinesQuotaInodeTempExpire))
self.ldap.update_dn_attribute(storage_dn, pre_modlist)
print(u'{0} : Quota par spécifique {1} mis à jour'.format(
session['uid'],
storage_cn).encode('utf-8')
)
def populate_last_used_idz(self):
ignore_ot_list = ['reserved', 'grLight', 'grPrace']
ldap_otz = LDAPObjectType.query.all()
for ldap_ot in ldap_otz:
if ldap_ot.label not in ignore_ot_list:
last_used_id = self.get_last_used_id(ldap_ot)
id_range = self.get_range_list_from_string(ldap_ot.ranges)
if int(last_used_id) not in id_range:
last_used_id = id_range[0]
ldap_ot.last_used_id = last_used_id
db.session.add(ldap_ot)
db.session.commit()
def create_ldapattr_if_not_exists(self, label):
db_attr = LDAPAttribute.query.filter_by(
label = label
).first()
if db_attr is None:
db_attr = LDAPAttribute(label=label)
return db_attr
def create_ldap_object_from_add_group_form(self, form, page_label):
ot = LDAPObjectType.query.filter_by(label = page_label).first()
cn = strip(form.cn.data).encode('utf-8')
description = strip(form.description.data).encode('utf-8')
id_number = str(self.get_next_id_from_ldap_ot(ot))
object_classes = [oc_ot.ldapobjectclass.label.encode('utf-8')
for oc_ot in LDAPObjectTypeObjectClass.query.filter_by(
ldapobjecttype_id = ot.id).all()]
if not object_classes:
flash(u'ObjectClasss manquants pour ce type d\'objet')
return 0
full_dn = "cn={0},ou={1},ou=groupePosix,{2}".format(
cn,
ot.label,
self.ldap_search_base)
add_record = [('cn', [cn]),
('gidNumber', [id_number]),
('objectClass', object_classes)]
if page_label != 'grProjet':
add_record.append(('fileSystem', [form.filesystem.data.encode('utf-8')]))
if description and description != '':
add_record.append(('description', [description]))
if hasattr(form, 'responsable'):
add_record.append(('cinesProjResp', [form.responsable.data.encode('utf-8')]))
if 'sambaGroupMapping' in object_classes:
add_record.extend([
('sambaSID', "{0}-{1}".format(self.ldap.get_sambasid_prefix(),
id_number)),
('sambaGroupType', ['2'])
])
if self.ldap.add(full_dn, add_record):
ot.last_used_id= id_number
db.session.add(ot)
db.session.commit()
self.cache.populate_grouplist()
self.cache.populate_people_group()
flash(u'Groupe créé')
print(u'{0} : Groupe posix {1} créé'.format(
session['uid'],
cn).encode('utf-8')
)
return 1
def create_ldap_object_from_add_workgroup_form(self, form):
ot = LDAPObjectType.query.filter_by(label = 'grTravail').first()
cn = strip(form.cn.data).encode('utf-8')
description = strip(form.description.data).encode('utf-8')
object_classes = [oc_ot.ldapobjectclass.label.encode('utf-8')
for oc_ot in LDAPObjectTypeObjectClass.query.filter_by(
ldapobjecttype_id = ot.id).all()]
if not object_classes:
flash(u'ObjectClasss manquants pour ce type d\'objet')
return 0
full_dn = "cn={0},ou=grTravail,{1}".format(
cn,
self.ldap_search_base)
add_record = [('cn', [cn]),
('cinesGrWorkType', [
getattr(form, 'cinesGrWorkType').data.encode('utf-8')
]),
('uniqueMember', [self.ldap_admin]),
('objectClass', object_classes)]
if description and description != '':
add_record.append(('description', [description]))
if self.ldap.add(full_dn, add_record):
db.session.add(ot)
db.session.commit()
self.cache.populate_work_group()
flash(u'Groupe créé')
print(u'{0} : Groupe de travail {1} créé'.format(
session['uid'],
cn).encode('utf-8')
)
return 1
def create_ldap_object_from_add_container_form(self, form):
ot = LDAPObjectType.query.filter_by(label = 'grConteneur').first()
cn = strip(form.cn.data).encode('utf-8')
description = strip(form.description.data).encode('utf-8')
object_classes = [oc_ot.ldapobjectclass.label.encode('utf-8')
for oc_ot in LDAPObjectTypeObjectClass.query.filter_by(
ldapobjecttype_id = ot.id).all()]
id_number = str(self.get_next_id_from_ldap_ot(ot))
if not object_classes:
flash(u'ObjectClasss manquants pour ce type d\'objet')
return 0
full_dn = "cn={0},ou=grConteneur,ou=groupePosix,{1}".format(
cn,
self.ldap_search_base)
add_record = [('cn', [cn]),
('gidNumber', [id_number]),
('objectClass', object_classes)]
if description and description != '':
add_record.append(('description', [description]))
if self.ldap.add(full_dn, add_record):
ot.last_used_id= id_number
db.session.add(ot)
db.session.commit()
self.cache.populate_work_group()
flash(u'Groupe créé')
print(u'{0} : Conteneur {1} créé'.format(
session['uid'],
cn).encode('utf-8')
)
return 1
def create_ldap_object_from_add_user_form(self, form, fieldz, page):
ldap_ot = LDAPObjectType.query.filter_by(
label=page.label
).first()
ldap_ot_ocz = LDAPObjectTypeObjectClass.query.filter_by(
ldapobjecttype_id = ldap_ot.id
).all()
ot_oc_list = [oc.ldapobjectclass.label.encode('utf-8')
for oc in ldap_ot_ocz]
form_attributez = []
uid = form.uid.data
for field in fieldz:
form_field_values = [
strip(
self.converter.from_display_mode(
entry.data,
field.fieldtype.type
)
)
for entry in getattr(form, field.label).entries
]
if field.label == 'cinesdaterenew' :
now_days_number = self.converter.datetime_to_days_number(datetime.now())
if form_field_values[0] == now_days_number :
form_field_values = []
if (field.label not in ['cinesUserToPurge', 'cn', 'cinesIpClient']
and form_field_values != [''] ):
form_attributez.append((field.label, form_field_values))
if (field.label == 'cinesIpClient' and form_field_values != ['']):
form_attributez.append((field.label, ';'.join(form_field_values)))
if field.label == 'gidNumber':
gid_number = form_field_values[0]
self.cache.add_to_people_group_if_not_member(
self.cache.get_posix_group_cn_by_gid(gid_number),
[uid.encode('utf-8')])
uid_number = self.get_next_id_from_ldap_ot(ldap_ot)
add_record = [('uid', [uid.encode('utf-8')]),
('cn', [uid.encode('utf-8')]),
('uidNumber', [str(uid_number).encode('utf-8')]),
('objectClass', ot_oc_list)]
add_record.extend(form_attributez)
add_record.append(
('homeDirectory', "/home/{0}".format(uid).encode('utf-8')))
add_record.append(
('shadowlastchange',
[str(self.converter.datetime_to_days_number(datetime.now()))]
)
)
if 'cinesusr' in ot_oc_list:
add_record.append(
('cinesSoumission', [self.ldap.get_initial_submission()])
)
if 'sambaSamAccount' in ot_oc_list:
add_record.append(
('sambaSID', "{0}-{1}".format(self.ldap.get_sambasid_prefix(),
uid_number))
)
if page.label == 'ccc' and gid_number:
group_cn = self.cache.get_posix_group_cn_by_gid(gid_number)
ressource = C4Ressource.query.filter_by(
code_projet = group_cn).first()
if ressource:
comite = ressource.comite.ct
else:
comite = ''
if comite != '':
add_record.append(
('cinesC4', comite.encode('utf-8'))
)
self.add_user_to_container(uid, comite)
if ldap_ot.ppolicy != '':
add_record.append(
('pwdPolicySubentry',
'cn={0},ou=policies,ou=system,{1}'.format(
ldap_ot.ppolicy,
self.ldap_search_base)
)
)
parent_dn = self.ldap.get_people_dn_from_ou(ldap_ot.label)
full_dn = "uid={0};{1}".format(uid,parent_dn)
if self.ldap.add(full_dn, add_record):
ldap_ot.last_used_id= uid_number
db.session.add(ldap_ot)
db.session.commit()
self.cache.populate_grouplist()
self.cache.populate_people_group()
print(u'{0} : Utilisateur {1} créé'.format(
session['uid'],
uid).encode('utf-8')
)
else:
flash(u'L\'utilisateur n\'a pas été créé')
return True
def create_ldap_object_from_add_object_type_form(self,
form,
ldap_object_type ):
selected_oc_choices = self.fm.get_ot_oc_choices(ldap_object_type)
ot_oc_id_list = [oc[0] for oc in selected_oc_choices]
ldap_object_type.label = strip(form.label.data)
ldap_object_type.description = strip(form.description.data)
ldap_object_type.ranges = strip(form.ranges.data)
ldap_object_type.apply_to = strip(form.apply_to.data)
ldap_object_type.ppolicy = form.ppolicy.data
db.session.add(ldap_object_type)
if form.object_classes.selected_oc.data is not None:
# On traite les ObjectClass ajoutées
for oc_id in form.object_classes.selected_oc.data :
if oc_id not in ot_oc_id_list:
print("Creation de l'Object Class id {0}".format(oc_id))
ot_oc = LDAPObjectTypeObjectClass(
ldap_object_type.id, oc_id)
db.session.add(ot_oc)
# On traite les ObjectClass supprimées
# et les fieldz associés en cascade
for oc_id in ot_oc_id_list:
if oc_id not in form.object_classes.selected_oc.data:
print("Suppression de l'Object Class id {0}".format(oc_id))
LDAPObjectTypeObjectClass.query.filter_by(
ldapobjecttype_id=ldap_object_type.id,
ldapobjectclass_id= oc_id
).delete()
db.session.commit()
if form.set_ppolicy.data :
self.ldap.set_group_ppolicy(ldap_object_type.label,
ldap_object_type.ppolicy)
flash(u'{0} mis à jour'.format(ldap_object_type.description))
print(u'{0} : Type d\'objet {1} créé'.format(
session['uid'],
ldap_object_type.label).encode('utf-8')
)
def create_ldap_quota(self, storage, group_id):
niou_cn = '{0}.G.{1}'.format(
storage,
group_id)
default_storage = self.ldap.get_default_storage(
storage).get_attributes()
add_record = [
('cn', [niou_cn]),
('objectClass', ['top', 'cinesQuota']),
('cinesQuotaSizeHard', default_storage['cinesQuotaSizeHard']),
('cinesQuotaSizeSoft', default_storage['cinesQuotaSizeSoft']),
('cinesQuotaInodeHard', default_storage['cinesQuotaInodeHard']),
('cinesQuotaInodeSoft', default_storage['cinesQuotaInodeSoft'])
]
group_full_dn = self.ldap.get_full_dn_from_cn(
self.cache.get_posix_group_cn_by_gid(group_id))
full_dn = 'cn={0},{1}'.format(niou_cn,group_full_dn)
self.ldap.add(full_dn, add_record)
flash(u'Quota initialisé')
print(u'{0} : Quota spécifique {1} créé'.format(
session['uid'],
niou_cn).encode('utf-8')
)
def update_users_by_file(self, edit_form, attrz_list):
fieldz = Field.query.filter(Field.id.in_(attrz_list)).all()
pre_modlist = []
for field in fieldz:
pre_modlist.append(
(field.label,
strip(getattr(edit_form, field.label).data).encode('utf-8'))
)
if edit_form.action.data == '0':
for uid in userz:
ldap.add_uid_attribute(uid, pre_modlist)
elif edit_form.action.data == '1':
for uid in userz:
ldap.update_uid_attribute(uid, pre_modlist)
flash(u'Les utilisateurs ont été mis à jour')
def generate_backup_file(self, userz, attributez):
userz = [strip(user).encode('utf-8') for user in userz.split(',')]
attributez = [strip(attribute).encode('utf-8')
for attribute in attributez.split(',')]
file_name = "backup_{0}.txt".format(
datetime.now().strftime("%d%b%HH%M_%Ss"))
file_content = " ".join(attributez)
for user in userz:
user_attr = ldap.search(
ldap_filter="(uid={0})".format(user),
attributes=attributez)[0].get_attributes()
line = ",".join(["{0}={1}".format(key, value)
for key, value in user_attr.iteritems()])
line = ",".join(["uid={0}".format(user),line])
file_content = "\n".join([file_content, line])
response = make_response(file_content)
response.headers[
'Content-Disposition'
] = 'attachment; filename={0}'.format(file_name)
return response
def get_next_id_from_ldap_ot(self, ldap_ot):
id_range = self.get_range_list_from_string(ldap_ot.ranges)
next_index = id_range.index(ldap_ot.last_used_id)+1
if ldap_ot.apply_to == 'group':
id_type = 'gidNumber'
else:
id_type = 'uidNumber'
while 1:
test_id = id_range[next_index]
ldap_filter = '({0}={1})'.format(id_type, test_id)
result = self.ldap.search(ldap_filter=ldap_filter,
attributes = [id_type])
if not result:
return test_id
next_index += 1
def get_storagez_labelz(self):
storagez = self.ldap.get_group_quota_list()
storagez_labelz = [storage.get_attributes()['cn'][0]
for storage in storagez]
return storagez_labelz
def get_range_list_from_string(self, rangez_string):
rangez = rangez_string.split(';')
rangez_lst = []
for range_string in rangez:
if range_string != '':
range_split = range_string.split('-')
rangez_lst.extend(range(
int(range_split[0]),
int(range_split[1])+1))
return sorted(set(rangez_lst))
def update_ldap_object_from_edit_ppolicy_form(self, form, attributes, cn):
dn = "cn={0},ou=policies,ou=system,{1}".format(
cn,
self.ldap_search_base)
ppolicy_attrz = self.ldap.get_ppolicy(cn).get_attributes()
pre_modlist = []
for attr in attributes:
field_value = strip(getattr(form, attr).data).encode('utf-8')
if attr not in ppolicy_attrz or ppolicy_attrz[attr][0] != field_value:
# if attr == 'pwdMustChange':
# pre_modlist.append((attr, [True if field_value else False]))
# else:
pre_modlist.append((attr, [field_value]))
self.ldap.update_dn_attribute(dn, pre_modlist)
print(u'{0} : Ppolicy {1} créée'.format(
session['uid'],
cn).encode('utf-8')
)
def update_ldap_object_from_edit_user_form(self, form, fieldz, uid, page):
user = self.ldap.get_uid_detailz(uid)
uid_attributez = user.get_attributes()
pre_modlist = []
for field in fieldz:
form_values = [
strip(
self.converter.from_display_mode(
entry.data,
field.fieldtype.type
)
)
for entry in getattr(form, field.label).entries
]
if field.label == 'cinesIpClient':
form_values = [';'.join(form_values)]
if (field.label not in uid_attributez
or uid_attributez[field.label] != form_values):
if form_values == [''] or (field.label == 'cinesUserToPurge'
and True not in form_values):
form_values = None
if (
field.label == 'cinesUserToPurge'
and form_values
and True in form_values
):
form_values = ['1']
pre_modlist.append((field.label, form_values))
if page.label == 'ccc' and field.label == 'gidNumber':
group_cn = self.cache.get_posix_group_cn_by_gid(form_values[0])
ressource = C4Ressource.query.filter_by(
code_projet = group_cn).first()
if ressource:
comite = ressource.comite.ct
else:
comite = ''
if (
comite != ''
and "cinesC4" in uid_attributez
and uid_attributez['cinesC4'] != comite
):
pre_modlist.append(
('cinesC4', comite.encode('utf-8'))
)
self.update_user_cines_c4(user, group_cn, comite)
self.ldap.update_uid_attribute(uid, pre_modlist)
self.cache.populate_people_group()
print(u'{0} : Mise à jour de l\'utilisteur {1}'.format(
session['uid'],
uid).encode('utf-8')
)
def upsert_otrs_user(self, uid):
user_attrz = self.ldap.get_uid_detailz(uid).get_attributes()
otrs_user = OTRSCustomerUser.query.filter_by(login = uid).first()
if not otrs_user:
otrs_user = OTRSCustomerUser(login = uid)
if 'telephoneNumber' in user_attrz:
telephone_number = ';'.join(
[phone for phone in user_attrz['telephoneNumber']])
else:
telephone_number = ''
user_type = LDAPObjectType.query.filter_by(
label = self.cache.get_group_from_member_uid(uid)
).first().description
first_gr_name = self.cache.get_posix_group_cn_by_gid(user_attrz['gidNumber'][0])
otrs_user.email = user_attrz['mail'][0]
otrs_user.customer_id = user_attrz['uidNumber'][0]
otrs_user.first_name = user_attrz['givenName'][0]
otrs_user.last_name = user_attrz['sn'][0]
otrs_user.phone = telephone_number
otrs_user.comments = '{0}; {1}'.format(user_type, first_gr_name)
otrs_user.valid_id = 1
otrs_user.create_time = datetime.now()
db.session.add(otrs_user)
db.session.commit()
def delete_otrs_user(self, uid):
date = datetime.now().strftime("%Y%m%d%H%M")
disabled_login = "".join(['ZDEL', date, "_", uid])
# print(disabled_login)
otrs_user = OTRSCustomerUser.query.filter_by(login = uid).first()
if otrs_user:
otrs_user.login = disabled_login
otrs_user.valid_id = 2
db.session.add(otrs_user)
otrs_ticketz = OTRSTicket.query.filter_by(customer_user_id = uid).all()
for ticket in otrs_ticketz:
ticket.customer_user_id = disabled_login
db.session.add(ticket)
if self.is_cines_group(uid):
OTRSUser.query.filter_by(login=uid).update(
{
'valid_id': 2,
'login': disabled_login,
'change_time': datetime.now()
}, synchronize_session=False
)
db.session.commit()
def update_user_table_on_deletion(self, uid):
ldap_user = self.ldap.get_uid_detailz(uid).get_attributes()
db_user = db.session.query(User).filter_by(uid=uid).first()
# Create user if doesn't already exists
if not db_user:
db_user = User(uid=uid)
db.session.add(db_user)
db_user.uid_number = ldap_user['uidNumber'][0].decode('utf-8')
db_user.firstname = ldap_user['givenName'][0].decode('utf-8')
db_user.lastname = ldap_user['sn'][0].decode('utf-8')
db_user.deletion_timestamp = datetime.now()
if 'mail' in ldap_user:
db_user.email = ldap_user['mail'][0].decode('utf-8')
if 'telephoneNumber' in ldap_user:
db_user.phone_number = ldap_user['telephoneNumber'][0].decode('utf-8')
db.session.commit()
def remove_user_from_all_groupz(self, uid, posix_groupz, work_groupz):
user_dn = self.ldap.get_full_dn_from_uid(uid)
for group_cn in work_groupz:
group_dn = 'cn={0},ou=grTravail,{1}'.format(
group_cn,
self.ldap_search_base
)
pre_modlist = [('uniqueMember', user_dn.encode('utf-8'))]
self.ldap.remove_dn_attribute(group_dn,pre_modlist)
for (group_cn, group_branch) in posix_groupz:
group_dn = self.ldap.get_group_full_dn(group_branch, group_cn)
pre_modlist = [('memberUid', uid.encode('utf-8'))]
self.ldap.remove_dn_attribute(group_dn,pre_modlist)
def set_user_submissionz(self, uid, formz):
for group in self.ldap.get_submission_groupz_list():
form = getattr(formz, group)
is_submission = form.submission.data
is_member = form.member.data
if is_submission and is_member:
self.cache.add_to_work_group_if_not_member(group, [uid])
self.ldap.set_submission(uid, group, '1')
elif is_member and not is_submission:
self.cache.add_to_work_group_if_not_member(group, [uid])
self.ldap.set_submission(uid, group, '0')
elif not is_member:
self.cache.rem_from_workgroup_if_member(group, [uid])
self.ldap.set_submission(uid, group, '0')
def update_user_submission(self, uid, form):
wrk_group = strip(form.wrk_group.data)
is_submission = form.submission.data
is_member = form.member.data
if is_submission and is_member:
self.cache.add_to_work_group_if_not_member(wrk_group, [uid])
self.ldap.set_submission(uid, wrk_group, '1')
elif is_member and not is_submission:
self.cache.add_to_work_group_if_not_member(wrk_group, [uid])
self.ldap.set_submission(uid, wrk_group, '0')
elif not is_member:
self.cache.rem_from_workgroup_if_member(wrk_group, [uid])
self.ldap.set_submission(uid, wrk_group, '0')
def update_group_submission(self, form):
groupz_id = form.group_form.selected_groupz.data
groupz_infoz = [
(self.ldap.get_branch_from_posix_group_gidnumber(id),
self.cache.get_posix_group_cn_by_gid(id))
for id in groupz_id
]
groupz_memberz = self.ldap.get_posix_groupz_memberz(groupz_infoz)
wrk_group = form.submission_form.wrk_group.data
is_submission = form.submission_form.submission.data
is_member = form.submission_form.member.data
if is_submission and is_member:
self.cache.add_to_work_group_if_not_member(
wrk_group,
groupz_memberz_uid)
for uid in groupz_memberz_uid:
self.ldap.set_submission(uid, wrk_group, '1')
elif is_member and not is_submission:
self.cache.add_to_work_group_if_not_member(
wrk_group,
groupz_memberz_uid)
for uid in groupz_memberz_uid:
self.ldap.set_submission(uid, wrk_group, '0')
elif not is_member:
self.cache.rem_from_workgroup_if_member(wrk_group, groupz_memberz_uid)
for uid in groupz_memberz_uid:
self.ldap.set_submission(uid, wrk_group, '0')
def update_ldap_object_from_edit_group_form(self, form, page, group_cn):
ldap_filter='(&(cn={0})(objectClass=posixGroup))'.format(group_cn)
attributes=['*','+']
group_attributez = self.ldap.search(
ldap_filter=ldap_filter,
attributes=attributes)[0].get_attributes()
pagefieldz = Field.query.filter_by(page_id = page.id,
edit = True).all()
pre_modlist = []
for field in pagefieldz:
form_field_values = [strip(entry.data).encode('utf-8')
for entry in getattr(form, field.label).entries]
if form_field_values == ['']:
form_field_values = None
if ((field.label not in group_attributez)
or (field.label in group_attributez
and group_attributez[field.label] != form_field_values)):
pre_modlist.append((field.label, form_field_values))
pre_modlist.append(
('memberuid',
[
member.encode('utf-8') for member in
form.memberz.selected_memberz.data
])
)
group_dn="cn={0},ou={1},ou=groupePosix,{2}".format(
group_cn,
page.label,
self.ldap_search_base)
self.ldap.update_dn_attribute(group_dn, pre_modlist)
print(u'{0} : Groupe posix {1} mis à jour'.format(
session['uid'],
group_cn).encode('utf-8')
)
def update_ldap_object_from_edit_workgroup_form(self, form, page, group_cn):
dn="cn={0},ou=grTravail,{1}".format(
group_cn,
self.ldap_search_base)
ldap_filter='(&(cn={0})(objectClass=cinesGrWork))'.format(group_cn)
attributes=['*','+']
group_attributez = self.ldap.search(
ldap_filter=ldap_filter,
attributes=attributes)[0].get_attributes()
pagefieldz = Field.query.filter_by(page_id = page.id,
edit = True).all()
pre_modlist = []
for field in pagefieldz:
form_field_values = [strip(entry.data).encode('utf-8')
for entry in
getattr(form, field.label).entries]
if form_field_values == ['']:
form_field_values = None
if ((field.label not in group_attributez)
or (field.label in group_attributez
and group_attributez[field.label] != form_field_values)):
pre_modlist.append((field.label, form_field_values))
old_memberz = group_attributez['uniqueMember']
new_memberz = [
self.ldap.get_full_dn_from_uid(member).encode('utf-8')
for member in form.memberz.selected_memberz.data
if self.ldap.get_full_dn_from_uid(member) is not None
]
for member in old_memberz:
if (member not in new_memberz
and self.cache.get_group_from_member_uid(
get_uid_from_dn(member)) == "cines"):
self.ldap.set_submission( get_uid_from_dn(member), group_cn, '0')
pre_modlist.append(
('uniqueMember',
new_memberz
)
)
self.cache.populate_work_group()
self.ldap.update_dn_attribute(dn, pre_modlist)
print(u'{0} : Groupe de travail {1} mis à jour'.format(
session['uid'],
group_cn).encode('utf-8')
)
def update_fields_from_edit_page_admin_form(self, form, attributes, page):
Field.query.filter(Field.page_id == page.id,
~Field.label.in_(attributes)
).delete(synchronize_session='fetch')
for attribute in attributes:
attribute_field = getattr(form, attribute)
self.upsert_field(attribute, attribute_field, page)
def create_default_field(self, attribute, page):
print("Create default {0} for {1}".format(attribute, page.label))
field_type = FieldType.query.filter_by(type='Text').first()
page_attr = Field(label = attribute.label,
page = page,
ldapattribute = attribute,
fieldtype=field_type)
db.session.add(page_attr)
db.session.commit()
return page_attr
def upsert_field(self, attr_label, form_field, page):
attribute = LDAPAttribute.query.filter_by(label = attr_label).first()
field_type = FieldType.query.filter_by(
id=form_field.display_mode.data
).first()
existing_field = Field.query.filter_by(page_id=page.id,
label=attribute.label,
ldapattribute_id=attribute.id
).first()
if existing_field is not None:
existing_field.display = form_field.display.data
existing_field.edit = form_field.edit.data
existing_field.restrict = form_field.restrict.data
existing_field.fieldtype = field_type
existing_field.description = strip(form_field.desc.data)
existing_field.multivalue = form_field.multivalue.data
existing_field.mandatory = form_field.mandatory.data
existing_field.priority = form_field.priority.data
existing_field.block = strip(form_field.block.data)
else:
new_field = Field(label=attribute.label,
page=page,
ldapattribute=attribute,
display=form_field.display.data,
edit=form_field.edit.data,
restrict=form_field.restrict.data,
fieldtype=field_type,
description=form_field.desc.data,
multivalue=form_field.multivalue.data,
mandatory=form_field.mandatory.data,
priority=form_field.priority.data,
block=form_field.block.data)
db.session.add(new_field)
def get_dict_from_raw_log_valuez(self, raw_valuez):
valuez = {}
for raw_value in raw_valuez:
# print(raw_value)
raw_value_split = raw_value.split(":")
attr_name = raw_value_split[0]
attr_operation = raw_value_split[1][:1]
attr_value = raw_value_split[1][1:]
if attr_name in [
'userPassword',
'sambaNTPassword',
'pwdHistory'
]:
attr_value = '<PASSWORD_HASH>'
elif attr_name in [
'pwdChangedTime',
'modifyTimestamp',
'pwdFailureTime',
]:
if attr_value != "":
attr_value = self.converter.generalized_time_to_datetime(
attr_value.strip())
if attr_name not in [
'entryCSN',
'modifiersName',
'modifyTimestamp',
'uidNumber'
]:
if attr_name in valuez:
valuez[attr_name].append(
(attr_value ,
attr_operation)
)
else:
valuez[attr_name] = [(attr_value, attr_operation)]
return valuez
def allowed_file(self, filename):
print("filename : {0}".format(filename))
print(filename.rsplit('.', 1)[1])
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
def get_cinesdaterenew_from_uid(self, uid):
page = Page.query.filter_by(label='ccc').first()
field = Field.query.filter_by(
page_id = page.id,
label = 'cinesdaterenew').first()
ldap_filter='(uid={0})'.format(uid)
attributes=['cinesdaterenew']
base_dn='ou=people,{0}'.format(self.ldap_search_base)
uid_attrz= self.ldap.search(base_dn,ldap_filter,attributes)[0].get_attributes()
if 'cinesdaterenew' in uid_attrz:
date_renew = self.converter.to_display_mode(
uid_attrz['cinesdaterenew'][0], field.fieldtype.type
)
else:
date_renew = ''
return date_renew
def add_ppolicy(self, cn):
dn = "cn={0},ou=policies,ou=system,{1}".format(
cn,
self.ldap_search_base)
add_record=[('cn',[cn]),
('pwdAttribute', ['userPassword']),
('objectClass', ['device', 'pwdPolicy'])]
if self.ldap.add(dn, add_record):
flash(u'PPolicy {0} ajoutée'.format(cn))
print(u'{0} : PPolicy {1} créé'.format(
session['uid'],
cn).encode('utf-8')
)
| T3h-N1k0/LAC | lac/engine.py | Python | gpl-3.0 | 57,871 |
# -*- coding: utf-8 -*-
"""
"""
# Copyright (C) 2015 ZetaOps Inc.
#
# This file is licensed under the GNU General Public License v3
# (GPLv3). See LICENSE.txt for details.
from ulakbus.models import User
from .general import fake
__author__ = 'Ali Riza Keles'
def new_user(username=None, password=None, superuser=False):
user = User(
username=username or fake.user_name(),
password=password or fake.password(),
superuser=superuser
)
user.save()
return user
| yetercatikkas/ulakbus | tests/fake/user.py | Python | gpl-3.0 | 515 |
# Mark Gatheman <[email protected]>
#
# This file is part of Hydrus.
#
# Hydrus is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Hydrus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Hydrus. If not, see <http://www.gnu.org/licenses/>.
import logging
import numpy as np
from numpy.testing import assert_almost_equal, assert_approx_equal
from scipy import stats
from hypothesis.strategies import floats
from hypothesis.extra.numpy import arrays
logging.disable(logging.CRITICAL)
strat_1d = arrays(
np.float, (100,), floats(min_value=-9., max_value=9.)
)
strat_3d = arrays(
np.float, (5, 5, 5), floats(min_value=-9., max_value=9.)
)
strat_pos_1d = arrays(
np.float, (100,), floats(min_value=1e-6, max_value=9.)
)
strat_pos_3d = arrays(
np.float, (5, 5, 5), floats(min_value=1e-6, max_value=9.)
)
strat_nan_1d = arrays(
np.float, (100,), floats(allow_nan=True, allow_infinity=False)
)
strat_nan_2d = arrays(
np.float, (20, 20), floats(allow_nan=True, allow_infinity=False)
)
| mark-r-g/hydrus | tests/__init__.py | Python | gpl-3.0 | 1,488 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Setting',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50)),
('label', models.CharField(max_length=255)),
('description', models.TextField()),
('data_type', models.CharField(max_length=10, choices=[(b'string', 'string'), (b'boolean', 'boolean'), (b'integer', 'int'), (b'file', 'file')])),
('value', models.TextField(blank=True)),
('default_value', models.TextField(blank=True)),
('input_type', models.CharField(max_length=25, choices=[(b'text', 'Text'), (b'textarea', 'Textarea'), (b'select', 'Select'), (b'file', 'File')])),
('input_value', models.CharField(max_length=1000, blank=True)),
('client_editable', models.BooleanField(default=True)),
('store', models.BooleanField(default=True)),
('update_dt', models.DateTimeField(auto_now=True, null=True)),
('updated_by', models.CharField(max_length=50, blank=True)),
('scope', models.CharField(max_length=50)),
('scope_category', models.CharField(max_length=50)),
('parent_id', models.IntegerField(default=0, blank=True)),
('is_secure', models.BooleanField(default=False)),
],
),
]
| alirizakeles/tendenci | tendenci/apps/site_settings/migrations/0001_initial.py | Python | gpl-3.0 | 1,684 |
import player
import gui
import distro
class master:
def startplayer(self):
person = player.person()
def __init__(self):
total = 1.8
self.userin = gui.cligui()
[stability,community,bleedingedge] = self.userin.startup(total)
self.playerdistro = distro.distrobution(stability,community,bleedingedge)
# self.startplayer()
while(0==0):
self.turn()
def turn(self):
stability = self.playerdistro.stability
community = self.playerdistro.community
bleedingedge = self.playerdistro.bleedingedge
self.userin.turnbanner(stability,community,bleedingedge)
amount = self.playerdistro.spendturn()
self.playerdistro.turn(self.userin.turn(amount))
a = master()
| scifi6546/distrowars | master.py | Python | gpl-3.0 | 773 |
#!/usr/bin/python
"""
Read a bulkConverted file
and create a matix with top
genes to visualize it in R
Patrick's analysis without Excel ;-)
"""
import sys
result = dict()
allsamples = set()
for i in open(sys.argv[1]):
if i.startswith("#"):
continue
fields = i.rstrip().split()
sample = fields[1]
gene = fields[10]
mtype = fields[16]
if "UTR" in mtype or "complex" in mtype or "stream" in mtype or "intronic" in mtype or "non-coding" in mtype or "silent" in mtype:
continue
if gene == "Ttn":
continue
if gene not in result:
result[gene] = set()
result[gene].add(sample)
allsamples.add(sample)
order = sorted(result.keys(), key = lambda x: len(result[x]), reverse=True)
print "samplewithmut allsamplenum"
for o in order[:40]:
print o, len(result[o]), len(allsamples)
| TravisCG/SI_scripts | pattab.py | Python | gpl-3.0 | 804 |
# -*- coding: utf-8 -*-
# Copyright (c) 2012 - 2014 Detlev Offenbach <[email protected]>
#
"""
Module implementing a dialog to show GreaseMonkey script information.
"""
from __future__ import unicode_literals
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtWidgets import QDialog
from .Ui_GreaseMonkeyConfigurationScriptInfoDialog import \
Ui_GreaseMonkeyConfigurationScriptInfoDialog
from ..GreaseMonkeyScript import GreaseMonkeyScript
import UI.PixmapCache
class GreaseMonkeyConfigurationScriptInfoDialog(
QDialog, Ui_GreaseMonkeyConfigurationScriptInfoDialog):
"""
Class implementing a dialog to show GreaseMonkey script information.
"""
def __init__(self, script, parent=None):
"""
Constructor
@param script reference to the script (GreaseMonkeyScript)
@param parent reference to the parent widget (QWidget)
"""
super(GreaseMonkeyConfigurationScriptInfoDialog, self).__init__(parent)
self.setupUi(self)
self.iconLabel.setPixmap(
UI.PixmapCache.getPixmap("greaseMonkey48.png"))
self.__scriptFileName = script.fileName()
self.setWindowTitle(
self.tr("Script Details of {0}").format(script.name()))
self.nameLabel.setText(script.fullName())
self.versionLabel.setText(script.version())
self.urlLabel.setText(script.downloadUrl().toString())
if script.startAt() == GreaseMonkeyScript.DocumentStart:
self.startAtLabel.setText("document-start")
else:
self.startAtLabel.setText("document-end")
self.descriptionBrowser.setHtml(script.description())
self.runsAtBrowser.setHtml("<br/>".join(script.include()))
self.doesNotRunAtBrowser.setHtml("<br/>".join(script.exclude()))
@pyqtSlot()
def on_showScriptSourceButton_clicked(self):
"""
Private slot to show an editor window with the script source code.
"""
from QScintilla.MiniEditor import MiniEditor
editor = MiniEditor(self.__scriptFileName, "JavaScript", self)
editor.show()
| davy39/eric | Helpviewer/GreaseMonkey/GreaseMonkeyConfiguration/GreaseMonkeyConfigurationScriptInfoDialog.py | Python | gpl-3.0 | 2,163 |
#!/usr/bin/python
import sys
print "genotype\ttemp\tgenotype:temp"
for i in open(sys.argv[1]):
fields = i.rstrip().split()
if i.startswith("[1]"):
hormon = fields[1].replace('"', '')
if i.startswith("genotype "):
pg = fields[5]
if i.startswith("temp"):
pt = fields[5]
if i.startswith("genotype:temp"):
pgt = fields[5]
print hormon + "\t" + pg + "\t" + pt + "\t" + pgt
| TravisCG/SI_scripts | reconv.py | Python | gpl-3.0 | 387 |
import os
import datetime
import calendar
import sqlite3 as sqlite
import geomag
import tkinter
from tkinter import ttk, filedialog, messagebox #separate imports needed due to tkinter idiosyncrasies
import sqlparse
# local objects
from classes import stdevs, meanw, stdevw
### begin function defintion ###
def run_sqlscript(conn, script_path, form = None, msg = None):
if script_path == None:
script_path = tkinter.filedialog.askopenfilename(title = "Choose SQL script to run on Database.", filetypes = (("SQL files", "*.sql"),("All files", "*.*")))
if os.path.isfile(script_path):
with open(script_path) as f:
script = f.read()
stmts = sqlparse.split(script)
### update messages
if msg != None:
print(msg)
if form != None:
form.lblAction['text'] = msg
form.lblAction.update_idletasks()
### initial values get the while loop to run at least once.
curlength = 1
pastlength = 2
counter = 0
while curlength < pastlength and curlength > 0:
errors = []
counter += 1
#print("Script pass",str(counter))
pastlength = len(stmts)
for stmt in stmts:
try:
#print(stmt,'\n','------------------------------------------------------','\n','------------------------------------------------------')
conn.execute(stmt)
stmts = [x for x in stmts if x != stmt] #removes SQL statement from list if completed successfully]
except sqlite.OperationalError:
errors.append(stmt)
curlength = len(stmts)
if len(stmts) == 0:
return (True, None)
else:
return (False, stmts)
else:
return (False, None)
def Update(var, form = None):
RDpath = var['RDpath']
log = ""
sqldir = var['SQLpath']
### connect to SQLite3 DB
dirpath = os.path.dirname(RDpath)
dbname = os.path.basename(RDpath)
connection = sqlite.connect(RDpath)
### creating these functions allows for custom aggreate functions within the database. See classes.py for definition.
connection.create_aggregate("stdev", 1, stdevs)
connection.create_aggregate("meanw", 2, meanw)
connection.create_aggregate("stdevw", 2, stdevw)
connection.enable_load_extension(True)
connection.row_factory = sqlite.Row
c = connection.cursor()
### converts DIMA species list semi-colon concatenated values to individual species records for ease of processing.
speciesrichness(connection)
### runs update SQL script to perform various post import updates given in the script.
run_sqlscript(connection, script_path = os.path.join(sqldir, 'update.sql'), form = form, msg = 'Running update script...')
### runs insert_tags SQL script to automatically create some species and plot tags given in the SQL script (e.g. sagebrush = woody Artemisia sp.)
run_sqlscript(connection, script_path = os.path.join(sqldir, 'insert_tags.sql'), form = form, msg = r'Inserting plot/species tags into database...')
### runs insert_custom SQL script to insert custom data defined by the user into the db.
run_sqlscript(connection, script_path = os.path.join(sqldir, 'insert_custom.sql'), form = form, msg = r'Inserting custom data into the database...')
### defines how to group plots together when looking at plot level info. Only one plot with the same plotkey is shown per season.
SeasonsCalc(connection)
### add declination information to tblPlots
msg = "Adding declination information to plots."
print(msg)
if form != None:
form.lblAction['text'] = msg
form.lblAction.update_idletasks()
if var['WMMpath'] == None:
getwmm = True
elif not os.path.isfile(var['WMMpath']):
getwmm = True
else:
getwmm = False
mmpath = var['WMMpath']
if getwmm:
getmm = tkinter.messagebox.askyesno("Calculate declination?", "Would you like to calulate the magnetic declination of imported plots (is required for some spatial QC checks)?")
if getmm:
mmpath = tkinter.filedialog.askopenfilename(title = "Choose NOAA World Magnetic Model location (i.e. WMM.COF).",
filetypes = (("Magnetic Model files", "*.COF"),("All files", "*.*")))
var['WMMpath'] = mmpath
if mmpath:
gm = geomag.geomag.GeoMag(mmpath)
i = connection.cursor()
rows = connection.execute("SELECT PlotKey, PlotID, Latitude, Longitude, Elevation, ElevationType, EstablishDate, "
"Declination FROM tblPlots WHERE PlotKey NOT IN ('888888888','999999999') AND Declination IS NULL;")
for row in rows:
if row['EstablishDate']:
dt = datetime.datetime.strptime(row['EstablishDate'],'%Y-%m-%d %H:%M:%S')
if row['ElevationType'] == 1:
elev = row['Elevation']*3.28084
elif row['ElevationType'] == 2:
elev = row['Elevation']
else:
elev = 0
mag = gm.GeoMag(row['Latitude'],row['Longitude'], elev, dt.date())
i.execute("UPDATE tblPlots SET Declination = ? WHERE PlotKey = ?;",(mag.dec, row['PlotKey']),)
else:
print("Plot", row['PlotID'], "has no EstablishDate. Skipping.")
connection.commit()
#connection.execute("VACUUM")
connection.close()
return var
### defines seasons, which the database uses to separate out plot revisits. When data are shown at the plot level,
### a season is given to it in order to view multiple visitations of the same plot. For above plot summations,
### only the most recent data in a revisit cycle is used.
def SeasonsCalc(connection):
connection.execute("DELETE FROM SeasonDefinition")
### checks if a data date range is provided and if not inserts a default range based on date values from tblPlots
rcount = connection.execute("SELECT Count(*) FROM Data_DateRange").fetchone()[0]
if rcount == 0:
sql = """INSERT INTO Data_DateRange SELECT strftime('%Y', Min(EstablishDate)) ||
'-01-01' AS StartDate, strftime('%Y', Max(EstablishDate)) || '-12-31'
AS EndDate, 12 AS SeasonLength_Months FROM tblPlots;"""
connection.execute(sql)
result = connection.execute("SELECT * FROM Data_DateRange")
row = result.fetchone()
startdate = datetime.datetime.strptime(row['StartDate'],'%Y-%m-%d')
enddate = datetime.datetime.strptime(row['EndDate'],'%Y-%m-%d')
slength = row['SeasonLength_Months']
slength_years = slength / 12
date = startdate
while date < enddate:
if calendar.isleap(date.year):
days = 366
else:
days = 365
nextdate = date + datetime.timedelta(days = (slength_years * days))
send = nextdate - datetime.timedelta(microseconds = 1)
season = date.strftime('%Y%m%d') + "-" + send.strftime('%Y%m%d')
sql = "INSERT INTO SeasonDefinition (SeasonStart, SeasonEnd, SeasonLabel) VALUES (?,?,?);"
connection.execute(sql,(date, send, season,))
date = nextdate
return
### this function is used to convert the semicolon delimitted species richness fields into individual records for ease of processing.
def speciesrichness(connection):
connection.execute("DELETE FROM SR_Raw;")
result = connection.execute("SELECT RecKey, subPlotID, SpeciesList FROM tblSpecRichDetail;")
for row in result:
speclist = []
species = row[2].split(sep=';')
for s in species:
if s and row[0] and row[1]:
speclist.append((row[0],row[1],s))
#print(speclist)
connection.executemany('INSERT OR IGNORE INTO SR_Raw VALUES (?,?,?)', speclist)
connection.commit()
### end function definition ###
| wlieurance/aim-reporting | update.py | Python | gpl-3.0 | 8,077 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Satpy developers
#
# This file is part of satpy.
#
# satpy is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the FIDUCEO MVIRI FCDR Reader."""
from __future__ import annotations
import os
from unittest import mock
import dask.array as da
import numpy as np
import pytest
import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_radius_parameters
from satpy.readers.mviri_l1b_fiduceo_nc import (
ALTITUDE,
EQUATOR_RADIUS,
POLE_RADIUS,
DatasetWrapper,
FiduceoMviriEasyFcdrFileHandler,
FiduceoMviriFullFcdrFileHandler,
)
from satpy.tests.utils import make_dataid
attrs_exp: dict = {
'platform': 'MET7',
'raw_metadata': {'foo': 'bar'},
'sensor': 'MVIRI',
'orbital_parameters': {
'projection_longitude': 57.0,
'projection_latitude': 0.0,
'projection_altitude': 35785860.0,
'satellite_actual_longitude': 57.1,
'satellite_actual_latitude': 0.1,
}
}
attrs_refl_exp = attrs_exp.copy()
attrs_refl_exp.update(
{'sun_earth_distance_correction_applied': True,
'sun_earth_distance_correction_factor': 1.}
)
acq_time_vis_exp = [np.datetime64('1970-01-01 00:30'),
np.datetime64('1970-01-01 00:30'),
np.datetime64('1970-01-01 02:30'),
np.datetime64('1970-01-01 02:30')]
vis_counts_exp = xr.DataArray(
np.array(
[[0., 17., 34., 51.],
[68., 85., 102., 119.],
[136., 153., np.nan, 187.],
[204., 221., 238., 255]],
dtype=np.float32
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_vis_exp),
},
attrs=attrs_exp
)
vis_rad_exp = xr.DataArray(
np.array(
[[np.nan, 18.56, 38.28, 58.],
[77.72, 97.44, 117.16, 136.88],
[156.6, 176.32, np.nan, 215.76],
[235.48, 255.2, 274.92, 294.64]],
dtype=np.float32
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_vis_exp),
},
attrs=attrs_exp
)
vis_refl_exp = xr.DataArray(
np.array(
[[np.nan, 23.440929, np.nan, np.nan],
[40.658744, 66.602233, 147.970867, np.nan],
[75.688217, 92.240733, np.nan, np.nan],
[np.nan, np.nan, np.nan, np.nan]],
dtype=np.float32
),
# (0, 0) and (2, 2) are NaN because radiance is NaN
# (0, 2) is NaN because SZA >= 90 degrees
# Last row/col is NaN due to SZA interpolation
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_vis_exp),
},
attrs=attrs_refl_exp
)
u_vis_refl_exp = xr.DataArray(
np.array(
[[0.1, 0.2, 0.3, 0.4],
[0.5, 0.6, 0.7, 0.8],
[0.9, 1.0, 1.1, 1.2],
[1.3, 1.4, 1.5, 1.6]],
dtype=np.float32
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_vis_exp),
},
attrs=attrs_exp
)
acq_time_ir_wv_exp = [np.datetime64('1970-01-01 00:30'),
np.datetime64('1970-01-01 02:30')]
wv_counts_exp = xr.DataArray(
np.array(
[[0, 85],
[170, 255]],
dtype=np.uint8
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_ir_wv_exp),
},
attrs=attrs_exp
)
wv_rad_exp = xr.DataArray(
np.array(
[[np.nan, 3.75],
[8, 12.25]],
dtype=np.float32
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_ir_wv_exp),
},
attrs=attrs_exp
)
wv_bt_exp = xr.DataArray(
np.array(
[[np.nan, 230.461366],
[252.507448, 266.863289]],
dtype=np.float32
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_ir_wv_exp),
},
attrs=attrs_exp
)
ir_counts_exp = xr.DataArray(
np.array(
[[0, 85],
[170, 255]],
dtype=np.uint8
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_ir_wv_exp),
},
attrs=attrs_exp
)
ir_rad_exp = xr.DataArray(
np.array(
[[np.nan, 80],
[165, 250]],
dtype=np.float32
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_ir_wv_exp),
},
attrs=attrs_exp
)
ir_bt_exp = xr.DataArray(
np.array(
[[np.nan, 178.00013189],
[204.32955838, 223.28709913]],
dtype=np.float32
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_ir_wv_exp),
},
attrs=attrs_exp
)
quality_pixel_bitmask_exp = xr.DataArray(
np.array(
[[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 0]],
dtype=np.uint8
),
dims=('y', 'x'),
coords={
'acq_time': ('y', acq_time_vis_exp),
},
attrs=attrs_exp
)
sza_vis_exp = xr.DataArray(
np.array(
[[45., 67.5, 90., np.nan],
[22.5, 45., 67.5, np.nan],
[0., 22.5, 45., np.nan],
[np.nan, np.nan, np.nan, np.nan]],
dtype=np.float32
),
dims=('y', 'x'),
attrs=attrs_exp
)
sza_ir_wv_exp = xr.DataArray(
np.array(
[[45, 90],
[0, 45]],
dtype=np.float32
),
dims=('y', 'x'),
attrs=attrs_exp
)
area_vis_exp = AreaDefinition(
area_id='geos_mviri_4x4',
proj_id='geos_mviri_4x4',
description='MVIRI Geostationary Projection',
projection={
'proj': 'geos',
'lon_0': 57.0,
'h': ALTITUDE,
'a': EQUATOR_RADIUS,
'b': POLE_RADIUS
},
width=4,
height=4,
area_extent=[5621229.74392, 5621229.74392, -5621229.74392, -5621229.74392]
)
area_ir_wv_exp = area_vis_exp.copy(
area_id='geos_mviri_2x2',
proj_id='geos_mviri_2x2',
width=2,
height=2
)
@pytest.fixture(name='fake_dataset')
def fixture_fake_dataset():
"""Create fake dataset."""
count_ir = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2)
count_wv = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2)
count_vis = da.linspace(0, 255, 16, dtype=np.uint8).reshape(4, 4)
sza = da.from_array(
np.array(
[[45, 90],
[0, 45]],
dtype=np.float32
)
)
mask = da.from_array(
np.array(
[[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 1, 0], # 1 = "invalid"
[0, 0, 0, 0]],
dtype=np.uint8
)
)
time = np.arange(4).astype('datetime64[h]').reshape(2, 2)
ds = xr.Dataset(
data_vars={
'count_vis': (('y', 'x'), count_vis),
'count_wv': (('y_ir_wv', 'x_ir_wv'), count_wv),
'count_ir': (('y_ir_wv', 'x_ir_wv'), count_ir),
'toa_bidirectional_reflectance_vis': vis_refl_exp / 100,
'u_independent_toa_bidirectional_reflectance': u_vis_refl_exp / 100,
'quality_pixel_bitmask': (('y', 'x'), mask),
'solar_zenith_angle': (('y_tie', 'x_tie'), sza),
'time_ir_wv': (('y_ir_wv', 'x_ir_wv'), time),
'a_ir': -5.0,
'b_ir': 1.0,
'bt_a_ir': 10.0,
'bt_b_ir': -1000.0,
'a_wv': -0.5,
'b_wv': 0.05,
'bt_a_wv': 10.0,
'bt_b_wv': -2000.0,
'years_since_launch': 20.0,
'a0_vis': 1.0,
'a1_vis': 0.01,
'a2_vis': -0.0001,
'mean_count_space_vis': 1.0,
'distance_sun_earth': 1.0,
'solar_irradiance_vis': 650.0,
'sub_satellite_longitude_start': 57.1,
'sub_satellite_longitude_end': np.nan,
'sub_satellite_latitude_start': np.nan,
'sub_satellite_latitude_end': 0.1,
},
coords={
'y': [1, 2, 3, 4],
'x': [1, 2, 3, 4],
'y_ir_wv': [1, 2],
'x_ir_wv': [1, 2],
'y_tie': [1, 2],
'x_tie': [1, 2]
},
attrs={'foo': 'bar'}
)
ds['count_ir'].attrs['ancillary_variables'] = 'a_ir b_ir'
ds['count_wv'].attrs['ancillary_variables'] = 'a_wv b_wv'
return ds
@pytest.fixture(
name='file_handler',
params=[FiduceoMviriEasyFcdrFileHandler,
FiduceoMviriFullFcdrFileHandler]
)
def fixture_file_handler(fake_dataset, request):
"""Create mocked file handler."""
marker = request.node.get_closest_marker("file_handler_data")
mask_bad_quality = True
if marker:
mask_bad_quality = marker.kwargs['mask_bad_quality']
fh_class = request.param
with mock.patch('satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset') as open_dataset:
open_dataset.return_value = fake_dataset
return fh_class(
filename='filename',
filename_info={'platform': 'MET7',
'sensor': 'MVIRI',
'projection_longitude': '57.0'},
filetype_info={'foo': 'bar'},
mask_bad_quality=mask_bad_quality
)
@pytest.fixture(name='reader')
def fixture_reader():
"""Return MVIRI FIDUCEO FCDR reader."""
from satpy._config import config_search_paths
from satpy.readers import load_reader
reader_configs = config_search_paths(
os.path.join("readers", "mviri_l1b_fiduceo_nc.yaml"))
reader = load_reader(reader_configs)
return reader
class TestFiduceoMviriFileHandlers:
"""Unit tests for FIDUCEO MVIRI file handlers."""
def test_init(self, file_handler):
"""Test file handler initialization."""
assert file_handler.projection_longitude == 57.0
assert file_handler.mask_bad_quality is True
@pytest.mark.parametrize(
('name', 'calibration', 'resolution', 'expected'),
[
('VIS', 'counts', 2250, vis_counts_exp),
('VIS', 'radiance', 2250, vis_rad_exp),
('VIS', 'reflectance', 2250, vis_refl_exp),
('WV', 'counts', 4500, wv_counts_exp),
('WV', 'radiance', 4500, wv_rad_exp),
('WV', 'brightness_temperature', 4500, wv_bt_exp),
('IR', 'counts', 4500, ir_counts_exp),
('IR', 'radiance', 4500, ir_rad_exp),
('IR', 'brightness_temperature', 4500, ir_bt_exp),
('quality_pixel_bitmask', None, 2250, quality_pixel_bitmask_exp),
('solar_zenith_angle', None, 2250, sza_vis_exp),
('solar_zenith_angle', None, 4500, sza_ir_wv_exp),
('u_independent_toa_bidirectional_reflectance', None, 4500, u_vis_refl_exp)
]
)
def test_get_dataset(self, file_handler, name, calibration, resolution,
expected):
"""Test getting datasets."""
id_keys = {'name': name, 'resolution': resolution}
if calibration:
id_keys['calibration'] = calibration
dataset_id = make_dataid(**id_keys)
dataset_info = {'platform': 'MET7'}
is_easy = isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler)
is_vis = name == 'VIS'
is_refl = calibration == 'reflectance'
if is_easy and is_vis and not is_refl:
# VIS counts/radiance not available in easy FCDR
with pytest.raises(ValueError):
file_handler.get_dataset(dataset_id, dataset_info)
else:
ds = file_handler.get_dataset(dataset_id, dataset_info)
xr.testing.assert_allclose(ds, expected)
assert ds.dtype == expected.dtype
assert ds.attrs == expected.attrs
def test_get_dataset_corrupt(self, file_handler):
"""Test getting datasets with known corruptions."""
# Time may have different names and satellite position might be missing
file_handler.nc.nc = file_handler.nc.nc.rename(
{'time_ir_wv': 'time'}
)
file_handler.nc.nc = file_handler.nc.nc.drop_vars(
['sub_satellite_longitude_start']
)
dataset_id = make_dataid(
name='VIS',
calibration='reflectance',
resolution=2250
)
ds = file_handler.get_dataset(dataset_id, {'platform': 'MET7'})
assert 'actual_satellite_longitude' not in ds.attrs['orbital_parameters']
assert 'actual_satellite_latitude' not in ds.attrs['orbital_parameters']
xr.testing.assert_allclose(ds, vis_refl_exp)
@mock.patch(
'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_acq_time'
)
def test_time_cache(self, interp_acq_time, file_handler):
"""Test caching of acquisition times."""
dataset_id = make_dataid(
name='VIS',
resolution=2250,
calibration='reflectance'
)
info = {}
interp_acq_time.return_value = xr.DataArray([1, 2, 3, 4], dims='y')
# Cache init
file_handler.get_dataset(dataset_id, info)
interp_acq_time.assert_called()
# Cache hit
interp_acq_time.reset_mock()
file_handler.get_dataset(dataset_id, info)
interp_acq_time.assert_not_called()
# Cache miss
interp_acq_time.return_value = xr.DataArray([1, 2], dims='y')
another_id = make_dataid(
name='IR',
resolution=4500,
calibration='brightness_temperature'
)
interp_acq_time.reset_mock()
file_handler.get_dataset(another_id, info)
interp_acq_time.assert_called()
@mock.patch(
'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_tiepoints'
)
def test_angle_cache(self, interp_tiepoints, file_handler):
"""Test caching of angle datasets."""
dataset_id = make_dataid(name='solar_zenith_angle',
resolution=2250)
info = {}
# Cache init
file_handler.get_dataset(dataset_id, info)
interp_tiepoints.assert_called()
# Cache hit
interp_tiepoints.reset_mock()
file_handler.get_dataset(dataset_id, info)
interp_tiepoints.assert_not_called()
# Cache miss
another_id = make_dataid(name='solar_zenith_angle',
resolution=4500)
interp_tiepoints.reset_mock()
file_handler.get_dataset(another_id, info)
interp_tiepoints.assert_called()
@pytest.mark.parametrize(
('name', 'resolution', 'area_exp'),
[
('VIS', 2250, area_vis_exp),
('WV', 4500, area_ir_wv_exp),
('IR', 4500, area_ir_wv_exp),
('quality_pixel_bitmask', 2250, area_vis_exp),
('solar_zenith_angle', 2250, area_vis_exp),
('solar_zenith_angle', 4500, area_ir_wv_exp)
]
)
def test_get_area_definition(self, file_handler, name, resolution,
area_exp):
"""Test getting area definitions."""
dataset_id = make_dataid(name=name, resolution=resolution)
area = file_handler.get_area_def(dataset_id)
a, b = proj4_radius_parameters(area.proj_dict)
a_exp, b_exp = proj4_radius_parameters(area_exp.proj_dict)
assert a == a_exp
assert b == b_exp
assert area.width == area_exp.width
assert area.height == area_exp.height
for key in ['h', 'lon_0', 'proj', 'units']:
assert area.proj_dict[key] == area_exp.proj_dict[key]
np.testing.assert_allclose(area.area_extent, area_exp.area_extent)
def test_calib_exceptions(self, file_handler):
"""Test calibration exceptions."""
with pytest.raises(KeyError):
file_handler.get_dataset(
make_dataid(name='solar_zenith_angle', calibration='counts'),
{}
)
with pytest.raises(KeyError):
file_handler.get_dataset(
make_dataid(
name='VIS',
resolution=2250,
calibration='brightness_temperature'),
{}
)
with pytest.raises(KeyError):
file_handler.get_dataset(
make_dataid(
name='IR',
resolution=4500,
calibration='reflectance'),
{}
)
if isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler):
with pytest.raises(KeyError):
file_handler.get_dataset(
{'name': 'VIS', 'calibration': 'counts'},
{}
) # not available in easy FCDR
@pytest.mark.file_handler_data(mask_bad_quality=False)
def test_bad_quality_warning(self, file_handler):
"""Test warning about bad VIS quality."""
file_handler.nc.nc['quality_pixel_bitmask'] = 2
vis = make_dataid(name='VIS', resolution=2250,
calibration='reflectance')
with pytest.warns(UserWarning):
file_handler.get_dataset(vis, {})
def test_file_pattern(self, reader):
"""Test file pattern matching."""
filenames = [
"FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc",
"FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc",
"FIDUCEO_FCDR_L15_MVIRI_MET7-00.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc",
"abcde",
]
files = reader.select_files_from_pathnames(filenames)
# only 3 out of 4 above should match
assert len(files) == 3
class TestDatasetWrapper:
"""Unit tests for DatasetWrapper class."""
def test_reassign_coords(self):
"""Test reassigning of coordinates.
For some reason xarray does not always assign (y, x) coordinates to
the high resolution datasets, although they have dimensions (y, x) and
coordinates y and x exist. A dataset with these properties seems
impossible to create (neither dropping, resetting or deleting
coordinates seems to work). Instead use mock as a workaround.
"""
nc = mock.MagicMock(
coords={
'y': [.1, .2],
'x': [.3, .4]
},
dims=('y', 'x')
)
nc.__getitem__.return_value = xr.DataArray(
[[1, 2],
[3, 4]],
dims=('y', 'x')
)
foo_exp = xr.DataArray(
[[1, 2],
[3, 4]],
dims=('y', 'x'),
coords={
'y': [.1, .2],
'x': [.3, .4]
}
)
ds = DatasetWrapper(nc)
foo = ds['foo']
xr.testing.assert_equal(foo, foo_exp)
| pytroll/satpy | satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | Python | gpl-3.0 | 19,070 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-05-26 22:08
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('event', '0022_event_number_limit_subscription'),
]
operations = [
migrations.AlterModelOptions(
name='event',
options={'ordering': ('-date',), 'verbose_name': 'evento', 'verbose_name_plural': 'eventos'},
),
]
| sandrofolk/girox | girox/event/migrations/0023_auto_20170526_2208.py | Python | gpl-3.0 | 482 |
'''
Created on Dec 23, 2013
@author: yusuf
'''
#list by: type, name etc.
def listFilesAndDirs(listType='a'):
from os import listdir
from os.path import isfile, join
from genericpath import isdir
mypath = "."
files = [ f for f in listdir(mypath) if isfile(join(mypath,f)) ]
directories = [ d for d in listdir(mypath) if isdir(join(mypath,d)) ]
filesWithType = zip(["f"] * len(files), files)
directoriesWithType = zip(["d"] * len(files), directories)
if listType=='f':
return filesWithType
elif listType == 'd':
return directoriesWithType
else:
return directoriesWithType + filesWithType | yusufb/file-manager | modules/list.py | Python | gpl-3.0 | 698 |
import discord
from discord.ext import commands
class Helputil:
"""Shane's custom help utility for Dolores."""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def dolores(self, ctx):
"""Dolores command reference."""
#Adds reaction to message containing ".dolores"
await self.bot.add_reaction(ctx.message, ":TVbarrydab:346706667595104268")
#Embeds command reference
embed=discord.Embed(title="Getting Started with Dolores", description="Dolores is a fork of Red, a bot written in Python by Twentysix. Any code modifications have been made by Shane#1044. You can view this command reference again at any time by typing '.dolores'.")
embed.set_thumbnail(url='https://i.imgur.com/fchVvu9.png')
await self.bot.whisper(embed=embed)
embed2=discord.Embed(title="Economy", color=0x2eaa17)
embed2.add_field(name="Open a bank account", value=".register", inline=True)
embed2.add_field(name="Check your balance", value=".balance (or .$)", inline=True)
embed2.add_field(name="Buy 'Trusted' role", value=".buy Trusted", inline=True)
embed2.add_field(name="View leaderboards", value=".leaderboard", inline=True)
embed2.add_field(name="Play the slot machine", value=".slot <bid>", inline=True)
embed2.add_field(name="Get some free credits", value=".payday (Patrons Only)", inline=True)
await self.bot.whisper(embed=embed2)
embed3=discord.Embed(title="Gambling", color=0xFFFFFF)
embed3.add_field(name="Join the Casino", value=".cjoin", inline=True)
embed3.add_field(name="Exchange for chips", value=".exchange credits <#>", inline=True)
embed3.add_field(name="Exchange for credits", value=".exchange chips <#>", inline=True)
embed3.add_field(name="Play All-In", value=".allin", inline=True)
embed3.add_field(name="Play Blackjack", value=".blackjack", inline=True)
embed3.add_field(name="Flip a coin", value=".coin", inline=True)
embed3.add_field(name="Play cups", value=".cups", inline=True)
embed3.add_field(name="Roll some dice", value=".dice", inline=True)
embed3.add_field(name="Play Hi-Lo", value=".hilo", inline=True)
embed3.add_field(name="Play War", value=".war", inline=True)
embed3.add_field(name="Check casino balance", value=".cbalance (or .chips)", inline=True)
embed3.add_field(name="View casino leaderboard", value=".ctop", inline=True)
embed3.add_field(name="Get some free chips", value=".cpayday (Patrons Only)", inline=True)
await self.bot.whisper(embed=embed3)
embed4=discord.Embed(title="Music", color=0x6000ff)
embed4.add_field(name="Play a song", value=".play <song name/link>", inline=True)
embed4.add_field(name="Search YouTube", value=".yt <search>", inline=True)
embed4.add_field(name="Access the queue", value=".queue", inline=True)
embed4.add_field(name="Toggle repeat", value=".repeat", inline=True)
embed4.add_field(name="Shuffle the queue", value=".shuffle", inline=True)
embed4.add_field(name="Vote skip", value=".skip", inline=True)
embed4.add_field(name="Display song info", value=".song", inline=True)
embed4.add_field(name="Make Dolores sing", value=".sing", inline=True)
embed4.add_field(name="Return to previous song", value=".prev", inline=True)
await self.bot.whisper(embed=embed4)
embedfoot=discord.Embed(title="Other", description="As a reminder, using any of these commands outside of the designated #commands channel is forbidden, per the Server Rules. Please help us keep our discussion channels tidy! :)")
embedfoot.set_footer(text="Bot created and maintained by Shane#1044 for The Lounge.")
await self.bot.whisper(embed=embedfoot)
def setup(bot):
bot.add_cog(Helputil(bot))
| bucklingspring/dolores | cogs/helputil/helputil.py | Python | gpl-3.0 | 3,931 |
# coding: utf-8
__author__ = 'Math'
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import random
from scipy.interpolate import interp1d, PchipInterpolator, splrep, splev
import matplotlib.patches as patches
import matplotlib.path as path
import matplotlib as mpl
from mpltools import style
from scipy.stats import gaussian_kde
mpl.rc("figure", facecolor="white")
class kernelhandler(object):
def __init__(self, points, bandwidth):
self.bandwidth = bandwidth
self.points = points
def kernel(self):
def f(x):
return 1.0/np.sqrt(2*np.pi)*np.exp(-1.0/2*x**2)
return f
def __call__(self, *args, **kwargs):
x = kwargs['x']
f = self.kernel()
inter = [f((x - x_i)*1.0/self.bandwidth)*y_i for x_i, y_i in self.points]
weigth = [f((x - x_i)*1.0/self.bandwidth) for x_i, y_i in self.points]
return ((1.0/sum(weigth))*sum(inter))
class Shape(object):
def __init__(self, data, size_k):
"""
:param data:
:param size_k:
:return:
"""
self.data = data
self.size_k = size_k
self.nodes_ask = None
self.nondes_bid = None
self.__fig = None
self.__ax = None
self.__line_bid = None
self.__line_ask = None
self.__scat = None
self.__patch_ask = None
self.__patch_bid = None
self.anot = None
self.ticks = 500
self.__vert_ask = None
self.__codes = None
self.__vert_bid = None
def cargando_figuras(self):
"""
:return:
"""
self.__fig, self.__ax = plt.subplots()
self.__line_bid, = self.__ax.plot([], [], lw=2, c='#286090')
self.__line_ask, = self.__ax.plot([], [], lw=2, c='#286090')
self.__scat = self.__ax.scatter([], [], c='black', s=2)
self.__ax.set_ylim(-1, 30)
self.__ax.set_xlim(-6, 6)
self.__ax.grid(linestyle='-', color='#808080', alpha=0.2)
f = lambda x: [(float(x) + 0.5, 0.), (float(x) + 0.5, 0.)]
self.__vert_ask = [f(x) for x in self.nodes_ask]
self.__vert_ask = np.array(sum(self.__vert_ask, []))
f = lambda x: [(float(x) - 0.5, 0.), (float(x) - 0.5, 0.)]
self.__vert_bid = [f(x) for x in self.nondes_bid]
self.__vert_bid = np.array(sum(self.__vert_bid, []))
self.__codes = [path.Path.LINETO for _ in xrange(len(self.__vert_ask))]
self.__codes[0] = path.Path.MOVETO
self.__codes = np.array(self.__codes)
barpath_ask = path.Path(self.__vert_ask, self.__codes)
self.__patch_ask = patches.PathPatch(barpath_ask, facecolor='#5cb85c',
edgecolor='#4cae4c', alpha=0.5)
self.__ax.add_patch(self.__patch_ask)
barpath_bid = path.Path(self.__vert_bid, self.__codes)
self.__patch_bid = patches.PathPatch(barpath_bid, facecolor='#c9302c',
edgecolor='#ac2925', alpha=0.5)
self.__ax.add_patch(self.__patch_bid)
# Se eliminan los ticks
self.__ax.tick_params(width=0)
# Se eliminan ejes
self.__ax.spines["top"].set_visible(False)
self.__ax.spines["right"].set_visible(False)
self.__ax.spines["bottom"].set_visible(False)
self.__ax.spines["left"].set_visible(False)
self.__ax.tick_params(axis='x', colors='#404040')
self.__ax.tick_params(axis='y', colors='#404040')
self.anot = self.__ax.text(0.02, 0.95, '', transform=self.__ax.transAxes)
def __update_limits_x(self, domain):
"""
:param domain:
:return:
"""
xmin, xmax = self.__ax.get_xlim()
min_domain = min(domain)
max_domain = max(domain)
any_change = False
if xmax - 2 < max_domain:
xmax = max_domain + 4
any_change = True
elif abs(xmax - max_domain) > 10:
xmax = max_domain + 3
any_change = True
if xmin + 2 > min_domain :
xmin = min_domain - 2
any_change = True
elif abs(xmin - min_domain) > 10:
xmin = min_domain - 4
any_change = True
if any_change:
self.__ax.set_xlim(xmin , xmax)
self.__ax.figure.canvas.draw()
def __update_limits_y(self, rango):
"""
:param rango:
:return:
"""
ymin, ymax = self.__ax.get_ylim()
min_rango = min(rango)
max_rango = max(rango)
any_change = False
if ymax < max_rango:
ymax = max_rango + 2
any_change = True
elif abs(ymax - max_rango) > 10:
ymax = max_rango + 3
any_change = True
if ymin > min_rango :
ymin = min_rango - 2
any_change = True
elif abs(ymin - min_rango) > 10:
ymin = min_rango - 3
any_change = True
if any_change:
self.__ax.set_ylim(ymin, ymax)
self.__ax.figure.canvas.draw()
def __interpolate_data(self, x, y):
"""
:param x:
:param y:
:return:
"""
x1 = x[:]
y1 = y[:]
if sorted(x1) != x1:
x1.reverse()
y1.reverse()
f = PchipInterpolator(x1 , y1 , extrapolate=True)
domain = np.linspace(x[0], x[-1], num=self.ticks, endpoint=True)
return [f(a) for a in domain]
def filter_data(self, x , y):
index = -1
for i, data in enumerate(y):
if data != 0 :
index = i
break
if index == -1:
return x , y
for i in xrange(index):
x.pop(0)
y.pop(0)
return x, y
def __generator_data(self):
"""
:return:
"""
for orderbook in self.data:
y_ask = [data[1] for data in orderbook[-1]]
y_bid = [data[1] for data in orderbook[1]]
x_ask = [data[0] for data in orderbook[-1]]
x_bid = [data[0] for data in orderbook[1]]
x_ask_fill, y_ask_fill= self.filter_data(x_ask[:] , y_ask[:])
x_bid_fill, y_bid_fill= self.filter_data(x_bid[:] , y_bid[:])
yield self.__interpolate_data(x_ask_fill, y_ask_fill), \
self.__interpolate_data(x_bid_fill, y_bid_fill), \
x_ask, x_bid, y_ask, y_bid, \
x_ask_fill, x_bid_fill, y_ask_fill, y_bid_fill
def __run_data(self, data):
"""
:param data:
:return:
"""
ask_inter_points, bid_inter_points, x_ask, x_bid, y_ask, y_bid, x_ask_fill, x_bid_fill, y_ask_fill, y_bid_fill = data
mid_price = (y_ask_fill[0] + y_bid_fill[0])*1.0 / 2
# print mid_price
self.anot.set_text('Mid Price = %.1f' % mid_price)
# bid_inter_points.reverse()
x_bid.reverse()
y_bid.reverse()
self.__update_limits_x(x_ask + x_bid)
self.__update_limits_y(ask_inter_points + bid_inter_points)
self.__line_bid.set_data(np.linspace(x_bid_fill[0], x_bid_fill[-1], num=self.ticks, endpoint=True), bid_inter_points,)
self.__line_ask.set_data(np.linspace(x_ask_fill[0], x_ask_fill[-1], num=self.ticks, endpoint=True), ask_inter_points)
self.__scat.set_offsets(np.array(zip(x_bid_fill + x_ask_fill, y_bid_fill + y_ask_fill)))
for x, point in enumerate(y_ask):
self.__vert_ask[x*2+1][1] = point
self.__vert_ask[x*2+2][1] = point
for x in xrange(len(y_ask), self.size_k):
self.__vert_ask[x*2+1][1] = 0
self.__vert_ask[x*2+2][1] = 0
y_bid.reverse()
a = len(self.__vert_bid) - 1
for x, point in enumerate(y_bid):
self.__vert_bid[a-(x*2+1)][1] = point
self.__vert_bid[a-(x*2+2)][1] = point
for x in xrange(len(y_bid), self.size_k):
self.__vert_bid[a-(x*2+1)][1] = 0
self.__vert_bid[a-(x*2+2)][1] = 0
return [self.__patch_ask, self.__patch_bid,self.__line_bid, self.__line_ask, self.__scat, self.anot]
def __call__(self, *args, **kwargs):
ani = animation.FuncAnimation(self.__fig, self.__run_data, self.__generator_data, blit=True, interval=100, repeat=False)
plt.show()
class levels_to_shape(object):
def __init__(self, data, k):
self.data = data
self.size_k = k
self.aux_data = []
self.shape = Shape(None, None)
self.type_levels = None
@property
def data(self):
return self.__data
@data.setter
def data(self, data):
for orderbook in data:
# print orderbook
domain_ask = [x[0] for x in orderbook[-1]]
assert all(domain_ask[i] < domain_ask[i + 1] for i in xrange(len(domain_ask) - 1))
domain_bid = [x[0] for x in orderbook[1]]
# print domain_bid
assert all(domain_bid[i] > domain_bid[i + 1] for i in xrange(len(domain_bid) - 1))
self.__data = data
def distance_to_midprice(self):
for orderbook in self.data:
new_orderbook = {-1: [], 1: []}
y_ask = [data[1] for data in orderbook[-1]]
y_bid = [data[1] for data in orderbook[1]]
x_ask = self.__ask_normalizade(y_ask)
x_bid = self.__bid_normalizade(y_bid)
new_orderbook[-1] = zip(x_ask, y_ask)
new_orderbook[1] = zip(x_bid, y_bid)
self.aux_data.append(new_orderbook)
# print new_orderbook[-1]
# print new_orderbook[1]
# exit()
self.type_levels = 'mid_price'
self.set_nodes_barr()
def distance_to_extreme(self):
for orderbook in self.data:
new_orderbook = {-1: [], 1: []}
y_ask = [data[1] for data in orderbook[-1]]
y_bid = [data[1] for data in orderbook[1]]
len_ask = len(y_ask)
len_bid = len(y_bid)
x_ask = [len_bid + i for i in xrange(len_ask)]
x_bid = [-(len_ask+i) for i in xrange(len_bid)]
new_orderbook[-1] = zip(x_ask, y_ask)
new_orderbook[1] = zip(x_bid, y_bid)
#
# print len_bid
# print len_ask
# print new_orderbook[-1]
# print new_orderbook[1]
# exit()
for i in xrange(0, abs(x_bid[0]) - 1):
new_orderbook[1].insert(i, (-(i+1), 0))
for i in xrange(1, x_ask[0]):
new_orderbook[-1].insert(i - 1, (i, 0))
# print new_orderbook[-1]
# print new_orderbook[1]
# exit()
self.aux_data.append(new_orderbook)
self.type_levels = 'extreme'
self.set_nodes_barr()
def set_nodes_barr(self):
if self.type_levels == 'mid_price':
nodes_ask = xrange(self.size_k + 1)
nodes_bid = xrange(-(self.size_k), 1)
self.shape.nondes_bid = nodes_bid
self.shape.nodes_ask = nodes_ask
self.shape.size_k = self.size_k
elif self.type_levels == 'extreme':
nodes_ask = xrange(self.size_k*2 + 1)
nodes_bid = xrange(-(self.size_k*2), 1)
self.shape.nondes_bid = nodes_bid
self.shape.nodes_ask = nodes_ask
self.shape.size_k = self.size_k*2
def draw_shape(self):
self.shape.data = self.aux_data
self.shape.cargando_figuras()
self.shape()
def __bid_normalizade(self, p_bid):
l = range(-len(p_bid), 0)
l.reverse()
return l
def __ask_normalizade(self, p_ask):
return range(1, len(p_ask) + 1)
def generar_orderbook_lines(n_lines):
lines = []
for i in xrange(n_lines):
n_ask = random.randint(1, 4)
ask = [(i, random.random() * 5 + 1) for i in xrange(0, 10, n_ask)]
ask.sort(key = lambda x: x[0])
n_bid = random.randint(1, 4)
bid = [(-(i+1), random.random() * 5 + 1) for i in xrange(0, 10, n_bid)]
lines.append({-1: ask, 1: bid})
print lines
return lines
if __name__ == '__main__':
lines = generar_orderbook_lines(500)
shape_orderbook = levels_to_shape(lines, 10)
# levels.distance_to_midprice()
shape_orderbook.distance_to_extreme()
shape_orderbook.draw_shape()
# l = [(i, random.random()) for i in xrange(10)]
# k = kernelhandler(l, 0.5)
# s = [k(**{'x': x}) for x, y in l]
#
# print [p[1] for p in l]
# print s
#
# plt.plot([p[0] for p in l], [p[1] for p in l] )
#
# domain = np.linspace(0, 10, 100)
#
# plt.plot(domain, [k(**{'x': x}) for x in domain])
#
# plt.show()
| grupoanfi/orderbook-data-analysis | ODA/shape.py | Python | gpl-3.0 | 12,925 |
Subsets and Splits