repo_name
stringlengths
5
92
path
stringlengths
4
221
copies
stringclasses
19 values
size
stringlengths
4
6
content
stringlengths
766
896k
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
32
997
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
13.6
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
NaohiroTamura/python-ironicclient
ironicclient/v1/chassis.py
1
6569
# -*- coding: utf-8 -*- # # Copyright © 2013 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ironicclient.common import base from ironicclient.common.i18n import _ from ironicclient.common import utils from ironicclient import exc class Chassis(base.Resource): def __repr__(self): return "<Chassis %s>" % self._info class ChassisManager(base.CreateManager): resource_class = Chassis _resource_name = 'chassis' _creation_attributes = ['description', 'extra', 'uuid'] def list(self, marker=None, limit=None, sort_key=None, sort_dir=None, detail=False, fields=None): """Retrieve a list of chassis. :param marker: Optional, the UUID of a chassis, eg the last chassis from a previous result set. Return the next result set. :param limit: The maximum number of results to return per request, if: 1) limit > 0, the maximum number of chassis to return. 2) limit == 0, return the entire list of chassis. 3) limit param is NOT specified (None), the number of items returned respect the maximum imposed by the Ironic API (see Ironic's api.max_limit option). :param sort_key: Optional, field used for sorting. :param sort_dir: Optional, direction of sorting, either 'asc' (the default) or 'desc'. :param detail: Optional, boolean whether to return detailed information about chassis. :param fields: Optional, a list with a specified set of fields of the resource to be returned. Can not be used when 'detail' is set. :returns: A list of chassis. """ if limit is not None: limit = int(limit) if detail and fields: raise exc.InvalidAttribute(_("Can't fetch a subset of fields " "with 'detail' set")) filters = utils.common_filters(marker, limit, sort_key, sort_dir, fields) path = '' if detail: path += 'detail' if filters: path += '?' + '&'.join(filters) if limit is None: return self._list(self._path(path), "chassis") else: return self._list_pagination(self._path(path), "chassis", limit=limit) def list_nodes(self, chassis_id, marker=None, limit=None, sort_key=None, sort_dir=None, detail=False, fields=None, associated=None, maintenance=None, provision_state=None): """List all the nodes for a given chassis. :param chassis_id: The UUID of the chassis. :param marker: Optional, the UUID of a node, eg the last node from a previous result set. Return the next result set. :param limit: The maximum number of results to return per request, if: 1) limit > 0, the maximum number of nodes to return. 2) limit == 0, return the entire list of nodes. 3) limit param is NOT specified (None), the number of items returned respect the maximum imposed by the Ironic API (see Ironic's api.max_limit option). :param sort_key: Optional, field used for sorting. :param sort_dir: Optional, direction of sorting, either 'asc' (the default) or 'desc'. :param detail: Optional, boolean whether to return detailed information about nodes. :param fields: Optional, a list with a specified set of fields of the resource to be returned. Can not be used when 'detail' is set. :param associated: Optional. Either a Boolean or a string representation of a Boolean that indicates whether to return a list of associated (True or "True") or unassociated (False or "False") nodes. :param maintenance: Optional. Either a Boolean or a string representation of a Boolean that indicates whether to return nodes in maintenance mode (True or "True"), or not in maintenance mode (False or "False"). :param provision_state: Optional. String value to get only nodes in that provision state. :returns: A list of nodes. """ if limit is not None: limit = int(limit) if detail and fields: raise exc.InvalidAttribute(_("Can't fetch a subset of fields " "with 'detail' set")) filters = utils.common_filters(marker, limit, sort_key, sort_dir, fields) if associated is not None: filters.append('associated=%s' % associated) if maintenance is not None: filters.append('maintenance=%s' % maintenance) if provision_state is not None: filters.append('provision_state=%s' % provision_state) path = "%s/nodes" % chassis_id if detail: path += '/detail' if filters: path += '?' + '&'.join(filters) if limit is None: return self._list(self._path(path), "nodes") else: return self._list_pagination(self._path(path), "nodes", limit=limit) def get(self, chassis_id, fields=None): return self._get(resource_id=chassis_id, fields=fields) def delete(self, chassis_id): return self._delete(resource_id=chassis_id) def update(self, chassis_id, patch): return self._update(resource_id=chassis_id, patch=patch)
apache-2.0
1,744,163,797,057,530,600
37.635294
79
0.564708
false
4.668088
false
false
false
1905410/Misago
misago/threads/views/admin/attachments.py
1
3500
from django.contrib import messages from django.core.urlresolvers import reverse from django.db import transaction from django.db.models import Count from django.shortcuts import redirect from django.utils.translation import ugettext_lazy as _ from misago.admin.views import generic from ...forms import SearchAttachmentsForm from ...models import Attachment, Post class AttachmentAdmin(generic.AdminBaseMixin): root_link = 'misago:admin:system:attachments:index' Model = Attachment templates_dir = 'misago/admin/attachments' message_404 = _("Requested attachment could not be found.") def get_queryset(self): qs = super(AttachmentAdmin, self).get_queryset() return qs.select_related('filetype', 'uploader', 'post', 'post__thread', 'post__category') class AttachmentsList(AttachmentAdmin, generic.ListView): items_per_page = 20 ordering = ( ('-id', _("From newest")), ('id', _("From oldest")), ('filename', _("A to z")), ('-filename', _("Z to a")), ('size', _("Smallest files")), ('-size', _("Largest files")), ) selection_label = _('With attachments: 0') empty_selection_label = _('Select attachments') mass_actions = [ { 'action': 'delete', 'name': _("Delete attachments"), 'icon': 'fa fa-times-circle', 'confirmation': _("Are you sure you want to delete selected attachments?"), 'is_atomic': False } ] def get_search_form(self, request): return SearchAttachmentsForm def action_delete(self, request, attachments): deleted_attachments = [] desynced_posts = [] for attachment in attachments: if attachment.post: deleted_attachments.append(attachment.pk) desynced_posts.append(attachment.post_id) if desynced_posts: with transaction.atomic(): for post in Post.objects.select_for_update().filter(id__in=desynced_posts): self.delete_from_cache(post, deleted_attachments) for attachment in attachments: attachment.delete() message = _("Selected attachments have been deleted.") messages.success(request, message) def delete_from_cache(self, post, attachments): if not post.attachments_cache: return # admin action may be taken due to desynced state clean_cache = [] for a in post.attachments_cache: if a['id'] not in attachments: clean_cache.append(a) post.attachments_cache = clean_cache or None post.save(update_fields=['attachments_cache']) class DeleteAttachment(AttachmentAdmin, generic.ButtonView): def button_action(self, request, target): if target.post: self.delete_from_cache(target) target.delete() message = _('Attachment "%(filename)s" has been deleted.') messages.success(request, message % {'filename': target.filename}) def delete_from_cache(self, attachment): if not attachment.post.attachments_cache: return # admin action may be taken due to desynced state clean_cache = [] for a in attachment.post.attachments_cache: if a['id'] != attachment.id: clean_cache.append(a) attachment.post.attachments_cache = clean_cache or None attachment.post.save(update_fields=['attachments_cache'])
gpl-2.0
-8,330,892,099,958,133,000
33.653465
98
0.626857
false
4.358655
false
false
false
pombredanne/blivet-1
tests/devicelibs_test/edd_test.py
1
9403
import mock class EddTestCase(mock.TestCase): def setUp(self): self.setupModules( ['_isys', 'logging', 'pyanaconda.anaconda_log', 'block']) def tearDown(self): self.tearDownModules() def test_biosdev_to_edd_dir(self): from blivet.devicelibs import edd path = edd.biosdev_to_edd_dir(138) self.assertEqual("/sys/firmware/edd/int13_dev8a", path) def test_collect_edd_data(self): from blivet.devicelibs import edd # test with vda, vdb fs = EddTestFS(self, edd).vda_vdb() edd_dict = edd.collect_edd_data() self.assertEqual(len(edd_dict), 2) self.assertEqual(edd_dict[0x80].type, "SCSI") self.assertEqual(edd_dict[0x80].scsi_id, 0) self.assertEqual(edd_dict[0x80].scsi_lun, 0) self.assertEqual(edd_dict[0x80].pci_dev, "00:05.0") self.assertEqual(edd_dict[0x80].channel, 0) self.assertEqual(edd_dict[0x80].sectors, 16777216) self.assertEqual(edd_dict[0x81].pci_dev, "00:06.0") # test with sda, vda fs = EddTestFS(self, edd).sda_vda() edd_dict = edd.collect_edd_data() self.assertEqual(len(edd_dict), 2) self.assertEqual(edd_dict[0x80].type, "ATA") self.assertEqual(edd_dict[0x80].scsi_id, None) self.assertEqual(edd_dict[0x80].scsi_lun, None) self.assertEqual(edd_dict[0x80].pci_dev, "00:01.1") self.assertEqual(edd_dict[0x80].channel, 0) self.assertEqual(edd_dict[0x80].sectors, 2097152) self.assertEqual(edd_dict[0x80].ata_device, 0) self.assertEqual(edd_dict[0x80].mbr_signature, "0x000ccb01") def test_collect_edd_data_cciss(self): from blivet.devicelibs import edd fs = EddTestFS(self, edd).sda_cciss() edd_dict = edd.collect_edd_data() self.assertEqual(edd_dict[0x80].pci_dev, None) self.assertEqual(edd_dict[0x80].channel, None) def test_edd_entry_str(self): from blivet.devicelibs import edd fs = EddTestFS(self, edd).sda_vda() edd_dict = edd.collect_edd_data() expected_output = """\ttype: ATA, ata_device: 0 \tchannel: 0, mbr_signature: 0x000ccb01 \tpci_dev: 00:01.1, scsi_id: None \tscsi_lun: None, sectors: 2097152""" self.assertEqual(str(edd_dict[0x80]), expected_output) def test_matcher_device_path(self): from blivet.devicelibs import edd fs = EddTestFS(self, edd).sda_vda() edd_dict = edd.collect_edd_data() analyzer = edd.EddMatcher(edd_dict[0x80]) path = analyzer.devname_from_pci_dev() self.assertEqual(path, "sda") analyzer = edd.EddMatcher(edd_dict[0x81]) path = analyzer.devname_from_pci_dev() self.assertEqual(path, "vda") def test_bad_device_path(self): from blivet.devicelibs import edd fs = EddTestFS(self, edd).sda_vda_no_pcidev() edd_dict = edd.collect_edd_data() analyzer = edd.EddMatcher(edd_dict[0x80]) path = analyzer.devname_from_pci_dev() self.assertEqual(path, None) def test_bad_host_bus(self): from blivet.devicelibs import edd fs = EddTestFS(self, edd).sda_vda_no_host_bus() edd_dict = edd.collect_edd_data() # 0x80 entry is basted so fail without an exception analyzer = edd.EddMatcher(edd_dict[0x80]) devname = analyzer.devname_from_pci_dev() self.assertEqual(devname, None) # but still succeed on 0x81 analyzer = edd.EddMatcher(edd_dict[0x81]) devname = analyzer.devname_from_pci_dev() self.assertEqual(devname, "vda") def test_get_edd_dict_1(self): """ Test get_edd_dict()'s pci_dev matching. """ from blivet.devicelibs import edd fs = EddTestFS(self, edd).sda_vda() self.assertEqual(edd.get_edd_dict([]), {'sda' : 0x80, 'vda' : 0x81}) def test_get_edd_dict_2(self): """ Test get_edd_dict()'s pci_dev matching. """ from blivet.devicelibs import edd edd.collect_mbrs = mock.Mock(return_value = { 'sda' : '0x000ccb01', 'vda' : '0x0006aef1'}) fs = EddTestFS(self, edd).sda_vda_missing_details() self.assertEqual(edd.get_edd_dict([]), {'sda' : 0x80, 'vda' : 0x81}) def test_get_edd_dict_3(self): """ Test scenario when the 0x80 and 0x81 edd directories contain the same data and give no way to distinguish among the two devices. """ from blivet.devicelibs import edd edd.log = mock.Mock() edd.collect_mbrs = mock.Mock(return_value={'sda' : '0x000ccb01', 'vda' : '0x0006aef1'}) fs = EddTestFS(self, edd).sda_sdb_same() self.assertEqual(edd.get_edd_dict([]), {}) self.assertIn((('edd: both edd entries 0x80 and 0x81 seem to map to sda',), {}), edd.log.info.call_args_list) class EddTestFS(object): def __init__(self, test_case, target_module): self.fs = mock.DiskIO() test_case.take_over_io(self.fs, target_module) def sda_vda_missing_details(self): self.fs["/sys/firmware/edd/int13_dev80"] = self.fs.Dir() self.fs["/sys/firmware/edd/int13_dev80/mbr_signature"] = "0x000ccb01\n" self.fs["/sys/firmware/edd/int13_dev81"] = self.fs.Dir() self.fs["/sys/firmware/edd/int13_dev81/mbr_signature"] = "0x0006aef1\n" def sda_vda(self): self.fs["/sys/firmware/edd/int13_dev80"] = self.fs.Dir() self.fs["/sys/firmware/edd/int13_dev80/host_bus"] = "PCI 00:01.1 channel: 0\n" self.fs["/sys/firmware/edd/int13_dev80/interface"] = "ATA device: 0\n" self.fs["/sys/firmware/edd/int13_dev80/mbr_signature"] = "0x000ccb01\n" self.fs["/sys/firmware/edd/int13_dev80/sectors"] = "2097152\n" self.fs["/sys/firmware/edd/int13_dev81"] = self.fs.Dir() self.fs["/sys/firmware/edd/int13_dev81/host_bus"] = "PCI 00:05.0 channel: 0\n" self.fs["/sys/firmware/edd/int13_dev81/interface"] = "SCSI id: 0 lun: 0\n" self.fs["/sys/firmware/edd/int13_dev81/mbr_signature"] = "0x0006aef1\n" self.fs["/sys/firmware/edd/int13_dev81/sectors"] = "16777216\n" self.fs["/sys/devices/pci0000:00/0000:00:01.1/host0/target0:0:0/0:0:0:0/block"] = self.fs.Dir() self.fs["/sys/devices/pci0000:00/0000:00:01.1/host0/target0:0:0/0:0:0:0/block/sda"] = self.fs.Dir() self.fs["/sys/devices/pci0000:00/0000:00:05.0/virtio2/block"] = self.fs.Dir() self.fs["/sys/devices/pci0000:00/0000:00:05.0/virtio2/block/vda"] = self.fs.Dir() return self.fs def sda_vda_no_pcidev(self): self.sda_vda() entries = [e for e in self.fs.fs if e.startswith("/sys/devices/pci")] map(self.fs.os_remove, entries) return self.fs def sda_vda_no_host_bus(self): self.sda_vda() self.fs["/sys/firmware/edd/int13_dev80/host_bus"] = "PCI 00:01.1 channel: \n" self.fs.os_remove("/sys/firmware/edd/int13_dev80/mbr_signature") self.fs.os_remove("/sys/firmware/edd/int13_dev81/mbr_signature") def sda_cciss(self): self.fs["/sys/firmware/edd/int13_dev80"] = self.fs.Dir() self.fs["/sys/firmware/edd/int13_dev80/host_bus"] = "PCIX 05:00.0 channel: 0\n" self.fs["/sys/firmware/edd/int13_dev80/interface"] = "RAID identity_tag: 0\n" self.fs["/sys/firmware/edd/int13_dev80/mbr_signature"] = "0x000ccb01\n" self.fs["/sys/firmware/edd/int13_dev80/sectors"] = "2097152\n" return self.fs def vda_vdb(self): self.fs["/sys/firmware/edd/int13_dev80"] = self.fs.Dir() self.fs["/sys/firmware/edd/int13_dev80/host_bus"] = "PCI 00:05.0 channel: 0\n" self.fs["/sys/firmware/edd/int13_dev80/interface"] = "SCSI id: 0 lun: 0\n" self.fs["/sys/firmware/edd/int13_dev80/sectors"] = "16777216\n" self.fs["/sys/firmware/edd/int13_dev81"] = self.fs.Dir() self.fs["/sys/firmware/edd/int13_dev81/host_bus"] = "PCI 00:06.0 channel: 0\n" self.fs["/sys/firmware/edd/int13_dev81/interface"] = "SCSI id: 0 lun: 0\n" self.fs["/sys/firmware/edd/int13_dev81/sectors"] = "4194304\n" return self.fs def sda_sdb_same(self): self.fs["/sys/firmware/edd/int13_dev80"] = self.fs.Dir() self.fs["/sys/firmware/edd/int13_dev80/host_bus"] = "PCI 00:01.1 channel: 0\n" self.fs["/sys/firmware/edd/int13_dev80/interface"] = "ATA device: 0\n" self.fs["/sys/firmware/edd/int13_dev80/mbr_signature"] = "0x000ccb01" self.fs["/sys/firmware/edd/int13_dev80/sectors"] = "2097152\n" self.fs["/sys/firmware/edd/int13_dev81"] = self.fs.Dir() self.fs["/sys/firmware/edd/int13_dev81/host_bus"] = "PCI 00:01.1 channel: 0\n" self.fs["/sys/firmware/edd/int13_dev81/interface"] = "ATA device: 0\n" self.fs["/sys/firmware/edd/int13_dev81/mbr_signature"] = "0x0006aef1" self.fs["/sys/firmware/edd/int13_dev81/sectors"] = "2097152\n" self.fs["/sys/devices/pci0000:00/0000:00:01.1/host0/target0:0:0/0:0:0:0/block"] = self.fs.Dir() self.fs["/sys/devices/pci0000:00/0000:00:01.1/host0/target0:0:0/0:0:0:0/block/sda"] = self.fs.Dir()
gpl-2.0
2,521,215,230,570,225,700
43.353774
107
0.605764
false
2.787726
true
false
false
Marcelpv96/SITWprac2017
sportsBetting/migrations/0018_auto_20170515_1009.py
1
1050
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-05-15 10:09 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('sportsBetting', '0017_auto_20170510_1614'), ] operations = [ migrations.RemoveField( model_name='event', name='api_id', ), migrations.AddField( model_name='event', name='id', field=models.AutoField(auto_created=True, default=1, primary_key=True, serialize=False, verbose_name='ID'), preserve_default=False, ), migrations.AddField( model_name='team', name='created_by', field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), preserve_default=False, ), ]
gpl-3.0
7,969,951,120,777,904,000
29.882353
121
0.612381
false
4.007634
false
false
false
djb1815/Essex-MuSoc
musoc_web/schedule/views.py
1
1465
from django.shortcuts import render, redirect from django.db import transaction from django.contrib.auth.decorators import login_required from .forms import ProfileNameForm, ProfileDetailForm from django.contrib import messages # Create your views here. def index(request): # Add variables in the custom_variables dict below to make them available within the rendered page title = "Welcome" custom_variables = { 'title': title } return render(request, "schedule/home.html", custom_variables) @login_required @transaction.atomic def profile(request): title = "Account Settings" if request.method == 'POST': name_form = ProfileNameForm(request.POST, instance=request.user) detail_form = ProfileDetailForm(request.POST, instance=request.user.profile) if name_form.is_valid() and detail_form.is_valid(): name_form.save() detail_form.save() messages.success(request, 'Your profile has been successfully updated!') return redirect('profile') else: messages.error(request, 'Please correct the error below.') else: name_form = ProfileNameForm(instance=request.user) detail_form = ProfileDetailForm(instance=request.user.profile) custom_variables = { 'title': title, 'name_form': name_form, 'detail_form': detail_form } return render(request, "account/profile.html", custom_variables)
mit
6,622,172,651,528,371,000
34.731707
102
0.686689
false
4.221902
false
false
false
cloughrm/Flask-Angular-Template
backend/pastry/resources/v1/users.py
1
1720
from pastry.db import mongo from pastry.models import User from pastry.resources.auth import login_required from pastry.resources import validators, httpcodes from bson.objectid import ObjectId from flask import request from flask.ext.restful import Resource, reqparse class UsersResource(Resource): @login_required def get(self, id): return mongo.db.users.find_one_or_404({'_id': ObjectId(id)}) @login_required def delete(self, id): return mongo.db.users.remove({'_id': ObjectId(id)}) class UsersListResource(Resource): def __init__(self): self.parser = reqparse.RequestParser() if request.method == 'GET': self.parser.add_argument('limit', type=int, default=20) self.parser.add_argument('offset', type=int, default=0) elif request.method == 'POST': self.parser.add_argument('username', type=validators.email_address, required=True) self.parser.add_argument('password', type=str, required=True) super(UsersListResource, self).__init__() @login_required def get(self): args = self.parser.parse_args() users = mongo.db.users.find().skip(args.offset).limit(args.limit) return { 'objects': users, 'offset': args.offset, 'limit': args.limit, } @login_required def post(self): args = self.parser.parse_args() user = User(args.username, args.password) if mongo.db.users.find_one({'username': user.username}): return {'message': 'User {} already exists'.format(user.username)}, httpcodes.BAD_REQUEST user_id = user.create() return {'id': user_id}, httpcodes.CREATED
mit
-4,205,179,780,906,292,700
33.4
101
0.640116
false
3.813747
false
false
false
shanksauce/mintr
mintr/__init__.py
1
1862
import requests import time import re from pprint import pprint auth_headers = {} def _validate_credentials(fn): def wrapper(*args): def is_not_populated(d,r): return reduce( lambda x,y: x or y, map(lambda k: k not in d or not d[k], r) ) if is_not_populated(auth_headers, ('cookie', 'token')): raise Exception('Login first') return fn(*args) return wrapper def login(username, password): if username is None or password is None: raise Exception('Use valid credentials') a = requests.get('https://wwws.mint.com/login.event') session_id = a.cookies.get('MINTJSESSIONID') route_id = a.cookies.get('ROUTEID') b = requests.post( 'https://wwws.mint.com/loginUserSubmit.xevent', cookies = a.cookies, headers = { 'Accept': 'application/json', 'Content-type': 'application/x-www-form-urlencoded; charset=UTF-8', 'Cookie': 'MINTJSESSIONID={0}; ROUTEID={1}'.format(session_id, route_id) }, data = { 'username': username, 'password': password, 'task': 'L' } ) csrf_token = b.json()['CSRFToken'] match = re.search('MINTJSESSIONID=(.*?);', b.headers['set-cookie']) if match is None: raise Exception('No MINTJSESSIONID') b_session_id = match.groups(0)[0] #@_validate_credentials def get_account_summaries(jwt=None): if jwt is None: return {} try: d = requests.get( 'https://mint.finance.intuit.com/v1/accounts?limit=1000', headers = {'Authorization': 'Bearer ' + jwt} ) accounts = dict(map( lambda x: ( x['fiName'] + ' ' + x['cpAccountName'], x['currentBalance'] ), filter( lambda x: x['accountStatus'] == 'ACTIVE' and x['currentBalance'] > 0, d.json()['Account'] ) )) return accounts except Exception as ex: return {}
mit
3,864,896,717,037,192,700
24.162162
77
0.605263
false
3.342908
false
false
false
davidtrem/ThunderStorm
thunderstorm/lightning/utils.py
1
5027
# -*- coding: utf-8 -*- # Copyright (C) 2010-2013 Trémouilles David #This file is part of Thunderstorm. # #ThunderStrom is free software: you can redistribute it and/or modify #it under the terms of the GNU Lesser General Public License as published by #the Free Software Foundation, either version 3 of the License, or #(at your option) any later version. # #ThunderStorm is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #GNU Lesser General Public License for more details. # #You should have received a copy of the GNU Lesser General Public License #along with ThunderStorm. If not, see <http://www.gnu.org/licenses/>. """ Various utility functions """ import matplotlib from weakref import WeakValueDictionary from weakref import WeakKeyDictionary import warnings class UniversalCursors(object): def __init__(self): self.all_cursor_orient = WeakKeyDictionary() self.all_canvas = WeakValueDictionary() self.all_axes = WeakValueDictionary() self.backgrounds = {} self.visible = True self.needclear = False def _onmove(self, event): for canvas in self.all_canvas.values(): if not canvas.widgetlock.available(self): return if event.inaxes is None or not self.visible: if self.needclear: self._update(event) for canvas in self.all_canvas.values(): canvas.draw() self.needclear = False return self._update(event) def _update(self, event): # 1/ Reset background for canvas in self.all_canvas.values(): canvas.restore_region(self.backgrounds[id(canvas)]) # 2/ update cursors for cursors in self.all_cursor_orient.keys(): orient = self.all_cursor_orient[cursors] if (event.inaxes in [line.get_axes() for line in cursors] and self.visible): visible = True self.needclear = True else: visible = False for line in cursors: if orient == 'vertical': line.set_xdata((event.xdata, event.xdata)) if orient == 'horizontal': line.set_ydata((event.ydata, event.ydata)) line.set_visible(visible) ax = line.get_axes() ax.draw_artist(line) # 3/ update canvas for canvas in self.all_canvas.values(): canvas.blit(canvas.figure.bbox) def _clear(self, event): """clear the cursor""" self.backgrounds = {} for canvas in self.all_canvas.values(): self.backgrounds[id(canvas)] = ( canvas.copy_from_bbox(canvas.figure.bbox)) for cursor in self.all_cursor_orient.keys(): for line in cursor: line.set_visible(False) def add_cursor(self, axes=(), orient='vertical', **lineprops): class CursorList(list): def __hash__(self): return hash(tuple(self)) cursors = CursorList() # Required to keep weakref for ax in axes: self.all_axes[id(ax)] = ax ax_canvas = ax.get_figure().canvas if ax_canvas not in self.all_canvas.values(): #if not ax_canvas.supports_blit: # warnings.warn("Must use canvas that support blit") # return self.all_canvas[id(ax_canvas)] = ax_canvas ax_canvas.mpl_connect('motion_notify_event', self._onmove) ax_canvas.mpl_connect('draw_event', self._clear) if orient == 'vertical': line = ax.axvline(ax.get_xbound()[0], visible=False, animated=True, **lineprops) if orient == 'horizontal': line = ax.axhline(ax.get_ybound()[0], visible=False, animated=True, **lineprops) cursors.append(line) self.all_cursor_orient[cursors] = orient return cursors def autoscale_visible_lines(axs): """ Function to autoscale only on visible lines. """ mplt_ver = [int(elem) for elem in matplotlib.__version__.split('.')[0:2]] ignore = True for line in (axs.lines): if not line.get_visible(): continue # jump to next line if this one is not visible if mplt_ver[0] == 0 and mplt_ver[1] < 98: axs.dataLim.update_numerix(line.get_xdata(), line.get_ydata(), ignore) else: axs.dataLim.update_from_data_xy(line.get_xydata(), ignore) ignore = False axs.autoscale_view() return None def neg_bool_list(a_list): return [not elem for elem in a_list]
gpl-3.0
895,360,067,562,563,700
35.686131
77
0.569041
false
4.129827
false
false
false
MicroPyramid/forex-python
setup.py
1
1589
import io import os from setuptools import setup, find_packages VERSION = '1.6' long_description_text = """Forex Python is a Free Foreign exchange rates and currency conversion. Features: List all currency rates. BitCoin price for all curuncies. Converting amount to BitCoins. Get historical rates for any day since 1999. Conversion rate for one currency(ex; USD to INR). Convert amount from one currency to other.('USD 10$' to INR). Currency symbols. Currency names. Documentation: http://forex-python.readthedocs.io/en/latest/usage.html GitHub: https://github.com/MicroPyramid/forex-python """ setup( name='forex-python', version=VERSION, author='Micro Pyramid Informatic Pvt. Ltd.', author_email='[email protected]', url='https://github.com/MicroPyramid/forex-python', description='Foreign exchange rates and currency conversion.', long_description=long_description_text, packages=find_packages(exclude=['tests', 'tests.*']), include_package_data=True, install_requires=[ 'requests', 'simplejson', ], classifiers=[ 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Software Development :: Internationalization', ], )
mit
7,102,093,925,092,458,000
32.104167
97
0.680931
false
4.032995
false
false
false
answer-huang/StatisticsCodeLines
statistics.py
1
3024
#coding=utf-8 __author__ = 'answer-huang' import sys reload(sys) sys.setdefaultencoding('utf8') """ 代码行统计工具 """ import wx from MyInfo import AboutMe from AHDropTarget import AHDropTarget import os class AHFrame(wx.Frame): def __init__(self, parent, title): wx.Frame.__init__(self, parent, -1, title, wx.DefaultPosition, wx.Size(500, 380), style=wx.DEFAULT_FRAME_STYLE ^ (wx.RESIZE_BORDER | wx.MAXIMIZE_BOX)) #style=wx.SYSTEM_MENU | wx.CAPTION | wx.CLOSE_BOX | wx.MINIMIZE_BOX) self.SetTransparent(250) self.statusbar = self.CreateStatusBar() self.statusbar.SetForegroundColour('red') self.statusbar.SetFieldsCount(2) self.statusbar.SetStatusWidths([-2, -1]) #大小比例2:1 toolbar = self.CreateToolBar() toolbar.AddSeparator() toolbar.AddSimpleTool(1, wx.Image('about.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap(), "关于我", "") toolbar.AddSeparator() toolbar.AddSimpleTool(2, wx.Image('donate.png', wx.BITMAP_TYPE_PNG).ConvertToBitmap(), "捐助我", "") toolbar.Realize() #准备显示工具栏 wx.EVT_TOOL(self, 1, self.OnAboutMe) wx.EVT_TOOL(self, 2, self.OnDonate) self.panel = wx.Panel(self) self.panel.SetDropTarget(AHDropTarget(self)) self.font = wx.Font(18, wx.SCRIPT, wx.BOLD, wx.LIGHT) self.selectedPath = wx.StaticText(self.panel, -1, u'将项目拖拽到这里', pos=(178, 280)) self.selectedPath.SetFont(self.font) self.panel.Bind(wx.EVT_ENTER_WINDOW, self.OnEnterWindow) self.panel.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeaveWindow) self.panel.Bind(wx.EVT_MOTION, self.OnMotion) def OnEnterWindow(self, event): #print event.LeftIsDown() event.Skip() def OnLeaveWindow(self, event): #print "leave" event.Skip() def OnMotion(self, event): if event.Dragging() and event.LeftIsDown(): print '按住了鼠标移动' event.Skip() def UpdateStatus(self, path, codes_num): self.statusbar.SetStatusText(path, 0) self.statusbar.SetStatusText(codes_num, 1) def ShowImage(self, img): self.image = wx.Image(img, wx.BITMAP_TYPE_JPEG).Rescale(500, 350, quality=wx.IMAGE_QUALITY_HIGH) bitmap = self.image.ConvertToBitmap() self.logo = wx.StaticBitmap(self.panel, bitmap=bitmap, pos=(0, 0), size=(500, 350)) def ShowPathDir(self, dirList): wx.CheckListBox(self.panel, -1, choices=dirList) def OnAboutMe(self, event): aboutMe = AboutMe(self) aboutMe.ShowModal() aboutMe.Destroy() def OnDonate(self, event): #wx.BeginBusyCursor() import webbrowser webbrowser.open('https://me.alipay.com/huangaiwu') #wx.EndBusyCursor() if __name__ == '__main__': app = wx.App(redirect=False) frame = AHFrame(None, '代码统计工具') frame.Show(True) app.MainLoop()
mit
6,652,973,643,853,062,000
30.902174
105
0.630198
false
3.043568
false
false
false
klahnakoski/ActiveData
vendor/mo_testing/fuzzytestcase.py
1
9712
# encoding: utf-8 # # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # # Contact: Kyle Lahnakoski ([email protected]) # from __future__ import unicode_literals import datetime import types import unittest from mo_collections.unique_index import UniqueIndex import mo_dots from mo_dots import coalesce, is_container, is_list, literal_field, unwrap, to_data, is_data, is_many from mo_future import is_text, zip_longest, first from mo_logs import Except, Log, suppress_exception from mo_logs.strings import expand_template, quote import mo_math from mo_math import is_number, log10 from mo_times import dates class FuzzyTestCase(unittest.TestCase): """ COMPARE STRUCTURE AND NUMBERS! ONLY THE ATTRIBUTES IN THE expected STRUCTURE ARE TESTED TO EXIST EXTRA ATTRIBUTES ARE IGNORED. NUMBERS ARE MATCHED BY ... * places (UP TO GIVEN SIGNIFICANT DIGITS) * digits (UP TO GIVEN DECIMAL PLACES, WITH NEGATIVE MEANING LEFT-OF-UNITS) * delta (MAXIMUM ABSOLUTE DIFFERENCE FROM expected) """ def __init__(self, *args, **kwargs): unittest.TestCase.__init__(self, *args, **kwargs) self.default_places=15 def set_default_places(self, places): """ WHEN COMPARING float, HOW MANY DIGITS ARE SIGNIFICANT BY DEFAULT """ self.default_places=places def assertAlmostEqual(self, test_value, expected, msg=None, digits=None, places=None, delta=None): if delta or digits: assertAlmostEqual(test_value, expected, msg=msg, digits=digits, places=places, delta=delta) else: assertAlmostEqual(test_value, expected, msg=msg, digits=digits, places=coalesce(places, self.default_places), delta=delta) def assertEqual(self, test_value, expected, msg=None, digits=None, places=None, delta=None): self.assertAlmostEqual(test_value, expected, msg=msg, digits=digits, places=places, delta=delta) def assertRaises(self, problem=None, function=None, *args, **kwargs): if function is None: return RaiseContext(self, problem=problem or Exception) with RaiseContext(self, problem=problem): function(*args, **kwargs) class RaiseContext(object): def __init__(self, this, problem=Exception): self.this = this self.problem = problem def __enter__(self): pass def __exit__(self, exc_type, exc_val, exc_tb): if not exc_val: Log.error("Expecting an error") f = Except.wrap(exc_val) if isinstance(self.problem, (list, tuple)): problems = self.problem else: problems = [self.problem] causes = [] for problem in problems: if isinstance(problem, object.__class__) and issubclass(problem, BaseException) and isinstance(exc_val, problem): return True try: self.this.assertIn(problem, f) return True except Exception as cause: causes.append(cause) Log.error("problem is not raised", cause=first(causes)) def assertAlmostEqual(test, expected, digits=None, places=None, msg=None, delta=None): show_detail = True test = unwrap(test) expected = unwrap(expected) try: if test is None and (is_null_op(expected) or expected is None): return elif test is expected: return elif is_text(expected): assertAlmostEqualValue(test, expected, msg=msg, digits=digits, places=places, delta=delta) elif isinstance(test, UniqueIndex): if test ^ expected: Log.error("Sets do not match") elif is_data(expected) and is_data(test): for k, e in unwrap(expected).items(): t = test.get(k) assertAlmostEqual(t, e, msg=coalesce(msg, "")+"key "+quote(k)+": ", digits=digits, places=places, delta=delta) elif is_data(expected): if is_many(test): test = list(test) if len(test) != 1: Log.error("Expecting data, not a list") test = test[0] for k, e in expected.items(): try: t = test[k] assertAlmostEqual(t, e, msg=msg, digits=digits, places=places, delta=delta) continue except: pass t = mo_dots.get_attr(test, literal_field(k)) assertAlmostEqual(t, e, msg=msg, digits=digits, places=places, delta=delta) elif is_container(test) and isinstance(expected, set): test = set(to_data(t) for t in test) if len(test) != len(expected): Log.error( "Sets do not match, element count different:\n{{test|json|indent}}\nexpecting{{expectedtest|json|indent}}", test=test, expected=expected ) try: return len(set(test)|expected) == len(expected) except: for e in expected: for t in test: try: assertAlmostEqual(t, e, msg=msg, digits=digits, places=places, delta=delta) break except Exception as _: pass else: Log.error("Sets do not match. {{value|json}} not found in {{test|json}}", value=e, test=test) elif isinstance(expected, types.FunctionType): return expected(test) elif hasattr(test, "__iter__") and hasattr(expected, "__iter__"): if test.__class__.__name__ == "ndarray": # numpy test = test.tolist() elif test.__class__.__name__ == "DataFrame": # pandas test = test[test.columns[0]].values.tolist() elif test.__class__.__name__ == "Series": # pandas test = test.values.tolist() if not expected and test == None: return if expected == None: expected = [] # REPRESENT NOTHING for t, e in zip_longest(test, expected): assertAlmostEqual(t, e, msg=msg, digits=digits, places=places, delta=delta) else: assertAlmostEqualValue(test, expected, msg=msg, digits=digits, places=places, delta=delta) except Exception as cause: Log.error( "{{test|json|limit(10000)}} does not match expected {{expected|json|limit(10000)}}", test=test if show_detail else "[can not show]", expected=expected if show_detail else "[can not show]", cause=cause ) def assertAlmostEqualValue(test, expected, digits=None, places=None, msg=None, delta=None): """ Snagged from unittest/case.py, then modified (Aug2014) """ if is_null_op(expected): if test == None: # pandas dataframes reject any comparision with an exception! return else: raise AssertionError(expand_template("{{test|json}} != NULL", locals())) if expected == None: # None has no expectations return if test == expected: # shortcut return if isinstance(expected, (dates.Date, datetime.datetime, datetime.date)): return assertAlmostEqualValue( dates.Date(test).unix, dates.Date(expected).unix, msg=msg, digits=digits, places=places, delta=delta ) if not is_number(expected): # SOME SPECIAL CASES, EXPECTING EMPTY CONTAINERS IS THE SAME AS EXPECTING NULL if is_list(expected) and len(expected) == 0 and test == None: return if is_data(expected) and not expected.keys() and test == None: return if test != expected: raise AssertionError(expand_template("{{test|json}} != {{expected|json}}", locals())) return elif not is_number(test): try: # ASSUME IT IS A UTC DATE test = dates.parse(test).unix except Exception as e: raise AssertionError(expand_template("{{test|json}} != {{expected}}", locals())) num_param = 0 if digits != None: num_param += 1 if places != None: num_param += 1 if delta != None: num_param += 1 if num_param > 1: raise TypeError("specify only one of digits, places or delta") if digits is not None: with suppress_exception: diff = log10(abs(test-expected)) if diff < digits: return standardMsg = expand_template("{{test|json}} != {{expected|json}} within {{digits}} decimal places", locals()) elif delta is not None: if abs(test - expected) <= delta: return standardMsg = expand_template("{{test|json}} != {{expected|json}} within {{delta}} delta", locals()) else: if places is None: places = 15 with suppress_exception: diff = mo_math.log10(abs(test-expected)) if diff == None: return # Exactly the same if diff < mo_math.ceiling(mo_math.log10(abs(test)))-places: return standardMsg = expand_template("{{test|json}} != {{expected|json}} within {{places}} places", locals()) raise AssertionError(coalesce(msg, "") + ": (" + standardMsg + ")") def is_null_op(v): return v.__class__.__name__ == "NullOp"
mpl-2.0
-932,436,513,672,928,600
36.210728
134
0.576091
false
4.138049
true
false
false
PopCap/GameIdea
Engine/Extras/Maya_AnimationRiggingTools/MayaTools/General/Scripts/perforceUtils.py
1
32585
import maya.cmds as cmds from P4 import P4,P4Exception import os, cPickle from functools import partial # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def p4_getLatestRevision(fileName, *args): fileArg = fileName #try to connect p4 = P4() try: p4.connect() except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to connect to perforce server.") return #find currently opened file name if fileName == None: fileName = cmds.file(q = True, sceneName = True) syncFiles = [] try: #client info spec = p4.run( "client", "-o" )[0] client = spec.get("Client") owner = spec.get("Owner") p4.user = owner p4.client = client except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to obtain client spec information.") #try to get the current file revision on local, and compare to depot try: #Find out the revision of the local version of the file myFile = p4.run_have(fileName)[0] #This will find the revision number of your local file. localRevision = int(myFile['haveRev']) #find out the revision number of the depot version of the file depotVersion = p4.run_files(myFile['depotFile'])[0] #find the depot file path depotFile = depotVersion['depotFile'] #find the depot revision number of the file depotRevision = int(depotVersion['rev']) #check for latest if localRevision != depotRevision: syncFiles.append(depotFile) #Check for scene references in the file allRefs = [] references = cmds.file(q = True, reference = True) for reference in references: nestedRef = cmds.file(reference, q = True, reference = True) allRefs.append(reference) allRefs.append(nestedRef) #loop through all found references and check for latest for ref in allRefs: #get revision of local file myFile = p4.run_have(ref)[0] #get revision number localRefRevision = int(myFile['haveRev']) #grab depot file info depotRefVersion = p4.run_files(myFile['depotFile'])[0] #depot file path depotFile = depotRefVersion['depotFile'] #get depot's revision # depotRefRevision = int(depotRefVersion['rev']) #compare if localRefRevision != depotRefRevision: syncFiles.append(depotFile) #if there are files to sync, do it now if len(syncFiles) > 0: message = "The following files are not at latest revision:\n\n" for file in syncFiles: message += file + "\n" result = cmds.confirmDialog(title = "Perforce", icon = "warning", message = message, button = ["Sync", "Cancel"]) if result == "Sync": #sync files for f in syncFiles: p4.run_sync(f) #ask if user would like to reopen if fileArg == None: result = cmds.confirmDialog(title = "Perforce", icon = "question", message = "Sync Complete. Reopen file to get changes?", button = ["Yes", "Cancel"]) if result == "Yes": cmds.file(fileName, open = True, force = True) else: cmds.confirmDialog(title = "Perforce", icon = "information", message = "This file is already at head revision.", button = "Close") #disconnect from server p4.disconnect() #Handle any p4 errors that come back from trying to run the above code except P4Exception: errorString = "The following errors were encountered:\n\n" for e in p4.errors: errorString += e + "\n" cmds.confirmDialog(title = "Perforce", icon = "critical", message = errorString) p4.disconnect() return # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def p4_checkOutCurrentFile(fileName, *args): fileArg = fileName #try to connect p4 = P4() try: p4.connect() except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to connect to perforce server.") return False #find currently opened file name if fileName == None: fileName = cmds.file(q = True, sceneName = True) reopen = False syncFiles = [] try: #client info spec = p4.run( "client", "-o" )[0] client = spec.get("Client") owner = spec.get("Owner") p4.user = owner p4.client = client except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to obtain client spec information.") try: #check to see if file is at head revision myFile = p4.run_have(fileName)[0] #This will find the revision number of your local file. localRevision = int(myFile['haveRev']) #find out the revision number of the depot version of the file depotVersion = p4.run_files(myFile['depotFile'])[0] #find the depot file path depotFile = depotVersion['depotFile'] #find the depot revision number of the file depotRevision = int(depotVersion['rev']) #check for latest if localRevision != depotRevision: result = cmds.confirmDialog(title = "Perforce", icon = "warning", message = "This file is not at head revision. Please get latest and try again.", button = ["Get Latest", "Cancel"]) if result == "Get Latest": p4_getLatestRevision(fileArg) p4.disconnect() else: return False else: try: #check to see if file is checked out opened = p4.run_opened(depotFile) if len(opened) > 0: user = opened[0]['user'] cmds.confirmDialog(title = "Perforce", icon = "warning", message = "This file is already checked out by: " + user, button = "Close") p4.disconnect() else: #check out the file p4.run_edit(depotFile) cmds.confirmDialog(title = "Perfoce", icon = "information", message = "This file is now checked out.", button = "Close") p4.disconnect() #tools path toolsPath = cmds.internalVar(usd = True) + "mayaTools.txt" if os.path.exists(toolsPath): f = open(toolsPath, 'r') mayaToolsDir = f.readline() f.close() return True #Handle any p4 errors that come back from trying to run the above code except P4Exception: errorString = "The following errors were encountered:\n\n" for e in p4.errors: errorString += e + "\n" cmds.confirmDialog(title = "Perforce", icon = "critical", message = errorString) return False #Handle any p4 errors that come back from trying to run the above code except P4Exception: errorString = "The following errors were encountered:\n\n" for e in p4.errors: errorString += e + "\n" cmds.confirmDialog(title = "Perforce", icon = "critical", message = errorString) p4.disconnect() return False # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def p4_getRevisionHistory(*args): #try to connect p4 = P4() try: p4.connect() except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to connect to perforce server.") return #find currently opened file name clientFile = cmds.file(q = True, sceneName = True) reopen = False syncFiles = [] try: #client info spec = p4.run( "client", "-o" )[0] client = spec.get("Client") owner = spec.get("Owner") p4.user = owner p4.client = client except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to obtain client spec information.") #get revision history of current file try: #check to see if file is at head revision myFile = p4.run_have(clientFile)[0] depotVersion = p4.run_files(myFile['depotFile'])[0] depotFile = depotVersion['depotFile'] history = p4.run_changes(depotFile) info = "" for h in history: user = h.get("user") change = h.get("change") desc = h.get("desc") if desc.find("\n") == -1: desc = desc + "...\n" else: desc = desc.partition("\n")[0] + "...\n" info += change + " by " + user + ": " + desc #print report into a confirm dialog cmds.confirmDialog(title = "History", icon = "information", ma = "left", message = info, button = "Close") p4.disconnect() except P4Exception: errorString = "The following errors were encountered:\n\n" for e in p4.errors: errorString += e + "\n" cmds.confirmDialog(title = "Perforce", icon = "critical", message = errorString) p4.disconnect() return # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def p4_submitCurrentFile(fileName, desc, *args): fileArg = fileName #try to connect p4 = P4() try: p4.connect() except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to connect to perforce server.") return #find currently opened file name if fileName == None: fileName = cmds.file(q = True, sceneName = True) reopen = False syncFiles = [] try: #client info spec = p4.run( "client", "-o" )[0] client = spec.get("Client") owner = spec.get("Owner") p4.user = owner p4.client = client except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to obtain client spec information.") #SUBMIT try: if desc == None: result = cmds.promptDialog(title = "Perforce", message = "Please Enter a Description..", button = ["Accept", "Cancel"], defaultButton = "Accept", dismissString = "Cancel", cancelButton = "Cancel") else: result = "Accept" #process if result == "Accept": #description = "test" myFile = p4.run_have(fileName)[0] depotVersion = p4.run_files(myFile['depotFile'])[0] depotFile = depotVersion['depotFile'] #check to see if file is checked out opened = p4.run_opened(depotFile) if len(opened) > 0: opendBy = opened[0]['user'] if opendBy.lower() != owner.lower(): cmds.confirmDialog(title = "Perforce", icon = "warning", message = "This file is already checked out by: " + opendBy, button = "Close") p4.disconnect() return else: #fetch the description if desc == None: desc = cmds.promptDialog(q = True, text = True) #save the file locally (so depot and HD are in sync) openedFile = cmds.file(q = True, sceneName = True) saveFileName = openedFile.rpartition("/")[2] if fileArg == None: cmds.file(f = True, save = True, options = "v = 0", type = "mayaBinary") #grab the name of the file fileNameWithExt = openedFile.rpartition("/")[2] fileName = fileNameWithExt.rpartition(".")[0] description = (desc + "\n Affected Files: " + openedFile) #create new changelist newChange = p4.fetch_change() newChange._description = description #make sure we don't add existing default changelist files. newChange._files = [] #determine the new number so we can refetch it. newChangeNum = int(p4.save_change(newChange)[0].split()[1]) #change changelist number p4.run_reopen('-c', newChangeNum, depotFile) #submit the changelist p4.run_submit('-c', newChangeNum) #tell the user submit was successful if fileArg == None: cmds.confirmDialog(title = "Perforce", icon = "information", message = "Submit Operation was successful!", button = "Close") else: return True else: #if the file is not checked out by the user, let them know result = cmds.confirmDialog(title = "Perforce", icon = "warning", message = "File is not checked out. Unable to continue submit operation on this file:\n\n" + fileName) except P4Exception: if fileArg == None: errorString = "The following errors were encountered:\n\n" for e in p4.errors: errorString += e + "\n" cmds.confirmDialog(title = "Perforce", icon = "critical", message = errorString) p4.disconnect() return False # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def p4_addAndSubmitCurrentFile(fileName, description, *args): fileArg = fileName #try to connect p4 = P4() try: p4.connect() except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to connect to perforce server.") return #find currently opened file name if fileName == None: fileName = cmds.file(q = True, sceneName = True) reopen = False syncFiles = [] try: #client info spec = p4.run( "client", "-o" )[0] client = spec.get("Client") owner = spec.get("Owner") p4.user = owner p4.client = client except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to obtain client spec information.") #find currently opened file name proceed = False if fileArg == None: fileName = cmds.file(q = True, sceneName = True) if fileName == "": cmds.confirmDialog(title = "Perforce", icon = "warning", message = "Cannot Add file to perforce as file has no name.", button = "Close") p4.disconnect() return else: proceed = True else: proceed = True #if the file has a filename, if proceed: try: clientRoot = p4.fetch_client(p4.client)._Root #check to make sure client root is in the client file path if os.path.normpath(fileName).find(os.path.normpath(clientRoot)) == 0: #if it was, then get a description for the changelist if description == None: result = cmds.promptDialog(title = "Perforce", message = "Please Enter a Description..", button = ["Accept", "Cancel"], defaultButton = "Accept", dismissString = "Cancel", cancelButton = "Cancel") else: result = "Accept" if result == "Accept": #get changelist description if description == None: description = cmds.promptDialog(q = True, text = True) #create changelist newChange = p4.fetch_change() newChange._description = description #make sure we don't add existing default changelist files. newChange._files = [] #determine the new number so we can refetch it. newChangeNum = int(p4.save_change(newChange)[0].split()[1]) #description = "test" p4.run_add('-c', newChangeNum, fileName) #submit the changelist p4.run_submit('-c', newChangeNum) #tell user operation was successful if fileArg == None: result = cmds.confirmDialog(title = "Perforce", icon = "information", message = "File has been successfully added to perforce and submitted!", button = ["Close", "Check Out File"]) if result == "Close": p4.disconnect() return if result == "Check Out File": p4_checkOutCurrentFile(fileName) #return operation succuessful return True else: p4.disconnect() return else: cmds.confirmDialog(title = "Perforce", icon = "warning", message = "Cannot proceed. File is not under client's root, " + clientRoot, button = "Close") p4.disconnect() return False except P4Exception: errorString = "The following errors were encountered:\n\n" for e in p4.errors: errorString += e + "\n" cmds.confirmDialog(title = "Perforce", icon = "critical", message = errorString) p4.disconnect() return False # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def p4_checkForUpdates(*args): #try to connect p4 = P4() #get maya tools path toolsPath = cmds.internalVar(usd = True) + "mayaTools.txt" if os.path.exists(toolsPath): f = open(toolsPath, 'r') mayaToolsDir = f.readline() f.close() #connect to p4 try: p4.connect() except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to connect to perforce server.") return #find currently opened file name clientFile = cmds.file(q = True, sceneName = True) reopen = False syncFiles = [] try: #client info spec = p4.run( "client", "-o" )[0] client = spec.get("Client") owner = spec.get("Owner") p4.user = owner p4.client = client except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to obtain client spec information.") #this will check the maya tools directory in p4 for any updates try: syncFiles = [] clientRoot = p4.fetch_client(p4.client)._Root depotDirectories = [] #get current project if os.path.exists(mayaToolsDir + "/General/Scripts/projectSettings.txt"): #read to find current project f = open(mayaToolsDir + "/General/Scripts/projectSettings.txt", 'r') settings = cPickle.load(f) f.close() #write out new settings project = settings.get("CurrentProject") if os.path.exists(mayaToolsDir + "/General/Scripts/" + project + "_Project_Settings.txt"): #read the depot paths to sync from the project settings f = open(mayaToolsDir + "/General/Scripts/" + project + "_Project_Settings.txt", 'r') settings = cPickle.load(f) f.close() depotDirectories = settings.get("depotPaths") print depotDirectories #look at each directory inside MayaTools for dir in depotDirectories: depotFiles = p4.run_files(dir + "...") for each in depotFiles: #try to compare depot to local. It is possible that there are local files not in depot, and vise versa try: fileInfo = p4.run_files(each['depotFile'])[0] depotFilePath = fileInfo['depotFile'] fileName = depotFilePath.rpartition("/")[2] #compare local files localFile = p4.run_have(depotFilePath)[0] localRevision = int(localFile['haveRev']) depotRevision = int(fileInfo['rev']) if localRevision < depotRevision: syncFiles.append(depotFilePath) except: try: #check to see if it errors out because we don't have a local version of the file fileInfo = p4.run_files(each['depotFile'])[0] depotFilePath = fileInfo['depotFile'] fileName = depotFilePath.rpartition("/")[2] localFile = p4.run_have(depotFilePath)[0] except: action = each.get("action") if action != "delete": syncFiles.append(depotFilePath) pass #check size of syncFiles and ask user if they want to sync if len(syncFiles) > 0: result = cmds.confirmDialog(title = "MayaTools", icon = "warning", message = "There are new updates available to the depot directories specified by your project settings.", button = ["Update", "Not Now"]) if result == "Update": for file in syncFiles: p4.run_sync(file) cmds.confirmDialog(title = "MayaTools", icon = "information", message = "Tools are now up to date!", button = "Close") p4.disconnect() else: p4.disconnect() return #handle any errors except P4Exception: errorString = "The following errors were encountered:\n\n" for e in p4.errors: errorString += e + "\n" cmds.confirmDialog(title = "Perforce", icon = "critical", message = errorString) p4.disconnect() return # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def createNewProject(*args): if cmds.window("createNewARTProject_Window", exists = True): cmds.deleteUI("createNewARTProject_Window") #create window window = cmds.window("createNewARTProject_Window", w = 400, h = 600, mnb = False, mxb = False, title = "Create New Project") #frameLayouts for settings: Perforce/Auto-Sync mainLayout = cmds.columnLayout(w = 400, h = 600) #project name field projectName = cmds.textFieldGrp("newARTProjectNameField", label = "Project Name: ", w = 400, h = 30, parent = mainLayout, cal = [1, "left"]) scrollLayout = cmds.scrollLayout(w = 400, h = 520, parent = mainLayout) columnLayout = cmds.columnLayout(w = 380, parent = scrollLayout) #perforce/auto-sync layout p4Frame = cmds.frameLayout(parent = columnLayout, w = 370, cll = True, label='Perforce/Auto-Sync', borderStyle='in') p4Layout = cmds.columnLayout(w = 360, parent = p4Frame, co = ["both", 5], rs = 5) #create the scrollField with the information cmds.scrollField(parent = p4Layout, w = 350, h = 100, editable = False, wordWrap = True, text = "Add depot paths you would like the tools to check for updates on. If updates are found, you will be notified, and asked if you would like to sync. Valid depot paths look like:\n\n//depot/usr/jeremy_ernst/MayaTools") #crete the add button cmds.button(w = 350, label = "Add Perforce Depot Path", parent = p4Layout, c = partial(addPerforceDepotPath, p4Layout)) #save settings button cmds.button(parent = mainLayout, w = 400, h = 50, label = "Save Settings and Close", c = partial(saveProjectSettings, p4Layout, False)) #show window cmds.showWindow(window) # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def editProject(project, *args): if cmds.window("createNewARTProject_Window", exists = True): cmds.deleteUI("createNewARTProject_Window") #create window window = cmds.window("createNewARTProject_Window", w = 400, h = 600, mnb = False, mxb = False, title = "Edit Project") #frameLayouts for settings: Perforce/Auto-Sync mainLayout = cmds.columnLayout(w = 400, h = 600) #project name field projectName = cmds.textFieldGrp("newARTProjectNameField", label = "Project Name: ", text = project, w = 400, h = 30, parent = mainLayout, cal = [1, "left"]) scrollLayout = cmds.scrollLayout(w = 400, h = 520, parent = mainLayout) columnLayout = cmds.columnLayout(w = 380, parent = scrollLayout) #perforce/auto-sync layout p4Frame = cmds.frameLayout(parent = columnLayout, w = 370, cll = True, label='Perforce/Auto-Sync', borderStyle='in') p4Layout = cmds.columnLayout(w = 360, parent = p4Frame, co = ["both", 5], rs = 5) #create the scrollField with the information cmds.scrollField(parent = p4Layout, w = 350, h = 100, editable = False, wordWrap = True, text = "Add depot paths you would like the tools to check for updates on. If updates are found, you will be notified, and asked if you would like to sync. Valid depot paths look like:\n\n//depot/usr/jeremy_ernst/MayaTools") #crete the add button cmds.button(w = 350, label = "Add Perforce Depot Path", parent = p4Layout, c = partial(addPerforceDepotPath, p4Layout)) #get maya tools path toolsPath = cmds.internalVar(usd = True) + "mayaTools.txt" if os.path.exists(toolsPath): f = open(toolsPath, 'r') mayaToolsDir = f.readline() f.close() #open the project settings and auto-fill in the info if os.path.exists(mayaToolsDir + "/General/Scripts/" + project + "_Project_Settings.txt"): f = open(mayaToolsDir + "/General/Scripts/" + project + "_Project_Settings.txt", 'r') settings = cPickle.load(f) f.close() paths = settings.get("depotPaths") if len(paths) > 0: for path in paths: #add the path field = addPerforceDepotPath(p4Layout) cmds.textField(field, edit = True, text = path) #save settings button cmds.button(parent = mainLayout, w = 400, h = 50, label = "Save Settings and Close", c = partial(saveProjectSettings, p4Layout, True)) #show window cmds.showWindow(window) # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def addPerforceDepotPath(layout, *args): field = cmds.textField(docTag = "P4DepotPath", w = 350, parent = layout) #add a RMB menu to remove the field menu = cmds.popupMenu(parent = field, b = 3) cmds.menuItem(parent = menu, label = "Remove Path", c = partial(removePerforceDepotPath, field)) return field # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def removePerforceDepotPath(field, *args): cmds.textField(field, edit = True, visible = False, h = 1) #cmds.deleteUI(field) This crashes maya instantly. Come ON AUTODESK # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def saveProjectSettings(perforceLayout, edit, *args): #find p4 depot path textfields children = cmds.columnLayout(perforceLayout, q = True, childArray = True) textFields = [] for child in children: if child.find("textField") == 0: data = cmds.textField(child, q = True, docTag = True) if data == "P4DepotPath": textFields.append(child) #make sure paths are valid savePaths = [] for field in textFields: path = cmds.textField(field, q = True, text = True) if path != "": try: p4 = P4() p4.connect() except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to connect to perforce server.") return try: #client info spec = p4.run( "client", "-o" )[0] client = spec.get("Client") owner = spec.get("Owner") p4.user = owner p4.client = client except: cmds.confirmDialog(title = "Perforce", icon = "critical", message = "Unable to obtain client spec information.") #now check try: depotFiles = p4.run_files(path + "...") if len(depotFiles) > 0: savePaths.append(path) #handle any errors except P4Exception: errorString = "The following errors were encountered:\n\n" for e in p4.errors: errorString += e + "\n" cmds.confirmDialog(title = "Perforce", icon = "critical", message = errorString) p4.disconnect() return else: #see if the text field is just hidden or if it is actually blank vis = cmds.textField(field, q = True, visible = True) if vis == True: cmds.confirmDialog(title = "Error", icon = "critical", message = "Empty string not allowed as a path name. Either remove that field, or enter a correct depot path.") return #write out to disk projectName = cmds.textFieldGrp("newARTProjectNameField", q = True, text = True) if projectName == "": cmds.confirmDialog(title = "Error", icon = "critical", message = "Empty string not allowed as a project name.") return #save the new project file under MayaTools/General/Scripts as projName + "_Project_Settings.txt" toolsPath = cmds.internalVar(usd = True) + "mayaTools.txt" if os.path.exists(toolsPath): f = open(toolsPath, 'r') mayaToolsDir = f.readline() f.close() if edit == False: if os.path.exists(mayaToolsDir + "/General/Scripts/" + projectName + "_Project_Settings.txt"): cmds.confirmDialog(title = "Error", icon = "critical", message = "Project already exists with that name") return #save out f = open(mayaToolsDir + "/General/Scripts/" + projectName + "_Project_Settings.txt", 'w') #create a dictionary with values settings = {} settings["depotPaths"] = savePaths #write our dictionary to file cPickle.dump(settings, f) f.close() #delete the UI cmds.deleteUI("createNewARTProject_Window") #add the project to the menu create = True items = cmds.lsUI(menuItems = True) for i in items: data = cmds.menuItem(i, q = True, docTag = True) if data == "P4Proj": label = cmds.menuItem(i, q = True, label = True) print label if label == projectName: create = False if create: menuItem = cmds.menuItem(label = projectName, parent = "perforceProjectList", cl = "perforceProjectRadioMenuCollection", rb = True, docTag = "P4Proj", c = partial(setCurrentProject, projectName)) cmds.menuItem(parent = "perforceProjectList", optionBox = True, c = partial(editProject, projectName)) #open up the projectSettings.txt file and add an entry for current project if os.path.exists(mayaToolsDir + "/General/Scripts/projectSettings.txt"): f = open(mayaToolsDir + "/General/Scripts/projectSettings.txt", 'r') oldSettings = cPickle.load(f) useSourceControl = oldSettings.get("UseSourceControl") f.close() #write out new settings settings = {} settings["UseSourceControl"] = useSourceControl settings["CurrentProject"] = projectName f = open(mayaToolsDir + "/General/Scripts/projectSettings.txt", 'w') cPickle.dump(settings, f) f.close() # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def setCurrentProject(projectName, *args): #get access to maya tools path toolsPath = cmds.internalVar(usd = True) + "mayaTools.txt" if os.path.exists(toolsPath): f = open(toolsPath, 'r') mayaToolsDir = f.readline() f.close() #re-write settings if os.path.exists(mayaToolsDir + "/General/Scripts/projectSettings.txt"): f = open(mayaToolsDir + "/General/Scripts/projectSettings.txt", 'r') oldSettings = cPickle.load(f) useSourceControl = oldSettings.get("UseSourceControl") f.close() #write out new settings settings = {} settings["UseSourceControl"] = useSourceControl settings["CurrentProject"] = projectName f = open(mayaToolsDir + "/General/Scripts/projectSettings.txt", 'w') cPickle.dump(settings, f) f.close()
bsd-2-clause
-8,773,135,227,386,069,000
14.551724
316
0.561792
false
3.307115
false
false
false
priomsrb/vimswitch
vimswitch/UpdateProfileAction.py
1
1247
from .Action import Action from .Settings import getSettings from .SwitchProfileAction import createSwitchProfileAction class UpdateProfileAction(Action): def __init__(self, settings, switchProfileAction): Action.__init__(self) self.settings = settings self.switchProfileAction = switchProfileAction self.profile = None def execute(self): self.profile = self._getProfile() if self.profile == self.settings.defaultProfile: print('Cannot update default profile') self.exitCode = -1 return self.switchProfileAction.update = True self.switchProfileAction.profile = self.profile self.switchProfileAction.execute() def _getProfile(self): if self.profile is None: if self.settings.currentProfile is None: return self.settings.defaultProfile else: return self.settings.currentProfile else: return self.profile def createUpdateProfileAction(app): settings = getSettings(app) switchProfileAction = createSwitchProfileAction(app) updateProfileAction = UpdateProfileAction(settings, switchProfileAction) return updateProfileAction
gpl-2.0
-8,137,162,348,852,659,000
30.974359
76
0.677626
false
4.85214
false
false
false
indico/indico
indico/modules/events/registration/models/invitations.py
1
3543
# This file is part of Indico. # Copyright (C) 2002 - 2021 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from uuid import uuid4 from sqlalchemy.dialects.postgresql import UUID from indico.core.db import db from indico.core.db.sqlalchemy import PyIntEnum from indico.util.enum import RichIntEnum from indico.util.i18n import L_ from indico.util.locators import locator_property from indico.util.string import format_repr class InvitationState(RichIntEnum): __titles__ = [L_('Pending'), L_('Accepted'), L_('Declined')] pending = 0 accepted = 1 declined = 2 class RegistrationInvitation(db.Model): """An invitation for someone to register.""" __tablename__ = 'invitations' __table_args__ = (db.CheckConstraint('(state = {state}) OR (registration_id IS NULL)' .format(state=InvitationState.accepted), name='registration_state'), db.UniqueConstraint('registration_form_id', 'email'), {'schema': 'event_registration'}) #: The ID of the invitation id = db.Column( db.Integer, primary_key=True ) #: The UUID of the invitation uuid = db.Column( UUID, index=True, unique=True, nullable=False, default=lambda: str(uuid4()) ) #: The ID of the registration form registration_form_id = db.Column( db.Integer, db.ForeignKey('event_registration.forms.id'), index=True, nullable=False ) #: The ID of the registration (if accepted) registration_id = db.Column( db.Integer, db.ForeignKey('event_registration.registrations.id'), index=True, unique=True, nullable=True ) #: The state of the invitation state = db.Column( PyIntEnum(InvitationState), nullable=False, default=InvitationState.pending ) #: Whether registration moderation should be skipped skip_moderation = db.Column( db.Boolean, nullable=False, default=False ) #: The email of the invited person email = db.Column( db.String, nullable=False ) #: The first name of the invited person first_name = db.Column( db.String, nullable=False ) #: The last name of the invited person last_name = db.Column( db.String, nullable=False ) #: The affiliation of the invited person affiliation = db.Column( db.String, nullable=False ) #: The associated registration registration = db.relationship( 'Registration', lazy=True, backref=db.backref( 'invitation', lazy=True, uselist=False ) ) # relationship backrefs: # - registration_form (RegistrationForm.invitations) @locator_property def locator(self): return dict(self.registration_form.locator, invitation_id=self.id) @locator.uuid def locator(self): """A locator suitable for 'display' pages. Instead of the numeric ID it uses the UUID. """ assert self.uuid is not None return dict(self.registration_form.locator, invitation=self.uuid) def __repr__(self): full_name = f'{self.first_name} {self.last_name}' return format_repr(self, 'id', 'registration_form_id', 'email', 'state', _text=full_name)
mit
-4,382,065,804,384,135,000
27.344
109
0.61558
false
3.998871
false
false
false
ajroussel/shell-nouns-data
src/extractor.py
1
3851
#!/usr/bin/env python3 import os import argparse import pickle from lxml import etree from sys import argv from objects import * ## returns a list of ints def to_index(s): outlist = list() spl1 = s.split(',') try: for item in spl1: spl2 = item.split('..') start = int(spl2[0].split('_')[1]) end = int(spl2[1].split('_')[1]) if len(spl2) > 1 else start outlist.extend([i - 1 for i in range(start, end + 1)]) except ValueError: print(s) return outlist def get_SNs(node): snes = list() try: for sn in node.find("shellnouns").iter("shellnoun"): snes.append((sn.get("content_phrases"), to_index(sn.get("span")), sn.get("value"))) except AttributeError: pass return snes if __name__ == '__main__': ap = argparse.ArgumentParser() ap.add_argument("inputfiles", type=str, nargs='+', help="xml input files") ap.add_argument("-o", "--outputfile", type=str, default="sn_data.pickle", help="name of output pickle") ap.add_argument("-a", "--annotated", action="store_true", help="use if xml files are annotated w/SN info") userargs = ap.parse_args() i = 0 corpus = Corpus() for fname in userargs.inputfiles: docroot = etree.parse(fname).getroot() myname, ext = os.path.splitext(fname) print("processing", myname + "...") session_start = i for turn in docroot.iter("turn"): turn_start = i mylang = "de" if "de" in turn.get("turn_id") else "en" for sentence in turn.iter("sent"): sent_start = i for tok in sentence.iter("tok"): corpus.tokens.append(Token(tok.text, tok.attrib, i, mylang, session_start)) i += 1 sent_end = i corpus.sentences.append(range(sent_start, sent_end)) turn_end = i corpus.turns.append(range(turn_start, turn_end)) session_end = i corpus.sessions.append(range(session_start, session_end)) if userargs.annotated: # dict: CP id -> Antecedent cps = dict() for cp in docroot.find("content_phrases").iter("content_phrase"): cp_indices = to_index(cp.get("span")) is_nom = cp.get("nominal") new_ante = Antecedent([corpus.tokens[x + session_start] for x in cp_indices], is_nom) corpus.antecedents.append(new_ante) cps[cp.get("id")] = new_ante # list[tuples] "proto-Anaphor" snes = get_SNs(docroot) for cp_key, sn_indices, val in snes: my_anaphor = Anaphor([corpus.tokens[x + session_start] for x in sn_indices], val) corpus.anaphors.append(my_anaphor) my_antecedents = list() for key in cp_key.split(";"): try: my_antecedents.append(cps[key]) except KeyError: pass my_instance = RefInstance(my_anaphor, *my_antecedents) # only keep (non-empty) entries if my_instance.antecedents: corpus.ref_instances.append(my_instance) with open(userargs.outputfile, 'wb') as outfile: print("read corpus with", len(corpus.tokens), "tokens...") pickle.dump(corpus, outfile) print("done!")
gpl-3.0
7,349,968,066,567,672,000
32.780702
77
0.491041
false
4.015641
false
false
false
CybOXProject/python-cybox
cybox/__init__.py
1
4103
# Copyright (c) 2020, The MITRE Corporation. All rights reserved. # See LICENSE.txt for complete terms. from mixbox import entities from mixbox.vendor import six from .version import __version__ # noqa #: Mapping of xsi:types to implementation/extension classes _EXTENSION_MAP = {} def _lookup_unprefixed(typename): """Attempts to resolve a class for the input XML type `typename`. Args: typename: The name of an CybOX XML type (e.g., UnixProcessStatusType) without a namespace prefix. Returns: A stix.Entity implementation class for the `typename`. Raises: ValueError: If no class has been registered for the input `typename`. """ for xsi_type, klass in six.iteritems(_EXTENSION_MAP): if typename in xsi_type: return klass error = "Unregistered extension type: %s" % typename raise ValueError(error) def _lookup_extension(xsi_type): """Returns a Python class for the `xsi_type` value. Args: xsi_type: An xsi:type value string. Returns: An Entity implementation class for the `xsi_type`. Raises: ValueError: If no class has been registered for the `xsi_type`. """ if xsi_type in _EXTENSION_MAP: return _EXTENSION_MAP[xsi_type] raise ValueError("Unregistered xsi:type %s" % xsi_type) def lookup_extension(typeinfo, default=None): """Returns an Entity class for that has been registered for the `typeinfo` value. Note: This is for internal use only. Args: typeinfo: An object or string containing type information. This can be either an xsi:type attribute value or a stix.bindings object. default: Return class if typeinfo is None or contains no xml type information. Returns: An Entity implementation class for the `xsi_type`. Raises: ValueError: If no class has been registered for the `xsi_type`. """ if typeinfo is None and default: return default # If the `typeinfo` was a string, consider it a full xsi:type value. if isinstance(typeinfo, six.string_types): return _lookup_extension(typeinfo) # Most extension bindings include this attribute. if not hasattr(typeinfo, 'xml_type'): if default: return default error = "Input %s is missing xml_type attribute. Cannot lookup class." raise ValueError(error % type(typeinfo)) # Extension binding classes usually (always?) have an `xmlns_prefix` # class attribute. if hasattr(typeinfo, 'xmlns_prefix'): xsi_type = "%s:%s" % (typeinfo.xmlns_prefix, typeinfo.xml_type) return _lookup_extension(xsi_type) # no xmlns_prefix found, try to resolve the class by just the `xml_type` return _lookup_unprefixed(typeinfo.xml_type) def add_extension(cls): """Registers an Entity class as an implementation of an xml type. Classes must have an ``_XSI_TYPE`` class attributes to be registered. The value of this attribute must be a valid xsi:type. Note: This was designed for internal use. """ _EXTENSION_MAP[cls._XSI_TYPE] = cls # noqa def register_extension(cls): """Class decorator for registering a stix.Entity class as an implementation of an xml type. Classes must have an ``_XSI_TYPE`` class attributes to be registered. Note: This was designed for internal use. """ add_extension(cls) return cls # TODO: Should this get moved to mixbox or not? class Unicode(entities.Entity): """Shim class to allow xs:string's in EntityList""" def __init__(self, value): super(Unicode, self).__init__() self.value = value @property def value(self): return self._value @value.setter def value(self, value): self._value = six.text_type(value) def to_obj(self, ns_info=None): return self.value def to_dict(self): return self.value @classmethod def from_obj(cls, cls_obj): return cls(cls_obj) from_dict = from_obj
bsd-3-clause
-1,116,159,880,867,913,500
25.816993
79
0.654155
false
3.995131
false
false
false
mlsecproject/gglsbl-rest
config.py
1
1443
from os import environ import logging.config from apscheduler.schedulers.background import BackgroundScheduler from multiprocessing import cpu_count from subprocess import Popen logging.config.fileConfig('logging.conf') bind = "0.0.0.0:5000" workers = int(environ.get('WORKERS', cpu_count() * 8 + 1)) timeout = int(environ.get('TIMEOUT', 120)) access_log_format = '%(h)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s" "%({X-Forwarded-For}i)s" "%({X-Forwarded-Port}i)s" "%({X-Forwarded-Proto}i)s" "%({X-Amzn-Trace-Id}i)s"' max_requests = int(environ.get('MAX_REQUESTS', 16384)) limit_request_line = int(environ.get('LIMIT_REQUEST_LINE', 8190)) keepalive = int(environ.get('KEEPALIVE', 60)) log = logging.getLogger(__name__) def update(): log.info("Starting update process...") po = Popen("python3 update.py", shell=True) log.info("Update started as PID %d", po.pid) rc = po.wait() log.info("Update process finished with status code %d", rc) sched = None def on_starting(server): log.info("Initial database load...") po = Popen("python3 update.py", shell=True) log.info("Update started as PID %d", po.pid) rc = po.wait() log.info("Update process finished with status code %d", rc) log.info("Starting scheduler...") global sched sched = BackgroundScheduler(timezone="UTC") sched.start() sched.add_job(update, id="update", coalesce=True, max_instances=1, trigger='interval', minutes=30)
apache-2.0
-1,130,091,579,888,611,300
33.357143
167
0.677755
false
3.150655
false
false
false
estaban/pyload
module/plugins/hoster/UnrestrictLi.py
1
4420
# -*- coding: utf-8 -*- ############################################################################ # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as # # published by the Free Software Foundation, either version 3 of the # # License, or (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################ import re from datetime import datetime, timedelta from module.plugins.Hoster import Hoster from module.common.json_layer import json_loads def secondsToMidnight(gmt=0): now = datetime.utcnow() + timedelta(hours=gmt) if now.hour is 0 and now.minute < 10: midnight = now else: midnight = now + timedelta(days=1) midnight = midnight.replace(hour=0, minute=10, second=0, microsecond=0) return int((midnight - now).total_seconds()) class UnrestrictLi(Hoster): __name__ = "UnrestrictLi" __version__ = "0.12" __type__ = "hoster" __pattern__ = r'https?://(?:[^/]*\.)?(unrestrict|unr)\.li' __description__ = """Unrestrict.li hoster plugin""" __author_name__ = "stickell" __author_mail__ = "[email protected]" def setup(self): self.chunkLimit = 16 self.resumeDownload = True def process(self, pyfile): if re.match(self.__pattern__, pyfile.url): new_url = pyfile.url elif not self.account: self.logError(_("Please enter your %s account or deactivate this plugin") % "Unrestrict.li") self.fail("No Unrestrict.li account provided") else: self.logDebug("Old URL: %s" % pyfile.url) for _ in xrange(5): page = self.req.load('https://unrestrict.li/unrestrict.php', post={'link': pyfile.url, 'domain': 'long'}) self.logDebug("JSON data: " + page) if page != '': break else: self.logInfo("Unable to get API data, waiting 1 minute and retry") self.retry(5, 60, "Unable to get API data") if 'Expired session' in page or ("You are not allowed to " "download from this host" in page and self.premium): self.account.relogin(self.user) self.retry() elif "File offline" in page: self.offline() elif "You are not allowed to download from this host" in page: self.fail("You are not allowed to download from this host") elif "You have reached your daily limit for this host" in page: self.logWarning("Reached daily limit for this host") self.retry(5, secondsToMidnight(gmt=2), "Daily limit for this host reached") elif "ERROR_HOSTER_TEMPORARILY_UNAVAILABLE" in page: self.logInfo("Hoster temporarily unavailable, waiting 1 minute and retry") self.retry(5, 60, "Hoster is temporarily unavailable") page = json_loads(page) new_url = page.keys()[0] self.api_data = page[new_url] if new_url != pyfile.url: self.logDebug("New URL: " + new_url) if hasattr(self, 'api_data'): self.setNameSize() self.download(new_url, disposition=True) if self.getConfig("history"): self.load("https://unrestrict.li/history/&delete=all") self.logInfo("Download history deleted") def setNameSize(self): if 'name' in self.api_data: self.pyfile.name = self.api_data['name'] if 'size' in self.api_data: self.pyfile.size = self.api_data['size']
gpl-3.0
7,935,763,041,935,266,000
43.646465
104
0.541629
false
4.25409
false
false
false
mikeh69/JammerDetect
src/audio_tones.py
1
3644
import math #import needed modules import pyaudio #sudo apt-get install python-pyaudio import struct import pickle from time import sleep PyAudio = pyaudio.PyAudio #initialize pyaudio #See http://en.wikipedia.org/wiki/Bit_rate#Audio BITRATE = 48000 #number of frames per second - 44.1kHz does not work properly on RPi BCM2538! LENGTH = 0.2 #seconds to play sound CHUNKSIZE = int(BITRATE * LENGTH) WAVEDATA_FILE = "/home/pi/wavedata.pickled" class AudioTones: def init(self): self.player = PyAudio() defaultCapability = self.player.get_default_host_api_info() print("Player defaults:") print(defaultCapability) # fmt = self.player.get_format_from_width(2) # fmt = pyaudio.paInt8 # supposedly 8-bit signed-integer fmt = pyaudio.paInt16 # 16-bit signed-integer print(self.player.is_format_supported(output_format = fmt, output_channels = 1, rate = BITRATE, output_device = 3)) self.stream = self.player.open(format = fmt, channels = 1, rate = BITRATE, output = True, frames_per_buffer = CHUNKSIZE) try: print("Trying to load wavedata from file...") f = open(WAVEDATA_FILE, "rb") print(" File opened OK") self.WAVEDATA = pickle.load(f) print(" Wavedata read from file OK") f.close() return except Exception as ex: print(ex) print("Failed to load wavedata from file, re-generating") frequency = 200.0 # start frequency 200Hz self.WAVEDATA = [] for index in range(0, 46): # valid index range 0 - 45, ~10log(32768) num_fadein_frames = int(BITRATE * LENGTH * 0.05) num_loud_frames = int(BITRATE * LENGTH * 0.7) num_fadeout_frames = CHUNKSIZE - (num_loud_frames + num_fadein_frames) self.WAVEDATA.append(struct.pack( "<H", 0 )) for xx in range(num_fadein_frames): x = xx next_sample = int(math.sin(x/((BITRATE/frequency)/math.pi)) * 32000 * (xx/num_fadein_frames)) self.WAVEDATA[index] = self.WAVEDATA[index] + struct.pack( "<h", next_sample ) # little-endian int16 for xx in range(num_loud_frames): x = xx + num_fadein_frames next_sample = int(math.sin(x/((BITRATE/frequency)/math.pi)) * 32000) self.WAVEDATA[index] = self.WAVEDATA[index] + struct.pack( "<h", next_sample ) # little-endian int16 for xx in range(num_fadeout_frames): x = xx + num_loud_frames + num_fadein_frames next_sample = int(math.sin(x/((BITRATE/frequency)/math.pi)) * 32000 * (1 - (xx/num_fadeout_frames))) # next_sample = 0 self.WAVEDATA[index] = self.WAVEDATA[index] + struct.pack( "<h", next_sample) frequency *= 1.0594631 # semitone ratio # Save the newly-generated data to a file using Pickle: print("Saving wavedata to file") f = open(WAVEDATA_FILE, "wb") pickle.dump(self.WAVEDATA, f) f.close() def test(self): for index in range(0, 40): self.stream.write(self.WAVEDATA[index]) index += 1 self.stream.stop_stream() def play(self, index): self.stream.write(self.WAVEDATA[index]) def close(self): self.stream.stop_stream() self.stream.close() self.player.terminate() if __name__ == "__main__": tones = AudioTones() tones.init() for i in range(0, 40): tones.play(i) sleep(0.3)
mit
-6,694,274,719,032,910,000
37.357895
128
0.586718
false
3.370953
false
false
false
mirkobronzi/finance-analyzer
lib/entries.py
1
3600
""" classes Entry and Entries """ __author__ = 'bronzi' from datetime import datetime import re #TODO: should be configurable PUNCTUATION_REMOVER = re.compile("[0-9,\.#\-_/']") SPACE_REMOVER = re.compile(" +") def string_to_float(string): """ simply convert a string into a float string : basestring """ return 0.0 if string.strip() == '' else float(string.replace(',', '.')) class Entry: """ classes Entry - representing an expense/income entry """ def __init__(self, date, name, money_out=0.0, money_in=0.0, comments=None): self.date = date self.name = name self.money_in = money_in self.money_out = money_out self.comments = comments if comments else {} self.normalized_name = Entry._normalize_entry(name) @staticmethod def parse(date, name, money_out='0', money_in='0', comments=None): """ method to parse strings and convert them into an Entry object (all parameter are basestring) """ parsed_date = datetime.strptime(date, "%m/%d/%Y").date() parsed_comments = [] if not comments else\ [x.strip().split(':') for x in comments.split(',')] fixed_parsed_comments = map( lambda x : x if len(x) > 1 else (x[0], ''), parsed_comments) return Entry(parsed_date, name, string_to_float(money_out), string_to_float(money_in), dict(fixed_parsed_comments)) def as_tuple(self): return (self.date, self.name, self.money_in, self.money_out, self.comments) @staticmethod def _normalize_entry(name): normalized_name = re.sub(PUNCTUATION_REMOVER, "", name) normalized_name = re.sub(SPACE_REMOVER, " ", normalized_name) return normalized_name.strip() def __eq__(self, other): return (self.date == other.date and self.name == other.name and self.money_in == other.money_in and self.money_out == other.money_out and self.comments == other.comments) def __hash__(self): # TODO: check a better way to implement this # try to generate a frozen dictionary from the beginning comments_hash = hash(frozenset(self.comments.keys())) +\ hash(frozenset(self.comments.values())) return hash(self.date) +\ hash(self.name) +\ hash(self.money_in) +\ hash(self.money_out) +\ comments_hash def __repr__(self): return (str(self.date) + ' : ' + self.name + ' => (' + str(self.money_in) + ', -' + str(self.money_out) + ') [' + str(self.comments) + ']') class Entries: """ classes Entries - representing a collection of Entry """ def __init__(self, *args): self.entries = list(args) def add(self, entry): """ method to add an Entry to the collection entry : Entry """ self.entries.append(entry) def extend(self, entries): """ method to merge the given Entries into this one entries : Entries """ self.entries.extend(entries.entries) def sorted(self): entries_as_tuple = [entry.as_tuple() for entry in self.entries] to_return = sorted(entries_as_tuple) return Entries(*to_return) def __eq__(self, other): return self.entries == other.entries def __repr__(self): return str(self.entries) def __iter__(self): return iter(self.entries)
gpl-3.0
-7,055,621,584,990,460,000
29
76
0.563889
false
3.829787
false
false
false
Si-elegans/Web-based_GUI_Tools
spirit/forms/comment_like.py
1
1042
#-*- coding: utf-8 -*- from django import forms from django.utils.translation import ugettext as _ from spirit.models.comment_like import CommentLike class LikeForm(forms.ModelForm): class Meta: model = CommentLike fields = [] def __init__(self, user=None, comment=None, *args, **kwargs): super(LikeForm, self).__init__(*args, **kwargs) self.user = user self.comment = comment def clean(self): cleaned_data = super(LikeForm, self).clean() like = CommentLike.objects.filter(user=self.user, comment=self.comment) if like.exists(): # Do this since some of the unique_together fields are excluded. raise forms.ValidationError(_("This like already exists")) return cleaned_data def save(self, commit=True): if not self.instance.pk: self.instance.user = self.user self.instance.comment = self.comment return super(LikeForm, self).save(commit)
apache-2.0
6,114,016,247,906,903,000
27.189189
76
0.606526
false
4.288066
false
false
false
vIiRuS/Lagerregal
users/forms.py
1
1438
from django import forms from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext from users.models import Lageruser, DepartmentUser from Lagerregal import settings class SettingsForm(forms.ModelForm): error_css_class = 'has-error' class Meta: model = Lageruser fields = ["pagelength", "timezone", "theme", "main_department"] help_texts = { "pagelength": _("The number of items displayed on one page in a list."), "main_department": _("Your Main department determines, which department devices you create are assigned to."), } def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["timezone"].choices[0] = ("", ugettext("Default ({0})".format(settings.TIME_ZONE))) self.fields["timezone"].widget.choices[0] = ("", ugettext("Default ({0})".format(settings.TIME_ZONE))) class AvatarForm(forms.ModelForm): error_css_class = 'has-error' avatar_clear = forms.BooleanField(required=False) class Meta: model = Lageruser fields = ["avatar"] widgets = { "avatar": forms.FileInput() } class DepartmentAddUserForm(forms.ModelForm): error_css_class = 'has-error' class Meta: model = DepartmentUser widgets = { "department": forms.HiddenInput() } fields = '__all__'
bsd-3-clause
492,863,469,276,929,150
30.26087
122
0.626565
false
4.144092
false
false
false
dmsimard/ansible
lib/ansible/plugins/filter/core.py
1
21532
# (c) 2012, Jeroen Hoekx <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import base64 import glob import hashlib import json import ntpath import os.path import re import sys import time import uuid import yaml import datetime from functools import partial from random import Random, SystemRandom, shuffle from jinja2.filters import environmentfilter, do_groupby as _do_groupby from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleFilterTypeError from ansible.module_utils.six import string_types, integer_types, reraise, text_type from ansible.module_utils.six.moves import shlex_quote from ansible.module_utils._text import to_bytes, to_native, to_text from ansible.module_utils.common.collections import is_sequence from ansible.module_utils.common._collections_compat import Mapping from ansible.parsing.ajson import AnsibleJSONEncoder from ansible.parsing.yaml.dumper import AnsibleDumper from ansible.template import recursive_check_defined from ansible.utils.display import Display from ansible.utils.encrypt import passlib_or_crypt from ansible.utils.hashing import md5s, checksum_s from ansible.utils.unicode import unicode_wrap from ansible.utils.vars import merge_hash display = Display() UUID_NAMESPACE_ANSIBLE = uuid.UUID('361E6D51-FAEC-444A-9079-341386DA8E2E') def to_yaml(a, *args, **kw): '''Make verbose, human readable yaml''' default_flow_style = kw.pop('default_flow_style', None) transformed = yaml.dump(a, Dumper=AnsibleDumper, allow_unicode=True, default_flow_style=default_flow_style, **kw) return to_text(transformed) def to_nice_yaml(a, indent=4, *args, **kw): '''Make verbose, human readable yaml''' transformed = yaml.dump(a, Dumper=AnsibleDumper, indent=indent, allow_unicode=True, default_flow_style=False, **kw) return to_text(transformed) def to_json(a, *args, **kw): ''' Convert the value to JSON ''' return json.dumps(a, cls=AnsibleJSONEncoder, *args, **kw) def to_nice_json(a, indent=4, sort_keys=True, *args, **kw): '''Make verbose, human readable JSON''' return to_json(a, indent=indent, sort_keys=sort_keys, separators=(',', ': '), *args, **kw) def to_bool(a): ''' return a bool for the arg ''' if a is None or isinstance(a, bool): return a if isinstance(a, string_types): a = a.lower() if a in ('yes', 'on', '1', 'true', 1): return True return False def to_datetime(string, format="%Y-%m-%d %H:%M:%S"): return datetime.datetime.strptime(string, format) def strftime(string_format, second=None): ''' return a date string using string. See https://docs.python.org/2/library/time.html#time.strftime for format ''' if second is not None: try: second = float(second) except Exception: raise AnsibleFilterError('Invalid value for epoch value (%s)' % second) return time.strftime(string_format, time.localtime(second)) def quote(a): ''' return its argument quoted for shell usage ''' if a is None: a = u'' return shlex_quote(to_text(a)) def fileglob(pathname): ''' return list of matched regular files for glob ''' return [g for g in glob.glob(pathname) if os.path.isfile(g)] def regex_replace(value='', pattern='', replacement='', ignorecase=False, multiline=False): ''' Perform a `re.sub` returning a string ''' value = to_text(value, errors='surrogate_or_strict', nonstring='simplerepr') flags = 0 if ignorecase: flags |= re.I if multiline: flags |= re.M _re = re.compile(pattern, flags=flags) return _re.sub(replacement, value) def regex_findall(value, regex, multiline=False, ignorecase=False): ''' Perform re.findall and return the list of matches ''' value = to_text(value, errors='surrogate_or_strict', nonstring='simplerepr') flags = 0 if ignorecase: flags |= re.I if multiline: flags |= re.M return re.findall(regex, value, flags) def regex_search(value, regex, *args, **kwargs): ''' Perform re.search and return the list of matches or a backref ''' value = to_text(value, errors='surrogate_or_strict', nonstring='simplerepr') groups = list() for arg in args: if arg.startswith('\\g'): match = re.match(r'\\g<(\S+)>', arg).group(1) groups.append(match) elif arg.startswith('\\'): match = int(re.match(r'\\(\d+)', arg).group(1)) groups.append(match) else: raise AnsibleFilterError('Unknown argument') flags = 0 if kwargs.get('ignorecase'): flags |= re.I if kwargs.get('multiline'): flags |= re.M match = re.search(regex, value, flags) if match: if not groups: return match.group() else: items = list() for item in groups: items.append(match.group(item)) return items def ternary(value, true_val, false_val, none_val=None): ''' value ? true_val : false_val ''' if value is None and none_val is not None: return none_val elif bool(value): return true_val else: return false_val def regex_escape(string, re_type='python'): string = to_text(string, errors='surrogate_or_strict', nonstring='simplerepr') '''Escape all regular expressions special characters from STRING.''' if re_type == 'python': return re.escape(string) elif re_type == 'posix_basic': # list of BRE special chars: # https://en.wikibooks.org/wiki/Regular_Expressions/POSIX_Basic_Regular_Expressions return regex_replace(string, r'([].[^$*\\])', r'\\\1') # TODO: implement posix_extended # It's similar to, but different from python regex, which is similar to, # but different from PCRE. It's possible that re.escape would work here. # https://remram44.github.io/regex-cheatsheet/regex.html#programs elif re_type == 'posix_extended': raise AnsibleFilterError('Regex type (%s) not yet implemented' % re_type) else: raise AnsibleFilterError('Invalid regex type (%s)' % re_type) def from_yaml(data): if isinstance(data, string_types): return yaml.safe_load(data) return data def from_yaml_all(data): if isinstance(data, string_types): return yaml.safe_load_all(data) return data @environmentfilter def rand(environment, end, start=None, step=None, seed=None): if seed is None: r = SystemRandom() else: r = Random(seed) if isinstance(end, integer_types): if not start: start = 0 if not step: step = 1 return r.randrange(start, end, step) elif hasattr(end, '__iter__'): if start or step: raise AnsibleFilterError('start and step can only be used with integer values') return r.choice(end) else: raise AnsibleFilterError('random can only be used on sequences and integers') def randomize_list(mylist, seed=None): try: mylist = list(mylist) if seed: r = Random(seed) r.shuffle(mylist) else: shuffle(mylist) except Exception: pass return mylist def get_hash(data, hashtype='sha1'): try: h = hashlib.new(hashtype) except Exception as e: # hash is not supported? raise AnsibleFilterError(e) h.update(to_bytes(data, errors='surrogate_or_strict')) return h.hexdigest() def get_encrypted_password(password, hashtype='sha512', salt=None, salt_size=None, rounds=None): passlib_mapping = { 'md5': 'md5_crypt', 'blowfish': 'bcrypt', 'sha256': 'sha256_crypt', 'sha512': 'sha512_crypt', } hashtype = passlib_mapping.get(hashtype, hashtype) try: return passlib_or_crypt(password, hashtype, salt=salt, salt_size=salt_size, rounds=rounds) except AnsibleError as e: reraise(AnsibleFilterError, AnsibleFilterError(to_native(e), orig_exc=e), sys.exc_info()[2]) def to_uuid(string, namespace=UUID_NAMESPACE_ANSIBLE): uuid_namespace = namespace if not isinstance(uuid_namespace, uuid.UUID): try: uuid_namespace = uuid.UUID(namespace) except (AttributeError, ValueError) as e: raise AnsibleFilterError("Invalid value '%s' for 'namespace': %s" % (to_native(namespace), to_native(e))) # uuid.uuid5() requires bytes on Python 2 and bytes or text or Python 3 return to_text(uuid.uuid5(uuid_namespace, to_native(string, errors='surrogate_or_strict'))) def mandatory(a, msg=None): from jinja2.runtime import Undefined ''' Make a variable mandatory ''' if isinstance(a, Undefined): if a._undefined_name is not None: name = "'%s' " % to_text(a._undefined_name) else: name = '' if msg is not None: raise AnsibleFilterError(to_native(msg)) else: raise AnsibleFilterError("Mandatory variable %s not defined." % name) return a def combine(*terms, **kwargs): recursive = kwargs.pop('recursive', False) list_merge = kwargs.pop('list_merge', 'replace') if kwargs: raise AnsibleFilterError("'recursive' and 'list_merge' are the only valid keyword arguments") # allow the user to do `[dict1, dict2, ...] | combine` dictionaries = flatten(terms, levels=1) # recursively check that every elements are defined (for jinja2) recursive_check_defined(dictionaries) if not dictionaries: return {} if len(dictionaries) == 1: return dictionaries[0] # merge all the dicts so that the dict at the end of the array have precedence # over the dict at the beginning. # we merge the dicts from the highest to the lowest priority because there is # a huge probability that the lowest priority dict will be the biggest in size # (as the low prio dict will hold the "default" values and the others will be "patches") # and merge_hash create a copy of it's first argument. # so high/right -> low/left is more efficient than low/left -> high/right high_to_low_prio_dict_iterator = reversed(dictionaries) result = next(high_to_low_prio_dict_iterator) for dictionary in high_to_low_prio_dict_iterator: result = merge_hash(dictionary, result, recursive, list_merge) return result def comment(text, style='plain', **kw): # Predefined comment types comment_styles = { 'plain': { 'decoration': '# ' }, 'erlang': { 'decoration': '% ' }, 'c': { 'decoration': '// ' }, 'cblock': { 'beginning': '/*', 'decoration': ' * ', 'end': ' */' }, 'xml': { 'beginning': '<!--', 'decoration': ' - ', 'end': '-->' } } # Pointer to the right comment type style_params = comment_styles[style] if 'decoration' in kw: prepostfix = kw['decoration'] else: prepostfix = style_params['decoration'] # Default params p = { 'newline': '\n', 'beginning': '', 'prefix': (prepostfix).rstrip(), 'prefix_count': 1, 'decoration': '', 'postfix': (prepostfix).rstrip(), 'postfix_count': 1, 'end': '' } # Update default params p.update(style_params) p.update(kw) # Compose substrings for the final string str_beginning = '' if p['beginning']: str_beginning = "%s%s" % (p['beginning'], p['newline']) str_prefix = '' if p['prefix']: if p['prefix'] != p['newline']: str_prefix = str( "%s%s" % (p['prefix'], p['newline'])) * int(p['prefix_count']) else: str_prefix = str( "%s" % (p['newline'])) * int(p['prefix_count']) str_text = ("%s%s" % ( p['decoration'], # Prepend each line of the text with the decorator text.replace( p['newline'], "%s%s" % (p['newline'], p['decoration'])))).replace( # Remove trailing spaces when only decorator is on the line "%s%s" % (p['decoration'], p['newline']), "%s%s" % (p['decoration'].rstrip(), p['newline'])) str_postfix = p['newline'].join( [''] + [p['postfix'] for x in range(p['postfix_count'])]) str_end = '' if p['end']: str_end = "%s%s" % (p['newline'], p['end']) # Return the final string return "%s%s%s%s%s" % ( str_beginning, str_prefix, str_text, str_postfix, str_end) @environmentfilter def extract(environment, item, container, morekeys=None): if morekeys is None: keys = [item] elif isinstance(morekeys, list): keys = [item] + morekeys else: keys = [item, morekeys] value = container for key in keys: value = environment.getitem(value, key) return value @environmentfilter def do_groupby(environment, value, attribute): """Overridden groupby filter for jinja2, to address an issue with jinja2>=2.9.0,<2.9.5 where a namedtuple was returned which has repr that prevents ansible.template.safe_eval.safe_eval from being able to parse and eval the data. jinja2<2.9.0,>=2.9.5 is not affected, as <2.9.0 uses a tuple, and >=2.9.5 uses a standard tuple repr on the namedtuple. The adaptation here, is to run the jinja2 `do_groupby` function, and cast all of the namedtuples to a regular tuple. See https://github.com/ansible/ansible/issues/20098 We may be able to remove this in the future. """ return [tuple(t) for t in _do_groupby(environment, value, attribute)] def b64encode(string, encoding='utf-8'): return to_text(base64.b64encode(to_bytes(string, encoding=encoding, errors='surrogate_or_strict'))) def b64decode(string, encoding='utf-8'): return to_text(base64.b64decode(to_bytes(string, errors='surrogate_or_strict')), encoding=encoding) def flatten(mylist, levels=None, skip_nulls=True): ret = [] for element in mylist: if skip_nulls and element in (None, 'None', 'null'): # ignore null items continue elif is_sequence(element): if levels is None: ret.extend(flatten(element, skip_nulls=skip_nulls)) elif levels >= 1: # decrement as we go down the stack ret.extend(flatten(element, levels=(int(levels) - 1), skip_nulls=skip_nulls)) else: ret.append(element) else: ret.append(element) return ret def subelements(obj, subelements, skip_missing=False): '''Accepts a dict or list of dicts, and a dotted accessor and produces a product of the element and the results of the dotted accessor >>> obj = [{"name": "alice", "groups": ["wheel"], "authorized": ["/tmp/alice/onekey.pub"]}] >>> subelements(obj, 'groups') [({'name': 'alice', 'groups': ['wheel'], 'authorized': ['/tmp/alice/onekey.pub']}, 'wheel')] ''' if isinstance(obj, dict): element_list = list(obj.values()) elif isinstance(obj, list): element_list = obj[:] else: raise AnsibleFilterError('obj must be a list of dicts or a nested dict') if isinstance(subelements, list): subelement_list = subelements[:] elif isinstance(subelements, string_types): subelement_list = subelements.split('.') else: raise AnsibleFilterTypeError('subelements must be a list or a string') results = [] for element in element_list: values = element for subelement in subelement_list: try: values = values[subelement] except KeyError: if skip_missing: values = [] break raise AnsibleFilterError("could not find %r key in iterated item %r" % (subelement, values)) except TypeError: raise AnsibleFilterTypeError("the key %s should point to a dictionary, got '%s'" % (subelement, values)) if not isinstance(values, list): raise AnsibleFilterTypeError("the key %r should point to a list, got %r" % (subelement, values)) for value in values: results.append((element, value)) return results def dict_to_list_of_dict_key_value_elements(mydict, key_name='key', value_name='value'): ''' takes a dictionary and transforms it into a list of dictionaries, with each having a 'key' and 'value' keys that correspond to the keys and values of the original ''' if not isinstance(mydict, Mapping): raise AnsibleFilterTypeError("dict2items requires a dictionary, got %s instead." % type(mydict)) ret = [] for key in mydict: ret.append({key_name: key, value_name: mydict[key]}) return ret def list_of_dict_key_value_elements_to_dict(mylist, key_name='key', value_name='value'): ''' takes a list of dicts with each having a 'key' and 'value' keys, and transforms the list into a dictionary, effectively as the reverse of dict2items ''' if not is_sequence(mylist): raise AnsibleFilterTypeError("items2dict requires a list, got %s instead." % type(mylist)) return dict((item[key_name], item[value_name]) for item in mylist) def path_join(paths): ''' takes a sequence or a string, and return a concatenation of the different members ''' if isinstance(paths, string_types): return os.path.join(paths) elif is_sequence(paths): return os.path.join(*paths) else: raise AnsibleFilterTypeError("|path_join expects string or sequence, got %s instead." % type(paths)) class FilterModule(object): ''' Ansible core jinja2 filters ''' def filters(self): return { # jinja2 overrides 'groupby': do_groupby, # base 64 'b64decode': b64decode, 'b64encode': b64encode, # uuid 'to_uuid': to_uuid, # json 'to_json': to_json, 'to_nice_json': to_nice_json, 'from_json': json.loads, # yaml 'to_yaml': to_yaml, 'to_nice_yaml': to_nice_yaml, 'from_yaml': from_yaml, 'from_yaml_all': from_yaml_all, # path 'basename': partial(unicode_wrap, os.path.basename), 'dirname': partial(unicode_wrap, os.path.dirname), 'expanduser': partial(unicode_wrap, os.path.expanduser), 'expandvars': partial(unicode_wrap, os.path.expandvars), 'path_join': path_join, 'realpath': partial(unicode_wrap, os.path.realpath), 'relpath': partial(unicode_wrap, os.path.relpath), 'splitext': partial(unicode_wrap, os.path.splitext), 'win_basename': partial(unicode_wrap, ntpath.basename), 'win_dirname': partial(unicode_wrap, ntpath.dirname), 'win_splitdrive': partial(unicode_wrap, ntpath.splitdrive), # file glob 'fileglob': fileglob, # types 'bool': to_bool, 'to_datetime': to_datetime, # date formatting 'strftime': strftime, # quote string for shell usage 'quote': quote, # hash filters # md5 hex digest of string 'md5': md5s, # sha1 hex digest of string 'sha1': checksum_s, # checksum of string as used by ansible for checksumming files 'checksum': checksum_s, # generic hashing 'password_hash': get_encrypted_password, 'hash': get_hash, # regex 'regex_replace': regex_replace, 'regex_escape': regex_escape, 'regex_search': regex_search, 'regex_findall': regex_findall, # ? : ; 'ternary': ternary, # random stuff 'random': rand, 'shuffle': randomize_list, # undefined 'mandatory': mandatory, # comment-style decoration 'comment': comment, # debug 'type_debug': lambda o: o.__class__.__name__, # Data structures 'combine': combine, 'extract': extract, 'flatten': flatten, 'dict2items': dict_to_list_of_dict_key_value_elements, 'items2dict': list_of_dict_key_value_elements_to_dict, 'subelements': subelements, 'split': partial(unicode_wrap, text_type.split), }
gpl-3.0
2,740,070,881,585,711,600
31.476621
120
0.611694
false
3.874055
false
false
false
stormi/tsunami
src/secondaires/familier/editeurs/famedit/__init__.py
1
6790
# -*-coding:Utf-8 -* # Copyright (c) 2014 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Package contenant l'éditeur 'famedit'. Si des redéfinitions de contexte-éditeur standard doivent être faites, elles seront placées dans ce package """ from primaires.interpreteur.editeur.choix import Choix from primaires.interpreteur.editeur.flag import Flag from primaires.interpreteur.editeur.entier import Entier from primaires.interpreteur.editeur.presentation import Presentation from primaires.interpreteur.editeur.selection import Selection from primaires.interpreteur.editeur.uniligne import Uniligne from primaires.scripting.editeurs.edt_script import EdtScript from secondaires.familier.constantes import * class EdtFamedit(Presentation): """Classe définissant l'éditeur de fiche de familier famedit.""" nom = "famedit" def __init__(self, personnage, fiche): """Constructeur de l'éditeur""" if personnage: instance_connexion = personnage.instance_connexion else: instance_connexion = None Presentation.__init__(self, instance_connexion, fiche) if personnage and fiche: self.construire(fiche) def __getnewargs__(self): return (None, None) def construire(self, fiche): """Construction de l'éditeur""" # Régimes regime = self.ajouter_choix("régime alimentaire", "r", Choix, fiche, "regime", REGIMES) regime.parent = self regime.prompt = "Régime alimentaire du familier : " regime.apercu = "{objet.regime}" regime.aide_courte = \ "Entrez le |ent|régime|ff| du familier ou |cmd|/|ff| pour revenir " \ "à la fenêtre parente.\n\nRégimes disponibles : {}.\n\n" \ "Régime actuel : |bc|{{objet.regime}}|ff|".format( ", ".join(REGIMES)) # Harnachements supportés harnachements = self.ajouter_choix("harnachement supportés", "h", Selection, fiche, "harnachements", TYPES_HARNACHEMENT) harnachements.parent = self harnachements.prompt = "Harnachements supportés : " harnachements.apercu = "{objet.str_harnachements}" harnachements.aide_courte = \ "Entrez un |ent|harnachement supporté|ff| pour l'ajouter " \ "ou le retirer\nou |cmd|/|ff| pour revenir à la fenêtre " \ "parente.\n\nHarnachements possibles : " + \ ", ".join(sorted(TYPES_HARNACHEMENT)) + "\nHarnachements " \ "supportés actuellement : {objet.str_harnachements}" # Stats pouvant progresser stats = self.ajouter_choix("stats pouvant progresser", "st", Selection, fiche, "stats_progres", ["force", "agilite", "robustesse", "intelligence", "charisme", "sensibilite"]) stats.parent = self stats.prompt = "Stats pouvant augmenter automatiquement : " stats.apercu = "{objet.str_stats_progres}" stats.aide_courte = \ "Entrez un |ent|nom de stat|ff| pour l'ajouter " \ "ou le retirer\nou |cmd|/|ff| pour revenir à la fenêtre " \ "parente.\n\nQuand le familier gagne un niveau, il va " \ "choisir aléatoirement parmi ces stats et les\naugmenter " \ "si il a des points d'entraînement disponibles\n\nStats " \ "automatiques actuelles : {objet.str_stats_progres}" # Monture monture = self.ajouter_choix("peut être monté", "m", Flag, fiche, "monture") monture.parent = self # Sorties verticales verticales = self.ajouter_choix( "peut emprunter les sorties verticales", "v", Flag, fiche, "sorties_verticales") verticales.parent = self # Aller en intérieur interieur = self.ajouter_choix("peut aller en intérieur", "l", Flag, fiche, "aller_interieur") interieur.parent = self # Difficulté d'apprivoisement difficulte = self.ajouter_choix("difficulté d'apprivoisement", "d", Entier, fiche, "difficulte_apprivoisement") difficulte.parent = self difficulte.apercu = "{objet.difficulte_apprivoisement}%" difficulte.prompt = "Entrez la difficulté d'apprivoisement du " \ "familier : " difficulte.aide_courte = \ "Entrez |ent|la difficulté d'apprivoisement|ff| du familier\n" \ "(entre |ent|1|ff| et |ent|100|ff|) ou |cmd|/|ff| pour " \ "revenir à la fenêtre parente.\n\nDifficulté actuelle : " \ "{objet.difficulte_apprivoisement}%" # Prix unitaire prix = self.ajouter_choix("prix unitaire", "u", Entier, fiche, "m_valeur") prix.parent = self prix.apercu = "{objet.m_valeur} pièces de bronze" prix.prompt = "Entrez le prix unitaire du familier : " prix.aide_courte = \ "Entrez |ent|le prix unitaire|ff| du familier" \ "ou |cmd|/|ff| pour revenir à la fenêtre parente.\n\n" \ "Prix unitaire actuel : {objet.m_valeur}" # Script scripts = self.ajouter_choix("scripts", "sc", EdtScript, fiche.script) scripts.parent = self
bsd-3-clause
-1,508,693,470,477,261,300
43.394737
81
0.655009
false
3.256757
false
false
false
JukeboxPipeline/jukebox-core
src/jukeboxcore/addons/guerilla/guerillamgmt.py
1
82524
from PySide import QtGui from jukeboxcore.log import get_logger log = get_logger(__name__) from jukeboxcore import ostool from jukeboxcore import djadapter from jukeboxcore.gui.main import JB_MainWindow, JB_Dialog, dt_to_qdatetime from jukeboxcore.gui import treemodel from jukeboxcore.gui import djitemdata from jukeboxcore.plugins import JB_CoreStandaloneGuiPlugin from jukeboxcore.gui.widgets.guerillamgmt_ui import Ui_guerillamgmt_mwin from jukeboxcore.gui.widgets.guerilla.projectcreator_ui import Ui_projectcreator_dialog from jukeboxcore.gui.widgets.guerilla.prjadder_ui import Ui_prjadder_dialog from jukeboxcore.gui.widgets.guerilla.seqcreator_ui import Ui_seqcreator_dialog from jukeboxcore.gui.widgets.guerilla.atypecreator_ui import Ui_atypecreator_dialog from jukeboxcore.gui.widgets.guerilla.atypeadder_ui import Ui_atypeadder_dialog from jukeboxcore.gui.widgets.guerilla.depcreator_ui import Ui_depcreator_dialog from jukeboxcore.gui.widgets.guerilla.depadder_ui import Ui_depadder_dialog from jukeboxcore.gui.widgets.guerilla.usercreator_ui import Ui_usercreator_dialog from jukeboxcore.gui.widgets.guerilla.useradder_ui import Ui_useradder_dialog from jukeboxcore.gui.widgets.guerilla.shotcreator_ui import Ui_shotcreator_dialog from jukeboxcore.gui.widgets.guerilla.assetcreator_ui import Ui_assetcreator_dialog from jukeboxcore.gui.widgets.guerilla.assetadder_ui import Ui_assetadder_dialog from jukeboxcore.gui.widgets.guerilla.taskcreator_ui import Ui_taskcreator_dialog class ProjectCreatorDialog(JB_Dialog, Ui_projectcreator_dialog): """A Dialog to create a project """ def __init__(self, parent=None, flags=0): """Initialize a new project creator dialog :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(ProjectCreatorDialog, self).__init__(parent, flags) self.project = None self.setupUi(self) self.create_pb.clicked.connect(self.create_prj) def create_prj(self, ): """Create a project and store it in the self.project :returns: None :rtype: None :raises: None """ name = self.name_le.text() short = self.short_le.text() path = self.path_le.text() semester = self.semester_le.text() try: prj = djadapter.models.Project(name=name, short=short, path=path, semester=semester) prj.save() self.project = prj self.accept() except: log.exception("Could not create new project") class ProjectAdderDialog(JB_Dialog, Ui_prjadder_dialog): """A Dialog to add project to a project """ def __init__(self, atype=None, department=None, user=None, parent=None, flags=0): """Initialize a new project creator dialog :param atype: the atype to add the project to :type atype: :class:`djadapter.models.Atype` :param department: the department to add the project to :type department: :class:`djadapter.models.Department` :param parent: the parent object :param user: the user to tadd the project to :type user: :class:`djadapter.models.User` :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(ProjectAdderDialog, self).__init__(parent, flags) self._atype = atype self._dep = department self._user = user self.projects = [] self.setupUi(self) self.add_pb.clicked.connect(self.add_project) rootdata = treemodel.ListItemData(["Name", "Description"]) rootitem = treemodel.TreeItem(rootdata) if atype: projects = djadapter.projects.exclude(pk__in = atype.projects.all()) elif department: projects = djadapter.projects.exclude(pk__in = department.projects.all()) else: projects = djadapter.projects.exclude(users=user) for project in projects: projectdata = djitemdata.ProjectItemData(project) treemodel.TreeItem(projectdata, rootitem) self.model = treemodel.TreeModel(rootitem) self.prj_tablev.setModel(self.model) def add_project(self, ): """Add a project and store it in the self.projects :returns: None :rtype: None :raises: None """ i = self.prj_tablev.currentIndex() item = i.internalPointer() if item: project = item.internal_data() if self._atype: self._atype.projects.add(project) elif self._dep: self._dep.projects.add(project) else: project.users.add(self._user) self.projects.append(project) item.set_parent(None) class SequenceCreatorDialog(JB_Dialog, Ui_seqcreator_dialog): """A Dialog to create a sequence """ def __init__(self, project, parent=None, flags=0): """Initialize a new sequence creator dialog :param project: The project for the sequence :type project: :class:`jukeboxcore.djadapter.models.Project` :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(SequenceCreatorDialog, self).__init__(parent, flags) self._project = project self.sequence = None self.setupUi(self) self.create_pb.clicked.connect(self.create_seq) def create_seq(self, ): """Create a sequence and store it in the self.sequence :returns: None :rtype: None :raises: None """ name = self.name_le.text() desc = self.desc_pte.toPlainText() try: seq = djadapter.models.Sequence(name=name, project=self._project, description=desc) seq.save() self.sequence = seq self.accept() except: log.exception("Could not create new sequence") class AtypeCreatorDialog(JB_Dialog, Ui_atypecreator_dialog): """A Dialog to create a atype """ def __init__(self, projects=None, parent=None, flags=0): """Initialize a new atype creator dialog :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(AtypeCreatorDialog, self).__init__(parent, flags) self.projects = projects or [] self.atype = None self.setupUi(self) self.create_pb.clicked.connect(self.create_atype) def create_atype(self, ): """Create a atype and store it in the self.atype :returns: None :rtype: None :raises: None """ name = self.name_le.text() desc = self.desc_pte.toPlainText() try: atype = djadapter.models.Atype(name=name, description=desc) atype.save() for prj in self.projects: atype.projects.add(prj) self.atype = atype self.accept() except: log.exception("Could not create new assettype") class AtypeAdderDialog(JB_Dialog, Ui_atypeadder_dialog): """A Dialog to add atype to a project """ def __init__(self, project, parent=None, flags=0): """Initialize a new atype creator dialog :param project: The project for the atypes :type project: :class:`jukeboxcore.djadapter.models.Project` :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(AtypeAdderDialog, self).__init__(parent, flags) self._project = project self.atypes = [] self.setupUi(self) self.add_pb.clicked.connect(self.add_atype) rootdata = treemodel.ListItemData(["Name", "Description"]) rootitem = treemodel.TreeItem(rootdata) atypes = djadapter.atypes.exclude(projects=project) for atype in atypes: atypedata = djitemdata.AtypeItemData(atype) treemodel.TreeItem(atypedata, rootitem) self.model = treemodel.TreeModel(rootitem) self.atype_tablev.setModel(self.model) def add_atype(self, ): """Add a atype and store it in the self.atypes :returns: None :rtype: None :raises: None """ i = self.atype_tablev.currentIndex() item = i.internalPointer() if item: atype = item.internal_data() atype.projects.add(self._project) self.atypes.append(atype) item.set_parent(None) class DepCreatorDialog(JB_Dialog, Ui_depcreator_dialog): """A Dialog to create a dep """ def __init__(self, projects=None, parent=None, flags=0): """Initialize a new dep creator dialog :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(DepCreatorDialog, self).__init__(parent, flags) self.projects = projects or [] self.dep = None self.setupUi(self) self.create_pb.clicked.connect(self.create_dep) def create_dep(self, ): """Create a dep and store it in the self.dep :returns: None :rtype: None :raises: None """ name = self.name_le.text() short = self.short_le.text() assetflag = self.asset_rb.isChecked() ordervalue = self.ordervalue_sb.value() desc = self.desc_pte.toPlainText() try: dep = djadapter.models.Department(name=name, short=short, assetflag=assetflag, ordervalue=ordervalue, description=desc) dep.save() for prj in self.projects: dep.projects.add(prj) self.dep = dep self.accept() except: log.exception("Could not create new department.") class DepAdderDialog(JB_Dialog, Ui_depadder_dialog): """A Dialog to add departments to a project """ def __init__(self, project, parent=None, flags=0): """Initialize a new dep creator dialog :param project: The project for the deps :type project: :class:`jukeboxcore.djadapter.models.Project` :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(DepAdderDialog, self).__init__(parent, flags) self._project = project self.deps = [] self.setupUi(self) self.add_pb.clicked.connect(self.add_dep) rootdata = treemodel.ListItemData(["Name", "Description", "Ordervalue"]) rootitem = treemodel.TreeItem(rootdata) deps = djadapter.departments.exclude(projects=project) for dep in deps: depdata = djitemdata.DepartmentItemData(dep) treemodel.TreeItem(depdata, rootitem) self.model = treemodel.TreeModel(rootitem) self.dep_tablev.setModel(self.model) def add_dep(self, ): """Add a dep and store it in the self.deps :returns: None :rtype: None :raises: None """ i = self.dep_tablev.currentIndex() item = i.internalPointer() if item: dep = item.internal_data() dep.projects.add(self._project) self.deps.append(dep) item.set_parent(None) class UserCreatorDialog(JB_Dialog, Ui_usercreator_dialog): """A Dialog to create a user """ def __init__(self, projects=None, tasks=None, parent=None, flags=0): """Initialize a new user creator dialog :param projects: The projects for the user :type projects: list of :class:`jukeboxcore.djadapter.models.Project` :param tasks: The tasks for the user :type tasks: list of :class:`jukeboxcore.djadapter.models.Task` :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(UserCreatorDialog, self).__init__(parent, flags) self.projects = projects or [] self.tasks = tasks or [] self.user = None self.setupUi(self) self.create_pb.clicked.connect(self.create_user) def create_user(self, ): """Create a user and store it in the self.user :returns: None :rtype: None :raises: None """ name = self.username_le.text() if not name: self.username_le.setPlaceholderText("Please provide a username.") return first = self.first_le.text() last = self.last_le.text() email = self.email_le.text() try: user = djadapter.models.User(username=name, first_name=first, last_name=last, email=email) user.save() for prj in self.projects: prj.users.add(user) for task in self.tasks: task.users.add(user) self.user = user self.accept() except: log.exception("Could not create new assettype") class UserAdderDialog(JB_Dialog, Ui_useradder_dialog): """A Dialog to add user to a project """ def __init__(self, project=None, task=None, parent=None, flags=0): """Initialize a new user creator dialog :param project: The project for the users :type project: :class:`jukeboxcore.djadapter.models.Project` :param task: The task for the users :type task: :class:`jukeboxcore.djadapter.models.Task` :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(UserAdderDialog, self).__init__(parent, flags) self._project = project self._task = task self.users = [] self.setupUi(self) self.add_pb.clicked.connect(self.add_user) rootdata = treemodel.ListItemData(["Name", "Description"]) rootitem = treemodel.TreeItem(rootdata) if project: users = djadapter.users.exclude(project = project) else: users = djadapter.users.exclude(task = task) for user in users: userdata = djitemdata.UserItemData(user) treemodel.TreeItem(userdata, rootitem) self.model = treemodel.TreeModel(rootitem) self.user_tablev.setModel(self.model) def add_user(self, ): """Add a user and store it in the self.users :returns: None :rtype: None :raises: None """ i = self.user_tablev.currentIndex() item = i.internalPointer() if item: user = item.internal_data() if self._project: self._project.users.add(user) else: self._task.users.add(user) self.users.append(user) item.set_parent(None) class ShotCreatorDialog(JB_Dialog, Ui_shotcreator_dialog): """A Dialog to create a shot """ def __init__(self, sequence, parent=None, flags=0): """Initialize a new shot creator dialog :param sequence: the sequence for the shot :type sequence: :class:`jukeboxcore.djadapter.models.Shot` :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(ShotCreatorDialog, self).__init__(parent, flags) self.sequence = sequence self.shot = None self.setupUi(self) self.create_pb.clicked.connect(self.create_shot) def create_shot(self, ): """Create a shot and store it in the self.shot :returns: None :rtype: None :raises: None """ name = self.name_le.text() if not name: self.name_le.setPlaceholderText("Please enter a name!") return desc = self.desc_pte.toPlainText() try: shot = djadapter.models.Shot(sequence=self.sequence, project=self.sequence.project, name=name, description=desc) shot.save() self.shot = shot self.accept() except: log.exception("Could not create new shot") class AssetCreatorDialog(JB_Dialog, Ui_assetcreator_dialog): """A Dialog to create a asset """ def __init__(self, project, atype=None, parent=None, flags=0): """Initialize a new asset creator dialog :param project: the project of the asset :type project: :class:`jukeboxcore.djadapter.models.Project` :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(AssetCreatorDialog, self).__init__(parent, flags) self.project = project self.atype = atype self.asset = None self.setupUi(self) if not self.atype: self.atypes = list(project.atype_set.all()) atrootdata = treemodel.ListItemData(["Name"]) atrootitem = treemodel.TreeItem(atrootdata) for at in self.atypes: data = djitemdata.AtypeItemData(at) treemodel.TreeItem(data, atrootitem) self.atypemodel = treemodel.TreeModel(atrootitem) self.atype_cb.setModel(self.atypemodel) else: self.atype_cb.setVisible(False) self.atype_lb.setVisible(False) self.create_pb.clicked.connect(self.create_asset) def create_asset(self, ): """Create a asset and store it in the self.asset :returns: None :rtype: None :raises: None """ name = self.name_le.text() if not name: self.name_le.setPlaceholderText("Please enter a name!") return desc = self.desc_pte.toPlainText() if not self.atype: atypei = self.atype_cb.currentIndex() assert atypei >= 0 self.atype = self.atypes[atypei] try: asset = djadapter.models.Asset(atype=self.atype, project=self.project, name=name, description=desc) asset.save() self.asset = asset self.accept() except: log.exception("Could not create new asset") class AssetAdderDialog(JB_Dialog, Ui_assetadder_dialog): """A Dialog to add asset to a project """ def __init__(self, shot=None, asset=None, parent=None, flags=0): """Initialize a new asset creator dialog :param shot: The shot for the assets :type shot: :class:`jukeboxcore.djadapter.models.Shot` :param asset: The asset for the assets :type asset: :class:`jukeboxcore.djadapter.models.Asset` :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(AssetAdderDialog, self).__init__(parent, flags) self._shot = shot self._asset = asset self.assets = [] self.setupUi(self) self.add_pb.clicked.connect(self.add_asset) rootdata = treemodel.ListItemData(["Name"]) rootitem = treemodel.TreeItem(rootdata) self.model = treemodel.TreeModel(rootitem) self.asset_treev.setModel(self.model) atypes = {} if shot: assets = djadapter.assets.exclude(pk__in = shot.assets.all()).filter(project=shot.project) else: assets = djadapter.assets.exclude(pk__in = asset.assets.all()).filter(project=asset.project) for asset in assets: atype = asset.atype atypeitem = atypes.get(atype) if not atypeitem: atypedata = djitemdata.AtypeItemData(atype) atypeitem = treemodel.TreeItem(atypedata, rootitem) atypes[atype] = atypeitem assetdata = djitemdata.AssetItemData(asset) treemodel.TreeItem(assetdata, atypeitem) def add_asset(self, ): """Add a asset and store it in the self.assets :returns: None :rtype: None :raises: None """ i = self.asset_treev.currentIndex() item = i.internalPointer() if item: asset = item.internal_data() if not isinstance(asset, djadapter.models.Asset): return if self._shot: self._shot.assets.add(asset) else: self._asset.assets.add(asset) self.assets.append(asset) item.set_parent(None) class TaskCreatorDialog(JB_Dialog, Ui_taskcreator_dialog): """A Dialog to create a task """ def __init__(self, element, parent=None, flags=0): """Initialize a new task creator dialog :param element: the element for the task :type element: :class:`jukeboxcore.djadapter.models.Asset` | :class:`jukeboxcore.djadapter.models.Shot` :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(TaskCreatorDialog, self).__init__(parent, flags) self.element = element self.task = None self.setupUi(self) self.create_pb.clicked.connect(self.create_task) qs = djadapter.departments.filter(projects=element.project).exclude(pk__in = element.tasks.all().values_list('department', flat=True)) qs = qs.filter(assetflag=isinstance(element, djadapter.models.Asset)) self.deps = list(qs) atrootdata = treemodel.ListItemData(["Name"]) atrootitem = treemodel.TreeItem(atrootdata) for dep in self.deps: data = djitemdata.DepartmentItemData(dep) treemodel.TreeItem(data, atrootitem) self.model = treemodel.TreeModel(atrootitem) self.dep_cb.setModel(self.model) def create_task(self, ): """Create a task and store it in the self.task :returns: None :rtype: None :raises: None """ depi = self.dep_cb.currentIndex() assert depi >= 0 dep = self.deps[depi] deadline = self.deadline_de.dateTime().toPython() try: task = djadapter.models.Task(department=dep, project=self.element.project, element=self.element, deadline=deadline) task.save() self.task = task self.accept() except: log.exception("Could not create new task") class GuerillaMGMTWin(JB_MainWindow, Ui_guerillamgmt_mwin): """A tool for creating entries in the database and a little project management. """ def __init__(self, parent=None, flags=0): """Initialize a new GuerillaMGMTwin :param parent: the parent object :type parent: :class:`QtCore.QObject` :param flags: the window flags :type flags: :data:`QtCore.Qt.WindowFlags` :raises: None """ super(GuerillaMGMTWin, self).__init__(parent, flags) self.cur_prj = None self.cur_seq = None self.cur_shot = None self.cur_atype = None self.cur_asset = None self.cur_dep = None self.cur_task = None self.cur_user = None self.setupUi(self) self.setup_ui() try: self.setup_signals() except: log.exception("Exception setting up signals") def setup_ui(self, ): """Create all necessary ui elements for the tool :returns: None :rtype: None :raises: None """ log.debug("Setting up the ui") self.setup_prjs_page() self.setup_prj_page() self.setup_seq_page() self.setup_shot_page() self.setup_atype_page() self.setup_asset_page() self.setup_dep_page() self.setup_task_page() self.setup_users_page() self.setup_user_page() def setup_prjs_page(self, ): """Create and set the model on the projects page :returns: None :rtype: None :raises: None """ self.prjs_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) log.debug("Loading projects for projects page.") rootdata = treemodel.ListItemData(['Name', 'Short', 'Path', 'Created', 'Semester', 'Status', 'Resolution', 'FPS', 'Scale']) rootitem = treemodel.TreeItem(rootdata) prjs = djadapter.projects.all() for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) treemodel.TreeItem(prjdata, rootitem) self.prjs_model = treemodel.TreeModel(rootitem) self.prjs_tablev.setModel(self.prjs_model) def setup_prj_page(self, ): """Create and set the model on the project page :returns: None :rtype: None :raises: None """ self.prj_seq_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) self.prj_atype_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) self.prj_dep_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) self.prj_user_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) def setup_seq_page(self, ): """Create and set the model on the sequence page :returns: None :rtype: None :raises: None """ self.seq_shot_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) def setup_shot_page(self, ): """Create and set the model on the shot page :returns: None :rtype: None :raises: None """ self.shot_asset_treev.header().setResizeMode(QtGui.QHeaderView.ResizeToContents) self.shot_task_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) def setup_atype_page(self, ): """Create and set the model on the atype page :returns: None :rtype: None :raises: None """ pass def setup_asset_page(self, ): """Create and set the model on the asset page :returns: None :rtype: None :raises: None """ self.asset_asset_treev.header().setResizeMode(QtGui.QHeaderView.ResizeToContents) self.asset_task_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) def setup_dep_page(self, ): """Create and set the model on the department page :returns: None :rtype: None :raises: None """ self.dep_prj_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) def setup_task_page(self, ): """Create and set the model on the task page :returns: None :rtype: None :raises: None """ self.task_user_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) def setup_users_page(self, ): """Create and set the model on the users page :returns: None :rtype: None :raises: None """ self.users_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) log.debug("Loading users for users page.") rootdata = treemodel.ListItemData(['Username', 'First', 'Last', 'Email']) rootitem = treemodel.TreeItem(rootdata) users = djadapter.users.all() for usr in users: usrdata = djitemdata.UserItemData(usr) treemodel.TreeItem(usrdata, rootitem) self.users_model = treemodel.TreeModel(rootitem) self.users_tablev.setModel(self.users_model) def setup_user_page(self, ): """Create and set the model on the user page :returns: None :rtype: None :raises: None """ self.user_prj_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents) self.user_task_treev.header().setResizeMode(QtGui.QHeaderView.ResizeToContents) def setup_signals(self, ): """Connect the signals with the slots to make the ui functional :returns: None :rtype: None :raises: None """ log.debug("Setting up signals.") self.setup_prjs_signals() self.setup_prj_signals() self.setup_seq_signals() self.setup_shot_signals() self.setup_atype_signals() self.setup_asset_signals() self.setup_dep_signals() self.setup_task_signals() self.setup_users_signals() self.setup_user_signals() log.debug("Signals are set up.") def setup_prjs_signals(self, ): """Setup the signals for the projects page :returns: None :rtype: None :raises: None """ log.debug("Setting up projects page signals.") self.prjs_prj_view_pb.clicked.connect(self.prjs_view_prj) self.prjs_prj_create_pb.clicked.connect(self.prjs_create_prj) def setup_prj_signals(self, ): """Setup the signals for the project page :returns: None :rtype: None :raises: None """ log.debug("Setting up project page signals.") self.prj_seq_view_pb.clicked.connect(self.prj_view_seq) self.prj_seq_create_pb.clicked.connect(self.prj_create_seq) self.prj_atype_view_pb.clicked.connect(self.prj_view_atype) self.prj_atype_add_pb.clicked.connect(self.prj_add_atype) self.prj_atype_create_pb.clicked.connect(self.prj_create_atype) self.prj_dep_view_pb.clicked.connect(self.prj_view_dep) self.prj_dep_add_pb.clicked.connect(self.prj_add_dep) self.prj_dep_create_pb.clicked.connect(self.prj_create_dep) self.prj_user_view_pb.clicked.connect(self.prj_view_user) self.prj_user_add_pb.clicked.connect(self.prj_add_user) self.prj_user_remove_pb.clicked.connect(self.prj_remove_user) self.prj_user_create_pb.clicked.connect(self.prj_create_user) self.prj_path_view_pb.clicked.connect(self.prj_show_path) self.prj_desc_pte.textChanged.connect(self.prj_save) self.prj_semester_le.editingFinished.connect(self.prj_save) self.prj_fps_dsb.valueChanged.connect(self.prj_save) self.prj_res_x_sb.valueChanged.connect(self.prj_save) self.prj_res_y_sb.valueChanged.connect(self.prj_save) self.prj_scale_cb.currentIndexChanged.connect(self.prj_save) def setup_seq_signals(self, ): """Setup the signals for the sequence page :returns: None :rtype: None :raises: None """ log.debug("Setting up sequence page signals.") self.seq_prj_view_pb.clicked.connect(self.seq_view_prj) self.seq_shot_view_pb.clicked.connect(self.seq_view_shot) self.seq_shot_create_pb.clicked.connect(self.seq_create_shot) self.seq_desc_pte.textChanged.connect(self.seq_save) def setup_shot_signals(self, ): """Setup the signals for the shot page :returns: None :rtype: None :raises: None """ log.debug("Setting up shot page signals.") self.shot_prj_view_pb.clicked.connect(self.shot_view_prj) self.shot_seq_view_pb.clicked.connect(self.shot_view_seq) self.shot_asset_view_pb.clicked.connect(self.shot_view_asset) self.shot_asset_create_pb.clicked.connect(self.shot_create_asset) self.shot_asset_add_pb.clicked.connect(self.shot_add_asset) self.shot_asset_remove_pb.clicked.connect(self.shot_remove_asset) self.shot_task_view_pb.clicked.connect(self.shot_view_task) self.shot_task_create_pb.clicked.connect(self.shot_create_task) self.shot_start_sb.valueChanged.connect(self.shot_save) self.shot_end_sb.valueChanged.connect(self.shot_save) self.shot_handle_sb.valueChanged.connect(self.shot_save) self.shot_desc_pte.textChanged.connect(self.shot_save) def setup_atype_signals(self, ): """Setup the signals for the assettype page :returns: None :rtype: None :raises: None """ log.debug("Setting up atype page signals.") self.asset_prj_view_pb.clicked.connect(self.asset_view_prj) self.asset_atype_view_pb.clicked.connect(self.asset_view_atype) self.atype_asset_view_pb.clicked.connect(self.atype_view_asset) self.atype_asset_create_pb.clicked.connect(self.atype_create_asset) self.atype_desc_pte.textChanged.connect(self.atype_save) def setup_asset_signals(self, ): """Setup the signals for the asset page :returns: None :rtype: None :raises: None """ log.debug("Setting up asset signals.") self.asset_asset_view_pb.clicked.connect(self.asset_view_asset) self.asset_asset_create_pb.clicked.connect(self.asset_create_asset) self.asset_asset_add_pb.clicked.connect(self.asset_add_asset) self.asset_asset_remove_pb.clicked.connect(self.asset_remove_asset) self.asset_task_view_pb.clicked.connect(self.asset_view_task) self.asset_task_create_pb.clicked.connect(self.asset_create_task) self.asset_desc_pte.textChanged.connect(self.asset_save) def setup_dep_signals(self, ): """Setup the signals for the department page :returns: None :rtype: None :raises: None """ log.debug("Setting up department page signals.") self.dep_prj_view_pb.clicked.connect(self.dep_view_prj) self.dep_prj_add_pb.clicked.connect(self.dep_add_prj) self.dep_prj_remove_pb.clicked.connect(self.dep_remove_prj) self.dep_desc_pte.textChanged.connect(self.dep_save) self.dep_ordervalue_sb.valueChanged.connect(self.dep_save) def setup_task_signals(self, ): """Setup the signals for the task page :returns: None :rtype: None :raises: None """ log.debug("Setting up task page signals.") self.task_user_view_pb.clicked.connect(self.task_view_user) self.task_user_add_pb.clicked.connect(self.task_add_user) self.task_user_remove_pb.clicked.connect(self.task_remove_user) self.task_dep_view_pb.clicked.connect(self.task_view_dep) self.task_link_view_pb.clicked.connect(self.task_view_link) self.task_deadline_de.dateChanged.connect(self.task_save) self.task_status_cb.currentIndexChanged.connect(self.task_save) def setup_users_signals(self, ): """Setup the signals for the users page :returns: None :rtype: None :raises: None """ log.debug("Setting up users page signals.") self.users_user_view_pb.clicked.connect(self.users_view_user) self.users_user_create_pb.clicked.connect(self.create_user) def setup_user_signals(self, ): """Setup the signals for the user page :returns: None :rtype: None :raises: None """ log.debug("Setting up user page signals.") self.user_task_view_pb.clicked.connect(self.user_view_task) self.user_prj_view_pb.clicked.connect(self.user_view_prj) self.user_prj_add_pb.clicked.connect(self.user_add_prj) self.user_prj_remove_pb.clicked.connect(self.user_remove_prj) self.user_username_le.editingFinished.connect(self.user_save) self.user_first_le.editingFinished.connect(self.user_save) self.user_last_le.editingFinished.connect(self.user_save) self.user_email_le.editingFinished.connect(self.user_save) def prjs_view_prj(self, *args, **kwargs): """View the, in the projects table view selected, project. :returns: None :rtype: None :raises: None """ i = self.prjs_tablev.currentIndex() item = i.internalPointer() if item: prj = item.internal_data() self.view_prj(prj) def prjs_create_prj(self, *args, **kwargs): """Create a new project :returns: None :rtype: None :raises: None """ self.create_prj() def view_prj(self, prj): """View the given project on the project page :param prj: the project to view :type prj: :class:`jukeboxcore.djadapter.models.Project` :returns: None :rtype: None :raises: None """ log.debug('Viewing project %s', prj.name) self.cur_prj = None self.pages_tabw.setCurrentIndex(1) self.prj_name_le.setText(prj.name) self.prj_short_le.setText(prj.short) self.prj_path_le.setText(prj.path) self.prj_desc_pte.setPlainText(prj.description) self.prj_created_dte.setDateTime(dt_to_qdatetime(prj.date_created)) self.prj_semester_le.setText(prj.semester) self.prj_fps_dsb.setValue(prj.framerate) self.prj_res_x_sb.setValue(prj.resx) self.prj_res_y_sb.setValue(prj.resy) scalemap = {"m": 2, "meter": 2, "mm": 0, "millimeter": 0, "cm": 1, "centimeter": 1, "km": 3, "kilometer": 3, "inch": 4, "foot": 5, "yard": 6, "mile": 7} scaleindex = scalemap.get(prj.scale, -1) log.debug("Setting index of project scale combobox to %s. Scale is %s", scaleindex, prj.scale) self.prj_scale_cb.setCurrentIndex(scaleindex) seqrootdata = treemodel.ListItemData(['Name', "Description"]) seqrootitem = treemodel.TreeItem(seqrootdata) for seq in prj.sequence_set.all(): seqdata = djitemdata.SequenceItemData(seq) treemodel.TreeItem(seqdata, seqrootitem) self.prj_seq_model = treemodel.TreeModel(seqrootitem) self.prj_seq_tablev.setModel(self.prj_seq_model) atyperootdata = treemodel.ListItemData(['Name', "Description"]) atyperootitem = treemodel.TreeItem(atyperootdata) for atype in prj.atype_set.all(): atypedata = djitemdata.AtypeItemData(atype) treemodel.TreeItem(atypedata, atyperootitem) self.prj_atype_model = treemodel.TreeModel(atyperootitem) self.prj_atype_tablev.setModel(self.prj_atype_model) deprootdata = treemodel.ListItemData(['Name', "Description", "Ordervalue"]) deprootitem = treemodel.TreeItem(deprootdata) for dep in prj.department_set.all(): depdata = djitemdata.DepartmentItemData(dep) treemodel.TreeItem(depdata, deprootitem) self.prj_dep_model = treemodel.TreeModel(deprootitem) self.prj_dep_tablev.setModel(self.prj_dep_model) userrootdata = treemodel.ListItemData(['Username', 'First', 'Last', 'Email']) userrootitem = treemodel.TreeItem(userrootdata) for user in prj.users.all(): userdata = djitemdata.UserItemData(user) treemodel.TreeItem(userdata, userrootitem) self.prj_user_model = treemodel.TreeModel(userrootitem) self.prj_user_tablev.setModel(self.prj_user_model) self.cur_prj = prj def create_prj(self, atypes=None, deps=None): """Create and return a new project :param atypes: add the given atypes to the project :type atypes: list | None :param deps: add the given departmetns to the project :type deps: list | None :returns: The created project or None :rtype: None | :class:`jukeboxcore.djadapter.models.Project` :raises: None """ dialog = ProjectCreatorDialog(parent=self) dialog.exec_() prj = dialog.project if prj and atypes: for at in atypes: at.projects.add(prj) at.save() if prj and deps: for dep in deps: dep.projects.add(prj) dep.save() if prj: prjdata = djitemdata.ProjectItemData(prj) treemodel.TreeItem(prjdata, self.prjs_model.root) return prj def prj_view_seq(self, *args, **kwargs): """View the, in the prj_seq_tablev selected, sequence. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return i = self.prj_seq_tablev.currentIndex() item = i.internalPointer() if item: seq = item.internal_data() self.view_seq(seq) def prj_create_seq(self, *args, **kwargs): """Create a new Sequence for the current project :returns: None :rtype: None :raises: None """ if not self.cur_prj: return seq = self.create_seq(project=self.cur_prj) if seq: seqdata = djitemdata.SequenceItemData(seq) treemodel.TreeItem(seqdata, self.prj_seq_model.root) def view_seq(self, seq): """View the given sequence on the sequence page :param seq: the sequence to view :type seq: :class:`jukeboxcore.djadapter.models.Sequence` :returns: None :rtype: None :raises: None """ log.debug('Viewing sequence %s', seq.name) self.cur_seq = None self.pages_tabw.setCurrentIndex(2) self.seq_name_le.setText(seq.name) self.seq_prj_le.setText(seq.project.name) self.seq_desc_pte.setPlainText(seq.description) shotrootdata = treemodel.ListItemData(['Name', "Description", "Duration", "Start", "End"]) shotrootitem = treemodel.TreeItem(shotrootdata) for shot in seq.shot_set.all(): shotdata = djitemdata.ShotItemData(shot) treemodel.TreeItem(shotdata, shotrootitem) self.seq_shot_model = treemodel.TreeModel(shotrootitem) self.seq_shot_tablev.setModel(self.seq_shot_model) self.cur_seq = seq def create_seq(self, project): """Create and return a new sequence :param project: the project for the sequence :type deps: :class:`jukeboxcore.djadapter.models.Project` :returns: The created sequence or None :rtype: None | :class:`jukeboxcore.djadapter.models.Sequence` :raises: None """ dialog = SequenceCreatorDialog(project=project, parent=self) dialog.exec_() seq = dialog.sequence return seq def prj_view_atype(self, *args, **kwargs): """View the, in the atype table view selected, assettype. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return i = self.prj_atype_tablev.currentIndex() item = i.internalPointer() if item: atype = item.internal_data() self.view_atype(atype) def prj_add_atype(self, *args, **kwargs): """Add more assettypes to the project. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return dialog = AtypeAdderDialog(project=self.cur_prj) dialog.exec_() atypes = dialog.atypes for atype in atypes: atypedata = djitemdata.AtypeItemData(atype) treemodel.TreeItem(atypedata, self.prj_atype_model.root) def prj_create_atype(self, *args, **kwargs): """Create a new project :returns: None :rtype: None :raises: None """ if not self.cur_prj: return atype = self.create_atype(projects=[self.cur_prj]) if atype: atypedata = djitemdata.AtypeItemData(atype) treemodel.TreeItem(atypedata, self.prj_atype_model.root) def create_atype(self, projects): """Create and return a new atype :param projects: the projects for the atype :type projects: :class:`jukeboxcore.djadapter.models.Project` :returns: The created atype or None :rtype: None | :class:`jukeboxcore.djadapter.models.Atype` :raises: None """ dialog = AtypeCreatorDialog(projects=projects, parent=self) dialog.exec_() atype = dialog.atype return atype def view_atype(self, atype): """View the given atype on the atype page :param atype: the atype to view :type atype: :class:`jukeboxcore.djadapter.models.Atype` :returns: None :rtype: None :raises: None """ if not self.cur_prj: return log.debug('Viewing atype %s', atype.name) self.cur_atype = None self.pages_tabw.setCurrentIndex(4) self.atype_name_le.setText(atype.name) self.atype_desc_pte.setPlainText(atype.description) assetrootdata = treemodel.ListItemData(['Name', 'Description']) assetrootitem = treemodel.TreeItem(assetrootdata) self.atype_asset_model = treemodel.TreeModel(assetrootitem) self.atype_asset_treev.setModel(self.atype_asset_model) for a in djadapter.assets.filter(project=self.cur_prj, atype=atype): assetdata = djitemdata.AssetItemData(a) treemodel.TreeItem(assetdata, assetrootitem) self.cur_atype = atype def prj_view_dep(self, *args, **kwargs): """View the, in the dep table view selected, department. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return i = self.prj_dep_tablev.currentIndex() item = i.internalPointer() if item: dep = item.internal_data() self.view_dep(dep) def prj_add_dep(self, *args, **kwargs): """Add more departments to the project. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return dialog = DepAdderDialog(project=self.cur_prj) dialog.exec_() deps = dialog.deps for dep in deps: depdata = djitemdata.DepartmentItemData(dep) treemodel.TreeItem(depdata, self.prj_dep_model.root) def prj_create_dep(self, *args, **kwargs): """Create a new project :returns: None :rtype: None :raises: None """ if not self.cur_prj: return dep = self.create_dep(projects=[self.cur_prj]) if dep: depdata = djitemdata.DepartmentItemData(dep) treemodel.TreeItem(depdata, self.prj_dep_model.root) def create_dep(self, projects): """Create and return a new dep :param projects: the projects for the dep :type projects: :class:`jukeboxcore.djadapter.models.Project` :returns: The created dep or None :rtype: None | :class:`jukeboxcore.djadapter.models.Dep` :raises: None """ dialog = DepCreatorDialog(projects=projects, parent=self) dialog.exec_() dep = dialog.dep return dep def view_dep(self, dep): """View the given department on the department page :param dep: the dep to view :type dep: :class:`jukeboxcore.djadapter.models.Department` :returns: None :rtype: None :raises: None """ log.debug('Viewing department %s', dep.name) self.cur_dep = None self.pages_tabw.setCurrentIndex(6) self.dep_name_le.setText(dep.name) self.dep_short_le.setText(dep.short) self.dep_shot_rb.setChecked(not dep.assetflag) self.dep_asset_rb.setChecked(dep.assetflag) self.dep_ordervalue_sb.setValue(dep.ordervalue) self.dep_desc_pte.setPlainText(dep.description) rootdata = treemodel.ListItemData(['Name', 'Short', 'Path', 'Created', 'Semester', 'Status', 'Resolution', 'FPS', 'Scale']) rootitem = treemodel.TreeItem(rootdata) prjs = dep.projects.all() for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) treemodel.TreeItem(prjdata, rootitem) self.dep_prj_model = treemodel.TreeModel(rootitem) self.dep_prj_tablev.setModel(self.dep_prj_model) self.cur_dep = dep def prj_view_user(self, *args, **kwargs): """View the, in the user table view selected, user. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return i = self.prj_user_tablev.currentIndex() item = i.internalPointer() if item: user = item.internal_data() self.view_user(user) def prj_add_user(self, *args, **kwargs): """Add more users to the project. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return dialog = UserAdderDialog(project=self.cur_prj) dialog.exec_() users = dialog.users for user in users: userdata = djitemdata.UserItemData(user) treemodel.TreeItem(userdata, self.prj_user_model.root) self.cur_prj.save() def prj_remove_user(self, *args, **kwargs): """Remove the, in the user table view selected, user. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return i = self.prj_user_tablev.currentIndex() item = i.internalPointer() if item: user = item.internal_data() log.debug("Removing user %s.", user.username) item.set_parent(None) self.cur_prj.users.remove(user) def prj_create_user(self, *args, **kwargs): """Create a new project :returns: None :rtype: None :raises: None """ if not self.cur_prj: return user = self.create_user(projects=[self.cur_prj]) if user: userdata = djitemdata.UserItemData(user) treemodel.TreeItem(userdata, self.prj_user_model.root) def create_user(self, projects=None, tasks=None): """Create and return a new user :param projects: the projects for the user :type projects: list of :class:`jukeboxcore.djadapter.models.Project` :param tasks: the tasks for the user :type tasks: list of :class:`jukeboxcore.djadapter.models.Task` :returns: The created user or None :rtype: None | :class:`jukeboxcore.djadapter.models.User` :raises: None """ projects = projects or [] tasks = tasks or [] dialog = UserCreatorDialog(projects=projects, tasks=tasks, parent=self) dialog.exec_() user = dialog.user if user: userdata = djitemdata.UserItemData(user) treemodel.TreeItem(userdata, self.users_model.root) return user def view_user(self, user): """View the given user on the user page :param user: the user to view :type user: :class:`jukeboxcore.djadapter.models.User` :returns: None :rtype: None :raises: None """ log.debug('Viewing user %s', user.username) self.cur_user = None self.pages_tabw.setCurrentIndex(9) self.user_username_le.setText(user.username) self.user_first_le.setText(user.first_name) self.user_last_le.setText(user.last_name) self.user_email_le.setText(user.email) prjrootdata = treemodel.ListItemData(['Name', 'Short', 'Path', 'Created', 'Semester', 'Status', 'Resolution', 'FPS', 'Scale']) prjrootitem = treemodel.TreeItem(prjrootdata) prjs = djadapter.projects.filter(users=user) for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) treemodel.TreeItem(prjdata, prjrootitem) self.user_prj_model = treemodel.TreeModel(prjrootitem) self.user_prj_tablev.setModel(self.user_prj_model) taskrootdata = treemodel.ListItemData(['Name']) taskrootitem = treemodel.TreeItem(taskrootdata) self.user_task_model = treemodel.TreeModel(taskrootitem) self.user_task_treev.setModel(self.user_task_model) tasks = djadapter.tasks.filter(users=user) assets = {} shots = {} atypes = {} seqs = {} prjs = {} for t in tasks: tdata = djitemdata.TaskItemData(t) titem = treemodel.TreeItem(tdata) e = t.element if isinstance(e, djadapter.models.Asset): eitem = assets.get(e) if not eitem: edata = djitemdata.AssetItemData(e) eitem = treemodel.TreeItem(edata) assets[e] = eitem egrp = e.atype egrpitem = atypes.get(egrp) if not egrpitem: egrpdata = djitemdata.AtypeItemData(egrp) egrpitem = treemodel.TreeItem(egrpdata) atypes[egrp] = egrpitem else: eitem = shots.get(e) if not eitem: edata = djitemdata.ShotItemData(e) eitem = treemodel.TreeItem(edata) shots[e] = eitem egrp = e.sequence egrpitem = seqs.get(egrp) if not egrpitem: egrpdata = djitemdata.SequenceItemData(egrp) egrpitem = treemodel.TreeItem(egrpdata) seqs[egrp] = egrpitem if eitem not in egrpitem.childItems: eitem.set_parent(egrpitem) prj = egrp.project prjitem = prjs.get(prj) if not prjitem: prjdata = djitemdata.ProjectItemData(prj) prjitem = treemodel.TreeItem(prjdata, taskrootitem) prjs[prj] = prjitem assetdata = treemodel.ListItemData(["Asset"]) assetitem = treemodel.TreeItem(assetdata, prjitem) shotdata = treemodel.ListItemData(["Shot"]) shotitem = treemodel.TreeItem(shotdata, prjitem) else: assetitem = prjitem.child(0) shotitem = prjitem.child(1) if isinstance(egrp, djadapter.models.Atype) and egrpitem not in assetitem.childItems: egrpitem.set_parent(assetitem) elif isinstance(egrp, djadapter.models.Sequence) and egrpitem not in shotitem.childItems: egrpitem.set_parent(shotitem) titem.set_parent(eitem) self.cur_user = user def prj_show_path(self, ): """Show the dir in the a filebrowser of the project :returns: None :rtype: None :raises: None """ f = self.prj_path_le.text() osinter = ostool.get_interface() osinter.open_path(f) def prj_save(self): """Save the current project :returns: None :rtype: None :raises: None """ if not self.cur_prj: return desc = self.prj_desc_pte.toPlainText() semester = self.prj_semester_le.text() fps = self.prj_fps_dsb.value() resx = self.prj_res_x_sb.value() resy = self.prj_res_y_sb.value() scale = self.prj_scale_cb.currentText() self.cur_prj.description = desc self.cur_prj.semester = semester self.cur_prj.framerate = fps self.cur_prj.resx = resx self.cur_prj.resy = resy self.cur_prj.scale = scale self.cur_prj.save() def seq_save(self): """Save the current sequence :returns: None :rtype: None :raises: None """ if not self.cur_seq: return desc = self.seq_desc_pte.toPlainText() self.cur_seq.description = desc self.cur_seq.save() def seq_view_prj(self, ): """View the project or the current sequence :returns: None :rtype: None :raises: None """ if not self.cur_seq: return self.view_prj(self.cur_seq.project) def seq_view_shot(self, ): """View the shot that is selected in the table view of the sequence page :returns: None :rtype: None :raises: None """ if not self.cur_seq: return i = self.seq_shot_tablev.currentIndex() item = i.internalPointer() if item: shot = item.internal_data() self.view_shot(shot) def seq_create_shot(self, *args, **kwargs): """Create a new shot :returns: None :rtype: None :raises: None """ if not self.cur_seq: return shot = self.create_shot(sequence=self.cur_seq) if shot: shotdata = djitemdata.ShotItemData(shot) treemodel.TreeItem(shotdata, self.seq_shot_model.root) def view_shot(self, shot): """View the given shot :param shot: the shot to view :type shot: :class:`jukeboxcore.djadapter.models.Shot` :returns: None :rtype: None :raises: None """ log.debug('Viewing shot %s', shot.name) self.cur_shot = None self.pages_tabw.setCurrentIndex(3) self.shot_name_le.setText(shot.name) self.shot_prj_le.setText(shot.project.name) self.shot_seq_le.setText(shot.sequence.name) self.shot_start_sb.setValue(shot.startframe) self.shot_end_sb.setValue(shot.endframe) self.shot_handle_sb.setValue(shot.handlesize) self.shot_desc_pte.setPlainText(shot.description) assetsrootdata = treemodel.ListItemData(["Name", "Description"]) assetsrootitem = treemodel.TreeItem(assetsrootdata) self.shot_asset_model = treemodel.TreeModel(assetsrootitem) self.shot_asset_treev.setModel(self.shot_asset_model) atypes = {} assets = shot.assets.all() for a in assets: atype = a.atype atypeitem = atypes.get(atype) if not atypeitem: atypedata = djitemdata.AtypeItemData(atype) atypeitem = treemodel.TreeItem(atypedata, assetsrootitem) atypes[atype] = atypeitem assetdata = djitemdata.AssetItemData(a) treemodel.TreeItem(assetdata, atypeitem) tasksrootdata = treemodel.ListItemData(["Name", "Short"]) tasksrootitem = treemodel.TreeItem(tasksrootdata) self.shot_task_model = treemodel.TreeModel(tasksrootitem) self.shot_task_tablev.setModel(self.shot_task_model) tasks = shot.tasks.all() for t in tasks: tdata = djitemdata.TaskItemData(t) treemodel.TreeItem(tdata, tasksrootitem) self.cur_shot = shot def create_shot(self, sequence): """Create and return a new shot :param sequence: the sequence for the shot :type sequence: :class:`jukeboxcore.djadapter.models.Sequence` :returns: The created shot or None :rtype: None | :class:`jukeboxcore.djadapter.models.Shot` :raises: None """ dialog = ShotCreatorDialog(sequence=sequence, parent=self) dialog.exec_() shot = dialog.shot return shot def shot_view_prj(self, ): """View the project of the current shot :returns: None :rtype: None :raises: None """ if not self.cur_shot: return self.view_prj(self.cur_shot.project) def shot_view_seq(self, ): """View the sequence of the current shot :returns: None :rtype: None :raises: None """ if not self.cur_shot: return self.view_seq(self.cur_shot.sequence) def shot_view_task(self, ): """View the task that is currently selected on the shot page :returns: None :rtype: None :raises: None """ if not self.cur_shot: return i = self.shot_task_tablev.currentIndex() item = i.internalPointer() if item: task = item.internal_data() self.view_task(task) def shot_view_asset(self, ): """View the task that is currently selected on the shot page :returns: None :rtype: None :raises: None """ if not self.cur_shot: return i = self.shot_asset_treev.currentIndex() item = i.internalPointer() if item: asset = item.internal_data() if isinstance(asset, djadapter.models.Asset): self.view_asset(asset) def shot_create_task(self, *args, **kwargs): """Create a new task :returns: None :rtype: None :raises: None """ if not self.cur_shot: return task = self.create_task(element=self.cur_shot) if task: taskdata = djitemdata.TaskItemData(task) treemodel.TreeItem(taskdata, self.shot_task_model.root) def create_task(self, element): """Create a new task for the given element :param element: the element for the task :type element: :class:`jukeboxcore.djadapter.models.Shot` | :class:`jukeboxcore.djadapter.models.Asset` :returns: None :rtype: None :raises: None """ dialog = TaskCreatorDialog(element=element, parent=self) dialog.exec_() task = dialog.task return task def view_asset(self, asset): """View the given asset :param asset: the asset to view :type asset: :class:`jukeboxcore.djadapter.models.Asset` :returns: None :rtype: None :raises: None """ log.debug('Viewing asset %s', asset.name) self.cur_asset = None self.pages_tabw.setCurrentIndex(5) name = asset.name prj = asset.project.name atype = asset.atype.name desc = asset.description self.asset_name_le.setText(name) self.asset_prj_le.setText(prj) self.asset_atype_le.setText(atype) self.asset_desc_pte.setPlainText(desc) assetsrootdata = treemodel.ListItemData(["Name", "Description"]) assetsrootitem = treemodel.TreeItem(assetsrootdata) self.asset_asset_model = treemodel.TreeModel(assetsrootitem) self.asset_asset_treev.setModel(self.asset_asset_model) atypes = {} assets = asset.assets.all() for a in assets: atype = a.atype atypeitem = atypes.get(atype) if not atypeitem: atypedata = djitemdata.AtypeItemData(atype) atypeitem = treemodel.TreeItem(atypedata, assetsrootitem) atypes[atype] = atypeitem assetdata = djitemdata.AssetItemData(a) treemodel.TreeItem(assetdata, atypeitem) tasksrootdata = treemodel.ListItemData(["Name", "Short"]) tasksrootitem = treemodel.TreeItem(tasksrootdata) self.asset_task_model = treemodel.TreeModel(tasksrootitem) self.asset_task_tablev.setModel(self.asset_task_model) tasks = asset.tasks.all() for t in tasks: tdata = djitemdata.TaskItemData(t) treemodel.TreeItem(tdata, tasksrootitem) self.cur_asset = asset def shot_add_asset(self, *args, **kwargs): """Add more assets to the shot. :returns: None :rtype: None :raises: None """ if not self.cur_shot: return dialog = AssetAdderDialog(shot=self.cur_shot) dialog.exec_() assets = dialog.assets atypes = {} for c in self.shot_asset_model.root.childItems: atypes[c.internal_data()] = c for asset in assets: atypeitem = atypes.get(asset.atype) if not atypeitem: atypedata = djitemdata.AtypeItemData(asset.atype) atypeitem = treemodel.TreeItem(atypedata, self.shot_asset_model.root) atypes[asset.atype] = atypeitem assetdata = djitemdata.AssetItemData(asset) treemodel.TreeItem(assetdata, atypeitem) self.cur_shot.save() def shot_remove_asset(self, *args, **kwargs): """Remove the, in the asset table view selected, asset. :returns: None :rtype: None :raises: None """ if not self.cur_shot: return i = self.shot_asset_treev.currentIndex() item = i.internalPointer() if item: asset = item.internal_data() if not isinstance(asset, djadapter.models.Asset): return log.debug("Removing asset %s.", asset.name) item.set_parent(None) self.cur_shot.assets.remove(asset) def shot_create_asset(self, *args, **kwargs): """Create a new shot :returns: None :rtype: None :raises: None """ if not self.cur_shot: return asset = self.create_asset(project=self.cur_shot.project, shot=self.cur_shot) if not asset: return atypes = {} for c in self.shot_asset_model.root.childItems: atypes[c.internal_data()] = c atypeitem = atypes.get(asset.atype) if not atypeitem: atypedata = djitemdata.AtypeItemData(asset.atype) atypeitem = treemodel.TreeItem(atypedata, self.shot_asset_model.root) atypes[asset.atype] = atypeitem assetdata = djitemdata.AssetItemData(asset) treemodel.TreeItem(assetdata, atypeitem) def create_asset(self, project, atype=None, shot=None, asset=None): """Create and return a new asset :param project: the project for the asset :type project: :class:`jukeboxcore.djadapter.models.Project` :param atype: the assettype of the asset :type atype: :class:`jukeboxcore.djadapter.models.Atype` :param shot: the shot to add the asset to :type shot: :class:`jukeboxcore.djadapter.models.Shot` :param asset: the asset to add the new asset to :type asset: :class:`jukeboxcore.djadapter.models.Asset` :returns: The created asset or None :rtype: None | :class:`jukeboxcore.djadapter.models.Asset` :raises: None """ element = shot or asset dialog = AssetCreatorDialog(project=project, atype=atype, parent=self) dialog.exec_() asset = dialog.asset if not atype: element.assets.add(asset) return asset def view_task(self, task): """View the given task :param task: the task to view :type task: :class:`jukeboxcore.djadapter.models.Task` :returns: None :rtype: None :raises: None """ log.debug('Viewing task %s', task.name) self.cur_task = None self.pages_tabw.setCurrentIndex(7) self.task_dep_le.setText(task.name) statusmap = {"New": 0, "Open": 1, "Done":2} self.task_status_cb.setCurrentIndex(statusmap.get(task.status, -1)) dt = dt_to_qdatetime(task.deadline) if task.deadline else None self.task_deadline_de.setDateTime(dt) self.task_link_le.setText(task.element.name) userrootdata = treemodel.ListItemData(['Username', 'First', 'Last', 'Email']) userrootitem = treemodel.TreeItem(userrootdata) for user in task.users.all(): userdata = djitemdata.UserItemData(user) treemodel.TreeItem(userdata, userrootitem) self.task_user_model = treemodel.TreeModel(userrootitem) self.task_user_tablev.setModel(self.task_user_model) self.cur_task = task def shot_save(self, ): """Save the current shot :returns: None :rtype: None :raises: None """ if not self.cur_shot: return desc = self.shot_desc_pte.toPlainText() start = self.shot_start_sb.value() end = self.shot_end_sb.value() handle = self.shot_handle_sb.value() self.cur_shot.description = desc self.cur_shot.startframe = start self.cur_shot.endframe = end self.cur_shot.handlesize = handle self.cur_shot.save() def asset_view_prj(self, ): """View the project of the current asset :returns: None :rtype: None :raises: None """ if not self.cur_asset: return prj = self.cur_asset.project self.view_prj(prj) def asset_view_atype(self, ): """View the project of the current atype :returns: None :rtype: None :raises: None """ if not self.cur_asset: return atype = self.cur_asset.atype self.view_atype(atype) def atype_view_asset(self, ): """View the project of the current assettype :returns: None :rtype: None :raises: None """ if not self.cur_atype: return i = self.atype_asset_treev.currentIndex() item = i.internalPointer() if item: asset = item.internal_data() if isinstance(asset, djadapter.models.Asset): self.view_asset(asset) def atype_create_asset(self, ): """Create a new asset :returns: None :rtype: None :raises: None """ if not self.cur_atype: return asset = self.create_asset(project=self.cur_prj, atype=self.cur_atype) if not asset: return assetdata = djitemdata.AssetItemData(asset) treemodel.TreeItem(assetdata, self.atype_asset_model.root) def atype_save(self): """Save the current atype :returns: None :rtype: None :raises: None """ if not self.cur_atype: return desc = self.atype_desc_pte.toPlainText() self.cur_atype.description = desc self.cur_atype.save() def asset_view_asset(self, ): """View the task that is currently selected on the asset page :returns: None :rtype: None :raises: None """ if not self.cur_asset: return i = self.asset_asset_treev.currentIndex() item = i.internalPointer() if item: asset = item.internal_data() if isinstance(asset, djadapter.models.Asset): self.view_asset(asset) def asset_add_asset(self, *args, **kwargs): """Add more assets to the asset. :returns: None :rtype: None :raises: None """ if not self.cur_asset: return dialog = AssetAdderDialog(asset=self.cur_asset) dialog.exec_() assets = dialog.assets atypes = {} for c in self.asset_asset_model.root.childItems: atypes[c.internal_data()] = c for asset in assets: atypeitem = atypes.get(asset.atype) if not atypeitem: atypedata = djitemdata.AtypeItemData(asset.atype) atypeitem = treemodel.TreeItem(atypedata, self.asset_asset_model.root) atypes[asset.atype] = atypeitem assetdata = djitemdata.AssetItemData(asset) treemodel.TreeItem(assetdata, atypeitem) self.cur_asset.save() def asset_remove_asset(self, *args, **kwargs): """Remove the, in the asset table view selected, asset. :returns: None :rtype: None :raises: None """ if not self.cur_asset: return i = self.asset_asset_treev.currentIndex() item = i.internalPointer() if item: asset = item.internal_data() if not isinstance(asset, djadapter.models.Asset): return log.debug("Removing asset %s.", asset.name) item.set_parent(None) self.cur_asset.assets.remove(asset) def asset_create_asset(self, *args, **kwargs): """Create a new asset :returns: None :rtype: None :raises: None """ if not self.cur_asset: return asset = self.create_asset(project=self.cur_asset.project, asset=self.cur_asset) if not asset: return atypes = {} for c in self.asset_asset_model.root.childItems: atypes[c.internal_data()] = c atypeitem = atypes.get(asset.atype) if not atypeitem: atypedata = djitemdata.AtypeItemData(asset.atype) atypeitem = treemodel.TreeItem(atypedata, self.asset_asset_model.root) atypes[asset.atype] = atypeitem assetdata = djitemdata.AssetItemData(asset) treemodel.TreeItem(assetdata, atypeitem) def asset_view_task(self, ): """View the task that is currently selected on the asset page :returns: None :rtype: None :raises: None """ if not self.cur_asset: return i = self.asset_task_tablev.currentIndex() item = i.internalPointer() if item: task = item.internal_data() self.view_task(task) def asset_create_task(self, *args, **kwargs): """Create a new task :returns: None :rtype: None :raises: None """ if not self.cur_asset: return task = self.create_task(element=self.cur_asset) if task: taskdata = djitemdata.TaskItemData(task) treemodel.TreeItem(taskdata, self.asset_task_model.root) def asset_save(self): """Save the current asset :returns: None :rtype: None :raises: None """ if not self.cur_asset: return desc = self.asset_desc_pte.toPlainText() self.cur_asset.description = desc self.cur_asset.save() def dep_view_prj(self, ): """View the project that is currently selected :returns: None :rtype: None :raises: None """ if not self.cur_dep: return i = self.dep_prj_tablev.currentIndex() item = i.internalPointer() if item: prj = item.internal_data() self.view_prj(prj) def dep_add_prj(self, *args, **kwargs): """Add projects to the current department :returns: None :rtype: None :raises: None """ if not self.cur_dep: return dialog = ProjectAdderDialog(department=self.cur_dep) dialog.exec_() prjs = dialog.projects for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) treemodel.TreeItem(prjdata, self.dep_prj_model.root) def dep_remove_prj(self, *args, **kwargs): """Remove the selected project from the department :returns: None :rtype: None :raises: None """ if not self.cur_dep: return i = self.dep_prj_tablev.currentIndex() item = i.internalPointer() if item: prj = item.internal_data() self.cur_dep.projects.remove(prj) item.set_parent(None) def dep_save(self, ): """Save the current department :returns: None :rtype: None :raises: None """ if not self.cur_dep: return ordervalue = self.dep_ordervalue_sb.value() desc = self.dep_desc_pte.toPlainText() self.cur_dep.ordervalue = ordervalue self.cur_dep.description = desc self.cur_dep.save() def task_view_user(self, ): """View the user that is currently selected :returns: None :rtype: None :raises: None """ if not self.cur_task: return i = self.task_user_tablev.currentIndex() item = i.internalPointer() if item: user = item.internal_data() self.view_user(user) def task_add_user(self, *args, **kwargs): """Add users to the current task :returns: None :rtype: None :raises: None """ if not self.cur_task: return dialog = UserAdderDialog(task=self.cur_task) dialog.exec_() users = dialog.users for user in users: userdata = djitemdata.UserItemData(user) treemodel.TreeItem(userdata, self.task_user_model.root) def task_remove_user(self, *args, **kwargs): """Remove the selected user from the task :returns: None :rtype: None :raises: None """ if not self.cur_task: return i = self.task_user_tablev.currentIndex() item = i.internalPointer() if item: user = item.internal_data() self.cur_task.users.remove(user) item.set_parent(None) def task_view_dep(self, ): """View the departmetn of the current task :returns: None :rtype: None :raises: None """ if not self.cur_task: return self.view_dep(self.cur_task.department) def task_view_link(self, ): """View the link of the current task :returns: None :rtype: None :raises: None """ if not self.cur_task: return e = self.cur_task.element if isinstance(e, djadapter.models.Asset): self.view_asset(e) else: self.view_shot(e) def task_save(self, ): """Save the current task :returns: None :rtype: None :raises: None """ if not self.cur_task: return deadline = self.task_deadline_de.dateTime().toPython() status = self.task_status_cb.currentText() self.cur_task.deadline = deadline self.cur_task.status = status self.cur_task.save() def users_view_user(self, ): """View the user that is currently selected :returns: None :rtype: None :raises: None """ i = self.users_tablev.currentIndex() item = i.internalPointer() if item: user = item.internal_data() self.view_user(user) def user_view_task(self, ): """View the task that is selected :returns: None :rtype: None :raises: None """ if not self.cur_user: return i = self.user_task_treev.currentIndex() item = i.internalPointer() if item: task = item.internal_data() if isinstance(task, djadapter.models.Task): self.view_task(task) def user_view_prj(self, ): """View the project that is currently selected :returns: None :rtype: None :raises: None """ i = self.user_prj_tablev.currentIndex() item = i.internalPointer() if item: prj = item.internal_data() self.view_prj(prj) def user_add_prj(self, *args, **kwargs): """Add projects to the current user :returns: None :rtype: None :raises: None """ if not self.cur_user: return dialog = ProjectAdderDialog(user=self.cur_user) dialog.exec_() prjs = dialog.projects for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) treemodel.TreeItem(prjdata, self.user_prj_model.root) def user_remove_prj(self, *args, **kwargs): """Remove the selected project from the user :returns: None :rtype: None :raises: None """ if not self.cur_user: return i = self.user_prj_tablev.currentIndex() item = i.internalPointer() if item: prj = item.internal_data() prj.users.remove(self.cur_user) item.set_parent(None) def user_save(self): """Save the current user :returns: None :rtype: None :raises: None """ if not self.cur_user: return username = self.user_username_le.text() first = self.user_first_le.text() last = self.user_last_le.text() email = self.user_email_le.text() self.cur_user.username = username self.cur_user.first_name = first self.cur_user.last_name = last self.cur_user.email = email self.cur_user.save() class GuerillaMGMT(JB_CoreStandaloneGuiPlugin): """A plugin that can run a GuerillaMGMT tool This can be used as a standalone plugin. Before you call run, make sure that there is a running QApplication running. See :mod:`jukeboxcore.gui.main` for helpful functions. """ author = "David Zuber" copyright = "2015" version = "0.1" description = "A guerilla tool for projectmanagement and creating entries in the database." def init(self, ): """Do nothing on init! Call run() if you want to start the configeditor :returns: None :rtype: None :raises: None """ pass def uninit(self, ): """Do nothing on uninit! :returns: None :rtype: None :raises: None """ pass def run(self, parent=None): """Start the configeditor :returns: None :rtype: None :raises: None """ self.gw = GuerillaMGMTWin(parent=parent) self.gw.show()
bsd-3-clause
-4,366,240,837,862,051,000
32.697019
142
0.592131
false
3.71462
false
false
false
tbullmann/heuhaufen
publication/generators_and_depth/aggregate.py
1
5320
import os import pandas import numpy as np from bokeh.palettes import Viridis4 as palette from bokeh.layouts import layout, column, row from bokeh.plotting import figure, output_file, show, ColumnDataSource from bokeh.models import HoverTool, Div, DataTable, TableColumn, NumberFormatter, LinearAxis, Select, CustomJS, Slider, Button import json # must be imported after bokeh def main(test_path='temp/publication/how_deep/test'): labels = ['membranes', 'synapses', 'mitochondria'] # concatenate the evaluation and parameters for all runs dfs = [] for label in labels: for run in range(1,21): df = read_run_from_json_and_csv(test_path, run, label) dfs.append(df) data = pandas.concat(dfs) # save aggregated data (in long format) data.to_csv(os.path.join(test_path, 'summary_long.csv')) # convert long to wide: label x metric --> label_metric metrics = data.columns.to_series().groupby(data.dtypes).groups[np.dtype('float64')] data2 = data.pivot_table(index=['generator', 'layers', 'sample'], columns='label', values=metrics) data2.columns = ['{}_{}'.format(x, y) for x, y in zip(data2.columns.get_level_values(1), data2.columns.get_level_values(0))] data2 = data2.reset_index() # save aggregated data (in wide format) data2.to_csv(os.path.join(test_path, 'summary_wide.csv')) # TODO: interactive plot with bokeh # bokeh_plot(data2, test_path) # not fully functional, e.g. cannot change label and metric def read_run_from_json_and_csv(test_path, run, label): # path to the test result for a particular model base_path = os.path.join(test_path, '%d' % run) # getting parameters from the options json file with open(os.path.join(base_path, "options.json")) as f: options = dict(json.loads(f.read()).items()) generator = options['generator'] # calculate the number of layers depending on generator network and its specific parameters if generator == 'unet': layers = options['u_depth'] * 2 # 1 for down sampling and 1 for up sampling at each level elif generator == 'densenet': layers = options['n_dense_blocks'] * options['n_dense_layers'] + 6 # 3 for each encoder and decoder elif generator == 'resnet': layers = options['n_res_blocks'] * 2 + 6 # 2 for transformation, 3 for each encoder and decoder elif generator == 'highwaynet': layers = options['n_highway_units'] * 2 + 6 # 2 for transformation, 3 for each encoder and decoder # read evaluation results df = pandas.read_csv(os.path.join(base_path, 'evaluation/%s.csv' % label)) # no index_col # add parameters df['generator'] = generator df['layers'] = layers df['label'] = label df['run'] = run return df def bokeh_plot(data, test_path): networks = ['unet', 'resnet', 'highwaynet', 'densenet'] # assuming all float values are metrics metrics = data.columns.to_series().groupby(data.dtypes).groups[np.dtype('float64')] # calculate mean for each data_mean = data.groupby(['generator', 'layers'])[metrics].mean().reset_index() source = dict() source_mean = dict() for network in networks: source[network] = ColumnDataSource(data[data.generator == network]) source_mean[network] = ColumnDataSource(data_mean[data_mean.generator == network]) output_file(os.path.join(test_path, "select.html")) description = Div(text=""" <h1>Evaluation of network type and depth for generator</h1> <p> Interact with the widgets to select metric and evaluated label. </p> """, width=1000) fig = figure(plot_width=1000, plot_height=1000, tools=['box_select', 'reset']) fig.xaxis.axis_label = "layers" fig.yaxis.axis_label = "value of metric" plots = [] for network, column_color in zip(networks, palette): plot = fig.line('layers', metrics[0], legend=dict(value=network), color=column_color, source=source_mean[network]) plot = fig.scatter('layers', metrics[0], legend=dict(value=network), color=column_color, source=source[network]) # legend which can hide/select a specific metric fig.legend.location = "bottom_right" fig.legend.click_policy = "hide" choices = metrics axis = 'y' axis_callback_code = """ plot.glyph.{axis}.field = cb_obj.value axis.attributes.axis_label = cb_obj.value; axis.trigger('change'); source.change.emit(); """ if axis == 'x': fig.xaxis.visible = None position = 'below' initial_choice = 0 else: fig.yaxis.visible = None position = 'left' initial_choice = 1 linear_axis = LinearAxis(axis_label=choices[initial_choice]) fig.add_layout(linear_axis, position) callback1 = CustomJS(args=dict(source=source[network], axis=linear_axis, plot=plot), code=axis_callback_code.format(axis=axis)) ticker = Select(value=choices[initial_choice], options=choices, title=axis + '-axis') ticker.js_on_change('value', callback1) l = layout([ [description], [ticker], [fig] ], sizing_mode='fixed') show(l) if __name__ == "__main__": main() else: main()
mit
2,667,029,470,658,940,000
37
126
0.644361
false
3.658872
true
false
false
Statoil/libres
python/res/enkf/ert_template.py
1
2187
# Copyright (C) 2012 Equinor ASA, Norway. # # The file 'ert_template.py' is part of ERT - Ensemble based Reservoir Tool. # # ERT is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ERT is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. # # See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html> # for more details. from cwrap import BaseCClass from res import ResPrototype class ErtTemplate(BaseCClass): TYPE_NAME = "ert_template" _free = ResPrototype("void ert_template_free( ert_template )") _get_template_file = ResPrototype("char* ert_template_get_template_file(ert_template)") _get_target_file = ResPrototype("char* ert_template_get_target_file(ert_template)") _get_arg_list = ResPrototype("subst_list_ref ert_template_get_arg_list( ert_template )") def __init__(self): raise NotImplementedError("Class can not be instantiated directly!") def get_template_file(self): """ @rtype: str """ return self._get_template_file() def get_target_file(self): """ @rtype: str """ return self._get_target_file() def get_args_as_string(self): """ @rtype: str """ args_list = self._get_arg_list() return ", ".join(["{}={}".format(key, args_list.get(key)) for key in args_list.keys()]) def __eq__(self, other): return self.get_template_file() == other.get_template_file() and\ self.get_target_file() == other.get_target_file() and\ self.get_args_as_string() == other.get_args_as_string() def __ne__(self, other): return not self == other def __repr__(self): return "ErtTemplate({}, {}, {})".format( self.get_template_file(), self.get_target_file(), self.get_args_as_string()) def free(self): self._free()
gpl-3.0
87,037,044,405,299,280
36.067797
98
0.631459
false
3.521739
false
false
false
oemof/oemof_base
src/oemof/solph/groupings.py
1
2875
# -*- coding: utf-8 -*- """Groupings needed on an energy system for it to work with solph. If you want to use solph on an energy system, you need to create it with these groupings specified like this: .. code-block: python from oemof.network import EnergySystem import solph energy_system = EnergySystem(groupings=solph.GROUPINGS) SPDX-FileCopyrightText: Uwe Krien <[email protected]> SPDX-FileCopyrightText: Simon Hilpert SPDX-FileCopyrightText: Cord Kaldemeyer SPDX-FileCopyrightText: Stephan Günther SPDX-License-Identifier: MIT """ from oemof.network import groupings as groupings from oemof.solph import blocks def constraint_grouping(node, fallback=lambda *xs, **ks: None): """Grouping function for constraints. This function can be passed in a list to :attr:`groupings` of :class:`oemof.solph.network.EnergySystem`. Parameters ---------- node : :class:`Node <oemof.network.Node` The node for which the figure out a constraint group. fallback : callable, optional A function of one argument. If `node` doesn't have a `constraint_group` attribute, this is used to group the node instead. Defaults to not group the node at all. """ # TODO: Refactor this for looser coupling between modules. # This code causes an unwanted tight coupling between the `groupings` and # `network` modules, resulting in having to do an import at runtime in the # init method of solph's `EnergySystem`. A better way would be to add a # method (maybe `constraints`, `constraint_group`, `constraint_type` or # something like that) to solph's node hierarchy, which gets overridden in # each subclass to return the appropriate value. Then we can just call the # method here. # This even gives other users/us the ability to customize/extend how # constraints are grouped by overriding the method in future subclasses. cg = getattr(node, "constraint_group", fallback) return cg() standard_flow_grouping = groupings.FlowsWithNodes(constant_key=blocks.Flow) def _investment_grouping(stf): if hasattr(stf[2], "investment"): if stf[2].investment is not None: return True else: return False investment_flow_grouping = groupings.FlowsWithNodes( constant_key=blocks.InvestmentFlow, # stf: a tuple consisting of (source, target, flow), so stf[2] is the flow. filter=_investment_grouping, ) def _nonconvex_grouping(stf): if hasattr(stf[2], "nonconvex"): if stf[2].nonconvex is not None: return True else: return False nonconvex_flow_grouping = groupings.FlowsWithNodes( constant_key=blocks.NonConvexFlow, filter=_nonconvex_grouping ) GROUPINGS = [ constraint_grouping, investment_flow_grouping, standard_flow_grouping, nonconvex_flow_grouping, ]
gpl-3.0
3,557,383,444,322,008,600
29.252632
79
0.707376
false
3.615094
false
false
false
NIRALUser/RodentThickness
Applications/RodentThickness/Testing/Data/vtkPointAttributes.py
2
3379
import sys # path="/tools/Python/Python-2.7.3/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg:/tools/Python/Python-2.7.3/lib/python2.7/site-packages/pip-1.0-py2.7.egg:/tools/Python/Python-2.7.3/lib/python2.7/site-packages/VTK-6.0-py2.7.egg:/tools/Python/Python-2.7.3/lib/python2.7/site-packages/distribute-0.6.28-py2.7.egg:/tools/Python/Python-2.7.3/lib/python2.7/site-packages/SimpleITK-0.6.0.dev208-py2.7.egg:/tools/Python/Python-2.7.3/lib/python27.zip:/tools/Python/Python-2.7.3/lib/python2.7:/tools/Python/Python-2.7.3/lib/python2.7/plat-linux2:/tools/Python/Python-2.7.3/lib/python2.7/lib-tk:/tools/Python/Python-2.7.3/lib/python2.7/lib-old:/tools/Python/Python-2.7.3/lib/python2.7/lib-dynload:/tools/Python/Python-2.7.3/lib/python2.7/site-packages".split(":") # for p in path: # sys.path.insert(0, p) from vtk import * from optparse import OptionParser import csv,sys import niralvtk as nv import numpy as np def removeAttributes(opts, args): print args inputvtk = args[0] outputvtk = args[1] inVTK = nv.readVTK(inputvtk); nArrays = inVTK.GetPointData().GetNumberOfArrays() arrayNames = [] for k in range(0,nArrays): arrayNames.append(inVTK.GetPointData().GetArrayName(k)) print arrayNames for name in arrayNames: inVTK.GetPointData().RemoveArray(name) nv.writeVTK(outputvtk, inVTK) def main(opts, argv): inputvtk = argv[0] outputvtk = argv[1] inVTK = nv.readVTK(inputvtk); if (opts.sep == "0"): csvreader = csv.reader(open(opts.csvfile, "r")) elif (opts.sep == "1"): csvreader = csv.reader(open(opts.csvfile, "r"), delimiter=',') elif (opts.sep == "2"): csvreader = csv.reader(open(opts.csvfile, "r"), delimiter='\t') first = csvreader.next() if (opts.header): header = first first = csvreader.next() if (opts.names != ""): header = opts.names.split(",") print header nCols = len(first) nPoints = inVTK.GetNumberOfPoints() data = np.zeros([nPoints,nCols]) for k in range(0,nCols): data[0,k] = float(first[k]) print "# points:", nPoints for j in range(1,nPoints): print j first = csvreader.next() for k in range(0,nCols): data[j,k] = float(first[k]) for k in range(0,nCols): arr = vtkDoubleArray() if (len(header) > 0): arr.SetName(header[k]) arr.SetNumberOfTuples(nPoints) for j in range(0,nPoints): arr.SetValue(j,data[j,k]) inVTK.GetPointData().AddArray(arr) nv.writeVTK(outputvtk, inVTK) if (__name__ == "__main__"): parser = OptionParser(usage="usage: %prog [options] input-vtk output-vtk") parser.add_option("-i", "--input", dest="csvfile", help="input attribute csv file", metavar="CSVFILE") parser.add_option("-t", "--title", dest="header", help="use first line as header", action="store_true", default=False); parser.add_option("-n", "--columnNames", dest="names", help="use this as column names", metavar="NAME1,NAME2,...", default=""); parser.add_option("-r", "--removeAttributes", dest="removeAttrs", help="remove all attributes", action="store_true", default=False); parser.add_option("-s", "--separator", dest="sep", help="separator (0=space, 1=comma, 2=tab)", default="0") (opts, args) = parser.parse_args() if (len(args) < 2): parser.print_help() else: if (opts.removeAttrs): removeAttributes(opts, args) else: main(opts, args)
gpl-3.0
4,282,565,386,402,492,400
33.479592
762
0.669725
false
2.837112
false
false
false
imsut/commons
src/python/twitter/pants/targets/jvm_target.py
1
2954
# ================================================================================================== # Copyright 2011 Twitter, Inc. # -------------------------------------------------------------------------------------------------- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this work except in compliance with the License. # You may obtain a copy of the License in the LICENSE file, or at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ================================================================================================== import os from twitter.pants.targets.internal import InternalTarget from twitter.pants.targets.jar_dependency import JarDependency from twitter.pants.targets.with_sources import TargetWithSources class JvmTarget(InternalTarget, TargetWithSources): """A base class for all java module targets that provides path and dependency translation.""" def __init__(self, name, sources, dependencies, excludes=None, buildflags=None, is_meta=False, configurations=None): InternalTarget.__init__(self, name, dependencies, is_meta) TargetWithSources.__init__(self, name, is_meta) self.declared_dependencies = set(dependencies or []) self.add_label('jvm') self.sources = self._resolve_paths(self.target_base, sources) or [] for source in self.sources: rel_path = os.path.join(self.target_base, source) TargetWithSources.register_source(rel_path, self) self.excludes = excludes or [] self.buildflags = buildflags or [] custom_antxml = '%s.xml' % self.name buildfile = self.address.buildfile.full_path custom_antxml_path = os.path.join(os.path.dirname(buildfile), custom_antxml) self.custom_antxml_path = custom_antxml_path if os.path.exists(custom_antxml_path) else None self.configurations = configurations def _as_jar_dependency(self): jar_dependency, _, _ = self._get_artifact_info() jar = JarDependency(org = jar_dependency.org, name = jar_dependency.name, rev = None) jar.id = self.id return jar def _as_jar_dependencies(self): yield self._as_jar_dependency() def _get_artifact_info(self): provides = self._provides() exported = bool(provides) org = provides.org if exported else 'internal' module = provides.name if exported else self.id version = provides.rev if exported else None id = "%s-%s" % (provides.org, provides.name) if exported else self.id return JarDependency(org = org, name = module, rev = version), id, exported def _provides(self): return None
apache-2.0
-2,108,860,342,207,043,300
40.027778
100
0.636425
false
4.21398
false
false
false
ver228/tierpsy-tracker
tierpsy/helper/misc/file_processing.py
1
2071
import os import tables from .misc import TABLE_FILTERS RESERVED_EXT = ['_skeletons.hdf5', '_trajectories.hdf5', '_features.hdf5', '_intensities.hdf5', '_feat_manual.hdf5', '_subsample.avi', '.wcon.zip', '_featuresN.hdf5' ] IMG_EXT = ['.png', '.jpeg', '.jpg', '.tif', '.tiff', '.bmp'] def remove_ext(fname): for rext in RESERVED_EXT: if fname.endswith(rext): return fname.replace(rext, '') return os.path.splitext(fname)[0] def get_base_name(fname): return os.path.basename(remove_ext(fname)) def replace_subdir(original_dir, original_subdir, new_subdir): # construct the results dir on base of the mask_dir_root original_dir = os.path.normpath(original_dir) subdir_list = original_dir.split(os.sep) for ii in range(len(subdir_list))[::-1]: if subdir_list[ii] == original_subdir: subdir_list[ii] = new_subdir break # the counter arrived to zero, add new_subdir at the end of the directory if ii == 0: if subdir_list[-1] == '': del subdir_list[-1] subdir_list.append(new_subdir) return (os.sep).join(subdir_list) def save_modified_table(file_name, modified_table, table_name): tab_recarray = modified_table.to_records(index=False) with tables.File(file_name, "r+") as fid: dum_name = table_name + '_d' if '/' + dum_name in fid: fid.remove_node('/', dum_name) newT = fid.create_table( '/', dum_name, obj=tab_recarray, filters=TABLE_FILTERS) oldT = fid.get_node('/' + table_name) old_args = [x for x in dir(oldT._v_attrs) if not x.startswith('_')] for key in old_args: if not key in newT._v_attrs and not key.startswith('FIELD'): newT.attrs[key] = oldT.attrs[key] fid.remove_node('/', table_name) newT.rename(table_name)
mit
-487,200,462,895,574,000
30.861538
77
0.551424
false
3.50423
false
false
false
attia42/twitter_word2vec
kmeans/experimentm.py
1
3559
import csv import nltk from nltk.tokenize import word_tokenize import string from nltk import pos_tag from gensim.models.word2vec import Word2Vec from gensim import matutils from numpy import array, float32 as REAL from sklearn.cluster import MiniBatchKMeans, KMeans from multiprocessing import Pool from collections import Counter #string.punctuation #string.digits file = 'training.1600000.processed.noemoticon2.csv' #file = 'testdata.manual.2009.06.14.csv' tags = ["NNP", "NN", "NNS"] ncls = 1000 niters = 1000 nreplay_kmeans = 1 lower = False redundant = ["aw", "aww", "awww", "awwww", "haha", "lol", "wow", "wtf", "xd", "yay", "http", "www", "com", "ah", "ahh", "ahhh", "amp"] def preprocess(tweet): ret_tweet = "" i = -1 nn = [] raw_tweet = tweet for ch in string.punctuation.replace("'","") + string.digits: tweet = tweet.replace(ch, " ") tweet_pos = {} if lower: tweet = tweet.lower() try: toks = word_tokenize(tweet) pos = pos_tag(toks) nn = [p for p in pos if p[1] in tags] #nn = [p for p in pos if p == 'NNP'] except: pass if(len(nn)): tweet_pos["NN"] = nn ret_tweet = tweet_pos return ret_tweet raw = [] with open(file, 'rb') as csvfile: content = csv.reader(csvfile, delimiter=',', quotechar='"') for row in content: tweet = row[5] raw.append(tweet) p = Pool(6) tweets = p.map(preprocess, raw) t1 = [] t2 = [] for i in range(len(tweets)): if len(tweets[i]): t1.append(raw[i]) t2.append(tweets[i]) raw = t1 tweets = t2 print "Loading model..." wv = Word2Vec.load_word2vec_format('GoogleNews-vectors-negative300.bin', binary=True) vectors = [] for i in range(len(tweets)): tweet = tweets[i] nns = tweet['NN'] vector = [] #print nns mean = [] no_wv_tweet = True for w in nns: if len(w[0]) > 1 and w[0] in wv and w[0].lower() not in redundant: no_wv_tweet = False #print w[0] weight = 1 if w[1] == 'NNP': weight = 100 mean.append(weight * wv[w[0]]) if(len(mean)): vectors.append(matutils.unitvec(array(mean).mean(axis=0)).astype(REAL)) else: vectors.append([]) t1 = [] t2 = [] t3 = [] for i in range(len(vectors)): if vectors[i] != None and len(vectors[i]): t1.append(raw[i]) t2.append(tweets[i]) t3.append(vectors[i]) raw = t1 tweets = t2 vectors = t3 #kmeans = KMeans(init='k-means++', n_clusters=ncls, n_init=1) kmeans = MiniBatchKMeans(init='k-means++', n_clusters=ncls, n_init=nreplay_kmeans, max_iter=niters) kmeans.fit(vectors) clss = kmeans.predict(vectors) clusters = [[] for i in range(ncls)] for i in range(len(vectors)): cls = clss[i] clusters[cls].append(i) clusterstags = [[] for i in range(ncls)] countarr = [] for c in clusters: counts = Counter() for i in c: t = [x[0] for x in tweets[i]["NN"] ]#if x[1] == "NNP"] #tn = [x[1] for x in tweets[i]["NN"]] sentence = " ".join(t) #+ tn) counts.update(word.strip('.,?!"\'').lower() for word in sentence.split()) countarr.append(counts) output = "" for i in range(ncls): output = "Most common words for this cluster:\n" output += str(countarr[i].most_common(12)) output += "\n\n\n\n\n\n" output += "Word2vec space of related words:\n" wv_rel = wv.most_similar([kmeans.cluster_centers_[i]], topn=10) output += str(wv_rel) output += "\n\n\n\n\n\n" for t in clusters[i]: output += str(raw[t]) + "\n" #output += "\n\n\n" nm = [x[0] for x in countarr[i].most_common(5)] nm = str(" ".join(nm)) for ch in string.punctuation: nm = nm.replace(ch, " ") f = open('clusters/' + nm +'.txt', 'wb') f.write(output) f.close()
mit
-3,103,478,720,361,940,000
18.662983
134
0.629952
false
2.527699
false
false
false
fga-gpp-mds/2017.2-Receituario-Medico
medical_prescription/exam/test/test_view_list_custom_exam.py
1
2151
# Django imports from django.test import TestCase from django.test.client import RequestFactory from django.contrib.auth.models import AnonymousUser from django.core.exceptions import PermissionDenied # Local Django imports from exam.views import ListCustomExams from user.models import User, Patient, HealthProfessional class ListExamsTest(TestCase): def setUp(self): self.factory = RequestFactory() self.health_professional = HealthProfessional.objects.create_user(email='[email protected]', password='senha12') self.patient = Patient.objects.create_user(email='[email protected]', password='senha12', CEP='72850735', UF='DF', city='Brasília', neighborhood='Asa sul', complement='Bloco 2 QD 701') self.user = User.objects.create_user(email='[email protected]', password='senha12') def teste_exam_get_exam_without_login(self): request = self.factory.get('/exam/list_custom_exams/') request.user = AnonymousUser() response = ListCustomExams.as_view()(request) self.assertEqual(response.status_code, 302) def teste_exam_get_exam_with_patient(self): request = self.factory.get('/exam/list_custom_exams/') request.user = self.patient with self.assertRaises(PermissionDenied): ListCustomExams.as_view()(request) def teste_exam_get_exam_with_user(self): request = self.factory.get('/exam/list_custom_exams/') request.user = self.user with self.assertRaises(PermissionDenied): ListCustomExams.as_view()(request) def teste_exam_get_exam_with_health_professional(self): request = self.factory.get('/exam/list_custom_exams/') request.user = self.health_professional response = ListCustomExams.as_view()(request) self.assertEqual(response.status_code, 200)
mit
-498,659,966,404,250,100
41.156863
120
0.605581
false
4.232283
true
false
false
mxcube/mxcube
mxcubeqt/widgets/optimisation_parameters_widget_layout.py
1
2158
# # Project: MXCuBE # https://github.com/mxcube # # This file is part of MXCuBE software. # # MXCuBE is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # MXCuBE is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with MXCuBE. If not, see <http://www.gnu.org/licenses/>. from mxcubeqt.utils import qt_import __credits__ = ["MXCuBE collaboration"] __license__ = "LGPLv3+" class OptimisationParametersWidgetLayout(qt_import.QWidget): """ Widget is used to define characterisation optimization parameters like maximum resolution, aimed multiplicity, and etc. """ def __init__(self, parent=None, name=None, window_flags=0): qt_import.QWidget.__init__(self, parent, qt_import.Qt.WindowFlags(window_flags)) if not name: self.setObjectName("OptimisationParametersWidgetLayout") # Hardware objects ---------------------------------------------------- # Internal variables -------------------------------------------------- # Graphic elements ---------------------------------------------------- self.opt_param_widget = qt_import.load_ui_file( "optimization_parameters_widget_layout.ui" ) # Layout -------------------------------------------------------------- _main_vlayout = qt_import.QVBoxLayout(self) _main_vlayout.addWidget(self.opt_param_widget) _main_vlayout.setSpacing(0) _main_vlayout.setContentsMargins(0, 0, 0, 0) # Size policies ------------------------------------------------------- # Other --------------------------------------------------------------- self.setAttribute(qt_import.Qt.WA_WState_Polished)
lgpl-3.0
7,412,875,325,961,287,000
36.859649
88
0.579703
false
4.231373
false
false
false
PowerHMC/HmcRestClient
src/managed_system/PowerOnManagedSystem.py
1
2162
# Copyright 2015, 2016 IBM Corp. # # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from src.utility import HTTPClient,HmcHeaders,HMCClientLogger import xml.etree.ElementTree as etree import os log = HMCClientLogger.HMCClientLogger(__name__) from src.common.JobStatus import * ROOT = 'ManagedSystem' CONTENT_TYPE ='application/vnd.ibm.powervm.web+xml; type=JobRequest' class PowerOnManagedSystem(JobStatus): """ Power On the Selected ManagedSystem if its not in operating state else shows error """ def __init__(self): """ initializes root and content-type """ self.content_type = CONTENT_TYPE self.root = ROOT def poweron_ManagedSystem(self, ip, managedsystem_uuid, x_api_session): """ Args: ip:ip address of hmc managedsystem_uuid:UUID of managedsystem x_api_session:session to be used """ super().__init__(ip, self.root, self.content_type, x_api_session) log.log_debug("power on managed system started") headers_object=HmcHeaders.HmcHeaders("web") namespace=headers_object.ns["xmlns"] directory = os.path.dirname(__file__) inputpayload=open(directory+"\data\poweron_managedsystem.xml","r") request_object=HTTPClient.HTTPClient('uom',ip,self.root,self.content_type,session_id=x_api_session) request_object.HTTPPut(payload=inputpayload,append=str(managedsystem_uuid)+"/do/PowerOn") log.log_debug(request_object.response) if request_object.response_b: self.get_job_status(request_object)
apache-2.0
6,232,945,339,774,236,000
37.607143
107
0.682701
false
3.813051
false
false
false
Nablaquabla/sns-analysis
ba-missing-files.py
1
5827
import os import time as tm import sys # Handles the creation of condor files for a given set of directories # ----------------------------------------------------------------------------- def createCondorFile(dataDir,outDir,run,day,time): # Condor submission file name convention: run-day-time.condor with open('/home/bjs66/CondorFiles/%s-%s-%s.condor'%(run,day,time),'w') as f: # Fixed program location' f.write('Executable = /home/bjs66/GitHub/sns-analysis/sns-analysis-v3\n') # Arguments passed to the exe: # Set main run directory, e.g. Run-15-10-02-27-32-23/151002 # Set current time to be analzyed (w/o .zip extension!), e.g. 184502 # Set output directory, eg Output/ Run-15-10-02-27-32-23/151002 f.write('Arguments = \"3 %s %s %s 1\"\n'%(dataDir,time,outDir)) # Standard cluster universe f.write('universe = vanilla\n') f.write('getenv = true\n') # Program needs at least 250 MB of free memory to hold unzipped data f.write('request_memory = 300\n') # Output, error and log name convention: run-day-time.log/out/err f.write('log = ../../Logs/%s-%s-%s.log\n'%(run,day,time)) f.write('Output = ../../Outs/%s-%s-%s.out\n'%(run,day,time)) f.write('Error = ../../Errs/%s-%ss-%s.err\n'%(run,day,time)) # Do not write any emails f.write('notification = never\n') f.write('+Department = Physics\n') f.write('should_transfer_files = NO\n') # Add single job to queue f.write('Queue') # Main function handling all internals # ----------------------------------------------------------------------------- def main(runMissing): # Choose main directory, i.e. ~/csi/beam_on_data/Run-15-06-25-xyz/ mainRunDir = '/var/phy/project/phil/grayson/COHERENT/CsI/' # Choose output directory, i.e. ~/output/Run-15-06-25-xyz/ mainOutDir = '/var/phy/project/phil/grayson/COHERENT/CsI/bjs-analysis/' # Choose run to analyze # runDirs = ['Run-15-03-27-12-42-26'] # run = 'Run-15-03-30-13-33-05' # run = 'Run-15-04-08-11-38-28' # run = 'Run-15-04-17-16-56-59' # run = 'Run-15-04-29-16-34-44' # runDirs = ['Run-15-05-05-16-09-12'] # run = 'Run-15-05-11-11-46-30' # run = 'Run-15-05-19-17-04-44' # run = 'Run-15-05-27-11-13-46' # runDirs = ['Run-15-05-05-16-09-12','Run-15-05-11-11-46-30','Run-15-05-19-17-04-44','Run-15-05-27-11-13-46'] runDirs = ['Run-15-03-27-12-42-26','Run-15-03-30-13-33-05','Run-15-04-08-11-38-28','Run-15-04-17-16-56-59','Run-15-04-29-16-34-44', 'Run-15-05-05-16-09-12','Run-15-05-11-11-46-30','Run-15-05-19-17-04-44','Run-15-05-27-11-13-46'] subdirs = {} days_in = {'Run-15-03-27-12-42-26': ['150327','150328','150329','150330'], 'Run-15-03-30-13-33-05': ['150330','150331','150401','150402','150403','150404','150405','150406','150407','150408'], 'Run-15-04-08-11-38-28': ['150408','150409','150410','150411','150412','150413','150414','150415','150416'], 'Run-15-04-17-16-56-59': ['150417','150418','150419','150420','150421','150422','150423','150424','150425','150426','150427','150428','150429'], 'Run-15-04-29-16-34-44': ['150429','150430','150501','150502','150503','150504','150505'], 'Run-15-05-05-16-09-12': ['150505','150506','150507','150508','150509','150510','150511'], 'Run-15-05-11-11-46-30': ['150512','150513','150514','150515','150516','150517','150518','150519'], 'Run-15-05-19-17-04-44': ['150519','150520','150521','150522','150523','150524','150525','150526','150527'], 'Run-15-05-27-11-13-46': ['150527','150528','150529','150530','150531','150601','150602','150603','150604','150605','150606','150607','150608','150609']} for run in runDirs: for day in days_in[run]: subdirs[run] = 'brillance_data' print run,day # Prepare paths for further processing dataRunDir = mainRunDir + '%s/%s/%s'%(subdirs[run],run,day) outDir = mainOutDir + '%s/%s'%(run,day) # Get all times within the day folder chosen inputList = [x.split('.')[0] for x in os.listdir(dataRunDir)] # Get all times within the day folder chosen outputList_B = [x.split('-')[1] for x in os.listdir(outDir) if 'B-' in x] outputList_S = [x.split('-')[1] for x in os.listdir(outDir) if 'S-' in x] outputList_I = [x.split('-')[1] for x in os.listdir(outDir) if 'I-' in x] # Check if there is a file missing in the day folder if len(inputList) != len(outputList_B) or len(inputList) != len(outputList_S) or len(inputList) != len(outputList_I): missingB = set(inputList) - set(outputList_B) missingI = set(inputList) - set(outputList_I) missingS = set(inputList) - set(outputList_S) missing = list((missingB | missingI) | missingS) if len(missing) > 0: print len(missing) if runMissing == '1': for m in missing: createCondorFile(dataRunDir,outDir,run,day,m) cmd = 'condor_submit /home/bjs66/CondorFiles/%s-%s-%s.condor'%(run,day,m) os.system(cmd) if __name__ == '__main__': main(sys.argv[1])
gpl-3.0
-2,322,044,558,256,752,000
42.485075
168
0.519478
false
3.00826
false
false
false
iotaledger/iota.lib.py
iota/transaction/validator.py
1
9563
from typing import Generator, List, Optional, Type from iota.crypto.kerl import Kerl from iota.crypto.signing import validate_signature_fragments from iota.transaction.base import Bundle, Transaction __all__ = [ 'BundleValidator', ] # In very rare cases, the IOTA protocol may switch hash algorithms. # When this happens, the IOTA Foundation will create a snapshot, so # that all new objects on the Tangle use the new hash algorithm. # # However, the snapshot will still contain references to addresses # created using the legacy hash algorithm, so the bundle validator has # to be able to use that as a fallback when validation fails. SUPPORTED_SPONGE = Kerl LEGACY_SPONGE = None # Curl class BundleValidator(object): """ Checks a bundle and its transactions for problems. """ def __init__(self, bundle: Bundle) -> None: super(BundleValidator, self).__init__() self.bundle = bundle self._errors: Optional[List[str]] = [] self._validator = self._create_validator() @property def errors(self) -> List[str]: """ Returns all errors found with the bundle. """ try: self._errors.extend(self._validator) # type: List[str] except StopIteration: pass return self._errors def is_valid(self) -> bool: """ Returns whether the bundle is valid. """ if not self._errors: try: # We only have to check for a single error to determine # if the bundle is valid or not. self._errors.append(next(self._validator)) except StopIteration: pass return not self._errors def _create_validator(self) -> Generator[str, None, None]: """ Creates a generator that does all the work. """ # Group transactions by address to make it easier to iterate # over inputs. grouped_transactions = self.bundle.group_transactions() # Define a few expected values. bundle_hash = self.bundle.hash last_index = len(self.bundle) - 1 # Track a few others as we go along. balance = 0 # Check indices and balance first. # Note that we use a counter to keep track of the current index, # since at this point we can't trust that the transactions have # correct ``current_index`` values. counter = 0 for group in grouped_transactions: for txn in group: balance += txn.value if txn.bundle_hash != bundle_hash: yield 'Transaction {i} has invalid bundle hash.'.format( i=counter, ) if txn.current_index != counter: yield ( 'Transaction {i} has invalid current index value ' '(expected {i}, actual {actual}).'.format( actual=txn.current_index, i=counter, ) ) if txn.last_index != last_index: yield ( 'Transaction {i} has invalid last index value ' '(expected {expected}, actual {actual}).'.format( actual=txn.last_index, expected=last_index, i=counter, ) ) counter += 1 # Bundle must be balanced (spends must match inputs). if balance != 0: yield ( 'Bundle has invalid balance ' '(expected 0, actual {actual}).'.format( actual=balance, ) ) # Signature validation is only meaningful if the transactions # are otherwise valid. if not self._errors: signature_validation_queue: List[List[Transaction]] = [] for group in grouped_transactions: # Signature validation only applies to inputs. if group[0].value >= 0: continue validate_group_signature = True for j, txn in enumerate(group): if (j > 0) and (txn.value != 0): # Input is malformed; signature fragments after # the first should have zero value. yield ( 'Transaction {i} has invalid value ' '(expected 0, actual {actual}).'.format( actual=txn.value, # If we get to this point, we know that # the ``current_index`` value for each # transaction can be trusted. i=txn.current_index, ) ) # We won't be able to validate the signature, # but continue anyway, so that we can check that # the other transactions in the group have the # correct ``value``. validate_group_signature = False continue # After collecting the signature fragment from each # transaction in the group, queue them up to run through # the validator. # # We have to perform signature validation separately so # that we can try different algorithms (for # backwards-compatibility). # # References: # # - https://github.com/iotaledger/kerl#kerl-integration-in-iota if validate_group_signature: signature_validation_queue.append(group) # Once we've finished checking the attributes from each # transaction in the bundle, go back and validate # signatures. if signature_validation_queue: # ``yield from`` is an option here, but for # compatibility with Python 2 clients, we will do it the # old-fashioned way. for error in self._get_bundle_signature_errors( signature_validation_queue ): yield error def _get_bundle_signature_errors( self, groups: List[List[Transaction]] ) -> List[str]: """ Validates the signature fragments in the bundle. :return: List of error messages. If empty, signature fragments are valid. """ # Start with the currently-supported hash algo. current_pos = None current_errors = [] for current_pos, group in enumerate(groups): error = self._get_group_signature_error(group, SUPPORTED_SPONGE) if error: current_errors.append(error) # Pause and retry with the legacy algo. break # If validation failed, then go back and try with the legacy # algo (only applies if we are currently transitioning to a new # algo). if current_errors and LEGACY_SPONGE: for group in groups: if self._get_group_signature_error(group, LEGACY_SPONGE): # Legacy algo doesn't work, either; no point in # continuing. break else: # If we get here, then we were able to validate the # signature fragments successfully using the legacy # algorithm. return [] # If we get here, then validation also failed when using the # legacy algorithm. # At this point, we know that the bundle is invalid, but we will # continue validating with the supported algorithm anyway, so # that we can return an error message for every invalid input. current_errors.extend(filter(None, ( self._get_group_signature_error(group, SUPPORTED_SPONGE) for group in groups[current_pos + 1:] ))) return current_errors @staticmethod def _get_group_signature_error( group: List[Transaction], sponge_type: Type ) -> Optional[str]: """ Validates the signature fragments for a group of transactions using the specified sponge type. Note: this method assumes that the transactions in the group have already passed basic validation (see :py:meth:`_create_validator`). :return: - ``None``: Indicates that the signature fragments are valid. - ``str``: Error message indicating the fragments are invalid. """ validate_group_signature = validate_signature_fragments( fragments=[txn.signature_message_fragment for txn in group], hash_=group[0].bundle_hash, public_key=group[0].address, sponge_type=sponge_type, ) if validate_group_signature: return None return ( 'Transaction {i} has invalid signature ' '(using {fragments} fragments).'.format( fragments=len(group), i=group[0].current_index, ) )
mit
-1,442,031,086,814,618,400
35.361217
79
0.527136
false
5.146932
false
false
false
nilbody/h2o-3
h2o-py/tests/testdir_golden/pyunit_svd_1_golden.py
1
2402
from __future__ import print_function from builtins import zip import sys sys.path.insert(1,"../../") import h2o from tests import pyunit_utils def svd_1_golden(): print("Importing USArrests.csv data...") arrestsH2O = h2o.upload_file(pyunit_utils.locate("smalldata/pca_test/USArrests.csv")) print("Compare with SVD") fitH2O = h2o.svd(x=arrestsH2O[0:4], nv=4, transform="NONE", max_iterations=2000) print("Compare singular values (D)") h2o_d = fitH2O._model_json['output']['d'] r_d = [1419.06139509772, 194.825846110138, 45.6613376308754, 18.0695566224677] print("R Singular Values: {0}".format(r_d)) print("H2O Singular Values: {0}".format(h2o_d)) for r, h in zip(r_d, h2o_d): assert abs(r - h) < 1e-6, "H2O got {0}, but R got {1}".format(h, r) print("Compare right singular vectors (V)") h2o_v = h2o.as_list(h2o.get_frame(fitH2O._model_json['output']['v_key']['name']), use_pandas=False) h2o_v.pop(0) r_v = [[-0.04239181, 0.01616262, -0.06588426, 0.99679535], [-0.94395706, 0.32068580, 0.06655170, -0.04094568], [-0.30842767, -0.93845891, 0.15496743, 0.01234261], [-0.10963744, -0.12725666, -0.98347101, -0.06760284]] print("R Right Singular Vectors: {0}".format(r_v)) print("H2O Right Singular Vectors: {0}".format(h2o_v)) for rl, hl in zip(r_v, h2o_v): for r, h in zip(rl, hl): assert abs(abs(r) - abs(float(h))) < 1e-5, "H2O got {0}, but R got {1}".format(h, r) print("Compare left singular vectors (U)") h2o_u = h2o.as_list(h2o.get_frame(fitH2O._model_json['output']['u_key']['name']), use_pandas=False) h2o_u.pop(0) r_u = [[-0.1716251, 0.096325710, 0.06515480, 0.15369551], [-0.1891166, 0.173452566, -0.42665785, -0.17801438], [-0.2155930, 0.078998111, 0.02063740, -0.28070784], [-0.1390244, 0.059889811, 0.01392269, 0.01610418], [-0.2067788, -0.009812026, -0.17633244, -0.21867425], [-0.1558794, -0.064555293, -0.28288280, -0.11797419]] print("R Left Singular Vectors: {0}".format(r_u)) print("H2O Left Singular Vectors: {0}".format(h2o_u)) for rl, hl in zip(r_u, h2o_u): for r, h in zip(rl, hl): assert abs(abs(r) - abs(float(h))) < 1e-5, "H2O got {0}, but R got {1}".format(h, r) if __name__ == "__main__": pyunit_utils.standalone_test(svd_1_golden) else: svd_1_golden()
apache-2.0
-2,250,794,566,595,891,700
39.711864
117
0.61199
false
2.404404
false
false
false
coreymcdermott/artbot
artbot_scraper/spiders/arthouse_spider.py
1
2106
# -*- coding: utf-8 -*- import re from scrapy import Spider, Request from dateutil import parser from artbot_scraper.items import EventItem from pytz import timezone class ArthouseSpider(Spider): name = 'Arthouse Gallery' allowed_domains = ['www.arthousegallery.com.au'] start_urls = ['http://www.arthousegallery.com.au/exhibitions/'] def parse(self, response): for href in response.xpath('//div[contains(@id, "index")]//li//a/@href'): url = response.urljoin(href.extract()) yield Request(url, callback=self.parse_exhibition) def parse_exhibition(self, response): item = EventItem() item['url'] = response.url item['venue'] = self.name item['title'] = response.xpath('//div[contains(@id, "headerTitle")]//text()').extract_first().strip() \ + ' - ' \ + response.xpath('//div[contains(@id, "headerSubTitle")]//em/text()').extract_first().strip() item['description'] = ''.join(response.xpath('//div[contains(@id, "exhibition")]//hr/following-sibling::p//text()').extract()).strip() item['image'] = response.urljoin(response.xpath('//img//@src').extract_first()) season = ''.join(response.xpath('//div[contains(@id, "headerSubTitle")]//text()[not(ancestor::em)]').extract()).strip() match = re.match(u'(?P<start>^[\d\w\s]+)[\s\-\–]*(?P<end>[\d\w\s]+$)', season) if (match): tz = timezone('Australia/Sydney') start = tz.localize(parser.parse(match.group('start'), fuzzy = True)) end = tz.localize(parser.parse(match.group('end'), fuzzy = True)) if (re.match(u'^\d+$', match.group('start'))): start = start.replace(month=end.month, year=end.year) if (re.match(u'^\d+\s+\w+$', match.group('start'))): start = start.replace(year=end.year) item['start'] = start item['end'] = end yield item
mit
2,145,790,434,256,513,000
43.765957
142
0.545627
false
3.665505
false
false
false
mmb90/dftintegrate
dftintegrate/fourier/readdata.py
1
5536
""" Classes:: ReadData -- A collection of functions to collect extracted VASP/QE data into a json file. """ import json from copy import deepcopy from ast import literal_eval from collections import defaultdict class ReadData(object): """ A collection of functions to collect extracted VASP/QE data into a json file. Variables:: name -- A string containing the name to the extracted data. kmax -- A number that determines how many terms can be used in the fourier representation based on the density of the sample points. kgrid -- A list of lists. Each inner list is a triplet that represents a k-point. The outer list is the collection of the triplets or k-points and therefore represents the k-kgrid. Note these are the irreducible k-points. weights -- A list of floats. Since kgrid represents the irreducible wedge, each k-point has a weight that represents in a way how degenerate it is. These are in the same order as their corresponding k-points in kgrid. eigenvals -- A dictionary. At each k-point there is an eigenvalue (energy) for each band that was calculated. The keys are the band numbers and the values are a list of energies for that band at each k-point. symops -- A triple nested list. The outer list is a collection of matrices that represent the symmetry operators for the system calculated. The inner double nested lists are representations of the matrices. trans -- A list of lists. Each symmetry operator has a translation vector associated with it. We aren't sure what they are for but we have them so we can implement them if we figure it out. Functions:: _read_lines -- Read a file yielding one line at a time. Generator. read_kpts_eigenvals -- Read kpts_eigenvals.dat in as a list of k-points and a dictionary. read_symops_trans -- Read symops_trans.dat in as two lists. read_kmax -- Read kmax from kmax.dat in. For example one might run a calulation with a grid of 4 4 4, in this case k is 4. This is needed in the Fourier basis fit to ensure the highest frequency term doesn't exceed the Nyquist frequency. This means that the highest frequency can't exeed k/2, so if k is 4 then the highest frequency can't exeed 2. Since we are in 3D we have to consider sqrt(x^2+x^2+x^2) < k/2, thus x = kmax = ceil(k/(2sqrt(3)). serialize -- Serialize the data to a json file. """ def __init__(self, name_of_data_directory): """ Arguments:: name_of_data_directory -- A string containing the name to the VASP data. """ self.name = name_of_data_directory self.read_kpts_eigenvals() self.read_symops_trans() self.read_kmax() self.serialize() def _read_lines(self, path_to_file): """ Read file, yield line by line. Arguments:: path_to_file -- String containing the path to the file. """ with open(path_to_file) as inf: for line in inf: yield [literal_eval(x) for x in line.strip().split()] def read_kpts_eigenvals(self): """ Read in kpts_eigenvals.dat with _read_lines. Stores the k-pionts in kgrid, the weights in weights, and the band energy (eigenvalues) in eigenvals. See this class's (ReadData) docstring for more details on kgrid, weights, and eigenvals. """ name = self.name kgrid = [] weights = [] eigenvals = defaultdict(list) for line in self._read_lines(name + '/kpts_eigenvals.dat'): if len(line) == 4: kgrid.append(line[:3]) weights.append(line[-1]) elif len(line) == 2: eigenvals[line[0]].append(line[1]) self.kgrid = kgrid self.weights = weights self.eigenvals = eigenvals def read_symops_trans(self): """ Read in symops_trans.dat with _read_lines. Stores the symmetry operators in symops and the translations in trans. See this class's (ReadData) docstring for more details on symops and trans. """ name = self.name symops = [] symop = [] trans = [] lines = self._read_lines(name + '/symops_trans.dat') for line in lines: symop.append(line) symop.append(next(lines)) symop.append(next(lines)) next(lines) tran = next(lines) next(lines) symops.append(deepcopy(symop)) trans.append(tran) symop.clear() self.symops = symops self.trans = trans def read_kmax(self): """ Read in kmax.dat using _read_lines. Only the first line will be read. It will be assigned to self.kmax. """ name = self.name lines = self._read_lines(name + '/kmax.dat') self.kmax = next(lines)[0] def serialize(self): data_dict = {'kmax': self.kmax, 'kgrid': self.kgrid, 'weights': self.weights, 'eigenvals': self.eigenvals, 'symops': self.symops, 'trans': self.trans} with open(self.name + '/data.json', mode='w', encoding='utf-8') as outf: json.dump(data_dict, outf, indent=2)
mit
-6,101,243,481,345,637,000
34.716129
74
0.601156
false
4.052709
false
false
false
nkmk/python-snippets
notebook/pypdf2_metadata_remove.py
1
4178
import PyPDF2 src_pdf = PyPDF2.PdfFileReader('data/src/pdf/sample1.pdf') dst_pdf = PyPDF2.PdfFileWriter() dst_pdf.cloneReaderDocumentRoot(src_pdf) with open('data/temp/sample1_no_meta.pdf', 'wb') as f: dst_pdf.write(f) print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo) # {'/Producer': 'PyPDF2'} dst_pdf.addMetadata({'/Producer': ''}) with open('data/temp/sample1_no_meta.pdf', 'wb') as f: dst_pdf.write(f) print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo) # {'/Producer': ''} def remove_all_metadata(src_path, dst_path, producer=''): src_pdf = PyPDF2.PdfFileReader(src_path) dst_pdf = PyPDF2.PdfFileWriter() dst_pdf.cloneReaderDocumentRoot(src_pdf) dst_pdf.addMetadata({'/Producer': producer}) with open(dst_path, 'wb') as f: dst_pdf.write(f) remove_all_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf') print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo) # {'/Producer': ''} src_pdf = PyPDF2.PdfFileReader('data/src/pdf/sample1.pdf') dst_pdf = PyPDF2.PdfFileWriter() d = {key: src_pdf.documentInfo[key] for key in src_pdf.documentInfo.keys()} print(d) # {'/Title': 'sample1', '/Producer': 'macOS バージョン10.14.2(ビルド18C54) Quartz PDFContext', '/Creator': 'Keynote', '/CreationDate': "D:20190114072947Z00'00'", '/ModDate': "D:20190114072947Z00'00'"} d.pop('/Creator') d.pop('/Producer') print(d) # {'/Title': 'sample1', '/CreationDate': "D:20190114072947Z00'00'", '/ModDate': "D:20190114072947Z00'00'"} dst_pdf.addMetadata(d) with open('data/temp/sample1_remove_meta.pdf', 'wb') as f: dst_pdf.write(f) print(PyPDF2.PdfFileReader('data/temp/sample1_remove_meta.pdf').documentInfo) # {'/Producer': 'PyPDF2', '/Title': 'sample1', '/CreationDate': "D:20190114072947Z00'00'", '/ModDate': "D:20190114072947Z00'00'"} def remove_metadata(src_path, dst_path, *args, producer=''): src_pdf = PyPDF2.PdfFileReader(src_path) dst_pdf = PyPDF2.PdfFileWriter() dst_pdf.cloneReaderDocumentRoot(src_pdf) d = {key: src_pdf.documentInfo[key] for key in src_pdf.documentInfo.keys() if key not in args} d.setdefault('/Producer', producer) dst_pdf.addMetadata(d) with open(dst_path, 'wb') as f: dst_pdf.write(f) remove_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf', '/Creator', '/ModDate', '/CreationDate') print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo) # {'/Producer': 'macOS バージョン10.14.2(ビルド18C54) Quartz PDFContext', '/Title': 'sample1'} remove_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf', '/Creator', '/ModDate', '/CreationDate', '/Producer') print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo) # {'/Producer': '', '/Title': 'sample1'} remove_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf', '/Creator', '/ModDate', '/CreationDate', '/Producer', producer='XXX') print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo) # {'/Producer': 'XXX', '/Title': 'sample1'} def select_metadata(src_path, dst_path, *args, producer=''): src_pdf = PyPDF2.PdfFileReader(src_path) dst_pdf = PyPDF2.PdfFileWriter() dst_pdf.cloneReaderDocumentRoot(src_pdf) d = {key: src_pdf.documentInfo[key] for key in src_pdf.documentInfo.keys() if key in args} d.setdefault('/Producer', producer) dst_pdf.addMetadata(d) with open(dst_path, 'wb') as f: dst_pdf.write(f) select_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf', '/Title', '/ModDate') print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo) # {'/Producer': '', '/Title': 'sample1', '/ModDate': "D:20190114072947Z00'00'"} select_metadata('data/src/pdf/sample1.pdf', 'data/temp/sample1_no_meta.pdf', '/Title', '/Producer') print(PyPDF2.PdfFileReader('data/temp/sample1_no_meta.pdf').documentInfo) # {'/Producer': 'macOS バージョン10.14.2(ビルド18C54) Quartz PDFContext', '/Title': 'sample1'}
mit
287,211,407,982,853,760
37.485981
192
0.675085
false
2.679245
false
false
false
feureau/Small-Scripts
Blender/Blender config/2.91/scripts/addons/bricker_v2-2-1/lib/property_groups/created_model_properties.py
1
35831
# Copyright (C) 2020 Christopher Gearhart # [email protected] # http://bblanimation.com/ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # System imports # NONE! # Blender imports import bpy from bpy.props import * # Module imports from .boolean_properties import * from ...functions.property_callbacks import * # Create custom property group class CreatedModelProperties(bpy.types.PropertyGroup): # CMLIST ITEM SETTINGS name = StringProperty(update=uniquify_name) id = IntProperty() idx = IntProperty() # NAME OF SOURCE source_obj = PointerProperty( type=bpy.types.Object, poll=lambda self, object: object.type == "MESH", name="Source Object", description="Name of the source object to Brickify", update=set_default_obj_if_empty, ) # TRANSFORMATION SETTINGS model_loc = StringProperty(default="-1,-1,-1") model_rot = StringProperty(default="-1,-1,-1") model_scale = StringProperty(default="-1,-1,-1") transform_scale = FloatProperty( name="Scale", description="Scale of the brick model", update=update_model_scale, step=1, default=1.0, ) apply_to_source_object = BoolProperty( name="Apply to source", description="Apply transformations to source object when Brick Model is deleted", default=True, ) parent_obj = PointerProperty( type=bpy.types.Object, name="Parent Object", description="Name of the parent object used for model transformations", ) expose_parent = BoolProperty( name="Show Manipulator", description="Expose the parent object for this brick model for viewport manipulation", update=update_parent_exposure, default=False, ) # ANIMATION SETTINGS use_animation = BoolProperty( name="Use Animation", description="Create Brick Model for each frame, from start to stop frame (WARNING: Calculation takes time, and may result in large blend file)", default=False, ) start_frame = IntProperty( name="Start", description="First frame of the brick animation", update=dirty_anim, min=0, max=500000, default=1, ) stop_frame = IntProperty( name="End", description="Final frame of the brick animation", update=dirty_anim, min=0, max=500000, default=10, ) step_frame = IntProperty( name="Step", description="Number of frames to skip forward when generating the brick animation", update=dirty_anim, min=0, max=500000, default=1, ) # BASIC MODEL SETTINGS brick_height = FloatProperty( name="Brick Height", description="Height of the bricks in the final Brick Model (in meters, excluding the stud)", update=dirty_matrix, subtype="DISTANCE", step=1, precision=3, min = 0.000001, soft_min=0.001, soft_max=10, default=0.1, ) gap = FloatProperty( name="Gap Between Bricks", description="Distance between bricks (relative to brick height)", update=dirty_matrix, subtype="PERCENTAGE", step=1, precision=1, min=0.0, max=100.0, default=0.5, ) split_model = BoolProperty( name="Split Model", description="Split model into separate objects (slower)", update=dirty_model, default=False, ) random_loc = FloatProperty( name="Random Location", description="Max random location applied to each brick", update=dirty_model, step=1, precision=3, min=0, soft_max=1, default=0.01, ) random_rot = FloatProperty( name="Random Rotation", description="Max random rotation applied to each brick", update=dirty_model, step=1, precision=3, min=0, soft_max=1, default=0.025, ) shell_thickness = IntProperty( name="Shell Thickness", description="Thickness of the outer shell of bricks", update=dirty_build, min=1, max=50, default=1, ) # MERGE SETTINGS merge_type = EnumProperty( name="Merge Type", description="Type of algorithm used for merging bricks together", items=[ # ("NONE", "None (fast)", "Bricks are not merged"), ("GREEDY", "Greedy", "Creates fewest amount of bricks possible"), ("RANDOM", "Random", "Merges randomly for realistic build"), ], update=dirty_build, default="RANDOM", ) legal_bricks_only = BoolProperty( name="Legal Bricks Only", description="Construct model using only legal brick sizes", update=dirty_build, default=True, ) merge_seed = IntProperty( name="Seed", description="Random seed for brick merging calculations", update=dirty_build, min=0, default=1000, ) align_bricks = BoolProperty( name="Align Bricks Horizontally", description="Keep bricks aligned horizontally, and fill the gaps with plates", update=dirty_build, default=True, ) offset_brick_layers = IntProperty( name="Offset Brick Layers", description="Offset the layers that will be merged into bricks if possible", update=dirty_build, step=1, min=0, max=2, default=0, ) # SMOKE SETTINGS smoke_density = FloatProperty( name="Smoke Density", description="Density of brickified smoke (threshold for smoke: 1 - d)", update=dirty_matrix, min=0, max=1, default=0.9, ) smoke_quality = FloatProperty( name="Smoke Quality", description="Amount of data to analyze for density and color of brickified smoke", update=dirty_matrix, min=1, soft_max=100, default=1, ) smoke_brightness = FloatProperty( name="Smoke Brightness", description="Add brightness to smoke colors read from smoke data", update=dirty_matrix, soft_min=0, soft_max=100, default=1, ) smoke_saturation = FloatProperty( name="Smoke Saturation", description="Change saturation level of smoke colors read from smoke data", update=dirty_matrix, min=0, soft_max=100, default=1, ) flame_color = FloatVectorProperty( name="Hex Value", subtype="COLOR", update=dirty_matrix, default=[1.0, 0.63, 0.2], ) flame_intensity = FloatProperty( name="Flame Intensity", description="Intensity of the flames", update=dirty_matrix, min=1, soft_max=50, default=4, ) # BRICK TYPE SETTINGS brick_type = EnumProperty( name="Brick Type", description="Type of brick used to build the model", items=get_brick_type_items, update=update_brick_type, # default="BRICKS", ) max_width = IntProperty( name="Max Width", description="Maximum brick width in studs", update=dirty_build, step=1, min=1, soft_max=100, default=8, ) max_depth = IntProperty( name="Max Depth", description="Maximum brick depth in studs", update=dirty_build, step=1, min=1, soft_max=100, default=24, ) custom_object1 = PointerProperty( type=bpy.types.Object, poll=lambda self, object: object.type == "MESH" and object != self.source_obj and not object.name.startswith("Bricker_{}".format(self.source_obj.name)), name="Custom Object 1", description="Custom object to use as brick type", ) custom_mesh1 = PointerProperty( type=bpy.types.Mesh, name="Custom Mesh 1", description="Cached mesh from Custom Object 1 with materials applied/transform removed", ) custom_object2 = PointerProperty( type=bpy.types.Object, poll=lambda self, object: object.type == "MESH" and object != self.source_obj and not object.name.startswith("Bricker_{}".format(self.source_obj.name)), name="Custom Object 2", description="Custom object to use as brick type", ) custom_mesh2 = PointerProperty( type=bpy.types.Mesh, name="Custom Mesh 2", description="Cached mesh from Custom Object 2 with materials applied/transform removed", ) custom_object3 = PointerProperty( type=bpy.types.Object, poll=lambda self, object: object.type == "MESH" and object != self.source_obj and not object.name.startswith("Bricker_{}".format(self.source_obj.name)), name="Custom Object 3", description="Custom object to use as brick type", ) custom_mesh3 = PointerProperty( type=bpy.types.Mesh, name="Custom Mesh 3", description="Cached mesh from Custom Object 3 with materials applied/transform removed", ) dist_offset = FloatVectorProperty( name="Offset Distance", description="Offset distance between custom bricks (1.0 = side-by-side)", update=dirty_matrix, step=1, precision=3, subtype="TRANSLATION", min=0.001, soft_max=1.0, default=(1, 1, 1), ) # BOOLEAN SETTINGS booleans = CollectionProperty(type=BooleanProperties) boolean_index = IntProperty(default=-1) # MATERIAL & COLOR SETTINGS material_type = EnumProperty( name="Material Type", description="Choose what materials will be applied to model", items=[ ("NONE", "None", "No material applied to bricks"), ("CUSTOM", "Single Material", "Choose one material to apply to all generated bricks"), ("RANDOM", "Random", "Apply a random material from Brick materials to each generated brick"), ("SOURCE", "Use Source Materials", "Apply material based on closest intersecting face"), ], update=dirty_build, default="SOURCE", ) custom_mat = PointerProperty( type=bpy.types.Material, name="Custom Material", description="Material to apply to all bricks", ) internal_mat = PointerProperty( type=bpy.types.Material, name="Internal Material", description="Material to apply to bricks inside material shell", update=dirty_material, ) mat_shell_depth = IntProperty( name="Shell Material Depth", description="Depth to which the outer materials should be applied (1 = Only exposed bricks)", step=1, min=1, max=50, update=dirty_matrix, default=1, ) merge_internals = EnumProperty( name="Merge Shell with Internals", description="Merge bricks on shell with internal bricks", items=[ ("NEITHER", "Neither", "Don't merge shell bricks with internals in either direction"), ("HORIZONTAL", "Horizontal", "Merge shell bricks with internals horizontally, but not vertically"), ("VERTICAL", "Vertical", "Merge shell bricks with internals vertically, but not horizontally"), ("BOTH", "Horizontal & Vertical", "Merge shell bricks with internals in both directions"), ], update=dirty_build, default="BOTH", ) random_mat_seed = IntProperty( name="Seed", description="Random seed for material assignment", min=0, update=dirty_material, default=1000, ) use_uv_map = BoolProperty( name="Use UV Map", description="Transfer colors from UV map (if disabled or no UV map found, brick color will be based on RGB of first shader node)", update=dirty_build, default=True, ) uv_image = PointerProperty( type=bpy.types.Image, name="UV Image", description="UV Image to use for UV Map color transfer (defaults to active UV if left blank)", update=dirty_build, ) color_snap = EnumProperty( name="Color Mapping", description="Method for mapping source material(s)/texture(s) to new materials", items=[ ("NONE", "None", "Use source material(s)"), ("RGB", "RGB", "Map RGB values to new materials (similar materials will merge into one material based on threshold)"), ("ABS", "ABS", "Map RGB values to nearest ABS Plastic Materials") ], update=dirty_build, default="RGB", ) color_depth = IntProperty( name="Color Depth", description="Number of colors to use in representing the UV texture (2^depth colors are created)", min=1, max=10, update=dirty_build, default=4, ) blur_radius = IntProperty( name="Blur Radius", description="Distance over which to blur the image before sampling", min=0, max=10, update=dirty_build, default=0, # 1 ) color_snap_specular = FloatProperty( name="Specular", description="Specular value for the created materials", subtype="FACTOR", precision=3, min=0.0, soft_max=1.0, update=dirty_material, default=0.5, ) color_snap_roughness = FloatProperty( name="Roughness", description="Roughness value for the created materials", subtype="FACTOR", precision=3, min=0.0, soft_max=1.0, update=dirty_material, default=0.5, ) color_snap_sss = FloatProperty( name="Subsurface Scattering", description="Subsurface scattering value for the created materials", subtype="FACTOR", precision=3, min=0.0, soft_max=1.0, update=dirty_material, default=0.0, ) color_snap_sss_saturation = FloatProperty( name="SSS Saturation", description="Saturation of the subsurface scattering for the created materials (relative to base color value)", subtype="FACTOR", precision=3, min=0.0, soft_max=1.0, update=dirty_material, default=1.0, ) color_snap_ior = FloatProperty( name="IOR", description="IOR value for the created materials", precision=3, min=0.0, soft_max=1000.0, update=dirty_material, default=1.45, ) color_snap_transmission = FloatProperty( name="Transmission", description="Transmission value for the created materials", subtype="FACTOR", precision=3, min=0.0, soft_max=1.0, update=dirty_material, default=0.0, ) color_snap_displacement = FloatProperty( name="Displacement", description="Displacement value for the created materials (overrides ABS Plastic displacement value)", subtype="FACTOR", precision=3, min=0.0, soft_max=1.0, update=dirty_material, default=0.04, ) use_abs_template = BoolProperty( name="Use ABS Template", description="Use the default ABS Plastic Material node tree to build the RGB materials", update=dirty_material, default=True, ) include_transparency = BoolProperty( name="Include Transparency", description="Include alpha value of original material color", update=dirty_build, default=True, ) transparent_weight = FloatProperty( name="Transparency Weight", description="How much the original material's alpha value affects the chosen ABS Plastic Material", precision=1, min=0, soft_max=2, update=dirty_material, default=1, ) target_material = PointerProperty( name="Target Material", type=bpy.types.Material, description="Add material to materials list", update=add_material_to_list, ) target_material_message = StringProperty( description="Message from target material chosen (warning or success)", default="", ) target_material_time = StringProperty( # stored as string because float cuts off digits description="'str(time.time())' from when the material message was created", default="0", ) # BRICK DETAIL SETTINGS stud_detail = EnumProperty( name="Stud Detailing", description="Choose where to draw brick studs", items=[ ("NONE", "None", "Don't include brick studs/logos on bricks"), ("EXPOSED", "Exposed Bricks", "Include brick studs/logos only on bricks with the top exposed"), ("ALL", "All Bricks", "Include brick studs/logos only on bricks with the top exposed"), ], update=dirty_bricks, default="EXPOSED", ) logo_type = EnumProperty( name="Logo Type", description="Choose logo type to draw on brick studs", items=get_logo_types, update=dirty_bricks, # default="NONE", ) logo_resolution = IntProperty( name="Resolution", description="Resolution of the brick logo", update=dirty_bricks, min=1, soft_max=10, default=2, ) logo_decimate = FloatProperty( name="Decimate", description="Decimate the brick logo (lower number for higher resolution)", update=dirty_bricks, precision=0, min=0, max=10, default=7.25, ) logo_object = PointerProperty( type=bpy.types.Object, poll=lambda self, object: object.type == "MESH" and object != self.source_obj and not object.name.startswith("Bricker_{}".format(self.source_obj.name)), name="Logo Object", description="Select a custom logo object to use on top of each stud", update=dirty_bricks, ) logo_scale = FloatProperty( name="Logo Scale", description="Logo scale relative to stud scale", subtype="PERCENTAGE", step=1, update=dirty_bricks, precision=1, min=0.0001, soft_max=100.0, default=78.0, ) logo_inset = FloatProperty( name="Logo Inset", description="How far to inset logo to stud", subtype="PERCENTAGE", step=1, update=dirty_bricks, precision=1, soft_min=0.0, soft_max=100.0, default=50.0, ) hidden_underside_detail = EnumProperty( name="Underside Detailing of Obstructed Bricks", description="Level of detail on underside of bricks with obstructed undersides", items=[ ("FLAT", "Flat", "Draw single face on brick underside", 0), ("LOW", "Low Detail", "Hollow out brick underside and draw tube supports", 1), ("HIGH", "High Detail", "Draw underside of bricks at full detail (support beams, ticks, inset tubes)", 3), ], update=dirty_bricks, default="FLAT", ) exposed_underside_detail = EnumProperty( name="Underside Detailing of Exposed Bricks", description="Level of detail on underside of bricks with exposed undersides", items=[ ("FLAT", "Flat", "Draw single face on brick underside", 0), ("LOW", "Low Detail", "Hollow out brick underside and draw tube supports", 1), ("HIGH", "High Detail", "Draw underside of bricks at full detail (support beams, ticks, inset tubes)", 3), ], update=dirty_bricks, default="FLAT", ) circle_verts = IntProperty( name="Vertices", description="Number of vertices in each circle in brick mesh", update=update_circle_verts, min=4, soft_max=64, default=16, ) # BEVEL SETTINGS bevel_added = BoolProperty( name="Bevel Bricks", description="Bevel brick edges and corners for added realism", default=False, ) bevel_show_render = BoolProperty( name="Render", description="Use modifier during render", update=update_bevel_render, default=True, ) bevel_show_viewport = BoolProperty( name="Realtime", description="Display modifier in viewport", update=update_bevel_viewport, default=True, ) bevel_show_edit_mode = BoolProperty( name="Edit Mode", description="Display modifier in Edit mode", update=update_bevel_edit_mode, default=True, ) bevel_width = FloatProperty( name="Bevel Width", description="Bevel amount (relative to Brick Height)", subtype="DISTANCE", step=1, min=0.0, soft_max=10, update=update_bevel, default=0.01, ) bevel_segments = IntProperty( name="Bevel Resolution", description="Number of segments for round edges/verts", step=1, min=1, max=100, update=update_bevel, default=1, ) bevel_profile = FloatProperty( name="Bevel Profile", description="The profile shape (0.5 = round)", subtype="FACTOR", step=1, min=0.0, max=1.0, update=update_bevel, default=0.7, ) # INTERNAL SUPPORTS SETTINGS internal_supports = EnumProperty( name="Internal Supports", description="Choose what type of brick support structure to use inside your model", items=[ ("NONE", "None", "No internal supports"), ("COLUMNS", "Columns", "Use columns inside model"), ("LATTICE", "Lattice", "Use latice inside model"), ], update=dirty_internal, default="NONE", ) lattice_step = IntProperty( name="Step", description="Distance between cross-beams", update=dirty_internal, step=1, min=2, soft_max=100, default=4, ) lattice_height = IntProperty( name="Height", description="Height of the cross-beams", update=dirty_internal, step=1, min=1, soft_max=100, default=1, ) alternate_xy = BoolProperty( name="Alternate X and Y", description="Alternate back-and-forth and side-to-side beams", update=dirty_internal, default=True, ) col_thickness = IntProperty( name="Thickness", description="Thickness of the columns", update=dirty_internal, min=1, soft_max=100, default=2, ) col_step = IntProperty( name="Step", description="Distance between columns", update=dirty_internal, step=1, min=1, soft_max=100, default=2, ) # ADVANCED SETTINGS insideness_ray_cast_dir = EnumProperty( name="Insideness Ray Cast Direction", description="Ray cast method for calculation of insideness", items=[ ("HIGH_EFFICIENCY", "High Efficiency", "Reuses single intersection ray cast for insideness calculation"), ("X", "X", "Cast rays along X axis for insideness calculations"), ("Y", "Y", "Cast rays along Y axis for insideness calculations"), ("Z", "Z", "Cast rays along Z axis for insideness calculations"), ("XYZ", "XYZ (Best Result)", "Cast rays in all axis directions for insideness calculation (slowest; uses result consistent for at least 2 of the 3 rays)"), ], update=dirty_matrix, default="HIGH_EFFICIENCY", ) brick_shell = EnumProperty( name="Brick Shell", description="Choose whether the outer shell of bricks will be inside or outside the source mesh", items=[ ("INSIDE", "Inside Mesh", "Draw brick shell inside source mesh (recommended)"), ("OUTSIDE", "Outside Mesh", "Draw brick shell outside source mesh"), ("CONSISTENT", "Consistent", "Draw brick shell on a consistent side of the source mesh topology (may fix noisy model if source mesh is not water-tight)"), ], update=update_brick_shell, default="INSIDE", ) calculation_axes = EnumProperty( name="Expanded Axes", description="The brick shell will be drawn on the outside in these directions", items=[ ("XYZ", "XYZ", "XYZ"), ("XY", "XY", "XY"), ("YZ", "YZ", "YZ"), ("XZ", "XZ", "XZ"), ("X", "X", "X"), ("Y", "Y", "Y"), ("Z", "Z", "Z"), ], update=dirty_matrix, default="XY", ) use_normals = BoolProperty( name="Use Normals", description="Use normals to calculate insideness of bricks (may improve the result if normals on source mesh are oriented correctly)", update=dirty_matrix, default=False, ) grid_offset = FloatVectorProperty( name="Grid Offset", description="Offset the brick grid along the volume of the source mesh (factor of brick dimensions)", subtype="XYZ", min=-1, max=1, update=dirty_matrix, default=(0, 0, 0), ) use_absolute_grid = BoolProperty( name="Absolute Grid Coords", description="Place bricks on a fixed grid that is consistent between all models", update=dirty_matrix, default=False, ) use_absolute_grid_anim = BoolProperty( name="Absolute Grid Coords", description="Place bricks on a fixed grid that is consistent between all models", update=dirty_matrix, default=True, ) calc_internals = BoolProperty( name="Calculate Internals", description="Calculate values for bricks inside shell (disable for faster calculation at the loss of the 'Shell Thickness' and 'Supports' features)", update=dirty_matrix, default=True, ) use_local_orient = BoolProperty( name="Use Local Orient", description="Generate bricks based on local orientation of source object", default=False, ) instance_method = EnumProperty( name="Instance Method", description="Method to use for instancing equivalent meshes to save on memory and render times", items=[ ("NONE", "None", "No object instancing"), ("LINK_DATA", "Link Data", "Link mesh data for like objects when 'Split Model' is enabled"), ("POINT_CLOUD", "Point Cloud (experimental)", "Instance a single mesh over a point cloud (this method does not support multiple materials or brick merging)"), ], update=dirty_build, default="LINK_DATA", ) # Deep Cache of bricksdict bfm_cache = StringProperty(default="") # Blender State for Undo Stack blender_undo_state = IntProperty(default=0) # Back-End UI Properties active_key = IntVectorProperty(subtype="XYZ", default=(-1,-1,-1)) # Internal Model Properties model_created = BoolProperty(default=False) brickifying_in_background = BoolProperty(default=False) job_progress = IntProperty( name="", description="", subtype="PERCENTAGE", default=0, soft_min=0, soft_max=100, ) linked_from_external = BoolProperty(default=False) num_animated_frames = IntProperty(default=0) completed_frames = StringProperty(default="") frames_to_animate = IntProperty(default=1) stop_background_process = BoolProperty(default=False) animated = BoolProperty(default=False) armature = BoolProperty(default=False) zstep = IntProperty(default=3) parent_obj = PointerProperty(type=bpy.types.Object) collection = PointerProperty(type=bpy.types.Collection if b280() else bpy.types.Group) mat_obj_abs = PointerProperty(type=bpy.types.Object) mat_obj_random = PointerProperty(type=bpy.types.Object) rgba_vals = StringProperty(default="789c8b8e0500011500b9") # result of `compress_str(json.dumps({}))` customized = BoolProperty(default=True) brick_sizes_used = StringProperty(default="") # list of brick_sizes used separated by | (e.g. '5,4,3|7,4,5|8,6,5') brick_types_used = StringProperty(default="") # list of brick_types used separated by | (e.g. 'PLATE|BRICK|STUD') model_created_on_frame = IntProperty(default=-1) is_smoke = BoolProperty(default=False) has_custom_obj1 = BoolProperty(default=False) has_custom_obj2 = BoolProperty(default=False) has_custom_obj3 = BoolProperty(default=False) # model stats num_bricks_in_model = IntProperty(default=0) num_materials_in_model = IntProperty(default=0) model_weight = IntProperty(default=0) real_world_dimensions = FloatVectorProperty( name="Real World Dimensions", description="", subtype="XYZ", unit="LENGTH", precision=6, default=(0, 0, 0), ) # Properties for checking of model needs updating anim_is_dirty = BoolProperty(default=True) material_is_dirty = BoolProperty(default=True) model_is_dirty = BoolProperty(default=True) build_is_dirty = BoolProperty(default=False) bricks_are_dirty = BoolProperty(default=True) matrix_is_dirty = BoolProperty(default=True) matrix_lost = BoolProperty(default=False) internal_is_dirty = BoolProperty(default=True) last_logo_type = StringProperty(default="NONE") last_split_model = BoolProperty(default=False) last_start_frame = IntProperty( name="Last Start", description="Current start frame of brickified animation", default=-1, ) last_stop_frame = IntProperty( name="Last End", description="Current end frame of brickified animation", default=-1, ) last_step_frame = IntProperty( name="Last Step", description="Current number of frames to skip forward when generating brickified animation", default=-1, ) last_source_mid = StringProperty(default="-1,-1,-1") last_material_type = StringProperty(default="SOURCE") last_use_abs_template = BoolProperty(default=False) last_shell_thickness = IntProperty(default=1) last_internal_supports = StringProperty(default="NONE") last_brick_type = StringProperty(default="BRICKS") last_instance_method = StringProperty(default="LINK_DATA") last_matrix_settings = StringProperty(default="") last_legal_bricks_only = BoolProperty(default=False) last_mat_shell_depth = IntProperty(default=1) last_bevel_width = FloatProperty() last_bevel_segments = IntProperty() last_bevel_profile = IntProperty() last_is_smoke = BoolProperty() # Bricker Version of Model version = StringProperty(default="1.0.4") ### BACKWARDS COMPATIBILITY # v1.0 maxBrickScale1 = IntProperty() maxBrickScale2 = IntProperty() # v1.3 distOffsetX = FloatProperty() distOffsetY = FloatProperty() distOffsetZ = FloatProperty() # v1.4 logoDetail = StringProperty("NONE") # v1.5 source_name = StringProperty() parent_name = StringProperty() # v1.6 modelLoc = StringProperty(default="-1,-1,-1") modelRot = StringProperty(default="-1,-1,-1") modelScale = StringProperty(default="-1,-1,-1") transformScale = FloatProperty(default=1) applyToSourceObject = BoolProperty(default=True) exposeParent = BoolProperty(default=False) useAnimation = BoolProperty(default=False) startFrame = IntProperty(default=1) stopFrame = IntProperty(default=10) maxWorkers = IntProperty(default=5) backProcTimeout = FloatProperty(default=0) brickHeight = FloatProperty(default=0.1) mergeSeed = IntProperty(default=1000) connectThresh = IntProperty(default=1) smokeDensity = FloatProperty(default=0.9) smokeQuality = FloatProperty(default=1) smokeBrightness = FloatProperty(default=1) smokeSaturation = FloatProperty(default=1) flameColor = FloatVectorProperty(default=[1.0, 0.63, 0.2]) flameIntensity = FloatProperty(default=4) splitModel = BoolProperty(default=False) randomLoc = FloatProperty(default=0.01) randomRot = FloatProperty(default=0.025) brickShell = StringProperty(default="INSIDE") calculationAxes = StringProperty(default="XY") shellThickness = IntProperty(default=1) brickType = StringProperty(default="BRICKS") alignBricks = BoolProperty(default=True) offsetBrickLayers = IntProperty(default=0) maxWidth = IntProperty(default=2) maxDepth = IntProperty(default=10) mergeType = StringProperty(default="RANDOM") legalBricksOnly = BoolProperty(default=True) customObject1 = PointerProperty(type=bpy.types.Object) customObject2 = PointerProperty(type=bpy.types.Object) customObject3 = PointerProperty(type=bpy.types.Object) distOffset = FloatVectorProperty(default=(1, 1, 1)) paintbrushMat = PointerProperty(type=bpy.types.Material) materialType = StringProperty(default="NONE") customMat = PointerProperty(type=bpy.types.Material) internalMat = PointerProperty(type=bpy.types.Material) matShellDepth = IntProperty(default=1) mergeInternals = StringProperty(default="BOTH") randomMatSeed = IntProperty(default=1000) useUVMap = BoolProperty(default=True) uvImage = PointerProperty(type=bpy.types.Image) colorSnap = StringProperty(default="RGB") colorSnapAmount = FloatProperty(default=0.001) color_snap_amount = FloatProperty(default=0.001) colorSnapSpecular = FloatProperty(0.5) colorSnapRoughness = FloatProperty(0.5) colorSnapIOR = FloatProperty(1.45) colorSnapTransmission = FloatProperty(0.0) includeTransparency = BoolProperty(default=True) transparentWeight = FloatProperty(default=1) targetMaterial = StringProperty(default="") studDetail = StringProperty(default="EXPOSED") logoType = StringProperty(default="NONE") logoResolution = IntProperty(default=2) logoDecimate = FloatProperty(default=7.25) logoScale = FloatProperty(default=78.0) logoInset = FloatProperty(default=50.0) hiddenUndersideDetail = StringProperty(default="FLAT") exposedUndersideDetail = StringProperty(default="FLAT") circleVerts = IntProperty(default=16) bevelAdded = BoolProperty(default=False) bevelShowRender = BoolProperty(default=True) bevelShowViewport = BoolProperty(default=True) bevelShowEditmode = BoolProperty(default=True) bevelWidth = FloatProperty(default=0.01) bevelSegments = IntProperty(default=1) bevelProfile = FloatProperty(default=0.7) internalSupports = StringProperty(default="NONE") latticeStep = IntProperty(default=4) latticeHeight = IntProperty(default=1) alternateXY = BoolProperty(default=1) colThickness = IntProperty(default=2) colStep = IntProperty(default=2) insidenessRayCastDir = StringProperty(default="HIGH_EFFICIENCY") useNormals = BoolProperty(default=False) verifyExposure = BoolProperty(default=False) calcInternals = BoolProperty(default=True) useLocalOrient = BoolProperty(default=False) instanceBricks = BoolProperty(default=True) BFMCache = StringProperty(default="") modelCreated = BoolProperty(default=False) numAnimatedFrames = IntProperty(default=0) framesToAnimate = IntProperty(default=0) modelCreatedOnFrame = IntProperty(default=-1)
gpl-3.0
8,693,281,229,719,090,000
35.674514
170
0.634395
false
3.951367
false
false
false
xclxxl414/rqalpha
rqalpha/__main__.py
1
14210
# -*- coding: utf-8 -*- # # Copyright 2017 Ricequant, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import errno import sys import os import shutil import six import click from importlib import import_module from rqalpha.utils.click_helper import Date from rqalpha.utils.config import parse_config, dump_config CONTEXT_SETTINGS = { 'default_map': { 'run': { } } } @click.group(context_settings=CONTEXT_SETTINGS) @click.option('-v', '--verbose', count=True) @click.help_option('-h', '--help') @click.pass_context def cli(ctx, verbose): ctx.obj["VERBOSE"] = verbose def inject_mod_commands(): from rqalpha.utils.config import get_mod_conf from rqalpha.mod import SYSTEM_MOD_LIST from rqalpha.utils.package_helper import import_mod mod_config = get_mod_conf() for mod_name, config in six.iteritems(mod_config['mod']): if 'lib' in config: lib_name = config["lib"] else: lib_name = "rqalpha_mod_{}".format(mod_name) if not config['enabled']: continue try: if mod_name in SYSTEM_MOD_LIST: # inject system mod import_mod("rqalpha.mod." + lib_name) else: # inject third part mod import_mod(lib_name) except Exception as e: pass def entry_point(): inject_mod_commands() cli(obj={}) @cli.command() @click.option('-d', '--data-bundle-path', default=os.path.expanduser('~/.rqalpha'), type=click.Path(file_okay=False)) @click.option('--locale', 'locale', type=click.STRING, default="zh_Hans_CN") def update_bundle(data_bundle_path, locale): """ Sync Data Bundle """ from rqalpha import main main.update_bundle(data_bundle_path, locale) @cli.command() @click.help_option('-h', '--help') # -- Base Configuration @click.option('-d', '--data-bundle-path', 'base__data_bundle_path', type=click.Path(exists=True)) @click.option('-f', '--strategy-file', 'base__strategy_file', type=click.Path(exists=True)) @click.option('-s', '--start-date', 'base__start_date', type=Date()) @click.option('-e', '--end-date', 'base__end_date', type=Date()) @click.option('-bm', '--benchmark', 'base__benchmark', type=click.STRING, default=None) @click.option('-mm', '--margin-multiplier', 'base__margin_multiplier', type=click.FLOAT) @click.option('-a', '--account', 'base__accounts', nargs=2, multiple=True, help="set account type with starting cash") @click.option('--position', 'base__init_positions', type=click.STRING, help="set init position") @click.option('-fq', '--frequency', 'base__frequency', type=click.Choice(['1d', '1m', 'tick'])) @click.option('-rt', '--run-type', 'base__run_type', type=click.Choice(['b', 'p', 'r']), default="b") @click.option('-rp', '--round-price', 'base__round_price', is_flag=True) @click.option('-mk', '--market', 'base__market', type=click.Choice(['cn', 'hk']), default=None) @click.option('--resume', 'base__resume_mode', is_flag=True) @click.option('--source-code', 'base__source_code') # -- Extra Configuration @click.option('-l', '--log-level', 'extra__log_level', type=click.Choice(['verbose', 'debug', 'info', 'error', 'none'])) @click.option('--disable-user-system-log', 'extra__user_system_log_disabled', is_flag=True, help='disable user system log stdout') @click.option('--disable-user-log', 'extra__user_log_disabled', is_flag=True, help='disable user log stdout') @click.option('--logger', 'extra__logger', nargs=2, multiple=True, help='config logger, e.g. --logger system_log debug') @click.option('--locale', 'extra__locale', type=click.Choice(['cn', 'en']), default="cn") @click.option('--extra-vars', 'extra__context_vars', type=click.STRING, help="override context vars") @click.option("--enable-profiler", "extra__enable_profiler", is_flag=True, help="add line profiler to profile your strategy") @click.option('--config', 'config_path', type=click.STRING, help="config file path") # -- Mod Configuration @click.option('-mc', '--mod-config', 'mod_configs', nargs=2, multiple=True, type=click.STRING, help="mod extra config") def run(**kwargs): """ Start to run a strategy """ config_path = kwargs.get('config_path', None) if config_path is not None: config_path = os.path.abspath(config_path) kwargs.pop('config_path') if not kwargs.get('base__securities', None): kwargs.pop('base__securities', None) from rqalpha import main source_code = kwargs.get("base__source_code") cfg = parse_config(kwargs, config_path=config_path, click_type=True, source_code=source_code) source_code = cfg.base.source_code results = main.run(cfg, source_code=source_code) # store results into ipython when running in ipython from rqalpha.utils import is_run_from_ipython if results is not None and is_run_from_ipython(): import IPython from rqalpha.utils import RqAttrDict ipy = IPython.get_ipython() report = results.get("sys_analyser", {}) ipy.user_global_ns["results"] = results ipy.user_global_ns["report"] = RqAttrDict(report) if results is None: sys.exit(1) @cli.command() @click.option('-d', '--directory', default="./", type=click.Path(), required=True) def examples(directory): """ Generate example strategies to target folder """ source_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "examples") try: shutil.copytree(source_dir, os.path.join(directory, "examples")) except OSError as e: if e.errno == errno.EEXIST: six.print_("Folder examples is exists.") @cli.command() @click.option('-v', '--verbose', is_flag=True) def version(**kwargs): """ Output Version Info """ from rqalpha import version_info six.print_("Current Version: ", version_info) @cli.command() @click.option('-d', '--directory', default="./", type=click.Path(), required=True) def generate_config(directory): """ Generate default config file """ default_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.yml") target_config_path = os.path.abspath(os.path.join(directory, 'config.yml')) shutil.copy(default_config, target_config_path) six.print_("Config file has been generated in", target_config_path) # For Mod Cli @cli.command(context_settings=dict( ignore_unknown_options=True, )) @click.help_option('-h', '--help') @click.argument('cmd', nargs=1, type=click.Choice(['list', 'enable', 'disable', 'install', 'uninstall'])) @click.argument('params', nargs=-1) def mod(cmd, params): """ Mod management command rqalpha mod list \n rqalpha mod install xxx \n rqalpha mod uninstall xxx \n rqalpha mod enable xxx \n rqalpha mod disable xxx \n """ def list(params): """ List all mod configuration """ from tabulate import tabulate from rqalpha.utils.config import get_mod_conf mod_config = get_mod_conf() table = [] for mod_name, mod in six.iteritems(mod_config['mod']): table.append([ mod_name, ("enabled" if mod['enabled'] else "disabled") ]) headers = [ "name", "status" ] six.print_(tabulate(table, headers=headers, tablefmt="psql")) six.print_("You can use `rqalpha mod list/install/uninstall/enable/disable` to manage your mods") def install(params): """ Install third-party Mod """ try: from pip._internal import main as pip_main from pip._internal.commands.install import InstallCommand except ImportError: from pip import main as pip_main from pip.commands.install import InstallCommand params = [param for param in params] options, mod_list = InstallCommand().parse_args(params) mod_list = [mod_name for mod_name in mod_list if mod_name != "."] params = ["install"] + params for mod_name in mod_list: mod_name_index = params.index(mod_name) if mod_name.startswith("rqalpha_mod_sys_"): six.print_('System Mod can not be installed or uninstalled') return if "rqalpha_mod_" in mod_name: lib_name = mod_name else: lib_name = "rqalpha_mod_" + mod_name params[mod_name_index] = lib_name # Install Mod installed_result = pip_main(params) # Export config from rqalpha.utils.config import load_yaml, user_mod_conf_path user_conf = load_yaml(user_mod_conf_path()) if os.path.exists(user_mod_conf_path()) else {'mod': {}} if installed_result == 0: # 如果为0,则说明安装成功 if len(mod_list) == 0: """ 主要是方便 `pip install -e .` 这种方式 本地调试 Mod 使用,需要满足以下条件: 1. `rqalpha mod install -e .` 命令是在对应 自定义 Mod 的根目录下 2. 该 Mod 必须包含 `setup.py` 文件(否则也不能正常的 `pip install -e .` 来安装) 3. 该 Mod 包名必须按照 RQAlpha 的规范来命名,具体规则如下 * 必须以 `rqalpha-mod-` 来开头,比如 `rqalpha-mod-xxx-yyy` * 对应import的库名必须要 `rqalpha_mod_` 来开头,并且需要和包名后半部分一致,但是 `-` 需要替换为 `_`, 比如 `rqalpha_mod_xxx_yyy` """ mod_name = _detect_package_name_from_dir(params) mod_name = mod_name.replace("-", "_").replace("rqalpha_mod_", "") mod_list.append(mod_name) for mod_name in mod_list: if "rqalpha_mod_" in mod_name: mod_name = mod_name.replace("rqalpha_mod_", "") if "==" in mod_name: mod_name = mod_name.split('==')[0] user_conf['mod'][mod_name] = {} user_conf['mod'][mod_name]['enabled'] = False dump_config(user_mod_conf_path(), user_conf) return installed_result def uninstall(params): """ Uninstall third-party Mod """ try: from pip._internal import main as pip_main from pip._internal.commands.uninstall import UninstallCommand except ImportError: # be compatible with pip < 10.0 from pip import main as pip_main from pip.commands.uninstall import UninstallCommand params = [param for param in params] options, mod_list = UninstallCommand().parse_args(params) params = ["uninstall"] + params for mod_name in mod_list: mod_name_index = params.index(mod_name) if mod_name.startswith("rqalpha_mod_sys_"): six.print_('System Mod can not be installed or uninstalled') return if "rqalpha_mod_" in mod_name: lib_name = mod_name else: lib_name = "rqalpha_mod_" + mod_name params[mod_name_index] = lib_name # Uninstall Mod uninstalled_result = pip_main(params) # Remove Mod Config from rqalpha.utils.config import user_mod_conf_path, load_yaml user_conf = load_yaml(user_mod_conf_path()) if os.path.exists(user_mod_conf_path()) else {'mod': {}} for mod_name in mod_list: if "rqalpha_mod_" in mod_name: mod_name = mod_name.replace("rqalpha_mod_", "") del user_conf['mod'][mod_name] dump_config(user_mod_conf_path(), user_conf) return uninstalled_result def enable(params): """ enable mod """ mod_name = params[0] if "rqalpha_mod_" in mod_name: mod_name = mod_name.replace("rqalpha_mod_", "") # check whether is installed module_name = "rqalpha_mod_" + mod_name if module_name.startswith("rqalpha_mod_sys_"): module_name = "rqalpha.mod." + module_name try: import_module(module_name) except ImportError: installed_result = install([module_name]) if installed_result != 0: return from rqalpha.utils.config import user_mod_conf_path, load_yaml user_conf = load_yaml(user_mod_conf_path()) if os.path.exists(user_mod_conf_path()) else {'mod': {}} try: user_conf['mod'][mod_name]['enabled'] = True except KeyError: user_conf['mod'][mod_name] = {'enabled': True} dump_config(user_mod_conf_path(), user_conf) def disable(params): """ disable mod """ mod_name = params[0] if "rqalpha_mod_" in mod_name: mod_name = mod_name.replace("rqalpha_mod_", "") from rqalpha.utils.config import user_mod_conf_path, load_yaml user_conf = load_yaml(user_mod_conf_path()) if os.path.exists(user_mod_conf_path()) else {'mod': {}} try: user_conf['mod'][mod_name]['enabled'] = False except KeyError: user_conf['mod'][mod_name] = {'enabled': False} dump_config(user_mod_conf_path(), user_conf) locals()[cmd](params) def _detect_package_name_from_dir(params): setup_path = os.path.join(os.path.abspath(params[-1]), 'setup.py') if not os.path.exists(setup_path): return None return os.path.split(os.path.dirname(setup_path))[1] if __name__ == '__main__': entry_point()
apache-2.0
1,797,748,913,422,962,200
34.845758
130
0.602266
false
3.486872
true
false
false
kim135797531/opencog-python-blending
opencog_b/python/blending/util/blending_util.py
1
1884
# coding=utf-8 __author__ = 'DongMin Kim' from opencog.type_constructors import * # Choose atoms which are connected to specific atom. def get_incoming_nodes(a, target): ret = [] xget_target_link = a.xget_atoms_by_target_atom(types.Link, target) for link in xget_target_link: xget_target_link_node = a.xget_outgoing(link.h) for node in xget_target_link_node: if node.h != target.h: ret.append(node) return ret def get_weighted_tv(atoms): """ Make new TruthValue by evaluate weighted average of exist link's TruthValue. This is implement code of this idea written by Ben Goertzel: https://groups.google.com/forum/#!topic/opencog/fa5c4yE8YdU :param list(EqualLinkKey) atoms: List of EqualLinkKey which are expected to make weighted average TruthValue from theirs. :rtype TruthValue: New truth value. """ if len(atoms) < 2: raise UserWarning( "Weighted TruthValue can't be evaluated with small size." ) mean_sum = 0 weighted_strength_sum = 0 confidence_sum = 0 link_count = 0 for atom in atoms: weighted_strength_sum += (atom.tv.confidence * atom.tv.mean) confidence_sum += atom.tv.confidence link_count += 1 try: new_strength = weighted_strength_sum / confidence_sum except ZeroDivisionError: # This is arithmetic mean, maybe given atoms doesn't have TruthValue. for atom in atoms: mean_sum += atom.tv.mean new_strength = mean_sum / link_count # TODO: Currently, confidence value for new blended node is just # average of old value. # 충돌값 보정을 단순 평균이 아닌 적절한 이유를 가진 값으로 바꿔야 한다. new_confidence = confidence_sum / link_count return TruthValue(new_strength, new_confidence)
agpl-3.0
70,699,895,936,959,990
28.451613
77
0.652245
false
3.254902
false
false
false
elysium001/zamboni
sites/s3dev/settings_base.py
1
5495
"""private_base will be populated from puppet and placed in this directory""" import logging import os import dj_database_url from mkt.settings import (CACHE_PREFIX, ES_INDEXES, KNOWN_PROXIES, LOGGING, HOSTNAME) from .. import splitstrip import private_base as private ALLOWED_HOSTS = ['.allizom.org', '.mozflare.net'] ENGAGE_ROBOTS = False EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = private.EMAIL_HOST DEBUG = False TEMPLATE_DEBUG = DEBUG DEBUG_PROPAGATE_EXCEPTIONS = False SESSION_COOKIE_SECURE = True ADMINS = () DATABASES = {} DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL) DATABASES['default']['ENGINE'] = 'django.db.backends.mysql' DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'} DATABASES['default']['ATOMIC_REQUESTS'] = True DATABASES['default']['CONN_MAX_AGE'] = 5 * 60 # 5m for persistent connections. DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL) DATABASES['slave']['ENGINE'] = 'django.db.backends.mysql' DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'} DATABASES['slave']['sa_pool_key'] = 'slave' DATABASES['slave']['ATOMIC_REQUESTS'] = True DATABASES['slave']['CONN_MAX_AGE'] = 5 * 60 # 5m for persistent connections. SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL) SLAVE_DATABASES = ['slave'] CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION), 'TIMEOUT': 500, 'KEY_PREFIX': CACHE_PREFIX, } } SECRET_KEY = private.SECRET_KEY LOG_LEVEL = logging.DEBUG # Celery BROKER_URL = private.BROKER_URL CELERY_ALWAYS_EAGER = False CELERY_IGNORE_RESULT = True CELERY_DISABLE_RATE_LIMITS = True CELERYD_PREFETCH_MULTIPLIER = 1 NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage' GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons' UPLOADS_PATH = NETAPP_STORAGE + '/uploads' ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons' WEBSITE_ICONS_PATH = UPLOADS_PATH + '/website_icons' FEATURED_APP_BG_PATH = UPLOADS_PATH + '/featured_app_background' FEED_COLLECTION_BG_PATH = UPLOADS_PATH + '/feed_collection_background' FEED_SHELF_BG_PATH = UPLOADS_PATH + '/feed_shelf_background' IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets' REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment' PREVIEWS_PATH = UPLOADS_PATH + '/previews' WEBAPP_PROMO_IMG_PATH = UPLOADS_PATH + '/webapp_promo_imgs' WEBSITE_PROMO_IMG_PATH = UPLOADS_PATH + '/website_promo_imgs' SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps' SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer' PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png' PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s' EXTENSIONS_PATH = NETAPP_STORAGE + '/extensions' SIGNED_EXTENSIONS_PATH = NETAPP_STORAGE + '/signed-extensions' LOGGING['loggers'].update({ 'amqp': {'level': logging.WARNING}, 'raven': {'level': logging.WARNING}, 'requests': {'level': logging.WARNING}, 'z.addons': {'level': logging.DEBUG}, 'z.elasticsearch': {'level': logging.DEBUG}, 'z.pool': {'level': logging.ERROR}, 'z.task': {'level': logging.DEBUG}, 'z.users': {'level': logging.DEBUG}, }) TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp') ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files' SPIDERMONKEY = '/usr/bin/tracemonkey' csp = 'csp.middleware.CSPMiddleware' RESPONSYS_ID = private.RESPONSYS_ID CRONJOB_LOCK_PREFIX = 'mkt-s3dev' ES_DEFAULT_NUM_REPLICAS = 2 ES_HOSTS = splitstrip(private.ES_HOSTS) ES_URLS = ['http://%s' % h for h in ES_HOSTS] ES_INDEXES = dict((k, '%s_s3dev' % v) for k, v in ES_INDEXES.items()) STATSD_HOST = private.STATSD_HOST STATSD_PORT = private.STATSD_PORT STATSD_PREFIX = private.STATSD_PREFIX CEF_PRODUCT = STATSD_PREFIX ES_TIMEOUT = 60 EXPOSE_VALIDATOR_TRACEBACKS = False KNOWN_PROXIES += ['10.2.83.105', '10.2.83.106', '10.2.83.107', '10.8.83.200', '10.8.83.201', '10.8.83.202', '10.8.83.203', '10.8.83.204', '10.8.83.210', '10.8.83.211', '10.8.83.212', '10.8.83.213', '10.8.83.214', '10.8.83.215', '10.8.83.251', '10.8.83.252', '10.8.83.253', ] NEW_FEATURES = True CLEANCSS_BIN = 'cleancss' LESS_BIN = 'lessc' STYLUS_BIN = 'stylus' UGLIFY_BIN = 'uglifyjs' CELERYD_TASK_SOFT_TIME_LIMIT = 540 VALIDATOR_TIMEOUT = 180 LESS_PREPROCESS = True XSENDFILE = True ALLOW_SELF_REVIEWS = True GOOGLE_ANALYTICS_CREDENTIALS = private.GOOGLE_ANALYTICS_CREDENTIALS GOOGLE_API_CREDENTIALS = private.GOOGLE_API_CREDENTIALS MONOLITH_SERVER = 'https://monolith-dev.allizom.org' GEOIP_URL = 'https://geo-dev-marketplace.allizom.org' AWS_ACCESS_KEY_ID = private.AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY = private.AWS_SECRET_ACCESS_KEY AWS_STORAGE_BUCKET_NAME = private.AWS_STORAGE_BUCKET_NAME RAISE_ON_SIGNAL_ERROR = True API_THROTTLE = False NEWRELIC_ENABLED_LIST = ['dev1.addons.phx1.mozilla.com', 'dev2.addons.phx1.mozilla.com'] NEWRELIC_ENABLE = HOSTNAME in NEWRELIC_ENABLED_LIST AES_KEYS = private.AES_KEYS TASK_USER_ID = 4757633 SERVE_TMP_PATH = False
bsd-3-clause
6,948,503,791,888,337,000
28.86413
79
0.66697
false
2.949544
false
false
false
rackerlabs/qonos
qonos/openstack/common/rpc/dispatcher.py
1
5310
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Code for rpc message dispatching. Messages that come in have a version number associated with them. RPC API version numbers are in the form: Major.Minor For a given message with version X.Y, the receiver must be marked as able to handle messages of version A.B, where: A = X B >= Y The Major version number would be incremented for an almost completely new API. The Minor version number would be incremented for backwards compatible changes to an existing API. A backwards compatible change could be something like adding a new method, adding an argument to an existing method (but not requiring it), or changing the type for an existing argument (but still handling the old type as well). The conversion over to a versioned API must be done on both the client side and server side of the API at the same time. However, as the code stands today, there can be both versioned and unversioned APIs implemented in the same code base. EXAMPLES ======== Nova was the first project to use versioned rpc APIs. Consider the compute rpc API as an example. The client side is in nova/compute/rpcapi.py and the server side is in nova/compute/manager.py. Example 1) Adding a new method. ------------------------------- Adding a new method is a backwards compatible change. It should be added to nova/compute/manager.py, and RPC_API_VERSION should be bumped from X.Y to X.Y+1. On the client side, the new method in nova/compute/rpcapi.py should have a specific version specified to indicate the minimum API version that must be implemented for the method to be supported. For example:: def get_host_uptime(self, ctxt, host): topic = _compute_topic(self.topic, ctxt, host, None) return self.call(ctxt, self.make_msg('get_host_uptime'), topic, version='1.1') In this case, version '1.1' is the first version that supported the get_host_uptime() method. Example 2) Adding a new parameter. ---------------------------------- Adding a new parameter to an rpc method can be made backwards compatible. The RPC_API_VERSION on the server side (nova/compute/manager.py) should be bumped. The implementation of the method must not expect the parameter to be present.:: def some_remote_method(self, arg1, arg2, newarg=None): # The code needs to deal with newarg=None for cases # where an older client sends a message without it. pass On the client side, the same changes should be made as in example 1. The minimum version that supports the new parameter should be specified. """ from qonos.openstack.common.rpc import common as rpc_common class RpcDispatcher(object): """Dispatch rpc messages according to the requested API version. This class can be used as the top level 'manager' for a service. It contains a list of underlying managers that have an API_VERSION attribute. """ def __init__(self, callbacks): """Initialize the rpc dispatcher. :param callbacks: List of proxy objects that are an instance of a class with rpc methods exposed. Each proxy object should have an RPC_API_VERSION attribute. """ self.callbacks = callbacks super(RpcDispatcher, self).__init__() def dispatch(self, ctxt, version, method, **kwargs): """Dispatch a message based on a requested version. :param ctxt: The request context :param version: The requested API version from the incoming message :param method: The method requested to be called by the incoming message. :param kwargs: A dict of keyword arguments to be passed to the method. :returns: Whatever is returned by the underlying method that gets called. """ if not version: version = '1.0' had_compatible = False for proxyobj in self.callbacks: if hasattr(proxyobj, 'RPC_API_VERSION'): rpc_api_version = proxyobj.RPC_API_VERSION else: rpc_api_version = '1.0' is_compatible = rpc_common.version_is_compatible(rpc_api_version, version) had_compatible = had_compatible or is_compatible if not hasattr(proxyobj, method): continue if is_compatible: return getattr(proxyobj, method)(ctxt, **kwargs) if had_compatible: raise AttributeError("No such RPC function '%s'" % method) else: raise rpc_common.UnsupportedRpcVersion(version=version)
apache-2.0
3,698,796,998,904,787,500
37.478261
79
0.673446
false
4.366776
false
false
false
Mirantis/disk_perf_test_tool
wally/test_run_class.py
1
1980
from typing import List, Callable, Any, Dict, Optional, Set from concurrent.futures import ThreadPoolExecutor from cephlib.istorage import IStorage from cephlib.node import NodeInfo, IRPCNode from cephlib.ssh import ConnCreds from cephlib.storage_selectors import DevRolesConfig from .openstack_api import OSCreds, OSConnection from .config import Config from .result_classes import IWallyStorage class TestRun: """Test run information""" def __init__(self, config: Config, storage: IStorage, rstorage: IWallyStorage) -> None: # NodesInfo list self.nodes_info: Dict[str, NodeInfo] = {} self.ceph_master_node: Optional[IRPCNode] = None self.ceph_extra_args: Optional[str] = None # Nodes list self.nodes: List[IRPCNode] = [] self.build_meta: Dict[str,Any] = {} self.clear_calls_stack: List[Callable[['TestRun'], None]] = [] # openstack credentials self.os_creds: Optional[OSCreds] = None # type: ignore self.os_connection: Optional[OSConnection] = None # type: ignore self.rpc_code: bytes = None # type: ignore self.default_rpc_plugins: Dict[str, bytes] = None # type: ignore self.storage = storage self.rstorage = rstorage self.config = config self.sensors_run_on: Set[str] = set() self.os_spawned_nodes_ids: List[int] = None # type: ignore self.devs_locator: DevRolesConfig = [] def get_pool(self): return ThreadPoolExecutor(self.config.get('worker_pool_sz', 32)) def merge_node(self, creds: ConnCreds, roles: Set[str], **params) -> NodeInfo: info = NodeInfo(creds, roles, params) nid = info.node_id if nid in self.nodes_info: self.nodes_info[nid].roles.update(info.roles) self.nodes_info[nid].params.update(info.params) return self.nodes_info[nid] else: self.nodes_info[nid] = info return info
apache-2.0
7,912,155,741,855,254,000
35
91
0.64798
false
3.626374
true
false
false
vmahuli/contrail-controller
src/opserver/partition_handler.py
1
28794
#!/usr/bin/python from gevent import monkey monkey.patch_all() import logging import gevent from gevent.coros import BoundedSemaphore from kafka import KafkaClient, KeyedProducer, SimpleConsumer, common from uveserver import UVEServer import os import json import copy import traceback import uuid import struct import socket import discoveryclient.client as client from sandesh_common.vns.constants import ALARM_PARTITION_SERVICE_NAME from pysandesh.util import UTCTimestampUsec import select import redis from collections import namedtuple PartInfo = namedtuple("PartInfo",["ip_address","instance_id","acq_time","port"]) def sse_pack(d): """Pack data in SSE format""" buffer = '' for k in ['event','data']: if k in d.keys(): buffer += '%s: %s\n' % (k, d[k]) return buffer + '\n' class UveCacheProcessor(gevent.Greenlet): def __init__(self, logger, q, partitions): gevent.Greenlet.__init__(self) self._logger = logger self._q = q self._partkeys = {} for partno in range(0,partitions): self._partkeys[partno] = set() self._uvedb = {} def get_uve(self, key, filters=None, is_alarm=False): failures = False rsp = {} try: filters = filters or {} tfilter = filters.get('cfilt') ackfilter = filters.get('ackfilt') if is_alarm: tfilter = tfilter or {} # When returning only alarms, ignore non-alarm cfilt for k in tfilter.keys(): if k != "UVEAlarms": del tfilter[k] if len(tfilter) == 0: tfilter["UVEAlarms"] = set(["alarms"]) barekey = key.split(":",1)[1] table = key.split(":",1)[0] if table not in self._uvedb: return failures, rsp if barekey not in self._uvedb[table]: return failures, rsp for tkey,tval in self._uvedb[table][barekey].iteritems(): afilter_list = set() if tfilter is not None: if tkey not in tfilter: continue else: afilter_list = tfilter[tkey] if not tval: continue for akey, aval in tval.iteritems(): if len(afilter_list): if akey not in afilter_list: continue if ackfilter is not None and \ tkey == "UVEAlarms" and akey == "alarms": alarms = [] for alarm in aval: ack = "false" if "ack" in alarm: if alarm["ack"]: ack = "true" else: ack = "false" if ack == ackfilter: alarms.append(alarm) if not len(alarms): continue else: if not tkey in rsp: rsp[tkey] = {} rsp[tkey][akey] = alarms else: if not tkey in rsp: rsp[tkey] = {} rsp[tkey][akey] = aval except Exception as ex: template = "Exception {0} in uve cache proc. Arguments:\n{1!r}" messag = template.format(type(ex).__name__, ex.args) self._logger.error("%s : traceback %s" % \ (messag, traceback.format_exc())) return failures, rsp def _run(self): for telem in self._q: elem = telem['data'] if telem['event'] == 'clear': # remove all keys of this partition partno = elem['partition'] for key in self._partkeys[partno]: barekey = key.split(":",1)[1] table = key.split(":",1)[0] del self._uvedb[table][barekey] self._partkeys[partno].remove("%s:%s" % \ (table, barekey)) elif telem['event'] == 'sync' or telem['event'] == 'update': partno = elem['partition'] self._partkeys[partno].add(elem['key']) barekey = elem['key'].split(":",1)[1] table = elem['key'].split(":",1)[0] if table not in self._uvedb: self._uvedb[table] = {} if barekey not in self._uvedb[table]: self._uvedb[table][barekey] = {} if elem['type'] is None: # delete the entire UVE self._partkeys[partno].remove("%s:%s" % \ (table, barekey)) del self._uvedb[table][barekey] else: typ = elem['type'] if typ not in self._uvedb[table][barekey]: self._uvedb[table][barekey][typ] = None if elem['value'] is None: # remove one type of this UVE del self._uvedb[table][barekey][typ] else: self._uvedb[table][barekey][typ] = elem['value'] elif telem['event'] == 'stop': break else: pass class UveStreamPart(gevent.Greenlet): def __init__(self, partno, logger, q, pi, rpass, sse): gevent.Greenlet.__init__(self) self._logger = logger self._q = q self._pi = pi self._partno = partno self._rpass = rpass self._sse = sse def syncpart(self, redish): inst = self._pi.instance_id part = self._partno keys = list(redish.smembers("AGPARTKEYS:%s:%d" % (inst, part))) ppe = redish.pipeline() for key in keys: ppe.hgetall("AGPARTVALUES:%s:%d:%s" % (inst, part, key)) pperes = ppe.execute() idx=0 for res in pperes: for tk,tv in res.iteritems(): dt = {'partition':self._partno, 'key':keys[idx], 'type':tk, 'value':json.loads(tv)} if self._sse: msg = {'event': 'sync', 'data':json.dumps(dt)} self._q.put(sse_pack(msg)) else: msg = {'event': 'sync', 'data':dt} self._q.put(msg) idx += 1 def _run(self): lredis = None pb = None while True: try: lredis = redis.StrictRedis( host=self._pi.ip_address, port=self._pi.port, password=self._rpass, db=2) pb = lredis.pubsub() inst = self._pi.instance_id part = self._partno pb.subscribe('AGPARTPUB:%s:%d' % (inst, part)) self.syncpart(lredis) for message in pb.listen(): if message["type"] != "message": continue dataline = message["data"] try: elems = json.loads(dataline) except: self._logger.error("AggUVE Parsing failed: %s" % str(message)) continue else: self._logger.info("AggUVE loading: %s" % str(elems)) ppe = lredis.pipeline() for elem in elems: # This UVE was deleted if elem["type"] is None: ppe.exists("AGPARTVALUES:%s:%d:%s" % \ (inst, part, elem["key"])) else: ppe.hget("AGPARTVALUES:%s:%d:%s" % \ (inst, part, elem["key"]), elem["type"]) pperes = ppe.execute() idx = 0 for elem in elems: if elem["type"] is None: dt = {'partition':part, 'key':elem["key"], 'type':None} else: vjson = pperes[idx] if vjson is None: vdata = None else: vdata = json.loads(vjson) dt = {'partition':part, 'key':elem["key"], 'type':elem["type"], 'value':vdata} if self._sse: msg = {'event': 'update', 'data':json.dumps(dt)} self._q.put(sse_pack(msg)) else: msg = {'event': 'update', 'data':dt} self._q.put(msg) idx += 1 except gevent.GreenletExit: break except Exception as ex: template = "Exception {0} in uve stream proc. Arguments:\n{1!r}" messag = template.format(type(ex).__name__, ex.args) self._logger.error("%s : traceback %s" % \ (messag, traceback.format_exc())) lredis = None if pb is not None: pb.close() pb = None gevent.sleep(2) return None class UveStreamer(gevent.Greenlet): def __init__(self, logger, q, rfile, agp_cb, partitions, rpass): gevent.Greenlet.__init__(self) self._logger = logger self._q = q self._rfile = rfile self._agp_cb = agp_cb self._agp = {} self._parts = {} self._partitions = partitions self._rpass = rpass self._sse = True if self._rfile is None: self._sse = False def _run(self): inputs = [ self._rfile ] outputs = [ ] if self._sse: msg = {'event': 'init', 'data':\ json.dumps({'partitions':self._partitions})} self._q.put(sse_pack(msg)) else: msg = {'event': 'init', 'data':\ {'partitions':self._partitions}} self._q.put(msg) while True: try: if self._rfile is not None: readable, writable, exceptional = \ select.select(inputs, outputs, inputs, 1) if (readable or writable or exceptional): break else: gevent.sleep(1) newagp = self._agp_cb() set_new, set_old = set(newagp.keys()), set(self._agp.keys()) intersect = set_new.intersection(set_old) # deleted parts for elem in set_old - intersect: self.partition_stop(elem) # new parts for elem in set_new - intersect: self.partition_start(elem, newagp[elem]) # changed parts for elem in intersect: if self._agp[elem] != newagp[elem]: self.partition_stop(elem) self.partition_start(elem, newagp[elem]) self._agp = newagp except gevent.GreenletExit: break for part, pi in self._agp.iteritems(): self.partition_stop(part) if self._sse: msg = {'event': 'stop', 'data':json.dumps(None)} self._q.put(sse_pack(msg)) else: msg = {'event': 'stop', 'data':None} self._q.put(msg) def partition_start(self, partno, pi): self._logger.error("Starting agguve part %d using %s" %( partno, pi)) if self._sse: msg = {'event': 'clear', 'data':\ json.dumps({'partition':partno, 'acq_time':pi.acq_time})} self._q.put(sse_pack(msg)) else: msg = {'event': 'clear', 'data':\ {'partition':partno, 'acq_time':pi.acq_time}} self._q.put(msg) self._parts[partno] = UveStreamPart(partno, self._logger, self._q, pi, self._rpass, self._sse) self._parts[partno].start() def partition_stop(self, partno): self._logger.error("Stopping agguve part %d" % partno) self._parts[partno].kill() self._parts[partno].get() del self._parts[partno] class PartitionHandler(gevent.Greenlet): def __init__(self, brokers, group, topic, logger, limit): gevent.Greenlet.__init__(self) self._brokers = brokers self._group = group self._topic = topic self._logger = logger self._limit = limit self._uvedb = {} self._partoffset = 0 self._kfk = None def msg_handler(self, mlist): self._logger.info("%s Reading %s" % (self._topic, str(mlist))) return True def _run(self): pcount = 0 while True: try: self._logger.error("New KafkaClient %s" % self._topic) self._kfk = KafkaClient(self._brokers , "kc-" + self._topic) try: consumer = SimpleConsumer(self._kfk, self._group, self._topic, buffer_size = 4096*4, max_buffer_size=4096*32) #except: except Exception as ex: template = "Consumer Failure {0} occured. Arguments:\n{1!r}" messag = template.format(type(ex).__name__, ex.args) self._logger.error("Error: %s trace %s" % \ (messag, traceback.format_exc())) raise RuntimeError(messag) self._logger.error("Starting %s" % self._topic) # Find the offset of the last message that has been queued consumer.seek(-1,2) try: mi = consumer.get_message(timeout=0.1) consumer.commit() except common.OffsetOutOfRangeError: mi = None #import pdb; pdb.set_trace() self._logger.info("Last Queued for %s is %s" % \ (self._topic,str(mi))) # start reading from last previously processed message if mi != None: consumer.seek(0,1) else: consumer.seek(0,0) if self._limit: raise gevent.GreenletExit while True: try: mlist = consumer.get_messages(10,timeout=0.5) if not self.msg_handler(mlist): raise gevent.GreenletExit consumer.commit() pcount += len(mlist) except TypeError as ex: self._logger.error("Type Error: %s trace %s" % \ (str(ex.args), traceback.format_exc())) gevent.sleep(0.1) except common.FailedPayloadsError as ex: self._logger.error("Payload Error: %s" % str(ex.args)) gevent.sleep(0.1) except gevent.GreenletExit: break except AssertionError as ex: self._partoffset = ex break except Exception as ex: template = "An exception of type {0} occured. Arguments:\n{1!r}" messag = template.format(type(ex).__name__, ex.args) self._logger.error("%s : traceback %s" % \ (messag, traceback.format_exc())) self.stop_partition() gevent.sleep(2) self._logger.error("Stopping %s pcount %d" % (self._topic, pcount)) partdb = self.stop_partition() return self._partoffset, partdb class UveStreamProc(PartitionHandler): # Arguments: # # brokers : broker list for kafka bootstrap # partition : partition number # uve_topic : Topic to consume # logger : logging object to use # callback : Callback function for reporting the set of the UVEs # that may have changed for a given notification # rsc : Callback function to check on collector status # and get sync contents for new collectors # aginst : instance_id of alarmgen # rport : redis server port # disc : discovery client to publish to def __init__(self, brokers, partition, uve_topic, logger, callback, host_ip, rsc, aginst, rport, disc = None): super(UveStreamProc, self).__init__(brokers, "workers", uve_topic, logger, False) self._uvedb = {} self._uvein = {} self._callback = callback self._partno = partition self._host_ip = host_ip self._ip_code, = struct.unpack('>I', socket.inet_pton( socket.AF_INET, host_ip)) self.disc_rset = set() self._resource_cb = rsc self._aginst = aginst self._disc = disc self._acq_time = UTCTimestampUsec() self._rport = rport def acq_time(self): return self._acq_time def resource_check(self, msgs): ''' This function compares the known collectors with the list from discovery, and syncs UVE keys accordingly ''' newset , coll_delete, chg_res = self._resource_cb(self._partno, self.disc_rset, msgs) for coll in coll_delete: self._logger.error("Part %d lost collector %s" % (self._partno, coll)) self.stop_partition(coll) if len(chg_res): self.start_partition(chg_res) self.disc_rset = newset if self._disc: data = { 'instance-id' : self._aginst, 'partition' : str(self._partno), 'ip-address': self._host_ip, 'acq-time': str(self._acq_time), 'port':str(self._rport)} self._disc.publish(ALARM_PARTITION_SERVICE_NAME, data) def stop_partition(self, kcoll=None): clist = [] if not kcoll: clist = self._uvedb.keys() # If all collectors are being cleared, clear resoures too self.disc_rset = set() if self._disc: # TODO: Unpublish instead of setting acq-time to 0 data = { 'instance-id' : self._aginst, 'partition' : str(self._partno), 'ip-address': self._host_ip, 'acq-time': "0", 'port':str(self._rport)} self._disc.publish(ALARM_PARTITION_SERVICE_NAME, data) else: clist = [kcoll] self._logger.error("Stopping part %d collectors %s" % \ (self._partno,clist)) partdb = {} chg = {} for coll in clist: partdb[coll] = {} for gen in self._uvedb[coll].keys(): partdb[coll][gen] = {} for tab in self._uvedb[coll][gen].keys(): for rkey in self._uvedb[coll][gen][tab].keys(): uk = tab + ":" + rkey chg[uk] = None partdb[coll][gen][uk] = \ set(self._uvedb[coll][gen][tab][rkey].keys()) del self._uvedb[coll] self._logger.error("Stopping part %d UVEs %s" % \ (self._partno,str(chg.keys()))) self._callback(self._partno, chg) return partdb def start_partition(self, cbdb): ''' This function loads the initial UVE database. for the partition ''' self._logger.error("Starting part %d collectors %s" % \ (self._partno, str(cbdb.keys()))) uves = {} for kcoll,coll in cbdb.iteritems(): self._uvedb[kcoll] = {} for kgen,gen in coll.iteritems(): self._uvedb[kcoll][kgen] = {} for kk in gen.keys(): tabl = kk.split(":",1) tab = tabl[0] rkey = tabl[1] if not tab in self._uvedb[kcoll][kgen]: self._uvedb[kcoll][kgen][tab] = {} self._uvedb[kcoll][kgen][tab][rkey] = {} uves[kk] = {} for typ, contents in gen[kk].iteritems(): self._uvedb[kcoll][kgen][tab][rkey][typ] = {} self._uvedb[kcoll][kgen][tab][rkey][typ]["c"] = 0 self._uvedb[kcoll][kgen][tab][rkey][typ]["u"] = \ uuid.uuid1(self._ip_code) uves[kk][typ] = contents self._logger.error("Starting part %d UVEs %s" % \ (self._partno, str(uves.keys()))) self._callback(self._partno, uves) def contents(self): return self._uvedb def stats(self): ''' Return the UVE incoming stats collected over the last time period for this partition Also, the stats should be cleared to prepare for the next period of collection. ''' ret_in = copy.deepcopy(self._uvein) self._uvein = {} return ret_in def msg_handler(self, mlist): self.resource_check(mlist) for mm in mlist: if mm is None: continue self._logger.debug("%s Reading offset %d" % \ (self._topic, mm.offset)) if not self.msg_handler_single(mm): self._logger.info("%s could not handle %s" % \ (self._topic, str(mm))) return False return True def msg_handler_single(self, om): self._partoffset = om.offset chg = {} try: uv = json.loads(om.message.value) coll = uv["coll"] gen = uv["gen"] if not self._uvedb.has_key(coll): # This partition is not synced yet. # Ignore this message self._logger.debug("%s Ignoring UVE %s" % (self._topic, str(om))) return True if not self._uvedb[coll].has_key(gen): self._uvedb[coll][gen] = {} if (uv["message"] == "UVEUpdate"): tabl = uv["key"].split(":",1) tab = tabl[0] rkey = tabl[1] if tab not in self._uvedb[coll][gen]: self._uvedb[coll][gen][tab] = {} if not rkey in self._uvedb[coll][gen][tab]: self._uvedb[coll][gen][tab][rkey] = {} removed = False # uv["type"] and uv["value"] can be decoded as follows: # uv["type"] can be one of the following: # - None # All Types under this UVE are deleted # uv["value"] will not be present # (this option is only for agg UVE updates) # - "<Struct>" # uv["value"] refers to this struct # uv["value"] can be one of the following: # - None # This Type has been deleted. # - {} # The Type has a value, which is # not available in this message. # (this option is only for raw UVE updates) # - {<Value>} # The Value of the Type # (this option is only for agg UVE updates) if uv["type"] is None: # TODO: Handling of delete UVE case return False if uv["value"] is None: if uv["type"] in self._uvedb[coll][gen][tab][rkey]: del self._uvedb[coll][gen][tab][rkey][uv["type"]] if not len(self._uvedb[coll][gen][tab][rkey]): del self._uvedb[coll][gen][tab][rkey] removed = True if not removed: if uv["type"] in self._uvedb[coll][gen][tab][rkey]: self._uvedb[coll][gen][tab][rkey][uv["type"]]["c"] +=1 else: self._uvedb[coll][gen][tab][rkey][uv["type"]] = {} self._uvedb[coll][gen][tab][rkey][uv["type"]]["c"] = 1 self._uvedb[coll][gen][tab][rkey][uv["type"]]["u"] = \ uuid.uuid1(self._ip_code) chg[uv["key"]] = { uv["type"] : uv["value"] } # Record stats on the input UVE Notifications if not self._uvein.has_key(tab): self._uvein[tab] = {} if not self._uvein[tab].has_key(coll): self._uvein[tab][coll] = {} if not self._uvein[tab][coll].has_key(gen): self._uvein[tab][coll][gen] = {} if not self._uvein[tab][coll][gen].has_key(uv["type"]): self._uvein[tab][coll][gen][uv["type"]] = 1 else: self._uvein[tab][coll][gen][uv["type"]] += 1 else: # Record stats on UVE Keys being processed for tab in self._uvedb[coll][gen].keys(): for rkey in self._uvedb[coll][gen][tab].keys(): uk = tab + ":" + rkey # when a generator is delelted, we need to # notify for *ALL* its UVEs chg[uk] = None del self._uvedb[coll][gen] except Exception as ex: template = "An exception of type {0} in uve proc . Arguments:\n{1!r}" messag = template.format(type(ex).__name__, ex.args) self._logger.info("%s" % messag) return False else: self._callback(self._partno, chg) return True if __name__ == '__main__': logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s') workers = {} brokers = "localhost:9092,localhost:9093,localhost:9094" group = "workers" kafka = KafkaClient(brokers,str(os.getpid())) cons = SimpleConsumer(kafka, group, "ctrl") cons.provide_partition_info() print "Starting control" end_ready = False while end_ready == False: try: while True: part, mmm = cons.get_message(timeout=None) mm = mmm.message print "Consumed ctrl " + str(mm) if mm.value == "start": if workers.has_key(mm.key): print "Dup partition %s" % mm.key raise ValueError else: ph = UveStreamProc(brokers, int(mm.key), "uve-" + mm.key, "alarm-x" + mm.key, logging) ph.start() workers[int(mm.key)] = ph elif mm.value == "stop": #import pdb; pdb.set_trace() if workers.has_key(int(mm.key)): ph = workers[int(mm.key)] gevent.kill(ph) res,db = ph.get() print "Returned " + str(res) print "State :" for k,v in db.iteritems(): print "%s -> %s" % (k,str(v)) del workers[int(mm.key)] else: end_ready = True cons.commit() gevent.sleep(2) break except TypeError: gevent.sleep(0.1) except common.FailedPayloadsError as ex: print "Payload Error: " + str(ex.args) gevent.sleep(0.1) lw=[] for key, value in workers.iteritems(): gevent.kill(value) lw.append(value) gevent.joinall(lw) print "Ending Consumers"
apache-2.0
-5,221,414,559,799,287,000
38.661157
129
0.44992
false
4.194319
false
false
false
edesky/text_validator
lib/validate.py
1
1509
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import unicode_literals import argparse import json import math parser = argparse.ArgumentParser() parser.add_argument("--text", required=True, help="Input text") parser.add_argument("--short", action='store_true') args = parser.parse_args() input_text = args.text.decode('utf8') VALID_CHARS = u'qwertzuiopasdfghjklyxcvbnm1234567890QWERTZUIOPASDFGHJKLYXCVBNM ?:.,;-=/+ěščřžýáíéĚŠČŘŽÝÁÍÉůúŇ' def chars_score(text): known_cnt = 0 for char in text: if char in VALID_CHARS: known_cnt += 1 return float(known_cnt)/max(len(text), 1) def length_score(text): cnt = 0 for char in text: if char in VALID_CHARS: cnt += 1 return min(math.log(max(cnt, 1), 1000), 1.0) # TODO: add another functions here # def xyz_score(text): # text magic here # return 0.5 def compute(scores): total = 0 cnt = 0 for score in scores: cnt += 1 total = (total * (cnt-1) + score['value']) / cnt; return {'score': total, 'parts': scores} scores = [] scores.append({'name': 'chars_score', 'value': chars_score(input_text)}) scores.append({'name': 'length_score', 'value': length_score(input_text)}) # TODO: add another functions here # scores.append({'name': 'xyz_score', 'value': xyz_score(args.text)}) total_score = compute(scores) if args.short: print(total_score['score']) else: print(json.dumps(total_score, sort_keys=True, indent=4))
mit
4,563,589,001,996,884,000
25.571429
110
0.649866
false
2.95825
false
false
false
Alberto-Beralix/Beralix
i386-squashfs-root/usr/share/system-config-printer/errordialogs.py
1
3111
#!/usr/bin/python ## system-config-printer ## Copyright (C) 2006, 2007, 2008, 2010 Red Hat, Inc. ## Authors: ## Florian Festi <[email protected]> ## Tim Waugh <[email protected]> ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## You should have received a copy of the GNU General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import cups import gtk from gettext import gettext as _ def show_dialog (title, text, type, parent=None): dialog = gtk.MessageDialog (parent, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, type, gtk.BUTTONS_OK, title) dialog.format_secondary_text (text) dialog.run () dialog.destroy () def show_info_dialog (title, text, parent=None): return show_dialog (title, text, gtk.MESSAGE_INFO, parent=parent) def show_error_dialog (title, text, parent=None): return show_dialog (title, text, gtk.MESSAGE_ERROR, parent=parent) def show_IPP_Error(exception, message, parent=None): if exception == 0: # In this case, the user has canceled an authentication dialog. return elif exception == cups.IPP_SERVICE_UNAVAILABLE: # In this case, the user has canceled a retry dialog. return else: title = _("CUPS server error") text = (_("There was an error during the CUPS " "operation: '%s'.")) % message show_error_dialog (title, text, parent) def show_HTTP_Error(status, parent=None): if (status == cups.HTTP_UNAUTHORIZED or status == cups.HTTP_FORBIDDEN): title = _('Not authorized') text = (_('The password may be incorrect, or the ' 'server may be configured to deny ' 'remote administration.')) else: title = _('CUPS server error') if status == cups.HTTP_BAD_REQUEST: msg = _("Bad request") elif status == cups.HTTP_NOT_FOUND: msg = _("Not found") elif status == cups.HTTP_REQUEST_TIMEOUT: msg = _("Request timeout") elif status == cups.HTTP_UPGRADE_REQUIRED: msg = _("Upgrade required") elif status == cups.HTTP_SERVER_ERROR: msg = _("Server error") elif status == -1: msg = _("Not connected") else: msg = _("status %s") % status text = _("There was an HTTP error: %s.") % msg show_error_dialog (title, text, parent)
gpl-3.0
8,152,813,570,208,546,000
35.6
82
0.6072
false
3.998715
false
false
false
drelu/SAGA-Hadoop
hadoop1/launcher.py
1
2591
#!/usr/bin/env python import time import saga import os, sys import subprocess import pdb import logging logging.basicConfig(level=logging.ERROR) def main(): try: # create a job service for Futuregrid's 'india' PBS cluster js = saga.job.Service("pbs+ssh://india") #js = saga.job.Service("fork://localhost") # describe our job jd = saga.job.Description() # resource requirements jd.total_cpu_count = 16 # environment, executable & arguments executable = os.path.join(os.getcwd(), "bootstrap_hadoop.py") logging.debug("Run %s"%executable) jd.executable = executable jd.arguments = [] # output options jd.output = "hadoop_job.stdout" jd.error = "hadoop_job.stderr" jd.working_directory=os.getcwd() # create the job (state: New) myjob = js.create_job(jd) print "Starting Hadoop bootstrap job...\n" # run the job (submit the job to PBS) myjob.run() jobid = myjob.get_id() print "**** Job ID : %s" % (jobid) print "**** Job State : %s" % (myjob.get_state()) while True: state = myjob.get_state() if state=="Running": if os.path.exists("work/started"): get_hadoop_config_data(str(jobid)) break time.sleep(3) except Exception, ex: print "An error occured: %s" % (str(ex)) def get_hadoop_config_data(jobid): pbs_id = jobid[jobid.find("-")+2:len(jobid)-1] nodes = subprocess.check_output(["qstat", "-f", pbs_id]) hosts = "empty" for i in nodes.split("\n"): if i.find("exec_host")>0: hosts = i[i.find("=")+1:].strip() hadoop_home=os.path.join(os.getcwd(), "work/hadoop-1.0.0") print "HADOOP installation directory: %s"%hadoop_home print "Allocated Resources for Hadoop cluster: " + hosts print "HDFS Web Interface: http://%s:50070"% hosts[:hosts.find("/")] print "\nTo use Hadoop set HADOOP_CONF_DIR: " print "export HADOOP_CONF_DIR=%s"%(os.path.join(os.getcwd(), "work", get_most_current_job(), "conf")) print "%s/bin/hadoop dfsadmin -report"%hadoop_home print "" def get_most_current_job(): dir = "work" files = os.listdir(dir) max = None for i in files: if i.startswith("hadoop-conf"): t = os.path.getctime(os.path.join(dir,i)) if max == None or t>max[0]: max = (t, i) return max[1] if __name__ == "__main__": main()
apache-2.0
944,839,436,700,455,200
29.482353
106
0.564261
false
3.351876
false
false
false
heilaaks/snippy
tests/lib/helper.py
1
7104
# -*- coding: utf-8 -*- # # SPDX-License-Identifier: AGPL-3.0-or-later # # snippy - software development and maintenance notes manager. # Copyright 2017-2020 Heikki J. Laaksonen <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """helper: Generic helpers testing.""" from __future__ import print_function import io import os.path import re import sys import json import pkg_resources from jsonschema import Draft7Validator, RefResolver class Helper(object): """Generic helpers testing. This class intentionally copies some of the implementation from the production code. The purpose is to avoid dependencies in this module to be able to import this module anywhere. """ EXPORT_TIME = '2018-02-02T02:02:02.000001+00:00' IMPORT_TIME = '2018-03-02T02:02:02.000001+00:00' EXPORT_TEMPLATE = '2017-10-14T19:56:31.000001+00:00' DB_SQLITE = 'sqlite' DB_POSTGRESQL = 'postgresql' DB_COCKROACHDB = 'cockroachdb' DB_IN_MEMORY = 'in-memory' STORAGES = (DB_SQLITE, DB_POSTGRESQL, DB_COCKROACHDB, DB_IN_MEMORY) COLOR_OK = '\033[32m' COLOR_END = '\033[0m' # All resource attributes that can be sent in HTTP request. REQUEST_ATTRIBUTES = ( 'data', 'brief', 'description', 'name', 'groups', 'tags', 'links', 'source', 'versions', 'languages', 'filename' ) RE_MATCH_ANSI_ESCAPE_SEQUENCES = re.compile(r''' \x1b[^m]*m # Match all ANSI escape sequences. ''', re.VERBOSE) RE_MATCH_LEADING_WHITEPSACES = re.compile(r''' \n\s+ # Match newline and all leading whitespaces after it. ''', re.VERBOSE) @classmethod def read_template(cls, filename): """Get default content template in text format. The returned template must be in the same format where external editor like vi gets the default template. This means that all the tags are removed and the group tag is replaced with 'default' group. Args: filename (str): Template filename as stored in data/templates. Returns: str: Empty template in the same format as for external editor. """ template = cls._read_resource('data/templates', filename) template = re.sub(r''' <groups> # Match groups tag. ''', 'default', template, flags=re.VERBOSE) template = re.sub(r''' [<]\S+[>] # Match any tag in the template. ''', '', template, flags=re.VERBOSE) # In case of the solution template, there is a <data> tag that leaves # empty fist line. Since all templates start from the first line, the # whitespaces can be removed from left of the string. template = template.lstrip() return template @classmethod def read_completion(cls, filename): """Get shell completion script. Args filename (str): Name of the shell completion file. """ return cls._read_resource('data/completion', filename) @staticmethod def remove_ansi(message): """Remove all ANSI escape codes from given string. Args: message (str): Message which ANSI escape codes are removed. Returns: str: Same message but without ANSI escape sequences. """ return Helper.RE_MATCH_ANSI_ESCAPE_SEQUENCES.sub('', message) @classmethod def get_schema_validator(cls): """Get JSON schema validator for REST API response. Returns: obj: Jsonschema draft7 validator. """ response_resource = json.loads(cls._read_resource('data/server/openapi/schema', 'responseresource.json')) response_collection_get = json.loads(cls._read_resource('data/server/openapi/schema', 'responsecollectionget.json')) response_collection_post = json.loads(cls._read_resource('data/server/openapi/schema', 'responsecollectionpost.json')) response_groups = json.loads(cls._read_resource('data/server/openapi/schema', 'responsegroups.json')) response_tags = json.loads(cls._read_resource('data/server/openapi/schema', 'responsetags.json')) response_errors = json.loads(cls._read_resource('data/server/openapi/schema', 'responseerrors.json')) response_hello = json.loads(cls._read_resource('data/server/openapi/schema', 'responsehello.json')) schema = { 'oneOf': [ response_collection_get, response_collection_post, response_groups, response_tags, response_errors, response_hello, response_resource ] } filepath = pkg_resources.resource_filename('snippy', 'data/server/openapi/schema/') if not os.path.isdir(filepath): print('NOK: cannot run test because server api response schema base uri is not accessible: {}'.format(filepath)) sys.exit(1) server_schema_base_uri = 'file:' + filepath Draft7Validator.check_schema(schema) resolver = RefResolver(base_uri=server_schema_base_uri, referrer=schema) validator = Draft7Validator(schema, resolver=resolver, format_checker=None) return validator @staticmethod def _read_resource(path, filename): """Read resource file. Args: path (str): Relative path under snippy project. filename (str): Resource filename. Returns: str: File read into a string. """ filename = os.path.join(pkg_resources.resource_filename('snippy', path), filename) if not os.path.isfile(filename): print('NOK: cannot run tests because snippy resource file is not accessible: {}'.format(filename)) sys.exit(1) resource_file = '' with io.open(filename, encoding='utf-8') as infile: resource_file = infile.read() return resource_file class Classproperty(object): # pylint: disable=too-few-public-methods """Implement classproperty. Implement decorator that mimics object property. See [1] for more details. [1] https://stackoverflow.com/a/3203659 """ def __init__(self, getter): self._getter = getter def __get__(self, _, owner): """Get property of a class.""" return self._getter(owner)
agpl-3.0
-3,998,784,855,587,372,000
32.668246
126
0.634713
false
4.047863
false
false
false
leshchevds/ganeti
lib/ssh.py
1
43027
# # # Copyright (C) 2006, 2007, 2010, 2011 Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Module encapsulating ssh functionality. """ import logging import os import shutil import tempfile from collections import namedtuple from functools import partial from ganeti import utils from ganeti import errors from ganeti import constants from ganeti import netutils from ganeti import pathutils from ganeti import vcluster from ganeti import compat from ganeti import serializer from ganeti import ssconf def GetUserFiles(user, mkdir=False, dircheck=True, kind=constants.SSHK_DSA, _homedir_fn=None): """Return the paths of a user's SSH files. @type user: string @param user: Username @type mkdir: bool @param mkdir: Whether to create ".ssh" directory if it doesn't exist @type dircheck: bool @param dircheck: Whether to check if ".ssh" directory exists @type kind: string @param kind: One of L{constants.SSHK_ALL} @rtype: tuple; (string, string, string) @return: Tuple containing three file system paths; the private SSH key file, the public SSH key file and the user's C{authorized_keys} file @raise errors.OpExecError: When home directory of the user can not be determined @raise errors.OpExecError: Regardless of the C{mkdir} parameters, this exception is raised if C{~$user/.ssh} is not a directory and C{dircheck} is set to C{True} """ if _homedir_fn is None: _homedir_fn = utils.GetHomeDir user_dir = _homedir_fn(user) if not user_dir: raise errors.OpExecError("Cannot resolve home of user '%s'" % user) if kind == constants.SSHK_DSA: suffix = "dsa" elif kind == constants.SSHK_RSA: suffix = "rsa" elif kind == constants.SSHK_ECDSA: suffix = "ecdsa" else: raise errors.ProgrammerError("Unknown SSH key kind '%s'" % kind) ssh_dir = utils.PathJoin(user_dir, ".ssh") if mkdir: utils.EnsureDirs([(ssh_dir, constants.SECURE_DIR_MODE)]) elif dircheck and not os.path.isdir(ssh_dir): raise errors.OpExecError("Path %s is not a directory" % ssh_dir) return [utils.PathJoin(ssh_dir, base) for base in ["id_%s" % suffix, "id_%s.pub" % suffix, "authorized_keys"]] def GetAllUserFiles(user, mkdir=False, dircheck=True, _homedir_fn=None): """Wrapper over L{GetUserFiles} to retrieve files for all SSH key types. See L{GetUserFiles} for details. @rtype: tuple; (string, dict with string as key, tuple of (string, string) as value) """ helper = compat.partial(GetUserFiles, user, mkdir=mkdir, dircheck=dircheck, _homedir_fn=_homedir_fn) result = [(kind, helper(kind=kind)) for kind in constants.SSHK_ALL] authorized_keys = [i for (_, (_, _, i)) in result] assert len(frozenset(authorized_keys)) == 1, \ "Different paths for authorized_keys were returned" return (authorized_keys[0], dict((kind, (privkey, pubkey)) for (kind, (privkey, pubkey, _)) in result)) def _SplitSshKey(key): """Splits a line for SSH's C{authorized_keys} file. If the line has no options (e.g. no C{command="..."}), only the significant parts, the key type and its hash, are used. Otherwise the whole line is used (split at whitespace). @type key: string @param key: Key line @rtype: tuple """ parts = key.split() if parts and parts[0] in constants.SSHAK_ALL: # If the key has no options in front of it, we only want the significant # fields return (False, parts[:2]) else: # Can't properly split the line, so use everything return (True, parts) def AddAuthorizedKeys(file_obj, keys): """Adds a list of SSH public key to an authorized_keys file. @type file_obj: str or file handle @param file_obj: path to authorized_keys file @type keys: list of str @param keys: list of strings containing keys """ key_field_list = [(key, _SplitSshKey(key)) for key in keys] if isinstance(file_obj, basestring): f = open(file_obj, "a+") else: f = file_obj try: nl = True for line in f: # Ignore whitespace changes line_key = _SplitSshKey(line) key_field_list[:] = [(key, split_key) for (key, split_key) in key_field_list if split_key != line_key] nl = line.endswith("\n") else: if not nl: f.write("\n") for (key, _) in key_field_list: f.write(key.rstrip("\r\n")) f.write("\n") f.flush() finally: f.close() def HasAuthorizedKey(file_obj, key): """Check if a particular key is in the 'authorized_keys' file. @type file_obj: str or file handle @param file_obj: path to authorized_keys file @type key: str @param key: string containing key """ key_fields = _SplitSshKey(key) if isinstance(file_obj, basestring): f = open(file_obj, "r") else: f = file_obj try: for line in f: # Ignore whitespace changes line_key = _SplitSshKey(line) if line_key == key_fields: return True finally: f.close() return False def CheckForMultipleKeys(file_obj, node_names): """Check if there is at most one key per host in 'authorized_keys' file. @type file_obj: str or file handle @param file_obj: path to authorized_keys file @type node_names: list of str @param node_names: list of names of nodes of the cluster @returns: a dictionary with hostnames which occur more than once """ if isinstance(file_obj, basestring): f = open(file_obj, "r") else: f = file_obj occurrences = {} try: index = 0 for line in f: index += 1 if line.startswith("#"): continue chunks = line.split() # find the chunk with user@hostname user_hostname = [chunk.strip() for chunk in chunks if "@" in chunk][0] if not user_hostname in occurrences: occurrences[user_hostname] = [] occurrences[user_hostname].append(index) finally: f.close() bad_occurrences = {} for user_hostname, occ in occurrences.items(): _, hostname = user_hostname.split("@") if hostname in node_names and len(occ) > 1: bad_occurrences[user_hostname] = occ return bad_occurrences def AddAuthorizedKey(file_obj, key): """Adds an SSH public key to an authorized_keys file. @type file_obj: str or file handle @param file_obj: path to authorized_keys file @type key: str @param key: string containing key """ AddAuthorizedKeys(file_obj, [key]) def RemoveAuthorizedKeys(file_name, keys): """Removes public SSH keys from an authorized_keys file. @type file_name: str @param file_name: path to authorized_keys file @type keys: list of str @param keys: list of strings containing keys """ key_field_list = [_SplitSshKey(key) for key in keys] fd, tmpname = tempfile.mkstemp(dir=os.path.dirname(file_name)) try: out = os.fdopen(fd, "w") try: f = open(file_name, "r") try: for line in f: # Ignore whitespace changes while comparing lines if _SplitSshKey(line) not in key_field_list: out.write(line) out.flush() os.rename(tmpname, file_name) finally: f.close() finally: out.close() except: utils.RemoveFile(tmpname) raise def RemoveAuthorizedKey(file_name, key): """Removes an SSH public key from an authorized_keys file. @type file_name: str @param file_name: path to authorized_keys file @type key: str @param key: string containing key """ RemoveAuthorizedKeys(file_name, [key]) def _AddPublicKeyProcessLine(new_uuid, new_key, line_uuid, line_key, found): """Processes one line of the public key file when adding a key. This is a sub function that can be called within the C{_ManipulatePublicKeyFile} function. It processes one line of the public key file, checks if this line contains the key to add already and if so, notes the occurrence in the return value. @type new_uuid: string @param new_uuid: the node UUID of the node whose key is added @type new_key: string @param new_key: the SSH key to be added @type line_uuid: the UUID of the node whose line in the public key file is processed in this function call @param line_key: the SSH key of the node whose line in the public key file is processed in this function call @type found: boolean @param found: whether or not the (UUID, key) pair of the node whose key is being added was found in the public key file already. @rtype: (boolean, string) @return: a possibly updated value of C{found} and the processed line """ if line_uuid == new_uuid and line_key == new_key: logging.debug("SSH key of node '%s' already in key file.", new_uuid) found = True return (found, "%s %s\n" % (line_uuid, line_key)) def _AddPublicKeyElse(new_uuid, new_key): """Adds a new SSH key to the key file if it did not exist already. This is an auxiliary function for C{_ManipulatePublicKeyFile} which is carried out when a new key is added to the public key file and after processing the whole file, we found out that the key does not exist in the file yet but needs to be appended at the end. @type new_uuid: string @param new_uuid: the UUID of the node whose key is added @type new_key: string @param new_key: the SSH key to be added @rtype: string @return: a new line to be added to the file """ return "%s %s\n" % (new_uuid, new_key) def _RemovePublicKeyProcessLine( target_uuid, _target_key, line_uuid, line_key, found): """Processes a line in the public key file when aiming for removing a key. This is an auxiliary function for C{_ManipulatePublicKeyFile} when we are removing a key from the public key file. This particular function only checks if the current line contains the UUID of the node in question and writes the line to the temporary file otherwise. @type target_uuid: string @param target_uuid: UUID of the node whose key is being removed @type _target_key: string @param _target_key: SSH key of the node (not used) @type line_uuid: string @param line_uuid: UUID of the node whose line is processed in this call @type line_key: string @param line_key: SSH key of the nodes whose line is processed in this call @type found: boolean @param found: whether or not the UUID was already found. @rtype: (boolean, string) @return: a tuple, indicating if the target line was found and the processed line; the line is 'None', if the original line is removed """ if line_uuid != target_uuid: return (found, "%s %s\n" % (line_uuid, line_key)) else: return (True, None) def _RemovePublicKeyElse( target_uuid, _target_key): """Logs when we tried to remove a key that does not exist. This is an auxiliary function for C{_ManipulatePublicKeyFile} which is run after we have processed the complete public key file and did not find the key to be removed. @type target_uuid: string @param target_uuid: the UUID of the node whose key was supposed to be removed @type _target_key: string @param _target_key: the key of the node which was supposed to be removed (not used) @rtype: string @return: in this case, always None """ logging.debug("Trying to remove key of node '%s' which is not in list" " of public keys.", target_uuid) return None def _ReplaceNameByUuidProcessLine( node_name, _key, line_identifier, line_key, found, node_uuid=None): """Replaces a node's name with its UUID on a matching line in the key file. This is an auxiliary function for C{_ManipulatePublicKeyFile} which processes a line of the ganeti public key file. If the line in question matches the node's name, the name will be replaced by the node's UUID. @type node_name: string @param node_name: name of the node to be replaced by the UUID @type _key: string @param _key: SSH key of the node (not used) @type line_identifier: string @param line_identifier: an identifier of a node in a line of the public key file. This can be either a node name or a node UUID, depending on if it got replaced already or not. @type line_key: string @param line_key: SSH key of the node whose line is processed @type found: boolean @param found: whether or not the line matches the node's name @type node_uuid: string @param node_uuid: the node's UUID which will replace the node name @rtype: (boolean, string) @return: a tuple indicating whether the target line was found and the processed line """ if node_name == line_identifier: return (True, "%s %s\n" % (node_uuid, line_key)) else: return (found, "%s %s\n" % (line_identifier, line_key)) def _ReplaceNameByUuidElse( node_uuid, node_name, _key): """Logs a debug message when we try to replace a key that is not there. This is an implementation of the auxiliary C{process_else_fn} function for the C{_ManipulatePubKeyFile} function when we use it to replace a line in the public key file that is indexed by the node's name instead of the node's UUID. @type node_uuid: string @param node_uuid: the node's UUID @type node_name: string @param node_name: the node's UUID @type _key: string (not used) @param _key: the node's SSH key (not used) @rtype: string @return: in this case, always None """ logging.debug("Trying to replace node name '%s' with UUID '%s', but" " no line with that name was found.", node_name, node_uuid) return None def _ParseKeyLine(line, error_fn): """Parses a line of the public key file. @type line: string @param line: line of the public key file @type error_fn: function @param error_fn: function to process error messages @rtype: tuple (string, string) @return: a tuple containing the UUID of the node and a string containing the SSH key and possible more parameters for the key """ if len(line.rstrip()) == 0: return (None, None) chunks = line.split(" ") if len(chunks) < 2: raise error_fn("Error parsing public SSH key file. Line: '%s'" % line) uuid = chunks[0] key = " ".join(chunks[1:]).rstrip() return (uuid, key) def _ManipulatePubKeyFile(target_identifier, target_key, key_file=pathutils.SSH_PUB_KEYS, error_fn=errors.ProgrammerError, process_line_fn=None, process_else_fn=None): """Manipulates the list of public SSH keys of the cluster. This is a general function to manipulate the public key file. It needs two auxiliary functions C{process_line_fn} and C{process_else_fn} to work. Generally, the public key file is processed as follows: 1) The function processes each line of the original ganeti public key file, applies the C{process_line_fn} function on it, which returns a possibly manipulated line and an indicator whether the line in question was found. If a line is returned, it is added to a list of lines for later writing to the file. 2) If all lines are processed and the 'found' variable is False, the seconds auxiliary function C{process_else_fn} is called to possibly add more lines to the list of lines. 3) Finally, the list of lines is assembled to a string and written atomically to the public key file, thereby overriding it. If the public key file does not exist, we create it. This is necessary for a smooth transition after an upgrade. @type target_identifier: str @param target_identifier: identifier of the node whose key is added; in most cases this is the node's UUID, but in some it is the node's host name @type target_key: str @param target_key: string containing a public SSH key (a complete line possibly including more parameters than just the key) @type key_file: str @param key_file: filename of the file of public node keys (optional parameter for testing) @type error_fn: function @param error_fn: Function that returns an exception, used to customize exception types depending on the calling context @type process_line_fn: function @param process_line_fn: function to process one line of the public key file @type process_else_fn: function @param process_else_fn: function to be called if no line of the key file matches the target uuid """ assert process_else_fn is not None assert process_line_fn is not None old_lines = [] f_orig = None if os.path.exists(key_file): try: f_orig = open(key_file, "r") old_lines = f_orig.readlines() finally: f_orig.close() else: try: f_orig = open(key_file, "w") f_orig.close() except IOError as e: raise errors.SshUpdateError("Cannot create public key file: %s" % e) found = False new_lines = [] for line in old_lines: (uuid, key) = _ParseKeyLine(line, error_fn) if not uuid: continue (new_found, new_line) = process_line_fn(target_identifier, target_key, uuid, key, found) if new_found: found = True if new_line is not None: new_lines.append(new_line) if not found: new_line = process_else_fn(target_identifier, target_key) if new_line is not None: new_lines.append(new_line) new_file_content = "".join(new_lines) utils.WriteFile(key_file, data=new_file_content) def AddPublicKey(new_uuid, new_key, key_file=pathutils.SSH_PUB_KEYS, error_fn=errors.ProgrammerError): """Adds a new key to the list of public keys. @see: _ManipulatePubKeyFile for parameter descriptions. """ _ManipulatePubKeyFile(new_uuid, new_key, key_file=key_file, process_line_fn=_AddPublicKeyProcessLine, process_else_fn=_AddPublicKeyElse, error_fn=error_fn) def RemovePublicKey(target_uuid, key_file=pathutils.SSH_PUB_KEYS, error_fn=errors.ProgrammerError): """Removes a key from the list of public keys. @see: _ManipulatePubKeyFile for parameter descriptions. """ _ManipulatePubKeyFile(target_uuid, None, key_file=key_file, process_line_fn=_RemovePublicKeyProcessLine, process_else_fn=_RemovePublicKeyElse, error_fn=error_fn) def ReplaceNameByUuid(node_uuid, node_name, key_file=pathutils.SSH_PUB_KEYS, error_fn=errors.ProgrammerError): """Replaces a host name with the node's corresponding UUID. When a node is added to the cluster, we don't know it's UUID yet. So first its SSH key gets added to the public key file and in a second step, the node's name gets replaced with the node's UUID as soon as we know the UUID. @type node_uuid: string @param node_uuid: the node's UUID to replace the node's name @type node_name: string @param node_name: the node's name to be replaced by the node's UUID @see: _ManipulatePubKeyFile for the other parameter descriptions. """ process_line_fn = partial(_ReplaceNameByUuidProcessLine, node_uuid=node_uuid) process_else_fn = partial(_ReplaceNameByUuidElse, node_uuid=node_uuid) _ManipulatePubKeyFile(node_name, None, key_file=key_file, process_line_fn=process_line_fn, process_else_fn=process_else_fn, error_fn=error_fn) def ClearPubKeyFile(key_file=pathutils.SSH_PUB_KEYS, mode=0600): """Resets the content of the public key file. """ utils.WriteFile(key_file, data="", mode=mode) def OverridePubKeyFile(key_map, key_file=pathutils.SSH_PUB_KEYS): """Overrides the public key file with a list of given keys. @type key_map: dict from str to list of str @param key_map: dictionary mapping uuids to lists of SSH keys """ new_lines = [] for (uuid, keys) in key_map.items(): for key in keys: new_lines.append("%s %s\n" % (uuid, key)) new_file_content = "".join(new_lines) utils.WriteFile(key_file, data=new_file_content) def QueryPubKeyFile(target_uuids, key_file=pathutils.SSH_PUB_KEYS, error_fn=errors.ProgrammerError): """Retrieves a map of keys for the requested node UUIDs. @type target_uuids: str or list of str @param target_uuids: UUID of the node to retrieve the key for or a list of UUIDs of nodes to retrieve the keys for @type key_file: str @param key_file: filename of the file of public node keys (optional parameter for testing) @type error_fn: function @param error_fn: Function that returns an exception, used to customize exception types depending on the calling context @rtype: dict mapping strings to list of strings @return: dictionary mapping node uuids to their ssh keys """ all_keys = target_uuids is None if isinstance(target_uuids, str): target_uuids = [target_uuids] result = {} f = open(key_file, "r") try: for line in f: (uuid, key) = _ParseKeyLine(line, error_fn) if not uuid: continue if all_keys or (uuid in target_uuids): if uuid not in result: result[uuid] = [] result[uuid].append(key) finally: f.close() return result def InitSSHSetup(key_type, key_bits, error_fn=errors.OpPrereqError, _homedir_fn=None, _suffix=""): """Setup the SSH configuration for the node. This generates a dsa keypair for root, adds the pub key to the permitted hosts and adds the hostkey to its own known hosts. @param key_type: the type of SSH keypair to be generated @param key_bits: the key length, in bits, to be used """ priv_key, _, auth_keys = GetUserFiles(constants.SSH_LOGIN_USER, kind=key_type, mkdir=True, _homedir_fn=_homedir_fn) new_priv_key_name = priv_key + _suffix new_pub_key_name = priv_key + _suffix + ".pub" for name in new_priv_key_name, new_pub_key_name: if os.path.exists(name): utils.CreateBackup(name) utils.RemoveFile(name) result = utils.RunCmd(["ssh-keygen", "-b", str(key_bits), "-t", key_type, "-f", new_priv_key_name, "-q", "-N", ""]) if result.failed: raise error_fn("Could not generate ssh keypair, error %s" % result.output) AddAuthorizedKey(auth_keys, utils.ReadFile(new_pub_key_name)) def InitPubKeyFile(master_uuid, key_type, key_file=pathutils.SSH_PUB_KEYS): """Creates the public key file and adds the master node's SSH key. @type master_uuid: str @param master_uuid: the master node's UUID @type key_type: one of L{constants.SSHK_ALL} @param key_type: the type of ssh key to be used @type key_file: str @param key_file: name of the file containing the public keys """ _, pub_key, _ = GetUserFiles(constants.SSH_LOGIN_USER, kind=key_type) ClearPubKeyFile(key_file=key_file) key = utils.ReadFile(pub_key) AddPublicKey(master_uuid, key, key_file=key_file) class SshRunner: """Wrapper for SSH commands. """ def __init__(self, cluster_name): """Initializes this class. @type cluster_name: str @param cluster_name: name of the cluster """ self.cluster_name = cluster_name family = ssconf.SimpleStore().GetPrimaryIPFamily() self.ipv6 = (family == netutils.IP6Address.family) def _BuildSshOptions(self, batch, ask_key, use_cluster_key, strict_host_check, private_key=None, quiet=True, port=None): """Builds a list with needed SSH options. @param batch: same as ssh's batch option @param ask_key: allows ssh to ask for key confirmation; this parameter conflicts with the batch one @param use_cluster_key: if True, use the cluster name as the HostKeyAlias name @param strict_host_check: this makes the host key checking strict @param private_key: use this private key instead of the default @param quiet: whether to enable -q to ssh @param port: the SSH port to use, or None to use the default @rtype: list @return: the list of options ready to use in L{utils.process.RunCmd} """ options = [ "-oEscapeChar=none", "-oHashKnownHosts=no", "-oGlobalKnownHostsFile=%s" % pathutils.SSH_KNOWN_HOSTS_FILE, "-oUserKnownHostsFile=/dev/null", "-oCheckHostIp=no", ] if use_cluster_key: options.append("-oHostKeyAlias=%s" % self.cluster_name) if quiet: options.append("-q") if private_key: options.append("-i%s" % private_key) if port: options.append("-oPort=%d" % port) # TODO: Too many boolean options, maybe convert them to more descriptive # constants. # Note: ask_key conflicts with batch mode if batch: if ask_key: raise errors.ProgrammerError("SSH call requested conflicting options") options.append("-oBatchMode=yes") if strict_host_check: options.append("-oStrictHostKeyChecking=yes") else: options.append("-oStrictHostKeyChecking=no") else: # non-batch mode if ask_key: options.append("-oStrictHostKeyChecking=ask") elif strict_host_check: options.append("-oStrictHostKeyChecking=yes") else: options.append("-oStrictHostKeyChecking=no") if self.ipv6: options.append("-6") else: options.append("-4") return options def BuildCmd(self, hostname, user, command, batch=True, ask_key=False, tty=False, use_cluster_key=True, strict_host_check=True, private_key=None, quiet=True, port=None): """Build an ssh command to execute a command on a remote node. @param hostname: the target host, string @param user: user to auth as @param command: the command @param batch: if true, ssh will run in batch mode with no prompting @param ask_key: if true, ssh will run with StrictHostKeyChecking=ask, so that we can connect to an unknown host (not valid in batch mode) @param use_cluster_key: whether to expect and use the cluster-global SSH key @param strict_host_check: whether to check the host's SSH key at all @param private_key: use this private key instead of the default @param quiet: whether to enable -q to ssh @param port: the SSH port on which the node's daemon is running @return: the ssh call to run 'command' on the remote host. """ argv = [constants.SSH] argv.extend(self._BuildSshOptions(batch, ask_key, use_cluster_key, strict_host_check, private_key, quiet=quiet, port=port)) if tty: argv.extend(["-t", "-t"]) argv.append("%s@%s" % (user, hostname)) # Insert variables for virtual nodes argv.extend("export %s=%s;" % (utils.ShellQuote(name), utils.ShellQuote(value)) for (name, value) in vcluster.EnvironmentForHost(hostname).items()) argv.append(command) return argv def Run(self, *args, **kwargs): """Runs a command on a remote node. This method has the same return value as `utils.RunCmd()`, which it uses to launch ssh. Args: see SshRunner.BuildCmd. @rtype: L{utils.process.RunResult} @return: the result as from L{utils.process.RunCmd()} """ return utils.RunCmd(self.BuildCmd(*args, **kwargs)) def CopyFileToNode(self, node, port, filename): """Copy a file to another node with scp. @param node: node in the cluster @param filename: absolute pathname of a local file @rtype: boolean @return: the success of the operation """ if not os.path.isabs(filename): logging.error("File %s must be an absolute path", filename) return False if not os.path.isfile(filename): logging.error("File %s does not exist", filename) return False command = [constants.SCP, "-p"] command.extend(self._BuildSshOptions(True, False, True, True, port=port)) command.append(filename) if netutils.IP6Address.IsValid(node): node = netutils.FormatAddress((node, None)) command.append("%s:%s" % (node, vcluster.ExchangeNodeRoot(node, filename))) result = utils.RunCmd(command) if result.failed: logging.error("Copy to node %s failed (%s) error '%s'," " command was '%s'", node, result.fail_reason, result.output, result.cmd) return not result.failed def VerifyNodeHostname(self, node, ssh_port): """Verify hostname consistency via SSH. This functions connects via ssh to a node and compares the hostname reported by the node to the name with have (the one that we connected to). This is used to detect problems in ssh known_hosts files (conflicting known hosts) and inconsistencies between dns/hosts entries and local machine names @param node: nodename of a host to check; can be short or full qualified hostname @param ssh_port: the port of a SSH daemon running on the node @return: (success, detail), where: - success: True/False - detail: string with details """ cmd = ("if test -z \"$GANETI_HOSTNAME\"; then" " hostname --fqdn;" "else" " echo \"$GANETI_HOSTNAME\";" "fi") retval = self.Run(node, constants.SSH_LOGIN_USER, cmd, quiet=False, port=ssh_port) if retval.failed: msg = "ssh problem" output = retval.output if output: msg += ": %s" % output else: msg += ": %s (no output)" % retval.fail_reason logging.error("Command %s failed: %s", retval.cmd, msg) return False, msg remotehostname = retval.stdout.strip() if not remotehostname or remotehostname != node: if node.startswith(remotehostname + "."): msg = "hostname not FQDN" else: msg = "hostname mismatch" return False, ("%s: expected %s but got %s" % (msg, node, remotehostname)) return True, "host matches" def WriteKnownHostsFile(cfg, file_name): """Writes the cluster-wide equally known_hosts file. """ data = "" if cfg.GetRsaHostKey(): data += "%s ssh-rsa %s\n" % (cfg.GetClusterName(), cfg.GetRsaHostKey()) if cfg.GetDsaHostKey(): data += "%s ssh-dss %s\n" % (cfg.GetClusterName(), cfg.GetDsaHostKey()) utils.WriteFile(file_name, mode=0600, data=data) def _EnsureCorrectGanetiVersion(cmd): """Ensured the correct Ganeti version before running a command via SSH. Before a command is run on a node via SSH, it makes sense in some situations to ensure that this node is indeed running the correct version of Ganeti like the rest of the cluster. @type cmd: string @param cmd: string @rtype: list of strings @return: a list of commands with the newly added ones at the beginning """ logging.debug("Ensure correct Ganeti version: %s", cmd) version = constants.DIR_VERSION all_cmds = [["test", "-d", os.path.join(pathutils.PKGLIBDIR, version)]] if constants.HAS_GNU_LN: all_cmds.extend([["ln", "-s", "-f", "-T", os.path.join(pathutils.PKGLIBDIR, version), os.path.join(pathutils.SYSCONFDIR, "ganeti/lib")], ["ln", "-s", "-f", "-T", os.path.join(pathutils.SHAREDIR, version), os.path.join(pathutils.SYSCONFDIR, "ganeti/share")]]) else: all_cmds.extend([["rm", "-f", os.path.join(pathutils.SYSCONFDIR, "ganeti/lib")], ["ln", "-s", "-f", os.path.join(pathutils.PKGLIBDIR, version), os.path.join(pathutils.SYSCONFDIR, "ganeti/lib")], ["rm", "-f", os.path.join(pathutils.SYSCONFDIR, "ganeti/share")], ["ln", "-s", "-f", os.path.join(pathutils.SHAREDIR, version), os.path.join(pathutils.SYSCONFDIR, "ganeti/share")]]) all_cmds.append(cmd) return all_cmds def RunSshCmdWithStdin(cluster_name, node, basecmd, port, data, debug=False, verbose=False, use_cluster_key=False, ask_key=False, strict_host_check=False, ensure_version=False): """Runs a command on a remote machine via SSH and provides input in stdin. @type cluster_name: string @param cluster_name: Cluster name @type node: string @param node: Node name @type basecmd: string @param basecmd: Base command (path on the remote machine) @type port: int @param port: The SSH port of the remote machine or None for the default @param data: JSON-serializable input data for script (passed to stdin) @type debug: bool @param debug: Enable debug output @type verbose: bool @param verbose: Enable verbose output @type use_cluster_key: bool @param use_cluster_key: See L{ssh.SshRunner.BuildCmd} @type ask_key: bool @param ask_key: See L{ssh.SshRunner.BuildCmd} @type strict_host_check: bool @param strict_host_check: See L{ssh.SshRunner.BuildCmd} """ cmd = [basecmd] # Pass --debug/--verbose to the external script if set on our invocation if debug: cmd.append("--debug") if verbose: cmd.append("--verbose") if ensure_version: all_cmds = _EnsureCorrectGanetiVersion(cmd) else: all_cmds = [cmd] if port is None: port = netutils.GetDaemonPort(constants.SSH) srun = SshRunner(cluster_name) scmd = srun.BuildCmd(node, constants.SSH_LOGIN_USER, utils.ShellQuoteArgs( utils.ShellCombineCommands(all_cmds)), batch=False, ask_key=ask_key, quiet=False, strict_host_check=strict_host_check, use_cluster_key=use_cluster_key, port=port) tempfh = tempfile.TemporaryFile() try: tempfh.write(serializer.DumpJson(data)) tempfh.seek(0) result = utils.RunCmd(scmd, interactive=True, input_fd=tempfh) finally: tempfh.close() if result.failed: raise errors.OpExecError("Command '%s' failed: %s" % (result.cmd, result.fail_reason)) def ReadRemoteSshPubKey(pub_key_file, node, cluster_name, port, ask_key, strict_host_check): """Fetches a public SSH key from a node via SSH. @type pub_key_file: string @param pub_key_file: a tuple consisting of the file name of the public DSA key """ ssh_runner = SshRunner(cluster_name) cmd = ["cat", pub_key_file] ssh_cmd = ssh_runner.BuildCmd(node, constants.SSH_LOGIN_USER, utils.ShellQuoteArgs(cmd), batch=False, ask_key=ask_key, quiet=False, strict_host_check=strict_host_check, use_cluster_key=False, port=port) result = utils.RunCmd(ssh_cmd) if result.failed: raise errors.OpPrereqError("Could not fetch a public SSH key (%s) from node" " '%s': ran command '%s', failure reason: '%s'." % (pub_key_file, node, cmd, result.fail_reason), errors.ECODE_INVAL) return result.stdout def GetSshKeyFilenames(key_type, suffix=""): """Get filenames of the SSH key pair of the given type. @type key_type: string @param key_type: type of SSH key, must be element of C{constants.SSHK_ALL} @type suffix: string @param suffix: optional suffix for the key filenames @rtype: tuple of (string, string) @returns: a tuple containing the name of the private key file and the public key file. """ if key_type not in constants.SSHK_ALL: raise errors.SshUpdateError("Unsupported key type '%s'. Supported key types" " are: %s." % (key_type, constants.SSHK_ALL)) (_, root_keyfiles) = \ GetAllUserFiles(constants.SSH_LOGIN_USER, mkdir=False, dircheck=False) if not key_type in root_keyfiles.keys(): raise errors.SshUpdateError("No keyfile for key type '%s' available." % key_type) key_filenames = root_keyfiles[key_type] if suffix: key_filenames = [_ComputeKeyFilePathWithSuffix(key_filename, suffix) for key_filename in key_filenames] return key_filenames def GetSshPubKeyFilename(key_type, suffix=""): """Get filename of the public SSH key of the given type. @type key_type: string @param key_type: type of SSH key, must be element of C{constants.SSHK_ALL} @type suffix: string @param suffix: optional suffix for the key filenames @rtype: string @returns: file name of the public key file """ return GetSshKeyFilenames(key_type, suffix=suffix)[1] def _ComputeKeyFilePathWithSuffix(key_filepath, suffix): """Converts the given key filename to a key filename with a suffix. @type key_filepath: string @param key_filepath: path of the key file @type suffix: string @param suffix: suffix to be appended to the basename of the file """ path = os.path.dirname(key_filepath) ext = os.path.splitext(os.path.basename(key_filepath))[1] basename = os.path.splitext(os.path.basename(key_filepath))[0] return os.path.join(path, basename + suffix + ext) def ReplaceSshKeys(src_key_type, dest_key_type, src_key_suffix="", dest_key_suffix=""): """Replaces an SSH key pair by another SSH key pair. Note that both parts, the private and the public key, are replaced. @type src_key_type: string @param src_key_type: key type of key pair that is replacing the other key pair @type dest_key_type: string @param dest_key_type: key type of the key pair that is being replaced by the source key pair @type src_key_suffix: string @param src_key_suffix: optional suffix of the key files of the source key pair @type dest_key_suffix: string @param dest_key_suffix: optional suffix of the keey files of the destination key pair """ (src_priv_filename, src_pub_filename) = GetSshKeyFilenames( src_key_type, suffix=src_key_suffix) (dest_priv_filename, dest_pub_filename) = GetSshKeyFilenames( dest_key_type, suffix=dest_key_suffix) if not (os.path.exists(src_priv_filename) and os.path.exists(src_pub_filename)): raise errors.SshUpdateError( "At least one of the source key files is missing: %s", ", ".join([src_priv_filename, src_pub_filename])) for dest_file in [dest_priv_filename, dest_pub_filename]: if os.path.exists(dest_file): utils.CreateBackup(dest_file) utils.RemoveFile(dest_file) shutil.move(src_priv_filename, dest_priv_filename) shutil.move(src_pub_filename, dest_pub_filename) def ReadLocalSshPubKeys(key_types, suffix=""): """Reads the local root user SSH key. @type key_types: list of string @param key_types: types of SSH keys. Must be subset of constants.SSHK_ALL. If 'None' or [], all available keys are returned. @type suffix: string @param suffix: optional suffix to be attached to key names when reading them. Used for temporary key files. @rtype: list of string @return: list of public keys """ fetch_key_types = [] if key_types: fetch_key_types += key_types else: fetch_key_types = constants.SSHK_ALL (_, root_keyfiles) = \ GetAllUserFiles(constants.SSH_LOGIN_USER, mkdir=False, dircheck=False) result_keys = [] for (public_key_type, (_, public_key_file)) in root_keyfiles.items(): if public_key_type not in fetch_key_types: continue public_key_dir = os.path.dirname(public_key_file) public_key_filename = "" if suffix: public_key_filename = \ os.path.splitext(os.path.basename(public_key_file))[0] \ + suffix + ".pub" else: public_key_filename = public_key_file public_key_path = os.path.join(public_key_dir, public_key_filename) if not os.path.exists(public_key_path): raise errors.SshUpdateError("Cannot find SSH public key of type '%s'." % public_key_type) else: key = utils.ReadFile(public_key_path) result_keys.append(key) return result_keys # Update gnt-cluster.rst when changing which combinations are valid. KeyBitInfo = namedtuple('KeyBitInfo', ['default', 'validation_fn']) SSH_KEY_VALID_BITS = { constants.SSHK_DSA: KeyBitInfo(1024, lambda b: b == 1024), constants.SSHK_RSA: KeyBitInfo(2048, lambda b: b >= 768), constants.SSHK_ECDSA: KeyBitInfo(384, lambda b: b in [256, 384, 521]), } def DetermineKeyBits(key_type, key_bits, old_key_type, old_key_bits): """Checks the key bits to be used for a given key type, or provides defaults. @type key_type: one of L{constants.SSHK_ALL} @param key_type: The key type to use. @type key_bits: positive int or None @param key_bits: The number of bits to use, if supplied by user. @type old_key_type: one of L{constants.SSHK_ALL} or None @param old_key_type: The previously used key type, if any. @type old_key_bits: positive int or None @param old_key_bits: The previously used number of bits, if any. @rtype: positive int @return: The number of bits to use. """ if key_bits is None: if old_key_type is not None and old_key_type == key_type: key_bits = old_key_bits else: key_bits = SSH_KEY_VALID_BITS[key_type].default if not SSH_KEY_VALID_BITS[key_type].validation_fn(key_bits): raise errors.OpPrereqError("Invalid key type and bit size combination:" " %s with %s bits" % (key_type, key_bits), errors.ECODE_INVAL) return key_bits
bsd-2-clause
-2,877,672,071,644,923,400
32.432012
80
0.659144
false
3.685712
false
false
false
vwflow/raws-python
raws_json/__init__.py
1
28752
#!/usr/bin/python # -*- coding: utf-8 -*- # # This is a modified version of '__init__.py' (version 1.1.1), part of the 'atom' module # from the gdata-python-client project (http://code.google.com/p/gdata-python-client/) by Google Inc. # Copyright (C) 2006, 2007, 2008 Google Inc. # # It has been modified to support json formatted data instead of atom. # Copyright (C) 2012 rambla.eu # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.import os __author__ = 'api.jscudder (Jeffrey Scudder)' """JsonService provides CRUD ops. in line with the Atom Publishing Protocol. JsonService: Encapsulates the ability to perform insert, update and delete operations with the Atom Publishing Protocol on which GData is based. An instance can perform query, insertion, deletion, and update. HttpRequest: Function that performs a GET, POST, PUT, or DELETE HTTP request to the specified end point. An JsonService object or a subclass can be used to specify information about the request. """ import os import httplib import urllib import re import base64 import socket URL_REGEX = re.compile('http(s)?\://([\w\.-]*)(\:(\d+))?(/.*)?') class JsonService(object): """Performs Atom Publishing Protocol CRUD operations. The JsonService contains methods to perform HTTP CRUD operations. """ # Default values for members -> disabled, also works without them # port = 80 # ssl = False # If debug is True, the HTTPConnection will display debug information debug = False def __init__(self, server=None, additional_headers=None): """Creates a new JsonService client. Args: server: string (optional) The start of a URL for the server to which all operations should be directed. Example: 'www.google.com' additional_headers: dict (optional) Any additional HTTP headers which should be included with CRUD operations. """ self.server = server self.additional_headers = additional_headers or {} self.additional_headers['User-Agent'] = 'Python Google Data Client Lib' def _ProcessUrl(self, url, for_proxy=False): """Processes a passed URL. If the URL does not begin with https?, then the default value for self.server is used""" return ProcessUrl(self, url, for_proxy=for_proxy) def UseBasicAuth(self, username, password, for_proxy=False): """Sets an Authenticaiton: Basic HTTP header containing plaintext. The username and password are base64 encoded and added to an HTTP header which will be included in each request. Note that your username and password are sent in plaintext. Args: username: str password: str """ UseBasicAuth(self, username, password, for_proxy=for_proxy) def PrepareConnection(self, full_uri): """Opens a connection to the server based on the full URI. Examines the target URI and the proxy settings, which are set as environment variables, to open a connection with the server. This connection is used to make an HTTP request. Args: full_uri: str Which is the target relative (lacks protocol and host) or absolute URL to be opened. Example: 'https://www.google.com/accounts/ClientLogin' or 'base/feeds/snippets' where the server is set to www.google.com. Returns: A tuple containing the httplib.HTTPConnection and the full_uri for the request. """ return PrepareConnection(self, full_uri) # Alias the old name for the above method to preserve backwards # compatibility. _PrepareConnection = PrepareConnection # CRUD operations def Get(self, uri, extra_headers=None, url_params=None, escape_params=True): """Query the APP server with the given URI The uri is the portion of the URI after the server value (server example: 'www.google.com'). Example use: To perform a query against Google Base, set the server to 'base.google.com' and set the uri to '/base/feeds/...', where ... is your query. For example, to find snippets for all digital cameras uri should be set to: '/base/feeds/snippets?bq=digital+camera' Args: uri: string The query in the form of a URI. Example: '/base/feeds/snippets?bq=digital+camera'. extra_headers: dicty (optional) Extra HTTP headers to be included in the GET request. These headers are in addition to those stored in the client's additional_headers property. The client automatically sets the Content-Type and Authorization headers. url_params: dict (optional) Additional URL parameters to be included in the query. These are translated into query arguments in the form '&dict_key=value&...'. Example: {'max-results': '250'} becomes &max-results=250 escape_params: boolean (optional) If false, the calling code has already ensured that the query will form a valid URL (all reserved characters have been escaped). If true, this method will escape the query and any URL parameters provided. Returns: httplib.HTTPResponse The server's response to the GET request. """ extra_headers.update({'Accept': 'application/json', 'Content-Type': 'application/json'}) return HttpRequest(self, 'GET', None, uri, extra_headers=extra_headers, url_params=url_params, escape_params=escape_params) def Post(self, data, uri, extra_headers=None, url_params=None, escape_params=True, content_type='application/atom+xml'): """Insert data into an APP server at the given URI. Args: data: string, ElementTree._Element, or something with a __str__ method The XML to be sent to the uri. uri: string The location (feed) to which the data should be inserted. Example: '/base/feeds/items'. extra_headers: dict (optional) HTTP headers which are to be included. The client automatically sets the Content-Type, Authorization, and Content-Length headers. url_params: dict (optional) Additional URL parameters to be included in the URI. These are translated into query arguments in the form '&dict_key=value&...'. Example: {'max-results': '250'} becomes &max-results=250 escape_params: boolean (optional) If false, the calling code has already ensured that the query will form a valid URL (all reserved characters have been escaped). If true, this method will escape the query and any URL parameters provided. Returns: httplib.HTTPResponse Server's response to the POST request. """ extra_headers.update({'Accept': 'application/json'}) return HttpRequest(self, 'POST', data, uri, extra_headers=extra_headers, url_params=url_params, escape_params=escape_params, content_type=content_type) def Put(self, data, uri, extra_headers=None, url_params=None, escape_params=True, content_type='application/atom+xml'): """Updates an entry at the given URI. Args: data: string, ElementTree._Element, or xml_wrapper.ElementWrapper The XML containing the updated data. uri: string A URI indicating entry to which the update will be applied. Example: '/base/feeds/items/ITEM-ID' extra_headers: dict (optional) HTTP headers which are to be included. The client automatically sets the Content-Type, Authorization, and Content-Length headers. url_params: dict (optional) Additional URL parameters to be included in the URI. These are translated into query arguments in the form '&dict_key=value&...'. Example: {'max-results': '250'} becomes &max-results=250 escape_params: boolean (optional) If false, the calling code has already ensured that the query will form a valid URL (all reserved characters have been escaped). If true, this method will escape the query and any URL parameters provided. Returns: httplib.HTTPResponse Server's response to the PUT request. """ extra_headers.update({'Accept': 'application/json'}) return HttpRequest(self, 'PUT', data, uri, extra_headers=extra_headers, url_params=url_params, escape_params=escape_params, content_type=content_type) def Delete(self, uri, extra_headers=None, url_params=None, escape_params=True): """Deletes the entry at the given URI. Args: uri: string The URI of the entry to be deleted. Example: '/base/feeds/items/ITEM-ID' extra_headers: dict (optional) HTTP headers which are to be included. The client automatically sets the Content-Type and Authorization headers. url_params: dict (optional) Additional URL parameters to be included in the URI. These are translated into query arguments in the form '&dict_key=value&...'. Example: {'max-results': '250'} becomes &max-results=250 escape_params: boolean (optional) If false, the calling code has already ensured that the query will form a valid URL (all reserved characters have been escaped). If true, this method will escape the query and any URL parameters provided. Returns: httplib.HTTPResponse Server's response to the DELETE request. """ return HttpRequest(self, 'DELETE', None, uri, extra_headers=extra_headers, url_params=url_params, escape_params=escape_params) def Head(self, uri, extra_headers=None, url_params=None, escape_params=True): """Send a HEAD request to the APP server with the given URI The uri is the portion of the URI after the server value (server example: 'www.google.com'). Example use: To perform a query against Google Base, set the server to 'base.google.com' and set the uri to '/base/feeds/...', where ... is your query. For example, to find snippets for all digital cameras uri should be set to: '/base/feeds/snippets?bq=digital+camera' Args: uri: string The query in the form of a URI. Example: '/base/feeds/snippets?bq=digital+camera'. extra_headers: dicty (optional) Extra HTTP headers to be included in the GET request. These headers are in addition to those stored in the client's additional_headers property. The client automatically sets the Content-Type and Authorization headers. url_params: dict (optional) Additional URL parameters to be included in the query. These are translated into query arguments in the form '&dict_key=value&...'. Example: {'max-results': '250'} becomes &max-results=250 escape_params: boolean (optional) If false, the calling code has already ensured that the query will form a valid URL (all reserved characters have been escaped). If true, this method will escape the query and any URL parameters provided. Returns: httplib.HTTPResponse The server's response to the GET request. """ return HttpRequest(self, 'HEAD', None, uri, extra_headers=extra_headers, url_params=url_params, escape_params=escape_params) def HttpRequest(service, operation, data, uri, extra_headers=None, url_params=None, escape_params=True, content_type='application/atom+xml'): """Performs an HTTP call to the server, supports GET, POST, PUT, and DELETE. Usage example, perform and HTTP GET on http://www.google.com/: import atom.service client = atom.service.JsonService() http_response = client.Get('http://www.google.com/') or you could set the client.server to 'www.google.com' and use the following: client.server = 'www.google.com' http_response = client.Get('/') Args: service: atom.JsonService object which contains some of the parameters needed to make the request. The following members are used to construct the HTTP call: server (str), additional_headers (dict), port (int), and ssl (bool). operation: str The HTTP operation to be performed. This is usually one of 'GET', 'POST', 'PUT', or 'DELETE' data: ElementTree, filestream, list of parts, or other object which can be converted to a string. Should be set to None when performing a GET or PUT. If data is a file-like object which can be read, this method will read a chunk of 100K bytes at a time and send them. If the data is a list of parts to be sent, each part will be evaluated and sent. uri: The beginning of the URL to which the request should be sent. Examples: '/', '/base/feeds/snippets', '/m8/feeds/contacts/default/base' extra_headers: dict of strings. HTTP headers which should be sent in the request. These headers are in addition to those stored in service.additional_headers. url_params: dict of strings. Key value pairs to be added to the URL as URL parameters. For example {'foo':'bar', 'test':'param'} will become ?foo=bar&test=param. escape_params: bool default True. If true, the keys and values in url_params will be URL escaped when the form is constructed (Special characters converted to %XX form.) content_type: str The MIME type for the data being sent. Defaults to 'application/atom+xml', this is only used if data is set. """ full_uri = BuildUri(uri, url_params, escape_params) (connection, full_uri) = PrepareConnection(service, full_uri) if extra_headers is None: extra_headers = {} # Turn on debug mode if the debug member is set. if service.debug: connection.debuglevel = 1 connection.putrequest(operation, full_uri) # If the list of headers does not include a Content-Length, attempt to # calculate it based on the data object. if (data and not service.additional_headers.has_key('Content-Length') and not extra_headers.has_key('Content-Length')): content_length = __CalculateDataLength(data) if content_length: extra_headers['Content-Length'] = str(content_length) else: extra_headers['Content-Length'] = "0" if content_type: extra_headers['Content-Type'] = content_type # Send the HTTP headers. if isinstance(service.additional_headers, dict): for header in service.additional_headers: connection.putheader(header, service.additional_headers[header]) if isinstance(extra_headers, dict): for header in extra_headers: connection.putheader(header, extra_headers[header]) connection.endheaders() # If there is data, send it in the request. if data: if isinstance(data, list): for data_part in data: __SendDataPart(data_part, connection) else: __SendDataPart(data, connection) # Return the HTTP Response from the server. return connection.getresponse() def __SendDataPart(data, connection): if isinstance(data, str): connection.send(data) return elif isinstance(data, unicode): # unicode string must be converted into 8-bit string version (otherwise httplib will raise UnicodeDecodeError) connection.send(data.encode('utf-8')) return # NEXT SECTION COMMENTED OUT, replace by json.decode() if desired # elif ElementTree.iselement(data): # connection.send(ElementTree.tostring(data)) # return # Check to see if data is a file-like object that has a read method. elif hasattr(data, 'read'): # Read the file and send it a chunk at a time. while 1: binarydata = data.read(100000) if binarydata == '': break connection.send(binarydata) return else: # The data object was not a file. # Try to convert to a string and send the data. connection.send(str(data)) return def __CalculateDataLength(data): """Attempts to determine the length of the data to send. This method will respond with a length only if the data is a string or and ElementTree element. Args: data: object If this is not a string or ElementTree element this funtion will return None. """ if isinstance(data, str): return len(data) elif isinstance(data, unicode): return len(data.decode('utf-8')) elif isinstance(data, list): return None # elif ElementTree.iselement(data): # return len(ElementTree.tostring(data)) elif hasattr(data, 'read'): # If this is a file-like object, don't try to guess the length. return None else: return len(str(data)) def PrepareConnection(service, full_uri): """Opens a connection to the server based on the full URI. Examines the target URI and the proxy settings, which are set as environment variables, to open a connection with the server. This connection is used to make an HTTP request. Args: service: atom.JsonService or a subclass. It must have a server string which represents the server host to which the request should be made. It may also have a dictionary of additional_headers to send in the HTTP request. full_uri: str Which is the target relative (lacks protocol and host) or absolute URL to be opened. Example: 'https://www.google.com/accounts/ClientLogin' or 'base/feeds/snippets' where the server is set to www.google.com. Returns: A tuple containing the httplib.HTTPConnection and the full_uri for the request. """ (server, port, ssl, partial_uri) = ProcessUrl(service, full_uri) if ssl: # destination is https proxy = os.environ.get('https_proxy') if proxy: (p_server, p_port, p_ssl, p_uri) = ProcessUrl(service, proxy, True) proxy_username = os.environ.get('proxy-username') if not proxy_username: proxy_username = os.environ.get('proxy_username') proxy_password = os.environ.get('proxy-password') if not proxy_password: proxy_password = os.environ.get('proxy_password') if proxy_username: user_auth = base64.encodestring('%s:%s' % (proxy_username, proxy_password)) proxy_authorization = ('Proxy-authorization: Basic %s\r\n' % ( user_auth.strip())) else: proxy_authorization = '' proxy_connect = 'CONNECT %s:%s HTTP/1.0\r\n' % (server, port) user_agent = 'User-Agent: %s\r\n' % ( service.additional_headers['User-Agent']) proxy_pieces = (proxy_connect + proxy_authorization + user_agent + '\r\n') #now connect, very simple recv and error checking p_sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM) p_sock.connect((p_server,p_port)) p_sock.sendall(proxy_pieces) response = '' # Wait for the full response. while response.find("\r\n\r\n") == -1: response += p_sock.recv(8192) p_status=response.split()[1] if p_status!=str(200): raise 'Error status=',str(p_status) # Trivial setup for ssl socket. ssl = socket.ssl(p_sock, None, None) fake_sock = httplib.FakeSocket(p_sock, ssl) # Initalize httplib and replace with the proxy socket. connection = httplib.HTTPConnection(server) connection.sock=fake_sock full_uri = partial_uri else: connection = httplib.HTTPSConnection(server, port) full_uri = partial_uri else: # destination is http proxy = os.environ.get('http_proxy') if proxy: (p_server, p_port, p_ssl, p_uri) = ProcessUrl(service.server, proxy, True) proxy_username = os.environ.get('proxy-username') if not proxy_username: proxy_username = os.environ.get('proxy_username') proxy_password = os.environ.get('proxy-password') if not proxy_password: proxy_password = os.environ.get('proxy_password') if proxy_username: UseBasicAuth(service, proxy_username, proxy_password, True) connection = httplib.HTTPConnection(p_server, p_port) if not full_uri.startswith("http://"): if full_uri.startswith("/"): full_uri = "http://%s%s" % (service.server, full_uri) else: full_uri = "http://%s/%s" % (service.server, full_uri) else: connection = httplib.HTTPConnection(server, port) full_uri = partial_uri return (connection, full_uri) def UseBasicAuth(service, username, password, for_proxy=False): """Sets an Authenticaiton: Basic HTTP header containing plaintext. The username and password are base64 encoded and added to an HTTP header which will be included in each request. Note that your username and password are sent in plaintext. The auth header is added to the additional_headers dictionary in the service object. Args: service: atom.JsonService or a subclass which has an additional_headers dict as a member. username: str password: str """ base_64_string = base64.encodestring('%s:%s' % (username, password)) base_64_string = base_64_string.strip() if for_proxy: header_name = 'Proxy-Authorization' else: header_name = 'Authorization' service.additional_headers[header_name] = 'Basic %s' % (base_64_string,) def ProcessUrl(service, url, for_proxy=False): """Processes a passed URL. If the URL does not begin with https?, then the default value for server is used""" server = None port = 80 ssl = False if hasattr(service, 'server'): server = service.server else: server = service if not for_proxy: if hasattr(service, 'port'): port = service.port if hasattr(service, 'ssl'): ssl = service.ssl uri = url m = URL_REGEX.match(url) if m is None: return (server, port, ssl, uri) else: if m.group(1) is not None: port = 443 ssl = True if m.group(3) is None: server = m.group(2) else: server = m.group(2) port = int(m.group(4)) if m.group(5) is not None: uri = m.group(5) else: uri = '/' return (server, port, ssl, uri) def DictionaryToParamList(url_parameters, escape_params=True): """Convert a dictionary of URL arguments into a URL parameter string. Args: url_parameters: The dictionaty of key-value pairs which will be converted into URL parameters. For example, {'dry-run': 'true', 'foo': 'bar'} will become ['dry-run=true', 'foo=bar']. Returns: A list which contains a string for each key-value pair. The strings are ready to be incorporated into a URL by using '&'.join([] + parameter_list) """ # Choose which function to use when modifying the query and parameters. # Use quote_plus when escape_params is true. transform_op = [str, urllib.quote_plus][bool(escape_params)] # Create a list of tuples containing the escaped version of the # parameter-value pairs. parameter_tuples = [(transform_op(param), transform_op(value)) for param, value in (url_parameters or {}).items()] # Turn parameter-value tuples into a list of strings in the form # 'PARAMETER=VALUE'. return ['='.join(x) for x in parameter_tuples] def BuildUri(uri, url_params=None, escape_params=True): """Converts a uri string and a collection of parameters into a URI. Args: uri: string url_params: dict (optional) escape_params: boolean (optional) uri: string The start of the desired URI. This string can alrady contain URL parameters. Examples: '/base/feeds/snippets', '/base/feeds/snippets?bq=digital+camera' url_parameters: dict (optional) Additional URL parameters to be included in the query. These are translated into query arguments in the form '&dict_key=value&...'. Example: {'max-results': '250'} becomes &max-results=250 escape_params: boolean (optional) If false, the calling code has already ensured that the query will form a valid URL (all reserved characters have been escaped). If true, this method will escape the query and any URL parameters provided. Returns: string The URI consisting of the escaped URL parameters appended to the initial uri string. """ # Prepare URL parameters for inclusion into the GET request. parameter_list = DictionaryToParamList(url_params, escape_params) # Append the URL parameters to the URL. if parameter_list: if uri.find('?') != -1: # If there are already URL parameters in the uri string, add the # parameters after a new & character. full_uri = '&'.join([uri] + parameter_list) else: # The uri string did not have any URL parameters (no ? character) # so put a ? between the uri and URL parameters. full_uri = '%s%s' % (uri, '?%s' % ('&'.join([] + parameter_list))) else: full_uri = uri return full_uri class MediaSource(object): """Raws Entries can refer to media sources, so this class provides a place to store references to these objects along with some metadata. """ def __init__(self, file_handle=None, content_type=None, content_length=None, file_path=None, file_name=None, svr_filename = None): """Creates an object of type MediaSource. Args: file_handle: A file handle pointing to the file to be encapsulated in the MediaSource content_type: string The MIME type of the file. Required if a file_handle is given. content_length: int The size of the file. Required if a file_handle is given. file_path: string (optional) A full path name to the file. Used in place of a file_handle. file_name: string The name of the file without any path information. Required if a file_handle is given. """ self.file_handle = file_handle self.content_type = content_type self.content_length = content_length self.file_name = file_name self.svr_filename = svr_filename self.file_path = file_path if (file_handle is None and file_path is not None): self.setFile(file_path, content_type) if not self.svr_filename: self.svr_filename = self.file_name def setFile(self, file_name, content_type): """A helper function which can create a file handle from a given filename and set the content type and length all at once. Args: file_name: string The path and file name to the file containing the media content_type: string A MIME type representing the type of the media """ self.file_handle = open(file_name, 'rb') self.content_type = content_type self.content_length = os.path.getsize(file_name) self.file_name = os.path.basename(file_name) def writeFile(self, file_path): # can not write if no path and handle if not file_path or not self.file_handle: return False self.file_path = file_path fd = open(file_path, 'wb') fd.write(self.file_handle.read()) fd.close() return True
apache-2.0
4,799,143,421,886,727,000
40.015692
116
0.643399
false
4.255773
false
false
false
google/google-ctf
2019/finals/web-gphotos-finals/app/gallery/middleware.py
1
1230
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.utils.text import slugify import hashlib import os class UserMiddleware: def __init__(self, get_response): self.get_response = get_response def __call__(self, request): user_id = request.COOKIES.get('user') if not user_id: user_id = hashlib.md5(os.urandom(16)).hexdigest() else: user_id = slugify(user_id) user_dir = os.path.join('media', user_id) if not os.path.exists(user_dir): os.makedirs(user_dir) os.makedirs(os.path.join(user_dir, 'thumbs')) request.user_id = user_id response = self.get_response(request) response.set_cookie('user', user_id) return response
apache-2.0
8,556,968,117,111,451,000
28.285714
74
0.704065
false
3.649852
false
false
false
UKPLab/emnlp2017-claim-identification
src/main/python/process_data_se_WithDevel.py
1
4976
import cPickle import numpy as np import pandas as pd import re import sys from collections import defaultdict def build_data_cv(data_folder, cv=10, clean_string=True): """ Loads data. """ revs = [] pos_file = data_folder[0] # train file neg_file = data_folder[1] # test file devel_file = data_folder[2] vocab = defaultdict(float) for (mysplit,myfile) in [(0,pos_file),(1,neg_file),(2,devel_file)]: with open(myfile, "rb") as f: for line in f: rev = [] strippedLine = line.strip() try: lline,label = strippedLine.split("\t") except ValueError: lline = "" label = strippedLine rev.append(lline.strip()) if clean_string: orig_rev = clean_str(" ".join(rev)) else: orig_rev = " ".join(rev).lower() words = set(orig_rev.split()) for word in words: vocab[word] += 1 datum = {"y":int(label), "text": orig_rev, "num_words": len(orig_rev.split()), "split": mysplit} revs.append(datum) #print revs return revs, vocab def get_W(word_vecs, k=300): """ Get word matrix. W[i] is the vector for word indexed by i """ vocab_size = len(word_vecs) word_idx_map = dict() W = np.zeros(shape=(vocab_size+1, k), dtype='float32') W[0] = np.zeros(k, dtype='float32') i = 1 for word in word_vecs: W[i] = word_vecs[word] word_idx_map[word] = i i += 1 return W, word_idx_map def load_bin_vec(fname, vocab): """ Loads 300x1 word vecs from Google (Mikolov) word2vec """ word_vecs = {} with open(fname, "rb") as f: header = f.readline() vocab_size, layer1_size = map(int, header.split()) binary_len = np.dtype('float32').itemsize * layer1_size for line in xrange(vocab_size): word = [] while True: ch = f.read(1) if ch == ' ': word = ''.join(word) break if ch != '\n': word.append(ch) if word in vocab: word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') else: f.read(binary_len) return word_vecs def add_unknown_words(word_vecs, vocab, min_df=1, k=300): """ For words that occur in at least min_df documents, create a separate word vector. 0.25 is chosen so the unknown vectors have (approximately) same variance as pre-trained ones """ for word in vocab: if word not in word_vecs and vocab[word] >= min_df: word_vecs[word] = np.random.uniform(-0.25,0.25,k) def clean_str(string, TREC=False): """ Tokenization/string cleaning for all datasets except for SST. Every dataset is lower cased except for TREC """ string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string) string = re.sub(r"\'s", " \'s", string) string = re.sub(r"\'ve", " \'ve", string) string = re.sub(r"n\'t", " n\'t", string) string = re.sub(r"\'re", " \'re", string) string = re.sub(r"\'d", " \'d", string) string = re.sub(r"\'ll", " \'ll", string) string = re.sub(r",", " , ", string) string = re.sub(r"!", " ! ", string) string = re.sub(r"\(", " \( ", string) string = re.sub(r"\)", " \) ", string) string = re.sub(r"\?", " \? ", string) string = re.sub(r"\s{2,}", " ", string) return string.strip() if TREC else string.strip().lower() def clean_str_sst(string): """ Tokenization/string cleaning for the SST dataset """ string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string) string = re.sub(r"\s{2,}", " ", string) return string.strip().lower() if __name__=="__main__": w2v_file = sys.argv[1] trainFile = sys.argv[2] testFile = sys.argv[3] develFile = sys.argv[4] saveFile = sys.argv[5] data_folder = [trainFile,testFile,develFile] print "loading data...", revs, vocab = build_data_cv(data_folder, cv=10, clean_string=True) max_l = np.max(pd.DataFrame(revs)["num_words"]) print "data loaded!" print "number of sentences: " + str(len(revs)) print "vocab size: " + str(len(vocab)) print "max sentence length: " + str(max_l) print "loading word2vec vectors...", sys.stdout.flush() w2v = load_bin_vec(w2v_file, vocab) print "word2vec loaded!" print "num words already in word2vec: " + str(len(w2v)) add_unknown_words(w2v, vocab) W, word_idx_map = get_W(w2v) rand_vecs = {} add_unknown_words(rand_vecs, vocab) W2, _ = get_W(rand_vecs) cPickle.dump([revs, W, W2, word_idx_map, vocab], open(saveFile, "wb")) print "dataset created!" #sys.exit(1) # SE
apache-2.0
-3,612,380,496,805,541,400
32.85034
96
0.530748
false
3.275839
false
false
false
bcgov/gwells
app/backend/gwells/views/bulk.py
1
18358
""" Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import math import logging from decimal import Decimal from rest_framework.views import APIView from rest_framework import status from rest_framework.response import Response from drf_yasg.utils import swagger_auto_schema from django.db import transaction from django.utils import timezone from django.contrib.gis.geos import Point from aquifers.constants import AQUIFER_ID_FOR_UNCORRELATED_WELLS from aquifers.models import Aquifer, VerticalAquiferExtent, VerticalAquiferExtentsHistory from wells.models import Well from gwells.models.bulk import BulkWellAquiferCorrelationHistory from gwells.permissions import ( HasBulkWellAquiferCorrelationUploadRole, HasBulkVerticalAquiferExtentsUploadRole ) logger = logging.getLogger(__name__) class BulkWellAquiferCorrelation(APIView): """ Changes multiple aquifers well correlations all at once """ permission_classes = (HasBulkWellAquiferCorrelationUploadRole, ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.change_log = [] self.create_date = timezone.now() self.unknown_well_tag_numbers = set() self.unknown_aquifer_ids = set() self.wells_outside_aquifer = dict() self.no_geom_aquifers = set() self.retired_aquifers = set() self.unpublished_aquifers = set() self.unpublished_wells = set() @swagger_auto_schema(auto_schema=None) @transaction.atomic def post(self, request, **kwargs): aquifers = request.data changes = {} wells_to_update = [] # check for a ?commit querystring parameter for this /bulk API # this flag will actually perform the bulk_update() on the DB # without it will just check for errors and return the changes # that would have been made update_db = 'commit' in request.GET incoming_well_tag_numbers = {wtn for aquifer in aquifers for wtn in aquifer['wellTagNumbers']} incoming_aquifer_ids = {aquifer['aquiferId'] for aquifer in aquifers} existing_wells = self.lookup_existing_wells(incoming_well_tag_numbers) existing_aquifers = self.lookup_existing_aquifers(incoming_aquifer_ids) if self.has_errors(): return self.return_errors({}) for aquifer in aquifers: aquifer_id = int(aquifer['aquiferId']) well_tag_numbers = aquifer['wellTagNumbers'] # capture errors about any unknown aquifers aquifer = existing_aquifers[aquifer_id] wells = [well for wtn, well in existing_wells.items() if wtn in well_tag_numbers] # now figure out what has changed for each well for well in wells: well_tag_number = well.well_tag_number existing_aquifer_id = well.aquifer_id if well.aquifer_id else None # We need to skip aquifer 1143 as it is the aquifer without geom that wells are # assigned to when they are not correlated at the time of interpretation. if aquifer_id != AQUIFER_ID_FOR_UNCORRELATED_WELLS: # If the correlation is changing — check if the well is inside the aquifer self.check_well_in_aquifer(well, aquifer) if existing_aquifer_id == aquifer_id: # this well correlation is unchanged change = { 'action': 'same' } else: if existing_aquifer_id is None: # No existing aquifer for this well? Must be a new correlation self.append_to_change_log(well_tag_number, aquifer_id, None) change = { 'action': 'new', 'aquiferId': aquifer_id } wells_to_update.append(well) elif existing_aquifer_id != aquifer_id: # existing ids don't match - must be a change self.append_to_change_log(well_tag_number, aquifer_id, existing_aquifer_id) change = { 'action': 'update', 'existingAquiferId': existing_aquifer_id, 'newAquiferId': aquifer_id } wells_to_update.append(well) if change: changes[well_tag_number] = change if update_db: # change all well's to point to the new aquifer for well in wells: well.aquifer = aquifer if update_db: # no errors then updated the DB (if ?commit is passed in) self.update_wells(wells_to_update) elif self.has_warnings(): return self.return_errors(changes) # no errors then we return the changes that were (or could be) performed http_status = status.HTTP_200_OK if update_db else status.HTTP_202_ACCEPTED return Response(changes, status=http_status) def has_errors(self): has_errors = ( len(self.unknown_well_tag_numbers) > 0 or len(self.unknown_aquifer_ids) > 0 ) return has_errors def has_warnings(self): has_warnings = ( len(self.wells_outside_aquifer) > 0 or len(self.no_geom_aquifers) > 0 or len(self.unpublished_wells) > 0 or len(self.unpublished_aquifers) > 0 or len(self.retired_aquifers) > 0 ) return has_warnings def lookup_existing_wells(self, well_tag_numbers): wells = Well.objects.filter(pk__in=well_tag_numbers) keyed_wells = {well.well_tag_number: well for well in wells} known_well_tag_numbers = set(keyed_wells.keys()) self.unknown_well_tag_numbers = well_tag_numbers - known_well_tag_numbers self.unpublished_wells = [well.well_tag_number for well in wells if well.well_publication_status_id != 'Published'] return keyed_wells def lookup_existing_aquifers(self, aquifer_ids): aquifers = Aquifer.objects.filter(pk__in=aquifer_ids).defer('geom') # we are not using geom keyed_aquifers = {aquifer.aquifer_id: aquifer for aquifer in aquifers} known_aquifer_ids = set(keyed_aquifers.keys()) self.unknown_aquifer_ids = aquifer_ids - known_aquifer_ids self.retired_aquifers = [a.aquifer_id for a in aquifers if a.status_retired] self.unpublished_aquifers = [a.aquifer_id for a in aquifers if not a.status_published] return keyed_aquifers def check_well_in_aquifer(self, well, aquifer): if aquifer.geom is None: self.no_geom_aquifers.add(aquifer.aquifer_id) return None if aquifer.geom_simplified is None: raise Exception(f"Aquifer {aquifer.aquifer_id} has no geom_simplified") # Expand simplified polygon by ~1000m in WGS-84 (srid 4326) aquifer_geom = aquifer.geom_simplified.buffer(0.01) if not aquifer_geom.contains(well.geom): well_3005_geom = well.geom.transform(3005, clone=True) distance = aquifer.geom.distance(well_3005_geom) # NOTE: 3005 projection's `.distance()` returns almost-meters self.wells_outside_aquifer[well.well_tag_number] = {'distance': distance, 'units': 'meters'} return False return True def return_errors(self, changes): # roll back the transaction as the bulk_update could have run for one # aquifer but errored on another. Best to abort the whole thing and warn the user transaction.set_rollback(True) errors = { 'unknownAquifers': self.unknown_aquifer_ids, 'unknownWells': self.unknown_well_tag_numbers, 'wellsNotInAquifer': self.wells_outside_aquifer, 'aquiferHasNoGeom': self.no_geom_aquifers, 'retiredAquifers': self.retired_aquifers, 'unpublishedAquifers': self.unpublished_aquifers, 'unpublishedWells': self.unpublished_wells, 'changes': changes # always return the list of changes even if there are unknowns } return Response(errors, status=status.HTTP_400_BAD_REQUEST) def update_wells(self, wells): logger.info("Bulk updating %d wells", len(wells)) # bulk update using efficient SQL for any well aquifer correlations that have changed Well.objects.bulk_update(wells, ['aquifer']) # save the BulkWellAquiferCorrelation records BulkWellAquiferCorrelationHistory.objects.bulk_create(self.change_log) def append_to_change_log(self, well_tag_number, to_aquifer_id, from_aquifer_id): bulk_history_item = BulkWellAquiferCorrelationHistory( well_id=well_tag_number, update_to_aquifer_id=to_aquifer_id, update_from_aquifer_id=from_aquifer_id, create_user=self.request.user.profile.username, create_date=self.create_date ) self.change_log.append(bulk_history_item) class BulkVerticalAquiferExtents(APIView): """ Changes multiple vertical aquifer extents all at once """ permission_classes = (HasBulkVerticalAquiferExtentsUploadRole, ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.conflicts = [] self.change_log = [] self.create_date = timezone.now() self.unknown_well_tag_numbers = set() self.unknown_aquifer_ids = set() @swagger_auto_schema(auto_schema=None) @transaction.atomic def post(self, request, **kwargs): vertical_aquifer_extents = request.data new_vae_models = [] # check for a ?commit querystring parameter for this /bulk API # this flag will actually perform the bulk_update() on the DB # without it will just check for errors and return the changes # that would have been made update_db = 'commit' in request.GET # create a dict of the extents keyed by well_tag_number incoming_vae_data = self.as_wells(vertical_aquifer_extents) incoming_well_tag_numbers = incoming_vae_data.keys() existing_wells = self.lookup_existing_wells(incoming_well_tag_numbers) incoming_aquifer_ids = set(row['aquiferId'] for row in vertical_aquifer_extents) existing_aquifers = self.lookup_existing_aquifers(incoming_aquifer_ids) if len(self.unknown_well_tag_numbers) > 0 or len(self.unknown_aquifer_ids) > 0: return self.return_errors() # loop through every well in this bulk update for well_tag_number, data in incoming_vae_data.items(): well = existing_wells[well_tag_number] existing_data = VerticalAquiferExtent.objects \ .filter(well_id=well_tag_number) \ .order_by('start')[:] existing_aquifer_ids = [item.aquifer_id for item in existing_data] extents = [{'start': item.start, 'end': item.end} for item in existing_data] # record the current extents at this well so we know the complete state at this time for existing_vae in existing_data: self.append_to_history_log(existing_vae) # loop through all incoming extents and see if they overlap with any existing or new extents max_depth = float('-inf') data.sort(key=lambda item: item['fromDepth']) for vae in data: aquifer_id = vae['aquiferId'] from_depth = Decimal(format(vae['fromDepth'], '.2f')) if vae['fromDepth'] is not None else None to_depth = Decimal(format(vae['toDepth'], '.2f')) if vae['toDepth'] is not None else Decimal('Infinity') if aquifer_id in existing_aquifer_ids: self.add_conflict(vae, 'Aquifer %s already defined for well' % aquifer_id) continue if from_depth < 0: self.add_conflict(vae, 'From depth can not be less then zero') continue if to_depth < 0: self.add_conflict(vae, 'To depth can not be less then zero') continue if to_depth < from_depth: self.add_conflict(vae, 'From depth must be below to depth') continue aquifer = existing_aquifers[aquifer_id] if self.check_extent_overlaps(from_depth, to_depth, extents): self.add_conflict(vae, 'Overlaps with an existing vertical aquifer extent') continue if from_depth < max_depth: self.add_conflict(vae, 'Overlaps with another vertical aquifer extent in the CSV') continue max_depth = to_depth if update_db: vae_model = self.build_vertical_aquifer_extent_model(well, aquifer, from_depth, to_depth) new_vae_models.append(vae_model) self.append_to_history_log(vae_model) # if there are any unknown aquifers or wells then we want to return errors if len(self.conflicts) > 0: return self.return_errors() if update_db: # no errors then updated the DB (if ?commit is passed in) self.create_vertical_aquifer_extents(new_vae_models) # no errors then we return the changes that were (or could be) performed http_status = status.HTTP_200_OK if update_db else status.HTTP_202_ACCEPTED return Response({}, status=http_status) def as_wells(self, vertical_aquifer_extents): """ Returns extents as a dict keyed by well_tag_number """ wells = {} for record in vertical_aquifer_extents: wells.setdefault(record['wellTagNumber'], []).append(record) return wells def lookup_existing_wells(self, well_tag_numbers): """ Returns a dict keyed by well_tag_number of existing wells """ wells = Well.objects.filter(pk__in=well_tag_numbers) keyed_wells = {well.well_tag_number: well for well in wells} known_well_tag_numbers = set(keyed_wells.keys()) self.unknown_well_tag_numbers = well_tag_numbers - known_well_tag_numbers return keyed_wells def lookup_existing_aquifers(self, aquifer_ids): """ Returns a dict keyed by aquifer_id of existing aquifers """ aquifers = Aquifer.objects.filter(pk__in=aquifer_ids) keyed_aquifers = {aquifer.aquifer_id: aquifer for aquifer in aquifers} known_aquifer_ids = set(keyed_aquifers.keys()) self.unknown_aquifer_ids = aquifer_ids - known_aquifer_ids return keyed_aquifers def add_conflict(self, data, msg): """ Logs a conflict to be returned as a list of conflicts """ self.conflicts.append({ **data, 'message': msg, }) def build_vertical_aquifer_extent_model(self, well, aquifer, from_depth, to_depth): """ A new VerticalAquiferExtentModel which uses the well's geom """ if well.geom: longitude = well.geom.x latitude = well.geom.y point = Point(-abs(float(longitude)), float(latitude), srid=4326) return VerticalAquiferExtent( well=well, aquifer=aquifer, geom=point, start=from_depth, end=None if math.isinf(to_depth) else to_depth, create_user=self.request.user.profile.username, create_date=self.create_date ) def check_extent_overlaps(self, from_depth, to_depth, existing_extents): """ Checks an extent against a list of existing extents """ if len(existing_extents) == 0: return False max_depth = float('-inf') for extent in existing_extents: start = extent['start'] end = extent['end'] if extent['end'] is not None else Decimal('Infinity') if from_depth >= max_depth and to_depth <= start: return False max_depth = end return from_depth < max_depth # check the bottom of all extents def return_errors(self): # roll back the transaction as the bulk_update could have run for one # aquifer but errored on another. Best to abort the whole thing and warn the user transaction.set_rollback(True) errors = { 'unknownAquifers': self.unknown_aquifer_ids, 'unknownWells': self.unknown_well_tag_numbers, 'conflicts': self.conflicts } return Response(errors, status=status.HTTP_400_BAD_REQUEST) def create_vertical_aquifer_extents(self, models): """ Creates all the vertical aquifer extents and history log items all at once """ logger.info("Bulk updating %d VerticalAquiferExtents", len(models)) # bulk update using efficient SQL for any well aquifer correlations that have changed VerticalAquiferExtent.objects.bulk_create(models) # save the BulkWellAquiferCorrelation records VerticalAquiferExtentsHistory.objects.bulk_create(self.change_log) def append_to_history_log(self, model): """ Adds a vertical aquifer extent's data to the history log """ bulk_history_item = VerticalAquiferExtentsHistory( well_tag_number=model.well_id, aquifer_id=model.aquifer_id, geom=model.geom, start=model.start, end=model.end, create_user=self.request.user.profile.username, create_date=self.create_date ) self.change_log.append(bulk_history_item)
apache-2.0
-6,203,309,790,408,724,000
41.787879
123
0.620288
false
3.633413
false
false
false
live-clones/dolfin-adjoint
tests_dolfin/optimization_scalar/optimization_scalar.py
1
1880
from __future__ import print_function from dolfin import * from dolfin_adjoint import * import sys dolfin.set_log_level(ERROR) n = 10 mesh = UnitIntervalMesh(n) V = FunctionSpace(mesh, "CG", 2) ic = project(Expression("sin(2*pi*x[0])", degree=1), V) u = ic.copy(deepcopy=True) def main(nu): u_next = Function(V) v = TestFunction(V) timestep = Constant(1.0/n, name="Timestep") F = ((u_next - u)/timestep*v + u_next*u_next.dx(0)*v + nu*u_next.dx(0)*v.dx(0))*dx bc = DirichletBC(V, 0.0, "on_boundary") t = 0.0 end = 0.1 while (t <= end): solve(F == 0, u_next, bc) u.assign(u_next) t += float(timestep) adj_inc_timestep() def eval_cb(j, m): print("j = %f, m = %f." % (j, float(m))) def derivative_cb(j, dj, m): print("j = %f, dj = %f, m = %f." % (j, dj, float(m))) def replay_cb(var, data, m): #print "Got data for variable %s at m = %f." % (var, float(m)) pass if __name__ == "__main__": nu = Constant(0.0001, name="Nu") # Run the forward model once to have the annotation main(nu) J = Functional(inner(u, u)*dx*dt[FINISH_TIME]) # Run the optimisation reduced_functional = ReducedFunctional(J, ConstantControl("Nu"), eval_cb_post= eval_cb, derivative_cb_post=derivative_cb, replay_cb=replay_cb, scale=2.0) try: nu_opt = minimize(reduced_functional, 'SLSQP') tol = 1e-4 if reduced_functional(nu_opt) > tol: print('Test failed: Optimised functional value exceeds tolerance: ', reduced_functional(nu_opt), ' > ', tol, '.') sys.exit(1) except ImportError: info_red("No suitable scipy version found. Aborting test.")
lgpl-3.0
-1,891,839,056,240,876,000
27.923077
125
0.534574
false
3.122924
false
false
false
Eternali/synk
synk-pre/synk-pre.py
1
5279
''' Synk - Sublime Text Plugin ''' import os import socket import sublime import sublime_plugin from threading import Thread, Timer # variables for storing user defined settings settings_filename = "synk_pre.sublime-settings" enabled_field = "enabled" server_ips_field = "project_server_ips" # NOTE: add feature to have more than one server later uplink_ports_field = "uplink_ports" downlink_ports_field = "downlink_ports" all_files_field = "synk_all_files" current_file_field = "synk_current_file" delay_field = "delay_in_seconds" # Object for connecting to the server class ServerConnection(object): def __init__(self, attempts=5): self.settings = sublime.load_settings(settings_filename) self.delay = self.settings.get(delay_field) self.server = self.settings.get(server_ips_field) self.up_port = self.settings.get(uplink_ports_field) self.down_port = self.settings.get(downlink_ports_field) self.current_file = self.settings.get(current_file_field) self.upsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.downsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.file_locked = False for a in range(attempts): try: self.upsock.connect((self.server, self.up_port)) self.downsock.connect((self.server, self.down_port)) return except: continue sublime.status_message("An error occured while attempting to connect to the server.") def recv_data(conn): received = "" while True: data = conn.recv(4096) received += data.decode("utf-8") if len(data) <= 4096: break return received def write_file(fname, data, mode="w"): with open(fname, mode) as f: for line in data: f.write(line + '\n') def push_changes(self, filename=self.current_file, attempts=30): for a in attempts: if not self.file_locked: self.file_locked = True data = self.current_file + '\n' + view.substr(sublime.Region(0, view.size())) self.upsock.send(data.encode("utf-8")) self.file_locked = False break def get_changes(self): #change_thread = Thread(target=self.get_changes_thread) #change_thread.start() Timer(self.delay, self.get_changes_thread).start() def get_changes_thread(self): while True: self.recved_data = self.recv_data(self.downsock) if len(self.recv_data) and not self.file_locked and not view.is_loading(): self.file_locked = True self.write_file(self.current_file, self.recved_data) self.file_locked = False class SynkPreListener(sublime_plugin.EventListener): save_queue = [] @staticmethod def generate_backup_filename(filename): dirname, basename = [os.path.dirname(filename), os.path.basename(filename).split('.')] if len(basename) > 1: basename.insert(-1, 'bak') else: basename.append('bak') return dirname + '/' + '.'.join(basename) def on_modified(self, view): settings = sublime.load_settings(settings_filename) if not (view.file_name() and view.is_dirty()): return delay = settings.get(delay_field) all_files = settings.get(all_files_field) current_file = settings.get(current_file_field) if not all_files and current_file != view.file_name(): return def callback(): settings = sublime.load_settings(settings_filename) current_file = settings.get(current_file_field) if view.is_dirty() and not view.is_loading(): view.run_command("save") serv_conn.push_changes(filename=current_file) else: content = view.substr(sublime.Region(0, view.size())) try: with open(SynkPreListener.generate_backup_filename(view.filename()), 'w', encoding="utf-8") as f: f.write(content) except Exception as e: sublime.status_message(str(e)) raise e class SynkPreCommand(sublime_plugin.TextCommand): def run(self, **kwargs): enable = kwargs.get("enable", None) all_files = kwargs.get("all_files", False) settings = sublime.load_settings(settings_filename) if enable is None: enable = not settings.get(enabled_field) if not enable: message = "Autosynk is turned off." filename = settings.get(current_file_field) settings.set(enabled_field, enable) settings.set(all_files_field, all_files) filename = sublime.Window.active_view(sublime.active_window()).file_name() settings.set(current_file_field, filename) if enable: message = "Autosynk is turned on." if not all_files: message += " for: " + os.path.basename(filename) serv_conn = ServerConnection() global serv_conn serv_conn.get_changes() sublime.status_message(message)
gpl-3.0
-5,355,001,853,979,260,000
33.730263
113
0.602387
false
3.867399
false
false
false
PostTenebrasLab/DrinkingBuddyServer
drinkingBuddyDB_declarative.py
1
3467
#!/usr/bin/python3 import os import sys from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, Boolean from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship from sqlalchemy import create_engine from marshmallow import Schema, fields from flask_sqlalchemy import SQLAlchemy from flask_marshmallow import Marshmallow Base = declarative_base() class Category(Base): __tablename__ = 'categories' id = Column(Integer, primary_key=True) name = Column(String(50), nullable=False) class Terminal(Base): __tablename__ = 'terminals' id = Column(Integer, primary_key=True) name = Column(String(50), nullable=False) key = Column(String(64), nullable=False) class Functionality(Base): __tablename__ = 'functionalities' id = Column(Integer, primary_key=True) category_id = Column(Integer, ForeignKey('categories.id')) category = relationship(Category) terminal_id = Column(Integer, ForeignKey('terminals.id')) terminal = relationship(Terminal) class Item(Base): __tablename__ = 'items' id = Column(Integer, primary_key=True) name = Column(String(50), nullable=False) quantity = Column(Integer) minquantity = Column(Integer) price = Column(Integer) barcode = Column(String(32), nullable=True) pictureURL = Column(String(512), nullable=True) category_id = Column(Integer, ForeignKey('categories.id')) category = relationship(Category) class User(Base): __tablename__ = 'users' id = Column(Integer, primary_key=True) name = Column(String(50)) balance = Column(Integer) type = Column(Integer) class Card(Base): __tablename__ = 'cards' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey('users.id')) user = relationship(User) class Locker(Base): __tablename__ = 'locker' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey('users.id')) user = relationship(User) class Transaction(Base): __tablename__ = 'transactions' id = Column(Integer, primary_key=True) date = Column(DateTime) value = Column(Integer) user_id = Column(Integer, ForeignKey('users.id')) user = relationship(User) class TransactionItem(Base): __tablename__ = 'transaction_items' id = Column(Integer, primary_key=True) date = Column(DateTime) quantity = Column(Integer) price_per_item = Column(Integer) canceled = Column(Boolean, default=False) canceled_date = Column(DateTime) element_id = Column(Integer, ForeignKey('items.id')) element = relationship(Item) transaction_id = Column(Integer, ForeignKey('transactions.id')) transaction = relationship(Transaction) class UserSchema(Schema): class Meta: fields = ("id", "name", "balance") class ItemSchema(Schema): class Meta: fields = ("id", "name") class TransactionItemSchema(Schema): element = fields.Nested(ItemSchema) class Meta: fields = ("id", "date", "value", "element_id", "element") class TransactionSchema(Schema): user = fields.Nested(UserSchema) transactionItems = fields.Nested(TransactionItemSchema, many=True) class Meta: fields = ("id", "date", "value", "user_id", "user", "transactionItems") # Create Database # engine = create_engine("sqlite:///db.db", echo=True) # Base.metadata.create_all(engine)
mit
974,896,659,334,482,000
27.418033
87
0.677531
false
3.847947
false
false
false
ifzing/ceilometer-extended-monitor
network/lbaas.py
1
2469
# -*- encoding: utf-8 -*- from ceilometer.openstack.common import log from ceilometer.openstack.common import timeutils from ceilometer.central import plugin from ceilometer import sample from ceilometer import neutron_client class LbaasInBytesPollster(plugin.CentralPollster): LOG = log.getLogger(__name__ + '.LBaaS') def _get_lb_in_bytes(self): in_bytes = [] nt = neutron_client.Client() for pool in nt._list_pools(): bytes = nt._get_lb_in_bytes(pool['id']) in_bytes.append({'id': pool['id'], 'bytes': bytes}) return in_bytes def _iter_pool_stats(self, cache): if 'in_bytes' not in cache: cache['in_bytes'] = list(self._get_lb_in_bytes()) return iter(cache['in_bytes']) def get_samples(self, manager, cache): for pool in self._iter_pool_stats(cache): self.LOG.info("LBAAS POOL: %s" % pool['id']) yield sample.Sample( name='network.lb.in.bytes', type=sample.TYPE_CUMULATIVE, unit='byte', volume=pool['bytes'], user_id=None, project_id=None, resource_id=pool['id'], timestamp=timeutils.utcnow().isoformat(), resource_metadata={}) class LbaasOutBytesPollster(plugin.CentralPollster): LOG = log.getLogger(__name__ + '.LBaaS') def _get_lb_out_bytes(self): in_bytes = [] nt = neutron_client.Client() for pool in nt._list_pools(): bytes = nt._get_lb_out_bytes(pool['id']) in_bytes.append({'id': pool['id'], 'bytes': bytes}) return in_bytes def _iter_pool_stats(self, cache): if 'out_bytes' not in cache: cache['out_bytes'] = list(self._get_lb_out_bytes()) return iter(cache['out_bytes']) def get_samples(self, manager, cache): for pool in self._iter_pool_stats(cache): self.LOG.info("LBAAS POOL: %s" % pool['id']) yield sample.Sample( name='network.lb.out.bytes', type=sample.TYPE_CUMULATIVE, unit='byte', volume=pool['bytes'], user_id=None, project_id=None, resource_id=pool['id'], timestamp=timeutils.utcnow().isoformat(), resource_metadata={})
apache-2.0
8,639,770,541,434,874,000
31.486842
63
0.534629
false
3.882075
false
false
false
bromjiri/Presto
predictor/predictor_new.py
1
8137
import settings import pandas as pd import numpy as np import os from datetime import datetime from datetime import timedelta import predictor.predictor_classifier as cls import predictor.predictor_statistic as stat import random import nltk class Stock: def __init__(self, subject): input_file = settings.PREDICTOR_STOCK + "/" + subject + ".csv" self.stock_df = pd.read_csv(input_file, sep=',', index_col='Date') def create_dict(self, from_date, to_date): self.stock_ser = self.stock_df['Diff'].loc[from_date:to_date] # binning self.stock_ser = self.stock_ser.apply(binning_none) self.stock_dict = self.stock_ser.dropna().astype(int).to_dict() def get_dict(self): return self.stock_dict def get_stock_dates(self): return self.stock_ser.index.values class Sent: def __init__(self, subject, source): input_file = settings.PREDICTOR_SENTIMENT + "/" + source + "/" + source + "-sent-" + subject + ".csv" self.sent_df = pd.read_csv(input_file, sep=',', index_col='Date') def get_weekend(self, col_name, stock_dates): weekend_df = np.round(self.sent_df, 2) aggreg = 0 days = 1 for idx, row in weekend_df.iterrows(): value = row[col_name] date = pd.to_datetime(idx) date_plus = date + timedelta(days=1) if str(date_plus.date()) not in stock_dates: # print("weekend") value += aggreg aggreg = value days += 1 else: total = value + aggreg mean = total / days aggreg = 0 days = 1 weekend_df.set_value(idx, col_name, mean) # print(date.date(), row[col_name], value) return np.round(weekend_df[col_name].diff().loc[stock_dates], 2) def create_dict(self, precision, method, from_date, to_date, stock_dates, binning): sentiment_col = "Sent" + precision sent_ser = self.sent_df[sentiment_col] if method == "Natural": sent_ser = sent_ser.diff().loc[from_date:to_date] elif method == "Friday": sent_ser = sent_ser.loc[stock_dates].diff() elif method == "Sunday": sent_ser = sent_ser.diff().loc[stock_dates] elif method == "Weekend": sent_ser = self.get_weekend(sentiment_col, stock_dates) # binning std_dev1 = sent_ser.std() / 4 std_dev2 = sent_ser.std() if binning == 'none': sent_ser_new = sent_ser.apply(binning_none) elif binning == 'low': sent_ser_new = sent_ser.apply(binning_low, args=(std_dev1,)) else: sent_ser_new = sent_ser.apply(binning_high, args=(std_dev1, std_dev2,)) # print(pd.concat([sent_ser, sent_ser_new], axis=1)) self.sent_dict = sent_ser_new.dropna().astype(int).to_dict() self.key_list = sorted(self.sent_dict.keys()) def get_dict(self): return self.sent_dict def get_features(self, key): index = self.key_list.index(key) features = dict() features['d1'] = self.sent_dict[self.key_list[index-3]] features['d2'] = self.sent_dict[self.key_list[index-2]] features['d3'] = self.sent_dict[self.key_list[index-1]] return features def binning_none(row): if row > 0: return 4 elif row < 0: return 0 else: return row def binning_low(row, std_dev1): if row > std_dev1: return 4 elif row < std_dev1 and row > -std_dev1: return 2 elif row < -std_dev1: return 0 else: return row def binning_high(row, std_dev1, std_dev2): if row > std_dev2: return 4 elif row < std_dev2 and row > std_dev1: return 3 elif row < std_dev1 and row > -std_dev1: return 2 elif row < -std_dev1 and row > -std_dev2: return 1 elif row < -std_dev2: return 0 else: return row def run_one(source, subject, precision, method, from_date, to_date, binning, filename_nltk, filename_skl): # stock dataframe stock = Stock(subject) stock.create_dict(from_date, to_date) stock_dict = stock.get_dict() # print(sorted(stock_dict.items())) indexes = ["djia", "snp", "nasdaq"] # if subject in indexes: # subject = "the" # sentiment dataframe sent = Sent(subject, source) sent.create_dict(precision, method, from_date, to_date, stock.get_stock_dates(), binning) # print(sorted(sent.get_dict().items())) # features features_list = list() for key in sorted(stock_dict)[3:]: features = sent.get_features(key) features_list.append([features, stock_dict[key]]) # print([key, sorted(features.items()), stock_dict[key]]) features_list_pos = list() features_list_neg = list() for feature in features_list: if feature[1] == 0: features_list_neg.append(feature) else: features_list_pos.append(feature) statistic = stat.Statistic(source, subject, precision, method, binning) # print(len(features_list), len(features_list_pos), len(features_list_neg)) max_half = min(len(features_list_pos), len(features_list_neg)) train_border = int(max_half * 4 / 5) # print(train_border, max_half) # exit() cycles = 50 for x in range(0, cycles): random.shuffle(features_list_pos) random.shuffle(features_list_neg) # random.shuffle(features_list) trainfeats = features_list_pos[:train_border] + features_list_neg[:train_border] testfeats = features_list_pos[train_border:max_half] + features_list_neg[train_border:max_half] # print(len(trainfeats), len(testfeats)) # trainfeats = features_list[:170] # testfeats = features_list[170:] nlt_output, skl_output = cls.train(trainfeats, testfeats, nlt=nltk_run, skl=sklearn_run) # print(nlt_output['most1']) # exit() if nltk_run: statistic.add_nltk(nlt_output) if sklearn_run: statistic.add_skl(skl_output) if nltk_run: statistic.mean_nltk(cycles) statistic.print_nltk() # statistic.write_nltk(filename_nltk) if sklearn_run: statistic.mean_skl(cycles) statistic.print_skl() statistic.print_stddev() # statistic.write_skl(filename_skl) nltk_run = True sklearn_run = True from_date = '2016-11-01' to_date = '2017-08-31' source = "stwits-comb" binnings = ['none', 'low', 'high'] # subjects = ["snp", "djia", "nasdaq"] subjects = ["djia", "snp", "nasdaq"] # subjects = ["microsoft"] precisions = ["0.6", "0.8", "1.0"] # precisions = ["0.6"] methods = ["Friday", "Natural", "Weekend", "Sunday"] # methods = ["Friday"] for subject in subjects: folder = settings.PREDICTOR_PREDICTION + '/' + source + '/' + subject + '/' os.makedirs(folder, exist_ok=True) filename_nltk = folder + source + '-prediction-' + subject + "-nltk.csv" filename_skl = folder + source + '-prediction-' + subject + "-skl.csv" # if nltk_run: # open(filename_nltk, 'w').close() # # if sklearn_run: # open(filename_skl, 'w').close() for method in methods: # if nltk_run: # f = open(filename_nltk, 'a') # f.write(source + ", " + subject + ", " + method + ", NLTK\n") # f.write("precision, binning, accuracy, pos_prec, neg_prec, pos_rec, neg_rec, d1, d2, d3\n") # f.close() # # if sklearn_run: # f = open(filename_skl, 'a') # f.write(source + ", " + subject + ", " + method + ", SKL\n") # f.write("precision, binning, mnb, bnb, lr, lsvc, nsvc, voted\n") # f.close() for precision in precisions: for binning in binnings: # print(source, subject, precision, method) run_one(source, subject, precision, method, from_date, to_date, binning, filename_nltk, filename_skl)
mit
-6,080,565,351,509,108,000
28.915441
117
0.577731
false
3.313111
false
false
false
imgos/asterisk-scripts
script/googlecontacts.py
1
2701
#!/usr/bin/python3 """Get Google Contacts Usage: google_contacts.py [--noauth_local_webserver] Options: --noauth_local_webserver passed on to google auth """ import docopt import httplib2 import subprocess import unidecode from apiclient import discovery from argparse import Namespace from oauth2client import client from oauth2client import tools from oauth2client.file import Storage ### APPLICATION_NAME = "Asterisk DB Updater" # If modifying these scopes, delete your previously saved credentials SCOPES = [ "https://www.googleapis.com/auth/contacts.readonly", "https://www.googleapis.com/auth/people.readonly", ] OAUTH_CONFIG_FILE = "/etc/asterisk-scripts/asterisk_client_secrets.json" OAUTH_TOKEN_FILE = "/etc/asterisk-scripts/asterisk_script_tokens.json" ### def get_credentials(flags): """Gets valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, the OAuth2 flow is completed to obtain the new credentials. :param flags: oauth flags :return: the obtained credentials """ store = Storage(OAUTH_TOKEN_FILE) credentials = store.get() if not credentials or credentials.invalid: flow = client.flow_from_clientsecrets(OAUTH_CONFIG_FILE, SCOPES) flow.user_agent = APPLICATION_NAME credentials = tools.run_flow(flow, store, flags) print(f"Storing credentials to: {OAUTH_TOKEN_FILE}") return credentials def main(): """Update asterisk db from google contacts""" opts = docopt.docopt(__doc__) flags = Namespace( auth_host_name="localhost", auth_host_port=[8080, 8090], logging_level="ERROR", noauth_local_webserver=opts["--noauth_local_webserver"], ) credentials = get_credentials(flags) http = credentials.authorize(httplib2.Http()) if opts["--noauth_local_webserver"]: return service = discovery.build("people", "v1", http=http) contacts_response = ( service.people() .connections() .list( resourceName="people/me", personFields="names,phoneNumbers", sortOrder="LAST_NAME_ASCENDING", ) .execute() ) for i, contact in enumerate(contacts_response["connections"]): display_name = ( contact["names"][0]["displayName"] if len(contact["names"]) > 0 else "Unknown" ) for phone in contact["phoneNumbers"]: ast_cmd = f'database put cidname {phone["canonicalForm"]} "{display_name}"' subprocess.run(["asterisk", "-rx", unidecode.unidecode(ast_cmd)]) if __name__ == "__main__": main()
mit
-8,137,995,456,563,472,000
26.561224
87
0.654202
false
3.920174
false
false
false
dingmingliu/quanttrade
bt/core.py
1
37660
""" Contains the core building blocks of the framework. """ import math from copy import deepcopy import pandas as pd import numpy as np import cython as cy class Node(object): """ The Node is the main building block in bt's tree structure design. Both StrategyBase and SecurityBase inherit Node. It contains the core functionality of a tree node. Args: * name (str): The Node name * parent (Node): The parent Node * children (dict, list): A collection of children. If dict, the format is {name: child}, if list then list of children. Attributes: * name (str): Node name * parent (Node): Node parent * root (Node): Root node of the tree (topmost node) * children (dict): Node's children * now (datetime): Used when backtesting to store current date * stale (bool): Flag used to determine if Node is stale and need updating * prices (TimeSeries): Prices of the Node. Prices for a security will be the security's price, for a strategy it will be an index that reflects the value of the strategy over time. * price (float): last price * value (float): last value * weight (float): weight in parent * full_name (str): Name including parents' names * members (list): Current Node + node's children """ _price = cy.declare(cy.double) _value = cy.declare(cy.double) _weight = cy.declare(cy.double) _issec = cy.declare(cy.bint) _has_strat_children = cy.declare(cy.bint) def __init__(self, name, parent=None, children=None): self.name = name # strategy children helpers self._has_strat_children = False self._strat_children = [] # if children is not None, we assume that we want to limit the # available children space to the provided list. if children is not None: if isinstance(children, list): # if all strings - just save as universe_filter if all(isinstance(x, str) for x in children): self._universe_tickers = children # empty dict - don't want to uselessly create # tons of children when they might not be needed children = {} else: # this will be case if we pass in children # (say a bunch of sub-strategies) tmp = {} ut = [] for c in children: if type(c) == str: tmp[c] = SecurityBase(c) ut.append(c) else: # deepcopy object for possible later reuse tmp[c.name] = deepcopy(c) # if strategy, turn on flag and add name to list # strategy children have special treatment if isinstance(c, StrategyBase): self._has_strat_children = True self._strat_children.append(c.name) # if not strategy, then we will want to add this to # universe_tickers to filter on setup else: ut.append(c.name) children = tmp # we want to keep whole universe in this case # so set to None self._universe_tickers = ut if parent is None: self.parent = self self.root = self else: self.parent = parent self.root = parent.root parent._add_child(self) # default children if children is None: children = {} self._universe_tickers = None self.children = children self._childrenv = children.values() for c in self._childrenv: c.parent = self c.root = self.root # set default value for now self.now = 0 # make sure root has stale flag # used to avoid unncessary update # sometimes we change values in the tree and we know that we will need # to update if another node tries to access a given value (say weight). # This avoid calling the update until it is actually needed. self.root.stale = False # helper vars self._price = 0 self._value = 0 self._weight = 0 # is security flag - used to avoid updating 0 pos securities self._issec = False def __getitem__(self, key): return self.children[key] @property def prices(self): """ A TimeSeries of the Node's price. """ # can optimize depending on type - # securities don't need to check stale to # return latest prices, whereas strategies do... raise NotImplementedError() @property def price(self): """ Current price of the Node """ # can optimize depending on type - # securities don't need to check stale to # return latest prices, whereas strategies do... raise NotImplementedError() @property def value(self): """ Current value of the Node """ if self.root.stale: self.root.update(self.root.now, None) return self._value @property def weight(self): """ Current weight of the Node (with respect to the parent). """ if self.root.stale: self.root.update(self.root.now, None) return self._weight def setup(self, dates): """ Setup method used to initialize a Node with a set of dates. """ raise NotImplementedError() def _add_child(self, child): child.parent = self child.root = self.root if self.children is None: self.children = {child.name: child} else: self.children[child.name] = child self._childrenv = self.children.values() def update(self, date, data=None, inow=None): """ Update Node with latest date, and optionally some data. """ raise NotImplementedError() def adjust(self, amount, update=True, isflow=True): """ Adjust Node value by amount. """ raise NotImplementedError() def allocate(self, amount, update=True): """ Allocate capital to Node. """ raise NotImplementedError() @property def members(self): """ Node members. Members include current node as well as Node's children. """ res = [self] for c in self.children.values(): res.extend(c.members) return res @property def full_name(self): if self.parent == self: return self.name else: return '%s>%s' % (self.parent.full_name, self.name) class StrategyBase(Node): """ Strategy Node. Used to define strategy logic within a tree. A Strategy's role is to allocate capital to it's children based on a function. Args: * name (str): Strategy name * children (dict, list): A collection of children. If dict, the format is {name: child}, if list then list of children. Children can be any type of Node. * parent (Node): The parent Node Attributes: * name (str): Strategy name * parent (Strategy): Strategy parent * root (Strategy): Root node of the tree (topmost node) * children (dict): Strategy's children * now (datetime): Used when backtesting to store current date * stale (bool): Flag used to determine if Strategy is stale and need updating * prices (TimeSeries): Prices of the Strategy - basically an index that reflects the value of the strategy over time. * price (float): last price * value (float): last value * weight (float): weight in parent * full_name (str): Name including parents' names * members (list): Current Strategy + strategy's children * commission_fn (fn(quantity, price)): A function used to determine the commission (transaction fee) amount. Could be used to model slippage (implementation shortfall). Note that often fees are symmetric for buy and sell and absolute value of quantity should be used for calculation. * capital (float): Capital amount in Strategy - cash * universe (DataFrame): Data universe available at the current time. Universe contains the data passed in when creating a Backtest. Use this data to determine strategy logic. """ _capital = cy.declare(cy.double) _net_flows = cy.declare(cy.double) _last_value = cy.declare(cy.double) _last_price = cy.declare(cy.double) _last_fee = cy.declare(cy.double) _paper_trade = cy.declare(cy.bint) bankrupt = cy.declare(cy.bint) def __init__(self, name, children=None, parent=None): Node.__init__(self, name, children=children, parent=parent) self._capital = 0 self._weight = 1 self._value = 0 self._price = 100 # helper vars self._net_flows = 0 self._last_value = 0 self._last_price = 100 self._last_fee = 0 # default commission function self.commission_fn = self._dflt_comm_fn self._paper_trade = False self._positions = None self.bankrupt = False @property def price(self): """ Current price. """ if self.root.stale: self.root.update(self.now, None) return self._price @property def prices(self): """ TimeSeries of prices. """ if self.root.stale: self.root.update(self.now, None) return self._prices.ix[:self.now] @property def values(self): """ TimeSeries of values. """ if self.root.stale: self.root.update(self.now, None) return self._values.ix[:self.now] @property def capital(self): """ Current capital - amount of unallocated capital left in strategy. """ # no stale check needed return self._capital @property def cash(self): """ TimeSeries of unallocated capital. """ # no stale check needed return self._cash @property def fees(self): """ TimeSeries of fees. """ # no stale check needed return self._fees @property def universe(self): """ Data universe available at the current time. Universe contains the data passed in when creating a Backtest. Use this data to determine strategy logic. """ # avoid windowing every time # if calling and on same date return # cached value if self.now == self._last_chk: return self._funiverse else: self._last_chk = self.now self._funiverse = self._universe.ix[:self.now] return self._funiverse @property def positions(self): """ TimeSeries of positions. """ # if accessing and stale - update first if self.root.stale: self.root.update(self.root.now, None) if self._positions is not None: return self._positions else: vals = pd.DataFrame({x.name: x.positions for x in self.members if isinstance(x, SecurityBase)}) self._positions = vals return vals def setup(self, universe): """ Setup strategy with universe. This will speed up future calculations and updates. """ # save full universe in case we need it self._original_data = universe # determine if needs paper trading # and setup if so if self is not self.parent: self._paper_trade = True self._paper_amount = 1000000 paper = deepcopy(self) paper.parent = paper paper.root = paper paper._paper_trade = False paper.setup(self._original_data) paper.adjust(self._paper_amount) self._paper = paper # setup universe funiverse = universe if self._universe_tickers is not None: # if we have universe_tickers defined, limit universe to # those tickers valid_filter = list(set(universe.columns) .intersection(self._universe_tickers)) funiverse = universe[valid_filter].copy() # if we have strat children, we will need to create their columns # in the new universe if self._has_strat_children: for c in self._strat_children: funiverse[c] = np.nan # must create to avoid pandas warning funiverse = pd.DataFrame(funiverse) self._universe = funiverse # holds filtered universe self._funiverse = funiverse self._last_chk = None # We're not bankrupt yet self.bankrupt = False # setup internal data self.data = pd.DataFrame(index=funiverse.index, columns=['price', 'value', 'cash', 'fees'], data=0.0) self._prices = self.data['price'] self._values = self.data['value'] self._cash = self.data['cash'] self._fees = self.data['fees'] # setup children as well - use original universe here - don't want to # pollute with potential strategy children in funiverse if self.children is not None: [c.setup(universe) for c in self._childrenv] @cy.locals(newpt=cy.bint, val=cy.double, ret=cy.double) def update(self, date, data=None, inow=None): """ Update strategy. Updates prices, values, weight, etc. """ # resolve stale state self.root.stale = False # update helpers on date change # also set newpt flag newpt = False if self.now == 0: newpt = True elif date != self.now: self._net_flows = 0 self._last_price = self._price self._last_value = self._value self._last_fee = 0.0 newpt = True # update now self.now = date if inow is None: if self.now == 0: inow = 0 else: inow = self.data.index.get_loc(date) # update children if any and calculate value val = self._capital # default if no children if self.children is not None: for c in self._childrenv: # avoid useless update call if c._issec and not c._needupdate: continue c.update(date, data, inow) val += c.value if self.root == self: if (val < 0) and not self.bankrupt: # Declare a bankruptcy self.bankrupt = True self.flatten() # update data if this value is different or # if now has changed - avoid all this if not since it # won't change if newpt or self._value != val: self._value = val self._values.values[inow] = val try: ret = self._value / (self._last_value + self._net_flows) - 1 except ZeroDivisionError: if self._value == 0: ret = 0 else: raise ZeroDivisionError( 'Could not update %s. Last value ' 'was %s and net flows were %s. Current' 'value is %s. Therefore, ' 'we are dividing by zero to obtain the return ' 'for the period.' % (self.name, self._last_value, self._net_flows, self._value)) self._price = self._last_price * (1 + ret) self._prices.values[inow] = self._price # update children weights if self.children is not None: for c in self._childrenv: # avoid useless update call if c._issec and not c._needupdate: continue try: c._weight = c.value / val except ZeroDivisionError: c._weight = 0.0 # if we have strategy children, we will need to update them in universe if self._has_strat_children: for c in self._strat_children: # TODO: optimize ".loc" here as well self._universe.loc[date, c] = self.children[c].price # Cash should track the unallocated capital at the end of the day, so # we should update it every time we call "update". # Same for fees self._cash.values[inow] = self._capital self._fees.values[inow] = self._last_fee # update paper trade if necessary if newpt and self._paper_trade: self._paper.update(date) self._paper.run() self._paper.update(date) # update price self._price = self._paper.price self._prices.values[inow] = self._price @cy.locals(amount=cy.double, update=cy.bint, flow=cy.bint, fees=cy.double) def adjust(self, amount, update=True, flow=True, fee=0.0): """ Adjust capital - used to inject capital to a Strategy. This injection of capital will have no effect on the children. Args: * amount (float): Amount to adjust by. * update (bool): Force update? * flow (bool): Is this adjustment a flow? Basically a flow will have an impact on the price index. Examples of flows are commissions. """ # adjust capital self._capital += amount self._last_fee += fee # if flow - increment net_flows - this will not affect # performance. Commissions and other fees are not flows since # they have a performance impact if flow: self._net_flows += amount if update: # indicates that data is now stale and must # be updated before access self.root.stale = True @cy.locals(amount=cy.double, update=cy.bint) def allocate(self, amount, child=None, update=True): """ Allocate capital to Strategy. By default, capital is allocated recursively down the children, proportionally to the children's weights. If a child is specified, capital will be allocated to that specific child. Allocation also have a side-effect. They will deduct the same amount from the parent's "account" to offset the allocation. If there is remaining capital after allocation, it will remain in Strategy. Args: * amount (float): Amount to allocate. * child (str): If specified, allocation will be directed to child only. Specified by name. * update (bool): Force update. """ # allocate to child if child is not None: if child not in self.children: c = SecurityBase(child) c.setup(self._universe) # update to bring up to speed c.update(self.now) # add child to tree self._add_child(c) # allocate to child self.children[child].allocate(amount) # allocate to self else: # adjust parent's capital # no need to update now - avoids repetition if self.parent == self: self.parent.adjust(-amount, update=False, flow=True) else: # do NOT set as flow - parent will be another strategy # and therefore should not incur flow self.parent.adjust(-amount, update=False, flow=False) # adjust self's capital self.adjust(amount, update=False, flow=True) # push allocation down to children if any # use _weight to avoid triggering an update if self.children is not None: [c.allocate(amount * c._weight, update=False) for c in self._childrenv] # mark as stale if update requested if update: self.root.stale = True @cy.locals(delta=cy.double, weight=cy.double, base=cy.double) def rebalance(self, weight, child, base=np.nan, update=True): """ Rebalance a child to a given weight. This is a helper method to simplify code logic. This method is used when we want to se the weight of a particular child to a set amount. It is similar to allocate, but it calculates the appropriate allocation based on the current weight. Args: * weight (float): The target weight. Usually between -1.0 and 1.0. * child (str): child to allocate to - specified by name. * base (float): If specified, this is the base amount all weight delta calculations will be based off of. This is useful when we determine a set of weights and want to rebalance each child given these new weights. However, as we iterate through each child and call this method, the base (which is by default the current value) will change. Therefore, we can set this base to the original value before the iteration to ensure the proper allocations are made. * update (bool): Force update? """ # if weight is 0 - we want to close child if weight == 0: if child in self.children: return self.close(child) else: return # if no base specified use self's value if np.isnan(base): base = self.value # else make sure we have child if child not in self.children: c = SecurityBase(child) c.setup(self._universe) # update child to bring up to speed c.update(self.now) self._add_child(c) # allocate to child # figure out weight delta c = self.children[child] delta = weight - c.weight c.allocate(delta * base) def close(self, child): """ Close a child position - alias for rebalance(0, child). This will also flatten (close out all) the child's children. Args: * child (str): Child, specified by name. """ c = self.children[child] # flatten if children not None if c.children is not None and len(c.children) != 0: c.flatten() c.allocate(-c.value) def flatten(self): """ Close all child positions. """ # go right to base alloc [c.allocate(-c.value) for c in self._childrenv if c.value != 0] def run(self): """ This is the main logic method. Override this method to provide some algorithm to execute on each date change. This method is called by backtester. """ pass def set_commissions(self, fn): """ Set commission (transaction fee) function. Args: fn (fn(quantity, price)): Function used to determine commission amount. """ self.commission_fn = fn for c in self._childrenv: if isinstance(c, StrategyBase): c.set_commissions(fn) @cy.locals(q=cy.double, p=cy.double) def _dflt_comm_fn(self, q, p): return max(1, abs(q) * 0.01) class SecurityBase(Node): """ Security Node. Used to define a security within a tree. A Security's has no children. It simply models an asset that can be bought or sold. Args: * name (str): Security name * multiplier (float): security multiplier - typically used for derivatives. Attributes: * name (str): Security name * parent (Security): Security parent * root (Security): Root node of the tree (topmost node) * now (datetime): Used when backtesting to store current date * stale (bool): Flag used to determine if Security is stale and need updating * prices (TimeSeries): Security prices. * price (float): last price * value (float): last value - basically position * price * multiplier * weight (float): weight in parent * full_name (str): Name including parents' names * members (list): Current Security + strategy's children * position (float): Current position (quantity). """ _last_pos = cy.declare(cy.double) _position = cy.declare(cy.double) multiplier = cy.declare(cy.double) _prices_set = cy.declare(cy.bint) _needupdate = cy.declare(cy.bint) @cy.locals(multiplier=cy.double) def __init__(self, name, multiplier=1): Node.__init__(self, name, parent=None, children=None) self._value = 0 self._price = 0 self._weight = 0 self._position = 0 self.multiplier = multiplier # opt self._last_pos = 0 self._issec = True self._needupdate = True @property def price(self): """ Current price. """ # if accessing and stale - update first if self._needupdate or self.now != self.parent.now: self.update(self.root.now) return self._price @property def prices(self): """ TimeSeries of prices. """ # if accessing and stale - update first if self._needupdate or self.now != self.parent.now: self.update(self.root.now) return self._prices.ix[:self.now] @property def values(self): """ TimeSeries of values. """ # if accessing and stale - update first if self._needupdate or self.now != self.parent.now: self.update(self.root.now) if self.root.stale: self.root.update(self.root.now, None) return self._values.ix[:self.now] @property def position(self): """ Current position """ # no stale check needed return self._position @property def positions(self): """ TimeSeries of positions. """ # if accessing and stale - update first if self._needupdate: self.update(self.root.now) if self.root.stale: self.root.update(self.root.now, None) return self._positions.ix[:self.now] def setup(self, universe): """ Setup Security with universe. Speeds up future runs. Args: * universe (DataFrame): DataFrame of prices with security's name as one of the columns. """ # if we already have all the prices, we will store them to speed up # future udpates try: prices = universe[self.name] except KeyError: prices = None # setup internal data if prices is not None: self._prices = prices self.data = pd.DataFrame(index=universe.index, columns=['value', 'position'], data=0.0) self._prices_set = True else: self.data = pd.DataFrame(index=universe.index, columns=['price', 'value', 'position']) self._prices = self.data['price'] self._prices_set = False self._values = self.data['value'] self._positions = self.data['position'] @cy.locals(prc=cy.double) def update(self, date, data=None, inow=None): """ Update security with a given date and optionally, some data. This will update price, value, weight, etc. """ # filter for internal calls when position has not changed - nothing to # do. Internal calls (stale root calls) have None data. Also want to # make sure date has not changed, because then we do indeed want to # update. if date == self.now and self._last_pos == self._position: return if inow is None: if date == 0: inow = 0 else: inow = self.data.index.get_loc(date) # date change - update price if date != self.now: # update now self.now = date if self._prices_set: self._price = self._prices.values[inow] # traditional data update elif data is not None: prc = data[self.name] self._price = prc self._prices.values[inow] = prc self._positions.values[inow] = self._position self._last_pos = self._position self._value = self._position * self._price * self.multiplier self._values.values[inow] = self._value if self._weight == 0 and self._position == 0: self._needupdate = False @cy.locals(amount=cy.double, update=cy.bint, q=cy.double, outlay=cy.double) def allocate(self, amount, update=True): """ This allocates capital to the Security. This is the method used to buy/sell the security. A given amount of shares will be determined on the current price, a commisison will be calculated based on the parent's commission fn, and any remaining capital will be passed back up to parent as an adjustment. Args: * amount (float): Amount of adjustment. * update (bool): Force update? """ # will need to update if this has been idle for a while... # update if needupdate or if now is stale # fetch parent's now since our now is stale if self._needupdate or self.now != self.parent.now: self.update(self.parent.now) # ignore 0 alloc # Note that if the price of security has dropped to zero, then it should # never be selected by SelectAll, SelectN etc. I.e. we should not open # the position at zero price. At the same time, we are able to close # it at zero price, because at that point amount=0. # Note also that we don't erase the position in an asset which price has # dropped to zero (though the weight will indeed be = 0) if amount == 0: return if self.parent is self or self.parent is None: raise Exception( 'Cannot allocate capital to a parentless security') if self._price == 0 or np.isnan(self._price): raise Exception( 'Cannot allocate capital to ' '%s because price is 0 or nan as of %s' % (self.name, self.parent.now)) # buy/sell # determine quantity - must also factor in commission # closing out? if amount == -self._value: q = -self._position else: if (self._position > 0) or ((self._position == 0) and (amount > 0)): # if we're going long or changing long position q = math.floor(amount / (self._price * self.multiplier)) else: # if we're going short or changing short position q = math.ceil(amount / (self._price * self.multiplier)) # if q is 0 nothing to do if q == 0 or np.isnan(q): return # this security will need an update, even if pos is 0 (for example if # we close the positions, value and pos is 0, but still need to do that # last update) self._needupdate = True # adjust position & value self._position += q # calculate proper adjustment for parent # parent passed down amount so we want to pass # -outlay back up to parent to adjust for capital # used outlay, fee = self.outlay(q) # call parent self.parent.adjust(-outlay, update=update, flow=False, fee=fee) @cy.locals(q=cy.double, p=cy.double) def commission(self, q, p): """ Calculates the commission (transaction fee) based on quantity and price. Uses the parent's commission_fn. Args: * q (float): quantity * p (float): price """ return self.parent.commission_fn(q, p) @cy.locals(q=cy.double) def outlay(self, q): """ Determines the complete cash outlay (including commission) necessary given a quantity q. Second returning parameter is a commission itself. Args: * q (float): quantity """ fee = self.commission(q, self._price * self.multiplier) full_outlay = q * self._price * self.multiplier + fee return full_outlay, fee def run(self): """ Does nothing - securities have nothing to do on run. """ pass class Algo(object): """ Algos are used to modularize strategy logic so that strategy logic becomes modular, composable, more testable and less error prone. Basically, the Algo should follow the unix philosophy - do one thing well. In practice, algos are simply a function that receives one argument, the Strategy (refered to as target) and are expected to return a bool. When some state preservation is necessary between calls, the Algo object can be used (this object). The __call___ method should be implemented and logic defined therein to mimic a function call. A simple function may also be used if no state preservation is neceesary. Args: * name (str): Algo name """ def __init__(self, name=None): self._name = name @property def name(self): """ Algo name. """ if self._name is None: self._name = self.__class__.__name__ return self._name def __call__(self, target): raise NotImplementedError("%s not implemented!" % self.name) class AlgoStack(Algo): """ An AlgoStack derives from Algo runs multiple Algos until a failure is encountered. The purpose of an AlgoStack is to group a logic set of Algos together. Each Algo in the stack is run. Execution stops if one Algo returns False. Args: * algos (list): List of algos. """ def __init__(self, *algos): super(AlgoStack, self).__init__() self.algos = algos self.check_run_always = any(hasattr(x, 'run_always') for x in self.algos) def __call__(self, target): # normal runing mode if not self.check_run_always: for algo in self.algos: if not algo(target): return False return True # run mode when at least one algo has a run_always attribute else: # store result in res # allows continuation to check for and run # algos that have run_always set to True res = True for algo in self.algos: if res: res = algo(target) elif hasattr(algo, 'run_always'): if algo.run_always: algo(target) return res class Strategy(StrategyBase): """ Strategy expands on the StrategyBase and incorporates Algos. Basically, a Strategy is built by passing in a set of algos. These algos will be placed in an Algo stack and the run function will call the stack. Furthermore, two class attributes are created to pass data between algos. perm for permanent data, temp for temporary data. Args: * name (str): Strategy name * algos (list): List of Algos to be passed into an AlgoStack * children (dict, list): Children - useful when you want to create strategies of strategies Attributes: * stack (AlgoStack): The stack * temp (dict): A dict containing temporary data - cleared on each call to run. This can be used to pass info to other algos. * perm (dict): Permanent data used to pass info from one algo to another. Not cleared on each pass. """ def __init__(self, name, algos=[], children=None): super(Strategy, self).__init__(name, children=children) self.stack = AlgoStack(*algos) self.temp = {} self.perm = {} def run(self): # clear out temp data self.temp = {} # run algo stack self.stack(self) # run children for c in self.children.values(): c.run()
apache-2.0
-3,770,879,272,697,670,000
32.151408
80
0.553877
false
4.469499
false
false
false
adamkovics/atmosphere
atmosphere/gas_opacity.py
1
8280
""" Add gas opacities to model based on the composition and vertical structure. """ import numpy as np import logging logger = logging.getLogger() def interpolate_kc(p, T, kc, verbose=False): """Linearly interpolate k-coefficients at a particular pressure and temperature, using the input k-coefficent grid, kc. The standard structure of k-coefficient data array is: [wavelengths,pressures,temperatures,g-nodes] where the g-node are the Legendre-Gauss quadrature nodes or "g-ordinate". Returned array of coefficients corresponds to: [wavelengths,g-nodes] """ pressures = np.array(kc['pressures']) temperatures = np.array(kc['temperatures']) ind_p = np.where(pressures < p) ind_T = np.where(temperatures < T) i = (np.max(ind_p) if np.size(ind_p) else np.array(0)).clip(0,len(pressures)-2) j = (np.max(ind_T) if np.size(ind_T) else np.array(0)).clip(0,len(temperatures)-2) L11 = np.log(kc['kc'][:,i,j,:]) L12 = np.log(kc['kc'][:,i+1,j,:]) L21 = np.log(kc['kc'][:,i,j+1,:]) L22 = np.log(kc['kc'][:,i+1,j+1,:]) L1T = L11 + (L12-L11)*(T-temperatures[j])/(temperatures[j+1]-temperatures[j]) L2T = L21 + (L22-L21)*(T-temperatures[j])/(temperatures[j+1]-temperatures[j]) LPT = L1T + (L2T-L1T)*((np.log(p)-np.log(pressures[i]))/ (np.log(pressures[i+1])-np.log(pressures[i]))) kc_interp = np.exp(LPT) return kc_interp def append_kc_to_layers(model, kc, species): """Set k-coefficients for each layer by interpolating to appropriate temperature and pressure and update the data structure for the amtosphere.""" kc_shape = (model['nlay'], model['nlam'], kc['ng']) model['layers'].update({'kc':{species:np.ndarray(kc_shape), 'ng':kc['ng'], 'g':kc['g'], 'w':kc['w'], }}) for i in range(model['nlay']): model['layers']['kc'][species][i,:,:] = interpolate_kc(model['layers']['p'][i], model['layers']['T'][i], kc) return # There are additional k-coefficients for C2H2, C2H6, and CO. # These are currently calculated on the VIMS grid as they are applicable in # roughly the 2.7--3um wavelength region that is inacccessible from the ground. # # Here we revise the gas opacity in the model to include multiple k-coefficient files. # # It is a reasonable estimate to sum k-coefficients after interpolating each onto the # same pressure and temperature, however, a minimal amount of error-checking should confirm # that the same wavelength grid and g-ordinates are being used. # # Overview of revisions to the code: # # (1) Generalization of the set_methane() to other species. # (2) Error-checking for wavelength (and P,T) grid # (3) back-compatibility for set_methane() method. # (4) Some thought to CH3D abundance variability. # def set_methane(model, kc_file, CH3D_scale=None, verbose=False): """Set methane opacities in atmosphere structure, model, by interpolatating k-coefficents from the specied kc_file, using the temperatures and pressures for each layer. """ if CH3D_scale: if len(kc_file) != 2: logger.debug('two k-coefficient files needed for set_methane_opacity()') return None kc = np.load(kc_file[0]).item() kc_CH3D = np.load(kc_file[1]).item() kc['kc'] = kc['kc']+CH3D_scale*kc_CH3D['kc'] model.update({'wavelength':kc['wavelength']['mu'], 'nlam':kc['wavelength']['nlam'], }) append_kc_to_layers(model, kc, 'CH4') tau_CH4 = model['layers']['kc']['CH4'] * np.reshape(model['layers']['N_CH4'], (model['nlay'],1,1)) if 'tau' not in model['layers']: model['layers'].update({'tau':{}}) model['layers']['tau'].update({'CH4':tau_CH4}) return if kc_file.endswith('.npy'): kc = np.load(kc_file).item() model.update({'wavelength':kc['wavelength']['mu'], 'nlam':kc['wavelength']['nlam'], }) append_kc_to_layers(model, kc, 'CH4') tau_CH4 = model['layers']['kc']['CH4'] * np.reshape(model['layers']['N_CH4'], (model['nlay'],1,1)) if 'tau' not in model['layers']: model['layers'].update({'tau':{}}) model['layers']['tau'].update({'CH4':tau_CH4}) return if kc_file.endswith('.fits'): import pyfits hdu = pyfits.open(kc_file) kc = {'kc': hdu[0].data, 'pressures':hdu[2].data['pressures'], 'temperatures':hdu[3].data['temperatures'], 'g': hdu[4].data['g'], 'w': hdu[5].data['w'], 'ng': hdu[0].header['NG'], } model.update({'wavelength':hdu[1].data['wavelength'], 'nlam':len(hdu[1].data['wavelength']), }) hdu.close() append_kc_to_layers(model, kc, 'CH4') tau_CH4 = model['layers']['kc']['CH4'] * np.reshape(model['layers']['N_CH4'], (model['nlay'],1,1)) if 'tau' not in model['layers']: model['layers'].update({'tau':{}}) model['layers']['tau'].update({'CH4':tau_CH4}) return def print_atmosphere_details(model): logger.debug('model dictionary data structure:') for item in model.keys(): logger.debug("{0:7s} - type: {2} - shape: {1}".format( item, np.shape(model[item]), type(model[item]))) logger.debug("\natmosphere['layers'] dictionary data structure:") for item in model['layers'].keys(): logger.debug("{0:7s} - type: {2} - shape: {1}".format( item, np.shape(model['layers'][item]), type(model['layers'][item]))) def set_cia(model, scale=4.0, show_figure=False): """Append collision-induced-absorption opacity for N2-N2 and H2-N2 (in the near-IR) to the atmosphere data structure, model.""" import pyfits import os fits = pyfits.open(os.path.join(os.getenv('RTDATAPATH'), 'gas_opacity/CIA/N2_N2.fits')) k_N2N2 = fits[0].data fits.close() fits = pyfits.open(os.path.join(os.getenv('RTDATAPATH'), 'gas_opacity/CIA/H2_N2.fits')) k_H2N2 = fits[0].data fits.close() if 'wavelength' not in model.keys(): logger.warning('Set wavelength scale first (e.g., with CH4 opacity.)') return None tau_H2N2 = np.empty((model['nlay'],model['nlam'])) tau_N2N2 = np.empty((model['nlay'],model['nlam'])) layers = model['layers'] N0 = 2.686e19 # Loschmidt number for i in range(model['nlay']): k_H2N2_interp = np.interp(model['wavelength'], (1e4/k_H2N2[::-1,0]), scale*k_H2N2[::-1,1],) k_N2N2_interp = np.interp(model['wavelength'], (1e4/k_N2N2[::-1,0]), scale*k_N2N2[::-1,1],) tau_H2N2[i,:] = k_H2N2_interp*layers['kmamg'][i] * \ layers['n'][i]/N0 * \ layers['m_N2'][i]*layers['m_H2'] tau_N2N2[i,:] = k_N2N2_interp*layers['kmamg'][i] * \ layers['n'][i]/N0 * \ layers['m_N2'][i]*layers['m_N2'][i] layers['tau'].update({'H2_N2':tau_H2N2, 'N2_N2':tau_N2N2,}) if show_figure: fig, ax = subplots(figsize=(16,4)) ax.plot(k_H2N2[:,0], k_H2N2[:,1], 'k', drawstyle='steps-mid') ax.set_xlabel('wavenumber (cm$^{-1}$)') ax.set_ylabel(r'km$^{-1}$ amagat$^{-2}$') ax.set_xlim(4000,5000) fig, ax = subplots(figsize=(16,4)) ax.plot(k_N2N2[:,0], k_N2N2[:,1], 'k', drawstyle='steps-mid') ax.set_xlabel('wavenumber (cm$^{-1}$)') ax.set_ylabel(r'km$^{-1}$ amagat$^{-2}$') ax.set_xlim(4000,5000) ;
gpl-2.0
4,651,276,065,316,480,000
39.004831
91
0.533333
false
3.189522
false
false
false
ArcherSys/ArcherSys
node_modules/npm/node_modules/node-gyp/gyp/buildbot/buildbot_run.py
1
18134
<<<<<<< HEAD <<<<<<< HEAD #!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Argument-less script to select what to run on the buildbots.""" import os import shutil import subprocess import sys if sys.platform in ['win32', 'cygwin']: EXE_SUFFIX = '.exe' else: EXE_SUFFIX = '' BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__)) TRUNK_DIR = os.path.dirname(BUILDBOT_DIR) ROOT_DIR = os.path.dirname(TRUNK_DIR) ANDROID_DIR = os.path.join(ROOT_DIR, 'android') CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake') CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin') OUT_DIR = os.path.join(TRUNK_DIR, 'out') def CallSubProcess(*args, **kwargs): """Wrapper around subprocess.call which treats errors as build exceptions.""" retcode = subprocess.call(*args, **kwargs) if retcode != 0: print '@@@STEP_EXCEPTION@@@' sys.exit(1) def PrepareCmake(): """Build CMake 2.8.8 since the version in Precise is 2.8.7.""" if os.environ['BUILDBOT_CLOBBER'] == '1': print '@@@BUILD_STEP Clobber CMake checkout@@@' shutil.rmtree(CMAKE_DIR) # We always build CMake 2.8.8, so no need to do anything # if the directory already exists. if os.path.isdir(CMAKE_DIR): return print '@@@BUILD_STEP Initialize CMake checkout@@@' os.mkdir(CMAKE_DIR) CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot']) CallSubProcess(['git', 'config', '--global', 'user.email', '[email protected]']) CallSubProcess(['git', 'config', '--global', 'color.ui', 'false']) print '@@@BUILD_STEP Sync CMake@@@' CallSubProcess( ['git', 'clone', '--depth', '1', '--single-branch', '--branch', 'v2.8.8', '--', 'git://cmake.org/cmake.git', CMAKE_DIR], cwd=CMAKE_DIR) print '@@@BUILD_STEP Build CMake@@@' CallSubProcess( ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR], cwd=CMAKE_DIR) CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR) def PrepareAndroidTree(): """Prepare an Android tree to run 'android' format tests.""" if os.environ['BUILDBOT_CLOBBER'] == '1': print '@@@BUILD_STEP Clobber Android checkout@@@' shutil.rmtree(ANDROID_DIR) # The release of Android we use is static, so there's no need to do anything # if the directory already exists. if os.path.isdir(ANDROID_DIR): return print '@@@BUILD_STEP Initialize Android checkout@@@' os.mkdir(ANDROID_DIR) CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot']) CallSubProcess(['git', 'config', '--global', 'user.email', '[email protected]']) CallSubProcess(['git', 'config', '--global', 'color.ui', 'false']) CallSubProcess( ['repo', 'init', '-u', 'https://android.googlesource.com/platform/manifest', '-b', 'android-4.2.1_r1', '-g', 'all,-notdefault,-device,-darwin,-mips,-x86'], cwd=ANDROID_DIR) print '@@@BUILD_STEP Sync Android@@@' CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR) print '@@@BUILD_STEP Build Android@@@' CallSubProcess( ['/bin/bash', '-c', 'source build/envsetup.sh && lunch full-eng && make -j4'], cwd=ANDROID_DIR) def GypTestFormat(title, format=None, msvs_version=None): """Run the gyp tests for a given format, emitting annotator tags. See annotator docs at: https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations Args: format: gyp format to test. Returns: 0 for sucesss, 1 for failure. """ if not format: format = title print '@@@BUILD_STEP ' + title + '@@@' sys.stdout.flush() env = os.environ.copy() if msvs_version: env['GYP_MSVS_VERSION'] = msvs_version command = ' '.join( [sys.executable, 'trunk/gyptest.py', '--all', '--passed', '--format', format, '--path', CMAKE_BIN_DIR, '--chdir', 'trunk']) if format == 'android': # gyptest needs the environment setup from envsetup/lunch in order to build # using the 'android' backend, so this is done in a single shell. retcode = subprocess.call( ['/bin/bash', '-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s' % (ROOT_DIR, command)], cwd=ANDROID_DIR, env=env) else: retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True) if retcode: # Emit failure tag, and keep going. print '@@@STEP_FAILURE@@@' return 1 return 0 def GypBuild(): # Dump out/ directory. print '@@@BUILD_STEP cleanup@@@' print 'Removing %s...' % OUT_DIR shutil.rmtree(OUT_DIR, ignore_errors=True) print 'Done.' retcode = 0 # The Android gyp bot runs on linux so this must be tested first. if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android': PrepareAndroidTree() retcode += GypTestFormat('android') elif sys.platform.startswith('linux'): retcode += GypTestFormat('ninja') retcode += GypTestFormat('make') PrepareCmake() retcode += GypTestFormat('cmake') elif sys.platform == 'darwin': retcode += GypTestFormat('ninja') retcode += GypTestFormat('xcode') retcode += GypTestFormat('make') elif sys.platform == 'win32': retcode += GypTestFormat('ninja') if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64': retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010') retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012') else: raise Exception('Unknown platform') if retcode: # TODO(bradnelson): once the annotator supports a postscript (section for # after the build proper that could be used for cumulative failures), # use that instead of this. This isolates the final return value so # that it isn't misattributed to the last stage. print '@@@BUILD_STEP failures@@@' sys.exit(retcode) if __name__ == '__main__': GypBuild() ======= #!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Argument-less script to select what to run on the buildbots.""" import os import shutil import subprocess import sys if sys.platform in ['win32', 'cygwin']: EXE_SUFFIX = '.exe' else: EXE_SUFFIX = '' BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__)) TRUNK_DIR = os.path.dirname(BUILDBOT_DIR) ROOT_DIR = os.path.dirname(TRUNK_DIR) ANDROID_DIR = os.path.join(ROOT_DIR, 'android') CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake') CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin') OUT_DIR = os.path.join(TRUNK_DIR, 'out') def CallSubProcess(*args, **kwargs): """Wrapper around subprocess.call which treats errors as build exceptions.""" retcode = subprocess.call(*args, **kwargs) if retcode != 0: print '@@@STEP_EXCEPTION@@@' sys.exit(1) def PrepareCmake(): """Build CMake 2.8.8 since the version in Precise is 2.8.7.""" if os.environ['BUILDBOT_CLOBBER'] == '1': print '@@@BUILD_STEP Clobber CMake checkout@@@' shutil.rmtree(CMAKE_DIR) # We always build CMake 2.8.8, so no need to do anything # if the directory already exists. if os.path.isdir(CMAKE_DIR): return print '@@@BUILD_STEP Initialize CMake checkout@@@' os.mkdir(CMAKE_DIR) CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot']) CallSubProcess(['git', 'config', '--global', 'user.email', '[email protected]']) CallSubProcess(['git', 'config', '--global', 'color.ui', 'false']) print '@@@BUILD_STEP Sync CMake@@@' CallSubProcess( ['git', 'clone', '--depth', '1', '--single-branch', '--branch', 'v2.8.8', '--', 'git://cmake.org/cmake.git', CMAKE_DIR], cwd=CMAKE_DIR) print '@@@BUILD_STEP Build CMake@@@' CallSubProcess( ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR], cwd=CMAKE_DIR) CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR) def PrepareAndroidTree(): """Prepare an Android tree to run 'android' format tests.""" if os.environ['BUILDBOT_CLOBBER'] == '1': print '@@@BUILD_STEP Clobber Android checkout@@@' shutil.rmtree(ANDROID_DIR) # The release of Android we use is static, so there's no need to do anything # if the directory already exists. if os.path.isdir(ANDROID_DIR): return print '@@@BUILD_STEP Initialize Android checkout@@@' os.mkdir(ANDROID_DIR) CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot']) CallSubProcess(['git', 'config', '--global', 'user.email', '[email protected]']) CallSubProcess(['git', 'config', '--global', 'color.ui', 'false']) CallSubProcess( ['repo', 'init', '-u', 'https://android.googlesource.com/platform/manifest', '-b', 'android-4.2.1_r1', '-g', 'all,-notdefault,-device,-darwin,-mips,-x86'], cwd=ANDROID_DIR) print '@@@BUILD_STEP Sync Android@@@' CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR) print '@@@BUILD_STEP Build Android@@@' CallSubProcess( ['/bin/bash', '-c', 'source build/envsetup.sh && lunch full-eng && make -j4'], cwd=ANDROID_DIR) def GypTestFormat(title, format=None, msvs_version=None): """Run the gyp tests for a given format, emitting annotator tags. See annotator docs at: https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations Args: format: gyp format to test. Returns: 0 for sucesss, 1 for failure. """ if not format: format = title print '@@@BUILD_STEP ' + title + '@@@' sys.stdout.flush() env = os.environ.copy() if msvs_version: env['GYP_MSVS_VERSION'] = msvs_version command = ' '.join( [sys.executable, 'trunk/gyptest.py', '--all', '--passed', '--format', format, '--path', CMAKE_BIN_DIR, '--chdir', 'trunk']) if format == 'android': # gyptest needs the environment setup from envsetup/lunch in order to build # using the 'android' backend, so this is done in a single shell. retcode = subprocess.call( ['/bin/bash', '-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s' % (ROOT_DIR, command)], cwd=ANDROID_DIR, env=env) else: retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True) if retcode: # Emit failure tag, and keep going. print '@@@STEP_FAILURE@@@' return 1 return 0 def GypBuild(): # Dump out/ directory. print '@@@BUILD_STEP cleanup@@@' print 'Removing %s...' % OUT_DIR shutil.rmtree(OUT_DIR, ignore_errors=True) print 'Done.' retcode = 0 # The Android gyp bot runs on linux so this must be tested first. if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android': PrepareAndroidTree() retcode += GypTestFormat('android') elif sys.platform.startswith('linux'): retcode += GypTestFormat('ninja') retcode += GypTestFormat('make') PrepareCmake() retcode += GypTestFormat('cmake') elif sys.platform == 'darwin': retcode += GypTestFormat('ninja') retcode += GypTestFormat('xcode') retcode += GypTestFormat('make') elif sys.platform == 'win32': retcode += GypTestFormat('ninja') if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64': retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010') retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012') else: raise Exception('Unknown platform') if retcode: # TODO(bradnelson): once the annotator supports a postscript (section for # after the build proper that could be used for cumulative failures), # use that instead of this. This isolates the final return value so # that it isn't misattributed to the last stage. print '@@@BUILD_STEP failures@@@' sys.exit(retcode) if __name__ == '__main__': GypBuild() >>>>>>> b875702c9c06ab5012e52ff4337439b03918f453 ======= #!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Argument-less script to select what to run on the buildbots.""" import os import shutil import subprocess import sys if sys.platform in ['win32', 'cygwin']: EXE_SUFFIX = '.exe' else: EXE_SUFFIX = '' BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__)) TRUNK_DIR = os.path.dirname(BUILDBOT_DIR) ROOT_DIR = os.path.dirname(TRUNK_DIR) ANDROID_DIR = os.path.join(ROOT_DIR, 'android') CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake') CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin') OUT_DIR = os.path.join(TRUNK_DIR, 'out') def CallSubProcess(*args, **kwargs): """Wrapper around subprocess.call which treats errors as build exceptions.""" retcode = subprocess.call(*args, **kwargs) if retcode != 0: print '@@@STEP_EXCEPTION@@@' sys.exit(1) def PrepareCmake(): """Build CMake 2.8.8 since the version in Precise is 2.8.7.""" if os.environ['BUILDBOT_CLOBBER'] == '1': print '@@@BUILD_STEP Clobber CMake checkout@@@' shutil.rmtree(CMAKE_DIR) # We always build CMake 2.8.8, so no need to do anything # if the directory already exists. if os.path.isdir(CMAKE_DIR): return print '@@@BUILD_STEP Initialize CMake checkout@@@' os.mkdir(CMAKE_DIR) CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot']) CallSubProcess(['git', 'config', '--global', 'user.email', '[email protected]']) CallSubProcess(['git', 'config', '--global', 'color.ui', 'false']) print '@@@BUILD_STEP Sync CMake@@@' CallSubProcess( ['git', 'clone', '--depth', '1', '--single-branch', '--branch', 'v2.8.8', '--', 'git://cmake.org/cmake.git', CMAKE_DIR], cwd=CMAKE_DIR) print '@@@BUILD_STEP Build CMake@@@' CallSubProcess( ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR], cwd=CMAKE_DIR) CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR) def PrepareAndroidTree(): """Prepare an Android tree to run 'android' format tests.""" if os.environ['BUILDBOT_CLOBBER'] == '1': print '@@@BUILD_STEP Clobber Android checkout@@@' shutil.rmtree(ANDROID_DIR) # The release of Android we use is static, so there's no need to do anything # if the directory already exists. if os.path.isdir(ANDROID_DIR): return print '@@@BUILD_STEP Initialize Android checkout@@@' os.mkdir(ANDROID_DIR) CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot']) CallSubProcess(['git', 'config', '--global', 'user.email', '[email protected]']) CallSubProcess(['git', 'config', '--global', 'color.ui', 'false']) CallSubProcess( ['repo', 'init', '-u', 'https://android.googlesource.com/platform/manifest', '-b', 'android-4.2.1_r1', '-g', 'all,-notdefault,-device,-darwin,-mips,-x86'], cwd=ANDROID_DIR) print '@@@BUILD_STEP Sync Android@@@' CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR) print '@@@BUILD_STEP Build Android@@@' CallSubProcess( ['/bin/bash', '-c', 'source build/envsetup.sh && lunch full-eng && make -j4'], cwd=ANDROID_DIR) def GypTestFormat(title, format=None, msvs_version=None): """Run the gyp tests for a given format, emitting annotator tags. See annotator docs at: https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations Args: format: gyp format to test. Returns: 0 for sucesss, 1 for failure. """ if not format: format = title print '@@@BUILD_STEP ' + title + '@@@' sys.stdout.flush() env = os.environ.copy() if msvs_version: env['GYP_MSVS_VERSION'] = msvs_version command = ' '.join( [sys.executable, 'trunk/gyptest.py', '--all', '--passed', '--format', format, '--path', CMAKE_BIN_DIR, '--chdir', 'trunk']) if format == 'android': # gyptest needs the environment setup from envsetup/lunch in order to build # using the 'android' backend, so this is done in a single shell. retcode = subprocess.call( ['/bin/bash', '-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s' % (ROOT_DIR, command)], cwd=ANDROID_DIR, env=env) else: retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True) if retcode: # Emit failure tag, and keep going. print '@@@STEP_FAILURE@@@' return 1 return 0 def GypBuild(): # Dump out/ directory. print '@@@BUILD_STEP cleanup@@@' print 'Removing %s...' % OUT_DIR shutil.rmtree(OUT_DIR, ignore_errors=True) print 'Done.' retcode = 0 # The Android gyp bot runs on linux so this must be tested first. if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android': PrepareAndroidTree() retcode += GypTestFormat('android') elif sys.platform.startswith('linux'): retcode += GypTestFormat('ninja') retcode += GypTestFormat('make') PrepareCmake() retcode += GypTestFormat('cmake') elif sys.platform == 'darwin': retcode += GypTestFormat('ninja') retcode += GypTestFormat('xcode') retcode += GypTestFormat('make') elif sys.platform == 'win32': retcode += GypTestFormat('ninja') if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64': retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010') retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012') else: raise Exception('Unknown platform') if retcode: # TODO(bradnelson): once the annotator supports a postscript (section for # after the build proper that could be used for cumulative failures), # use that instead of this. This isolates the final return value so # that it isn't misattributed to the last stage. print '@@@BUILD_STEP failures@@@' sys.exit(retcode) if __name__ == '__main__': GypBuild() >>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
mit
4,987,594,992,532,851,000
30.482639
117
0.635657
false
3.427977
true
false
false
dawsonjon/Chips-2.0
chips/compiler/tokens.py
1
9314
__author__ = "Jon Dawson" __copyright__ = "Copyright (C) 2012, Jonathan P Dawson" __version__ = "0.1" import os.path import subprocess from chips.compiler.exceptions import C2CHIPError operators = [ "!", "~", "+", "-", "*", "/", "//", "%", "=", "==", "<", ">", "<=", ">=", "!=", "|", "&", "^", "||", "&&", "(", ")", "{", "}", "[", "]", ";", "<<", ">>", ",", "+=", "-=", "*=", "/=", "%=", "&=", "|=", "<<=", ">>=", "^=", "++", "--", "?", ":", ".", "->", ] class Tokens: """Break the input file into a stream of tokens, provide functions to traverse the stream.""" def __init__(self, filename, parameters={}): self.tokens = [] self.definitions = [] self.filename = None self.lineno = None self.scan( os.path.join(os.path.dirname(__file__), "builtins.h"), external_preprocessor=False) self.scan(os.path.abspath(filename)) tokens = [] for token in self.tokens: f, l, t = token if t in parameters: tokens.append((f, l, str(parameters[t]))) else: tokens.append(token) self.tokens = tokens def scan(self, filename, input_file=None, parameters={}, external_preprocessor=True): """Convert the test file into tokens""" self.filename = filename if external_preprocessor: directory = os.path.abspath(__file__) directory = os.path.dirname(directory) directory = os.path.join(directory, "include") cpp_commands = [ "cpp", "-nostdinc", "-isystem", directory, filename] pipe = subprocess.Popen(cpp_commands, stdout=subprocess.PIPE) input_file = pipe.stdout else: if input_file is None: try: input_file = open(self.filename) except IOError: raise C2CHIPError("Cannot open file: " + self.filename) token = [] tokens = [] self.lineno = 1 jump = False for line in input_file: # include files line = line + " " if jump: if line.strip().startswith("#endif"): jump = False if line.strip().startswith("#else"): jump = False self.lineno += 1 continue elif external_preprocessor and line.strip().startswith("#"): l = line.strip() l = l.lstrip("#") l = l.split('"') lineno = int(l[0].strip()) self.lineno = lineno filename = l[1].strip().strip('"') self.filename = filename continue elif line.strip().startswith("#include"): filename = self.filename lineno = self.lineno self.tokens.extend(tokens) if line.strip().endswith(">"): directory = os.path.abspath(__file__) directory = os.path.dirname(directory) directory = os.path.join(directory, "include") else: directory = os.path.abspath(self.filename) directory = os.path.dirname(directory) self.filename = line.strip().replace( "#include", "").strip(' ><"') self.filename = os.path.join(directory, self.filename) self.scan(self.filename) self.lineno = lineno self.filename = filename tokens = [] self.lineno += 1 continue elif line.strip().startswith("#define"): definition = line.strip().split(" ")[1] self.definitions.append(definition) self.lineno += 1 continue elif line.strip().startswith("#undef"): definition = line.strip().split(" ")[1] self.definitions.remove(definition) self.lineno += 1 continue elif line.strip().startswith("#ifdef"): definition = line.strip().split(" ")[1] if definition not in self.definitions: jump = True self.lineno += 1 continue elif line.strip().startswith("#ifndef"): definition = line.strip().split(" ")[1] if definition in self.definitions: jump = True self.lineno += 1 continue elif line.strip().startswith("#else"): jump = True self.lineno += 1 continue elif line.strip().startswith("#endif"): self.lineno += 1 continue newline = True for char in line: if not token: token = char # c style comment elif (token + char).startswith("/*"): if (token + char).endswith("*/"): token = "" else: token += char # c++ style comment elif token.startswith("//"): if newline: token = char else: token += char # identifier elif token[0].isalpha(): if char.isalnum() or char == "_": token += char else: tokens.append((self.filename, self.lineno, token)) token = char # number elif token[0].isdigit(): if char.upper() in "0123456789ABCDEFXUL.": token += char elif token.upper().endswith("E") and char in ["+", "-"]: token += char else: tokens.append((self.filename, self.lineno, token)) token = char # string literal elif token.startswith('"'): if char == '"' and previous_char != "\\": token += char tokens.append((self.filename, self.lineno, token)) token = "" else: # remove dummy space from the end of a line if newline: token = token[:-1] previous_char = char token += char # character literal elif token.startswith("'"): if char == "'": token += char tokens.append((self.filename, self.lineno, token)) token = "" else: token += char # operator elif token in operators: if token + char in operators: token += char else: tokens.append((self.filename, self.lineno, token)) token = char else: token = char newline = False self.lineno += 1 self.tokens.extend(tokens) def error(self, string): """ Generate an error message (including the filename and line number) """ raise C2CHIPError(string + "\n", self.filename, self.lineno) def peek(self): """ Return the next token in the stream, but don't consume it. """ if self.tokens: return self.tokens[0][2] else: return "" def peek_next(self): """ Return the next next token in the stream, but don't consume it. """ if len(self.tokens) > 1: return self.tokens[1][2] else: return "" def get(self): """Return the next token in the stream, and consume it.""" if self.tokens: self.lineno = self.tokens[0][1] self.filename = self.tokens[0][0] try: filename, lineno, token = self.tokens.pop(0) except IndexError: self.error("Unexpected end of file") return token def end(self): """Return True if all the tokens have been consumed.""" return not self.tokens def expect(self, expected): """Consume the next token in the stream, generate an error if it is not as expected.""" try: filename, lineno, actual = self.tokens.pop(0) except IndexError: self.error("Unexpected end of file") if self.tokens: self.lineno = self.tokens[0][1] self.filename = self.tokens[0][0] if actual == expected: return else: self.error("Expected: %s, got: %s" % (expected, actual))
mit
3,926,535,980,926,046,000
31.340278
77
0.432467
false
5.045504
false
false
false
OnroerendErfgoed/skosprovider_heritagedata
skosprovider_heritagedata/utils.py
1
5488
# -*- coding: utf-8 -*- ''' Utility functions for :mod:`skosprovider_heritagedata`. ''' import requests from skosprovider.skos import ( Concept, Label, Note, ConceptScheme) from skosprovider.exceptions import ProviderUnavailableException import logging import sys import requests log = logging.getLogger(__name__) PY3 = sys.version_info[0] == 3 if PY3: # pragma: no cover binary_type = bytes else: # pragma: no cover binary_type = str import rdflib from rdflib.term import URIRef from rdflib.namespace import RDF, SKOS, DC, DCTERMS, RDFS PROV = rdflib.Namespace('http://www.w3.org/ns/prov#') def conceptscheme_from_uri(conceptscheme_uri, **kwargs): ''' Read a SKOS Conceptscheme from a :term:`URI` :param string conceptscheme_uri: URI of the conceptscheme. :rtype: skosprovider.skos.ConceptScheme ''' s = kwargs.get('session', requests.Session()) graph = uri_to_graph('%s.rdf' % (conceptscheme_uri), session=s) notes = [] labels = [] if graph is not False: for s, p, o in graph.triples((URIRef(conceptscheme_uri), RDFS.label, None)): label = Label(o.toPython(), "prefLabel", 'en') labels.append(label) for s, p, o in graph.triples((URIRef(conceptscheme_uri), DCTERMS.description, None)): note = Note(o.toPython(), "scopeNote", 'en') notes.append(note) # get the conceptscheme conceptscheme = ConceptScheme( conceptscheme_uri, labels=labels, notes=notes ) return conceptscheme def things_from_graph(graph, concept_scheme): ''' Read concepts and collections from a graph. :param rdflib.Graph graph: Graph to read from. :param skosprovider.skos.ConceptScheme concept_scheme: Conceptscheme the concepts and collections belong to. :rtype: :class:`list` ''' clist = [] for sub, pred, obj in graph.triples((None, RDF.type, SKOS.Concept)): uri = str(sub) con = Concept( id=_split_uri(uri, 1), uri=uri, concept_scheme = concept_scheme, labels = _create_from_subject_typelist(graph, sub, Label.valid_types), notes = _create_from_subject_typelist(graph, sub, Note.valid_types), broader = _create_from_subject_predicate(graph, sub, SKOS.broader), narrower = _create_from_subject_predicate(graph, sub, SKOS.narrower), related = _create_from_subject_predicate(graph, sub, SKOS.related), subordinate_arrays = [] ) clist.append(con) # at this moment, Heritagedata does not support SKOS.Collection # for sub, pred, obj in graph.triples((None, RDF.type, SKOS.Collection)): # uri = str(sub) # col = Collection(_split_uri(uri, 1), uri=uri) # col.members = _create_from_subject_predicate(sub, SKOS.member) # col.labels = _create_from_subject_typelist(sub, Label.valid_types) # col.notes = _create_from_subject_typelist(sub, Note.valid_types) # clist.append(col) return clist def _create_from_subject_typelist(graph, subject, typelist): list = [] for p in typelist: term = SKOS.term(p) list.extend(_create_from_subject_predicate(graph, subject, term)) return list def _create_from_subject_predicate(graph, subject, predicate): list = [] for s, p, o in graph.triples((subject, predicate, None)): type = predicate.split('#')[-1] if Label.is_valid_type(type): o = _create_label(o, type) elif Note.is_valid_type(type): o = _create_note(o, type) else: o = _split_uri(o, 1) if o: list.append(o) return list def _create_label(literal, type): language = literal.language if language is None: return 'und' # return undefined code when no language return Label(literal.toPython(), type, language) def _create_note(literal, type): if not Note.is_valid_type(type): raise ValueError('Type of Note is not valid.') return Note(text_(literal.value, encoding="utf-8"), type, _get_language_from_literal(literal)) def _get_language_from_literal(data): if data.language is None: return 'und' # return undefined code when no language return text_(data.language, encoding="utf-8") def _split_uri(uri, index): return uri.strip('/').rsplit('/', 1)[index] def uri_to_graph(uri, **kwargs): ''' :param string uri: :term:`URI` where the RDF data can be found. :rtype: rdflib.Graph :raises skosprovider.exceptions.ProviderUnavailableException: if the heritagedata.org services are down ''' s = kwargs.get('session', requests.Session()) graph = rdflib.Graph() try: res = s.get(uri) except requests.ConnectionError as e: raise ProviderUnavailableException("URI not available: %s" % uri) if res.status_code == 404: return False graph.parse(data=res.content) #heritagedata.org returns a empy page/graph when a resource does not exists (statsu_code 200). For this reason we return False if the graph is empty if len(graph) == 0: return False return graph def text_(s, encoding='latin-1', errors='strict'): """ If ``s`` is an instance of ``binary_type``, return ``s.decode(encoding, errors)``, otherwise return ``s``""" if isinstance(s, binary_type): return s.decode(encoding, errors) return s
mit
6,342,921,371,170,018,000
30.54023
152
0.636662
false
3.538362
false
false
false
mr-ping/WebTesting
main.py
1
5059
#!/usr/bin/python import os import sys import argparse from log import Log from chat import Trend from chat import plot_trend as pl from settings import * def parse_args(): """ Parsing shell command arguments, and override appropriate params from setting module :return: None """ parser = argparse.ArgumentParser(version=VERSION) parser.add_argument('-u', action='store', dest='url') parser.add_argument('-f', action='store', dest='url_file') parser.add_argument('-t', action='store', dest='target_log_file') parser.add_argument('-l', action='store', dest='log_file') parser.add_argument('-p', action='store_true', dest='plotting', default=True) parser.add_argument('-m', action='store', dest='max_allowed_concurrent', type=int) parser.add_argument('-b', action='store', dest='base_concurrent', type=int) parser.add_argument('-s', action='store', dest='step_concurrent', type=int) result = parser.parse_args() if result.url: global url url = result.url if result.url_file: global url_file url_file = result.url_file if result.target_log_file: global target_file target_file = result.target_log_file if result.log_file: global log_file log_file = result.log_file if result.plotting: global plotting plotting = result.plotting if result.max_allowed_concurrent: global max_concurrent max_concurrent = result.max_allowed_concurrent if result.base_concurrent: global base_concurrent base_concurrent = result.base_concurrent if result.step_concurrent: global step_concurrent step_concurrent = result.step_concurrent def check_url_source(): """ Check out Obtaining url from commend line or urls file. :return: A flag that represent the source of urls. String' """ global plotting if not url_file and not url: plotting = False sys.stderr.write('You should figure out the url source.') elif url_file and url: plotting = False sys.stderr.write('Url source come from either url address or url file') elif url_file: exist = os.path.exists(url_file) if exist: return 'file' else: plotting = False sys.stderr.write('No such urls file.') elif url: return 'address' def test(base_concurrent): """ Main method to do the Testing. Looping siege tool until some conditions satisfied, and generate a new log file from siege log file. :param base_concurrent: number concurrent :return: None """ url_source = check_url_source() while True: for i in range(num_samples): if url_source == 'address': #os.system('siege -c {concurrent} -t {duration} -l {address}'\ os.system('siege -c {concurrent} -r {repeat} -l {address}'\ .format(address=url, concurrent=base_concurrent, #duration=duration)) repeat=repeat)) elif url_source == 'file': #os.system('siege -c {concurrent} -t {duration} -f {url_file} -l'\ os.system('siege -c {concurrent} -r {repeat} -f {url_file} -l'\ .format(url_file=url_file, concurrent=base_concurrent, #duration=duration)) repeat=repeat)) last = Log.get_last_logs(log_file, siege_log_line_length, 1,\ base_concurrent) Log.add_new_log(target_file, last) base_concurrent += step_concurrent log = Log(target_file) if log.get_last_arrive_rate(num_samples) < (1-fails_allowed) \ or base_concurrent > max_concurrent: break def plot(): """ Plotting chat using the data that analyzed from testing log. :return: None """ log = Log(target_file) trans_rate_dict = log.get_steps_trans_rate() arrive_rate_dict = log.get_steps_arrive_rate() resp_time_dict = log.get_steps_resp_time() trans_trend = Trend('Transaction Rate', 'simulated users', 'trans rate (trans/sec)', 'g', 1, 'bar', step_concurrent/2) trans_trend.get_points(trans_rate_dict) arrive_trend = Trend('Arrive Rate', 'simulated users', 'arrive rate', 'g', 2, 'line') arrive_trend.get_points(arrive_rate_dict) resp_trend = Trend('Resp Time', 'simulated users', 'time(sec)', 'r', 2, 'line') resp_trend.get_points(resp_time_dict) pl(trans_trend, resp_trend, arrive_trend) if __name__ == '__main__': parse_args() test(base_concurrent) if plotting: plot()
mit
379,554,074,709,667,840
31.63871
86
0.567108
false
3.999209
false
false
false
rickerc/neutron_audit
neutron/openstack/common/rpc/impl_kombu.py
1
32063
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import itertools import socket import ssl import sys import time import uuid import eventlet import greenlet import kombu import kombu.connection import kombu.entity import kombu.messaging from oslo.config import cfg from neutron.openstack.common import excutils from neutron.openstack.common.gettextutils import _ from neutron.openstack.common import network_utils from neutron.openstack.common.rpc import amqp as rpc_amqp from neutron.openstack.common.rpc import common as rpc_common kombu_opts = [ cfg.StrOpt('kombu_ssl_version', default='', help='SSL version to use (valid only if SSL enabled)'), cfg.StrOpt('kombu_ssl_keyfile', default='', help='SSL key file (valid only if SSL enabled)'), cfg.StrOpt('kombu_ssl_certfile', default='', help='SSL cert file (valid only if SSL enabled)'), cfg.StrOpt('kombu_ssl_ca_certs', default='', help=('SSL certification authority file ' '(valid only if SSL enabled)')), cfg.StrOpt('rabbit_host', default='localhost', help='The RabbitMQ broker address where a single node is used'), cfg.IntOpt('rabbit_port', default=5672, help='The RabbitMQ broker port where a single node is used'), cfg.ListOpt('rabbit_hosts', default=['$rabbit_host:$rabbit_port'], help='RabbitMQ HA cluster host:port pairs'), cfg.BoolOpt('rabbit_use_ssl', default=False, help='connect over SSL for RabbitMQ'), cfg.StrOpt('rabbit_userid', default='guest', help='the RabbitMQ userid'), cfg.StrOpt('rabbit_password', default='guest', help='the RabbitMQ password', secret=True), cfg.StrOpt('rabbit_virtual_host', default='/', help='the RabbitMQ virtual host'), cfg.IntOpt('rabbit_retry_interval', default=1, help='how frequently to retry connecting with RabbitMQ'), cfg.IntOpt('rabbit_retry_backoff', default=2, help='how long to backoff for between retries when connecting ' 'to RabbitMQ'), cfg.IntOpt('rabbit_max_retries', default=0, help='maximum retries with trying to connect to RabbitMQ ' '(the default of 0 implies an infinite retry count)'), cfg.BoolOpt('rabbit_ha_queues', default=False, help='use H/A queues in RabbitMQ (x-ha-policy: all).' 'You need to wipe RabbitMQ database when ' 'changing this option.'), ] cfg.CONF.register_opts(kombu_opts) LOG = rpc_common.LOG def _get_queue_arguments(conf): """Construct the arguments for declaring a queue. If the rabbit_ha_queues option is set, we declare a mirrored queue as described here: http://www.rabbitmq.com/ha.html Setting x-ha-policy to all means that the queue will be mirrored to all nodes in the cluster. """ return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {} class ConsumerBase(object): """Consumer base class.""" def __init__(self, channel, callback, tag, **kwargs): """Declare a queue on an amqp channel. 'channel' is the amqp channel to use 'callback' is the callback to call when messages are received 'tag' is a unique ID for the consumer on the channel queue name, exchange name, and other kombu options are passed in here as a dictionary. """ self.callback = callback self.tag = str(tag) self.kwargs = kwargs self.queue = None self.reconnect(channel) def reconnect(self, channel): """Re-declare the queue after a rabbit reconnect.""" self.channel = channel self.kwargs['channel'] = channel self.queue = kombu.entity.Queue(**self.kwargs) self.queue.declare() def consume(self, *args, **kwargs): """Actually declare the consumer on the amqp channel. This will start the flow of messages from the queue. Using the Connection.iterconsume() iterator will process the messages, calling the appropriate callback. If a callback is specified in kwargs, use that. Otherwise, use the callback passed during __init__() If kwargs['nowait'] is True, then this call will block until a message is read. Messages will automatically be acked if the callback doesn't raise an exception """ options = {'consumer_tag': self.tag} options['nowait'] = kwargs.get('nowait', False) callback = kwargs.get('callback', self.callback) if not callback: raise ValueError("No callback defined") def _callback(raw_message): message = self.channel.message_to_python(raw_message) try: msg = rpc_common.deserialize_msg(message.payload) callback(msg) except Exception: LOG.exception(_("Failed to process message... skipping it.")) finally: message.ack() self.queue.consume(*args, callback=_callback, **options) def cancel(self): """Cancel the consuming from the queue, if it has started.""" try: self.queue.cancel(self.tag) except KeyError as e: # NOTE(comstud): Kludge to get around a amqplib bug if str(e) != "u'%s'" % self.tag: raise self.queue = None class DirectConsumer(ConsumerBase): """Queue/consumer class for 'direct'.""" def __init__(self, conf, channel, msg_id, callback, tag, **kwargs): """Init a 'direct' queue. 'channel' is the amqp channel to use 'msg_id' is the msg_id to listen on 'callback' is the callback to call when messages are received 'tag' is a unique ID for the consumer on the channel Other kombu options may be passed """ # Default options options = {'durable': False, 'queue_arguments': _get_queue_arguments(conf), 'auto_delete': True, 'exclusive': False} options.update(kwargs) exchange = kombu.entity.Exchange(name=msg_id, type='direct', durable=options['durable'], auto_delete=options['auto_delete']) super(DirectConsumer, self).__init__(channel, callback, tag, name=msg_id, exchange=exchange, routing_key=msg_id, **options) class TopicConsumer(ConsumerBase): """Consumer class for 'topic'.""" def __init__(self, conf, channel, topic, callback, tag, name=None, exchange_name=None, **kwargs): """Init a 'topic' queue. :param channel: the amqp channel to use :param topic: the topic to listen on :paramtype topic: str :param callback: the callback to call when messages are received :param tag: a unique ID for the consumer on the channel :param name: optional queue name, defaults to topic :paramtype name: str Other kombu options may be passed as keyword arguments """ # Default options options = {'durable': conf.amqp_durable_queues, 'queue_arguments': _get_queue_arguments(conf), 'auto_delete': conf.amqp_auto_delete, 'exclusive': False} options.update(kwargs) exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) exchange = kombu.entity.Exchange(name=exchange_name, type='topic', durable=options['durable'], auto_delete=options['auto_delete']) super(TopicConsumer, self).__init__(channel, callback, tag, name=name or topic, exchange=exchange, routing_key=topic, **options) class FanoutConsumer(ConsumerBase): """Consumer class for 'fanout'.""" def __init__(self, conf, channel, topic, callback, tag, **kwargs): """Init a 'fanout' queue. 'channel' is the amqp channel to use 'topic' is the topic to listen on 'callback' is the callback to call when messages are received 'tag' is a unique ID for the consumer on the channel Other kombu options may be passed """ unique = uuid.uuid4().hex exchange_name = '%s_fanout' % topic queue_name = '%s_fanout_%s' % (topic, unique) # Default options options = {'durable': False, 'queue_arguments': _get_queue_arguments(conf), 'auto_delete': True, 'exclusive': False} options.update(kwargs) exchange = kombu.entity.Exchange(name=exchange_name, type='fanout', durable=options['durable'], auto_delete=options['auto_delete']) super(FanoutConsumer, self).__init__(channel, callback, tag, name=queue_name, exchange=exchange, routing_key=topic, **options) class Publisher(object): """Base Publisher class.""" def __init__(self, channel, exchange_name, routing_key, **kwargs): """Init the Publisher class with the exchange_name, routing_key, and other options """ self.exchange_name = exchange_name self.routing_key = routing_key self.kwargs = kwargs self.reconnect(channel) def reconnect(self, channel): """Re-establish the Producer after a rabbit reconnection.""" self.exchange = kombu.entity.Exchange(name=self.exchange_name, **self.kwargs) self.producer = kombu.messaging.Producer(exchange=self.exchange, channel=channel, routing_key=self.routing_key) def send(self, msg, timeout=None): """Send a message.""" if timeout: # # AMQP TTL is in milliseconds when set in the header. # self.producer.publish(msg, headers={'ttl': (timeout * 1000)}) else: self.producer.publish(msg) class DirectPublisher(Publisher): """Publisher class for 'direct'.""" def __init__(self, conf, channel, msg_id, **kwargs): """init a 'direct' publisher. Kombu options may be passed as keyword args to override defaults """ options = {'durable': False, 'auto_delete': True, 'exclusive': False} options.update(kwargs) super(DirectPublisher, self).__init__(channel, msg_id, msg_id, type='direct', **options) class TopicPublisher(Publisher): """Publisher class for 'topic'.""" def __init__(self, conf, channel, topic, **kwargs): """init a 'topic' publisher. Kombu options may be passed as keyword args to override defaults """ options = {'durable': conf.amqp_durable_queues, 'auto_delete': conf.amqp_auto_delete, 'exclusive': False} options.update(kwargs) exchange_name = rpc_amqp.get_control_exchange(conf) super(TopicPublisher, self).__init__(channel, exchange_name, topic, type='topic', **options) class FanoutPublisher(Publisher): """Publisher class for 'fanout'.""" def __init__(self, conf, channel, topic, **kwargs): """init a 'fanout' publisher. Kombu options may be passed as keyword args to override defaults """ options = {'durable': False, 'auto_delete': True, 'exclusive': False} options.update(kwargs) super(FanoutPublisher, self).__init__(channel, '%s_fanout' % topic, None, type='fanout', **options) class NotifyPublisher(TopicPublisher): """Publisher class for 'notify'.""" def __init__(self, conf, channel, topic, **kwargs): self.durable = kwargs.pop('durable', conf.amqp_durable_queues) self.queue_arguments = _get_queue_arguments(conf) super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs) def reconnect(self, channel): super(NotifyPublisher, self).reconnect(channel) # NOTE(jerdfelt): Normally the consumer would create the queue, but # we do this to ensure that messages don't get dropped if the # consumer is started after we do queue = kombu.entity.Queue(channel=channel, exchange=self.exchange, durable=self.durable, name=self.routing_key, routing_key=self.routing_key, queue_arguments=self.queue_arguments) queue.declare() class Connection(object): """Connection object.""" pool = None def __init__(self, conf, server_params=None): self.consumers = [] self.consumer_thread = None self.proxy_callbacks = [] self.conf = conf self.max_retries = self.conf.rabbit_max_retries # Try forever? if self.max_retries <= 0: self.max_retries = None self.interval_start = self.conf.rabbit_retry_interval self.interval_stepping = self.conf.rabbit_retry_backoff # max retry-interval = 30 seconds self.interval_max = 30 self.memory_transport = False if server_params is None: server_params = {} # Keys to translate from server_params to kombu params server_params_to_kombu_params = {'username': 'userid'} ssl_params = self._fetch_ssl_params() params_list = [] for adr in self.conf.rabbit_hosts: hostname, port = network_utils.parse_host_port( adr, default_port=self.conf.rabbit_port) params = { 'hostname': hostname, 'port': port, 'userid': self.conf.rabbit_userid, 'password': self.conf.rabbit_password, 'virtual_host': self.conf.rabbit_virtual_host, } for sp_key, value in server_params.iteritems(): p_key = server_params_to_kombu_params.get(sp_key, sp_key) params[p_key] = value if self.conf.fake_rabbit: params['transport'] = 'memory' if self.conf.rabbit_use_ssl: params['ssl'] = ssl_params params_list.append(params) self.params_list = params_list brokers_count = len(self.params_list) self.next_broker_indices = itertools.cycle(range(brokers_count)) self.memory_transport = self.conf.fake_rabbit self.connection = None self.reconnect() def _fetch_ssl_params(self): """Handles fetching what ssl params should be used for the connection (if any). """ ssl_params = dict() # http://docs.python.org/library/ssl.html - ssl.wrap_socket if self.conf.kombu_ssl_version: ssl_params['ssl_version'] = self.conf.kombu_ssl_version if self.conf.kombu_ssl_keyfile: ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile if self.conf.kombu_ssl_certfile: ssl_params['certfile'] = self.conf.kombu_ssl_certfile if self.conf.kombu_ssl_ca_certs: ssl_params['ca_certs'] = self.conf.kombu_ssl_ca_certs # We might want to allow variations in the # future with this? ssl_params['cert_reqs'] = ssl.CERT_REQUIRED if not ssl_params: # Just have the default behavior return True else: # Return the extended behavior return ssl_params def _connect(self, params): """Connect to rabbit. Re-establish any queues that may have been declared before if we are reconnecting. Exceptions should be handled by the caller. """ if self.connection: LOG.info(_("Reconnecting to AMQP server on " "%(hostname)s:%(port)d") % params) try: self.connection.release() except self.connection_errors: pass # Setting this in case the next statement fails, though # it shouldn't be doing any network operations, yet. self.connection = None self.connection = kombu.connection.BrokerConnection(**params) self.connection_errors = self.connection.connection_errors if self.memory_transport: # Kludge to speed up tests. self.connection.transport.polling_interval = 0.0 self.consumer_num = itertools.count(1) self.connection.connect() self.channel = self.connection.channel() # work around 'memory' transport bug in 1.1.3 if self.memory_transport: self.channel._new_queue('ae.undeliver') for consumer in self.consumers: consumer.reconnect(self.channel) LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d') % params) def reconnect(self): """Handles reconnecting and re-establishing queues. Will retry up to self.max_retries number of times. self.max_retries = 0 means to retry forever. Sleep between tries, starting at self.interval_start seconds, backing off self.interval_stepping number of seconds each attempt. """ attempt = 0 while True: params = self.params_list[next(self.next_broker_indices)] attempt += 1 try: self._connect(params) return except (IOError, self.connection_errors) as e: pass except Exception as e: # NOTE(comstud): Unfortunately it's possible for amqplib # to return an error not covered by its transport # connection_errors in the case of a timeout waiting for # a protocol response. (See paste link in LP888621) # So, we check all exceptions for 'timeout' in them # and try to reconnect in this case. if 'timeout' not in str(e): raise log_info = {} log_info['err_str'] = str(e) log_info['max_retries'] = self.max_retries log_info.update(params) if self.max_retries and attempt == self.max_retries: LOG.error(_('Unable to connect to AMQP server on ' '%(hostname)s:%(port)d after %(max_retries)d ' 'tries: %(err_str)s') % log_info) # NOTE(comstud): Copied from original code. There's # really no better recourse because if this was a queue we # need to consume on, we have no way to consume anymore. sys.exit(1) if attempt == 1: sleep_time = self.interval_start or 1 elif attempt > 1: sleep_time += self.interval_stepping if self.interval_max: sleep_time = min(sleep_time, self.interval_max) log_info['sleep_time'] = sleep_time LOG.error(_('AMQP server on %(hostname)s:%(port)d is ' 'unreachable: %(err_str)s. Trying again in ' '%(sleep_time)d seconds.') % log_info) time.sleep(sleep_time) def ensure(self, error_callback, method, *args, **kwargs): while True: try: return method(*args, **kwargs) except (self.connection_errors, socket.timeout, IOError) as e: if error_callback: error_callback(e) except Exception as e: # NOTE(comstud): Unfortunately it's possible for amqplib # to return an error not covered by its transport # connection_errors in the case of a timeout waiting for # a protocol response. (See paste link in LP888621) # So, we check all exceptions for 'timeout' in them # and try to reconnect in this case. if 'timeout' not in str(e): raise if error_callback: error_callback(e) self.reconnect() def get_channel(self): """Convenience call for bin/clear_rabbit_queues.""" return self.channel def close(self): """Close/release this connection.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.connection.release() self.connection = None def reset(self): """Reset a connection so it can be used again.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.channel.close() self.channel = self.connection.channel() # work around 'memory' transport bug in 1.1.3 if self.memory_transport: self.channel._new_queue('ae.undeliver') self.consumers = [] def declare_consumer(self, consumer_cls, topic, callback): """Create a Consumer using the class that was passed in and add it to our list of consumers """ def _connect_error(exc): log_info = {'topic': topic, 'err_str': str(exc)} LOG.error(_("Failed to declare consumer for topic '%(topic)s': " "%(err_str)s") % log_info) def _declare_consumer(): consumer = consumer_cls(self.conf, self.channel, topic, callback, self.consumer_num.next()) self.consumers.append(consumer) return consumer return self.ensure(_connect_error, _declare_consumer) def iterconsume(self, limit=None, timeout=None): """Return an iterator that will consume from all queues/consumers.""" info = {'do_consume': True} def _error_callback(exc): if isinstance(exc, socket.timeout): LOG.debug(_('Timed out waiting for RPC response: %s') % str(exc)) raise rpc_common.Timeout() else: LOG.exception(_('Failed to consume message from queue: %s') % str(exc)) info['do_consume'] = True def _consume(): if info['do_consume']: queues_head = self.consumers[:-1] queues_tail = self.consumers[-1] for queue in queues_head: queue.consume(nowait=True) queues_tail.consume(nowait=False) info['do_consume'] = False return self.connection.drain_events(timeout=timeout) for iteration in itertools.count(0): if limit and iteration >= limit: raise StopIteration yield self.ensure(_error_callback, _consume) def cancel_consumer_thread(self): """Cancel a consumer thread.""" if self.consumer_thread is not None: self.consumer_thread.kill() try: self.consumer_thread.wait() except greenlet.GreenletExit: pass self.consumer_thread = None def wait_on_proxy_callbacks(self): """Wait for all proxy callback threads to exit.""" for proxy_cb in self.proxy_callbacks: proxy_cb.wait() def publisher_send(self, cls, topic, msg, timeout=None, **kwargs): """Send to a publisher based on the publisher class.""" def _error_callback(exc): log_info = {'topic': topic, 'err_str': str(exc)} LOG.exception(_("Failed to publish message to topic " "'%(topic)s': %(err_str)s") % log_info) def _publish(): publisher = cls(self.conf, self.channel, topic, **kwargs) publisher.send(msg, timeout) self.ensure(_error_callback, _publish) def declare_direct_consumer(self, topic, callback): """Create a 'direct' queue. In nova's use, this is generally a msg_id queue used for responses for call/multicall """ self.declare_consumer(DirectConsumer, topic, callback) def declare_topic_consumer(self, topic, callback=None, queue_name=None, exchange_name=None): """Create a 'topic' consumer.""" self.declare_consumer(functools.partial(TopicConsumer, name=queue_name, exchange_name=exchange_name, ), topic, callback) def declare_fanout_consumer(self, topic, callback): """Create a 'fanout' consumer.""" self.declare_consumer(FanoutConsumer, topic, callback) def direct_send(self, msg_id, msg): """Send a 'direct' message.""" self.publisher_send(DirectPublisher, msg_id, msg) def topic_send(self, topic, msg, timeout=None): """Send a 'topic' message.""" self.publisher_send(TopicPublisher, topic, msg, timeout) def fanout_send(self, topic, msg): """Send a 'fanout' message.""" self.publisher_send(FanoutPublisher, topic, msg) def notify_send(self, topic, msg, **kwargs): """Send a notify message on a topic.""" self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs) def consume(self, limit=None): """Consume from all queues/consumers.""" it = self.iterconsume(limit=limit) while True: try: it.next() except StopIteration: return def consume_in_thread(self): """Consumer from all queues/consumers in a greenthread.""" @excutils.forever_retry_uncaught_exceptions def _consumer_thread(): try: self.consume() except greenlet.GreenletExit: return if self.consumer_thread is None: self.consumer_thread = eventlet.spawn(_consumer_thread) return self.consumer_thread def create_consumer(self, topic, proxy, fanout=False): """Create a consumer that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) self.proxy_callbacks.append(proxy_cb) if fanout: self.declare_fanout_consumer(topic, proxy_cb) else: self.declare_topic_consumer(topic, proxy_cb) def create_worker(self, topic, proxy, pool_name): """Create a worker that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) self.proxy_callbacks.append(proxy_cb) self.declare_topic_consumer(topic, proxy_cb, pool_name) def join_consumer_pool(self, callback, pool_name, topic, exchange_name=None): """Register as a member of a group of consumers for a given topic from the specified exchange. Exactly one member of a given pool will receive each message. A message will be delivered to multiple pools, if more than one is created. """ callback_wrapper = rpc_amqp.CallbackWrapper( conf=self.conf, callback=callback, connection_pool=rpc_amqp.get_connection_pool(self.conf, Connection), ) self.proxy_callbacks.append(callback_wrapper) self.declare_topic_consumer( queue_name=pool_name, topic=topic, exchange_name=exchange_name, callback=callback_wrapper, ) def create_connection(conf, new=True): """Create a connection.""" return rpc_amqp.create_connection( conf, new, rpc_amqp.get_connection_pool(conf, Connection)) def multicall(conf, context, topic, msg, timeout=None): """Make a call that returns multiple times.""" return rpc_amqp.multicall( conf, context, topic, msg, timeout, rpc_amqp.get_connection_pool(conf, Connection)) def call(conf, context, topic, msg, timeout=None): """Sends a message on a topic and wait for a response.""" return rpc_amqp.call( conf, context, topic, msg, timeout, rpc_amqp.get_connection_pool(conf, Connection)) def cast(conf, context, topic, msg): """Sends a message on a topic without waiting for a response.""" return rpc_amqp.cast( conf, context, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def fanout_cast(conf, context, topic, msg): """Sends a message on a fanout exchange without waiting for a response.""" return rpc_amqp.fanout_cast( conf, context, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def cast_to_server(conf, context, server_params, topic, msg): """Sends a message on a topic to a specific server.""" return rpc_amqp.cast_to_server( conf, context, server_params, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def fanout_cast_to_server(conf, context, server_params, topic, msg): """Sends a message on a fanout exchange to a specific server.""" return rpc_amqp.fanout_cast_to_server( conf, context, server_params, topic, msg, rpc_amqp.get_connection_pool(conf, Connection)) def notify(conf, context, topic, msg, envelope): """Sends a notification event on a topic.""" return rpc_amqp.notify( conf, context, topic, msg, rpc_amqp.get_connection_pool(conf, Connection), envelope) def cleanup(): return rpc_amqp.cleanup(Connection.pool)
apache-2.0
-1,269,175,068,643,578,600
37.124851
79
0.558993
false
4.416391
false
false
false
raytung/Slice
account/hooks.py
1
2621
import hashlib import random from django.core.mail import send_mail from django.template.loader import render_to_string from account.conf import settings from django.contrib.auth.models import User from django.core.exceptions import ObjectDoesNotExist class AccountDefaultHookSet(object): def send_invitation_email(self, to, ctx): subject = render_to_string("account/email/invite_user_subject.txt", ctx) message = render_to_string("account/email/invite_user.txt", ctx) send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, to) def send_confirmation_email(self, to, ctx): subject = render_to_string("account/email/email_confirmation_subject.txt", ctx) subject = "".join(subject.splitlines()) # remove superfluous line breaks message = render_to_string("account/email/email_confirmation_message.txt", ctx) send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, to) def send_password_change_email(self, to, ctx): subject = render_to_string("account/email/password_change_subject.txt", ctx) subject = "".join(subject.splitlines()) message = render_to_string("account/email/password_change.txt", ctx) send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, to) def send_password_reset_email(self, to, ctx): subject = render_to_string("account/email/password_reset_subject.txt", ctx) subject = "".join(subject.splitlines()) message = render_to_string("account/email/password_reset.txt", ctx) send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, to) def generate_random_token(self, extra=None, hash_func=hashlib.sha256): if extra is None: extra = [] bits = extra + [str(random.SystemRandom().getrandbits(512))] return hash_func("".join(bits).encode("utf-8")).hexdigest() def generate_signup_code_token(self, email=None): return self.generate_random_token([email]) def generate_email_confirmation_token(self, email): return self.generate_random_token([email]) def get_user_credentials(self, form, identifier_field): try: username = User.objects.get(email=form.cleaned_data[identifier_field]) username = username.username except ObjectDoesNotExist: username = form.cleaned_data[identifier_field] return { "username": username, "password": form.cleaned_data["password"], } class HookProxy(object): def __getattr__(self, attr): return getattr(settings.ACCOUNT_HOOKSET, attr) hookset = HookProxy()
mit
-1,658,567,781,542,435,800
38.119403
87
0.679893
false
3.917788
false
false
false
anselal/antminer-monitor
antminermonitor/blueprints/user/models.py
1
1102
from flask_login.mixins import UserMixin from werkzeug.security import generate_password_hash, check_password_hash from sqlalchemy import Column, Integer, VARCHAR from antminermonitor.database import Base class User(UserMixin, Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(VARCHAR(64), index=True, unique=True) email = Column(VARCHAR(120), index=True, unique=True) password_hash = Column(VARCHAR(128)) surname = Column(VARCHAR(100)) firstname = Column(VARCHAR(100)) active = Column(Integer, default=1) @property def serialize(self): return { 'id': self.id, 'username': self.username, 'firstname': self.firstname, 'surname': self.surname, 'email': self.email } def __repr__(self): return '<User {}>'.format(self.username) def set_password(self, password): self.password_hash = generate_password_hash(password) def check_password(self, password): return check_password_hash(self.password_hash, password)
gpl-3.0
8,158,068,241,093,129,000
30.485714
73
0.653358
false
3.978339
false
false
false
wurstmineberg/alltheitems.wurstmineberg.de
alltheitems/cloud.py
1
67724
import alltheitems.__main__ as ati import bottle import collections import contextlib import datetime import itertools import json import pathlib import random import re import xml.sax.saxutils import alltheitems.item import alltheitems.util import alltheitems.world class FillLevel: def __init__(self, stack_size, total_items, max_slots, *, is_smart_chest=True): self.stack_size = stack_size self.total_items = total_items self.max_slots = max_slots self.is_smart_chest = is_smart_chest def __str__(self): if self.total_items == 0: return '{} is empty.'.format('SmartChest' if self.is_smart_chest else 'Chest') elif self.total_items == self.max_items: return '{} is full.'.format('SmartChest' if self.is_smart_chest else 'Chest') else: stacks, items = self.stacks return '{} is filled {}% ({} {stack}{}{} out of {} {stack}s).'.format('SmartChest' if self.is_smart_chest else 'Chest', int(100 * self.fraction), stacks, '' if stacks == 1 else 's', ' and {} item{}'.format(items, '' if items == 1 else 's') if items > 0 else '', self.max_slots, stack='item' if self.stack_size == 1 else 'stack') @property def fraction(self): return self.total_items / self.max_items def is_empty(self): return self.total_items == 0 def is_full(self): return self.total_items == self.max_items @property def max_items(self): return self.max_slots * self.stack_size @property def stacks(self): return divmod(self.total_items, self.stack_size) CONTAINERS = [ # layer coords of all counted container blocks in a SmartChest (3, -7, 3), (3, -7, 4), (4, -7, 4), (5, -7, 3), (5, -7, 4), (2, -6, 3), (3, -6, 2), (3, -6, 3), (2, -5, 2), (2, -5, 3), (3, -5, 3), (2, -4, 3), (3, -4, 2), (3, -4, 3), (3, -3, 2), (4, -3, 2), (5, -3, 2), (6, -3, 2), (5, -2, 2), (6, -2, 2), (5, 0, 2), (5, 0, 3) ] STONE_VARIANTS = { 0: 'stone', 1: 'granite', 2: 'polished granite', 3: 'diorite', 4: 'polished diorite', 5: 'andesite', 6: 'polished andesite' } HOPPER_FACINGS = { 0: 'down', 1: 'up', #for droppers 2: 'north', 3: 'south', 4: 'west', 5: 'east' } TORCH_FACINGS = { 1: 'to its west', 2: 'to its east', 3: 'to its north', 4: 'to its south', 5: 'below' } HTML_COLORS = { 'cyan': '#0ff', 'cyan2': '#0ff', 'gray': '#777', 'red': '#f00', 'orange': '#f70', 'yellow': '#ff0', 'white': '#fff', 'white2': '#fff', None: 'transparent' } def hopper_chain_connected(start_coords, end_coords, *, world=None, chunk_cache=None, block_at=None): if world is None: world = alltheitems.world.World() if chunk_cache is None: chunk_cache = {} if block_at is None: block_at=world.block_at visited_coords = set() x, y, z = start_coords while (x, y, z) != end_coords: if (x, y, z) in visited_coords: return False, 'hopper chain points into itself at {} {} {}'.format(x, y, z) visited_coords.add((x, y, z)) block = block_at(x, y, z, chunk_cache=chunk_cache) if block['id'] != 'minecraft:hopper': return False, 'block at {} {} {} is not a <a href="/block/minecraft/hopper">hopper</a>'.format(x, y, z, *end_coords) if block['damage'] & 0x7 == 0: y -= 1 # down elif block['damage'] & 0x7 == 2: z -= 1 # north elif block['damage'] & 0x7 == 3: z += 1 # south elif block['damage'] & 0x7 == 4: x -= 1 # west elif block['damage'] & 0x7 == 5: x += 1 # east else: raise ValueError('Unknown hopper facing {} at {}'.format(block['damage'] & 0x7, (x, y, z))) return True, None def smart_chest_schematic(document_root=ati.document_root): layers = {} with (document_root / 'static' / 'smartchest.txt').open() as smart_chest_layers: current_y = None current_layer = None for line in smart_chest_layers: if line == '\n': continue match = re.fullmatch('layer (-?[0-9]+)\n', line) if match: # new layer if current_y is not None: layers[current_y] = current_layer current_y = int(match.group(1)) current_layer = [] else: current_layer.append(line.rstrip('\r\n')) if current_y is not None: layers[current_y] = current_layer return sorted(layers.items()) def chest_iter(): """Returns an iterator yielding tuples (x, corridor, y, floor, z, chest).""" with (ati.assets_root / 'json' / 'cloud.json').open() as cloud_json: cloud_data = json.load(cloud_json) for y, floor in enumerate(cloud_data): for x, corridor in sorted(((int(x), corridor) for x, corridor in floor.items()), key=lambda tup: tup[0]): for z, chest in enumerate(corridor): yield x, corridor, y, floor, z, chest def chest_coords(item, *, include_meta=False): if not isinstance(item, alltheitems.item.Item): item = alltheitems.item.Item(item) for x, corridor, y, _, z, chest in chest_iter(): if item == chest: if include_meta: return (x, y, z), len(corridor), None if isinstance(chest, str) else chest.get('name'), None if isinstance(chest, str) else chest.get('sorter') else: return x, y, z if include_meta: return None, 0, None, None def global_error_checks(*, chunk_cache=None, block_at=alltheitems.world.World().block_at): cache_path = ati.cache_root / 'cloud-globals.json' max_age = datetime.timedelta(hours=1, minutes=random.randrange(0, 60)) # use a random value between 1 and 2 hours for the cache expiration if cache_path.exists() and datetime.datetime.utcfromtimestamp(cache_path.stat().st_mtime) > datetime.datetime.utcnow() - max_age: # cached check results are recent enough with cache_path.open() as cache_f: cache = json.load(cache_f) return cache # cached check results are too old, recheck if chunk_cache is None: chunk_cache = {} # error check: input hopper chain start = 14, 61, 32 # the first hopper after the buffer elevator end = -1, 25, 52 # the half of the uppermost overflow chest into which the hopper chain is pointing is_connected, message = hopper_chain_connected(start, end, chunk_cache=chunk_cache, block_at=block_at) if not is_connected: return 'Input hopper chain at {} is not connected to the unsorted overflow at {}: {}.'.format(start, end, message) if ati.cache_root.exists(): with cache_path.open('w') as cache_f: json.dump(message, cache_f, sort_keys=True, indent=4) def chest_error_checks(x, y, z, base_x, base_y, base_z, item, item_name, exists, stackable, durability, has_smart_chest, has_sorter, has_overflow, filler_item, sorting_hopper, missing_overflow_hoppers, north_half, south_half, corridor_length, pre_sorter, layer_coords, block_at, items_data, chunk_cache, document_root): if stackable and has_sorter: # error check: overflow exists if not has_overflow: if len(missing_overflow_hoppers) == 3: return 'Missing overflow hoppers.' elif len(missing_overflow_hoppers) > 1: return 'Overflow hoppers at x={} do not exist.'.format(missing_overflow_hoppers) elif len(missing_overflow_hoppers) == 1: return 'Overflow hopper at x={} does not exist, is {}.'.format(next(iter(missing_overflow_hoppers)), block_at(next(iter(missing_overflow_hoppers)), base_y - 7, base_z - 1)['id']) else: return 'Missing overflow.' # error check: pre-sorter for lower floors if y > 4: if pre_sorter is None: return 'Preliminary sorter coordinate missing from cloud.json.' pre_sorting_hopper = block_at(pre_sorter, 30, 52, chunk_cache=chunk_cache) if pre_sorting_hopper['id'] != 'minecraft:hopper': return 'Preliminary sorter is missing (should be at {} 30 52).'.format(pre_sorter) if pre_sorting_hopper['damage'] != 3: return 'Preliminary sorting hopper ({} 30 52) should be pointing south, but is facing {}.'.format(pre_sorter, HOPPER_FACINGS[pre_sorting_hopper['damage']]) empty_slots = set(range(5)) for slot in pre_sorting_hopper['tileEntity']['Items']: empty_slots.remove(slot['Slot']) if slot['Slot'] == 0: if not item.matches_slot(slot): return 'Preliminary sorting hopper is sorting the wrong item: {}.'.format(alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) else: if not filler_item.matches_slot(slot): return 'Preliminary sorting hopper has wrong filler item in slot {}: {} (should be {}).'.format(slot['Slot'], alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text(), filler_item.link_text()) if slot['Count'] > 1: return 'Preliminary sorting hopper: too much {} in slot {}.'.format(filler_item.link_text(), slot['Slot']) if len(empty_slots) > 0: if len(empty_slots) == 5: return 'Preliminary sorting hopper is empty.' elif len(empty_slots) == 1: return 'Slot {} of the preliminary sorting hopper is empty.'.format(next(iter(empty_slots))) else: return 'Some slots in the preliminary sorting hopper are empty: {}.'.format(alltheitems.util.join(empty_slots)) if has_sorter: # error check: sorting hopper if sorting_hopper['damage'] != 2: return 'Sorting hopper ({} {} {}) should be pointing north, but is facing {}.'.format(base_x - 2 if z % 2 == 0 else base_x + 2, base_y - 3, base_z, HOPPER_FACINGS[sorting_hopper['damage']]) empty_slots = set(range(5)) for slot in sorting_hopper['tileEntity']['Items']: empty_slots.remove(slot['Slot']) if slot['Slot'] == 0 and stackable: if not item.matches_slot(slot) and not filler_item.matches_slot(slot): return 'Sorting hopper is sorting the wrong item: {}.'.format(alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) else: if not filler_item.matches_slot(slot): return 'Sorting hopper has wrong filler item in slot {}: {} (should be {}).'.format(slot['Slot'], alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text(), filler_item.link_text()) if slot['Count'] > 1: return 'Sorting hopper: too much {} in slot {}.'.format(filler_item.link_text(), slot['Slot']) if len(empty_slots) > 0: if len(empty_slots) == 5: return 'Sorting hopper is empty.' elif len(empty_slots) == 1: return 'Slot {} of the sorting hopper is empty.'.format(next(iter(empty_slots))) else: return 'Some slots in the sorting hopper are empty: {}.'.format(alltheitems.util.join(empty_slots)) if exists: # error check: wrong items in access chest for slot in itertools.chain(north_half['tileEntity']['Items'], south_half['tileEntity']['Items']): if not item.matches_slot(slot): return 'Access chest contains items of the wrong kind: {}.'.format(alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) # error check: wrong name on sign sign = block_at(base_x - 1 if z % 2 == 0 else base_x + 1, base_y + 1, base_z + 1, chunk_cache=chunk_cache) if sign['id'] != 'minecraft:wall_sign': return 'Sign is missing.' text = [] for line in range(1, 5): line_text = json.loads(sign['tileEntity']['Text{}'.format(line)])['text'].translate(dict.fromkeys(range(0xf700, 0xf704), None)) if len(line_text) > 0: text.append(line_text) text = ' '.join(text) if text != item_name.translate({0x2161: 'II'}): return 'Sign has wrong text: should be {!r}, is {!r}.'.format(xml.sax.saxutils.escape(item_name), xml.sax.saxutils.escape(text)) if has_overflow: # error check: overflow hopper chain start = base_x + 5 if z % 2 == 0 else base_x - 5, base_y - 7, base_z - 1 end = -35, 6, 38 # position of the dropper leading into the Smelting Center's item elevator is_connected, message = hopper_chain_connected(start, end, chunk_cache=chunk_cache, block_at=block_at) if not is_connected: return 'Overflow hopper chain at {} is not connected to the Smelting Center item elevator at {}: {}.'.format(start, end, message) if exists and has_smart_chest: # error check: all blocks for layer_y, layer in smart_chest_schematic(document_root=document_root): for layer_x, row in enumerate(layer): for layer_z, block_symbol in enumerate(row): # determine the coordinate of the current block exact_x, exact_y, exact_z = layer_coords(layer_x, layer_y, layer_z) # determine current block block = block_at(exact_x, exact_y, exact_z, chunk_cache=chunk_cache) # check against schematic if block_symbol == ' ': # air if block['id'] != 'minecraft:air': return 'Block at {} {} {} should be air, is {}.'.format(exact_x, exact_y, exact_z, block['id']) elif block_symbol == '!': # sign if block['id'] != 'minecraft:wall_sign': return 'Block at {} {} {} should be a sign, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] != (4 if z % 2 == 0 else 5): return 'Sign at {} {} {} is facing the wrong way.'.format(exact_x, exact_y, exact_z) elif block_symbol == '#': # chest if block['id'] != 'minecraft:chest': return 'Block at {} {} {} should be a chest, is {}.'.format(exact_x, exact_y, exact_z, block['id']) for slot in block['tileEntity']['Items']: if not item.matches_slot(slot): return 'Storage chest at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) elif block_symbol == '<': # hopper facing south if block['id'] != 'minecraft:hopper': return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x7 != 3: # south return 'Hopper at {} {} {} should be pointing south, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']]) storage_hoppers = { (5, -7, 4), (6, -5, 4) } if (layer_x, layer_y, layer_z) in storage_hoppers: for slot in block['tileEntity']['Items']: if not item.matches_slot(slot): return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) elif block_symbol == '>': # hopper facing north if layer_y == -7 and layer_x == 0 and z < 8: # the first few chests get ignored because their overflow points in the opposite direction pass #TODO introduce special checks for them else: if block['id'] != 'minecraft:hopper': return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x7 != 2: # north return 'Hopper at {} {} {} should be pointing north, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']]) storage_hoppers = { (3, -7, 3), (3, -4, 2) } if (layer_x, layer_y, layer_z) in storage_hoppers: for slot in block['tileEntity']['Items']: if not item.matches_slot(slot): return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) elif block_symbol == '?': # any block pass elif block_symbol == 'C': # comparator if block['id'] != 'minecraft:unpowered_comparator': return 'Block at {} {} {} should be a comparator, is {}.'.format(exact_x, exact_y, exact_z, block['id']) known_facings = { (5, -7, 2): 0x2, # south (5, -5, 2): 0x2, # south (7, -3, 4): 0x0, # north (0, -1, 1): 0x0, # north (1, -1, 2): 0x0, # north (2, 0, 2): 0x1 if z % 2 == 0 else 0x3, # east / west (2, 0, 3): 0x2, # south (4, 0, 2): 0x1 if z % 2 == 0 else 0x3, # east / west (4, 0, 3): 0x2 # south } facing = block['damage'] & 0x3 if (layer_x, layer_y, layer_z) in known_facings: if known_facings[layer_x, layer_y, layer_z] != facing: return 'Comparator at {} {} {} is facing the wrong way.'.format(exact_x, exact_y, exact_z) else: return 'Direction check for comparator at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z) known_modes = { (5, -7, 2): False, # compare (5, -5, 2): False, # compare (7, -3, 4): False, # compare (0, -1, 1): False, # compare (1, -1, 2): True, # subtract (2, 0, 2): True, # subtract (2, 0, 3): False, # compare (4, 0, 2): True, #subtract (4, 0, 3): False # compare } mode = (block['damage'] & 0x4) == 0x4 if (layer_x, layer_y, layer_z) in known_modes: if known_modes[layer_x, layer_y, layer_z] != mode: return 'Comparator at {} {} {} is in {} mode, should be in {} mode.'.format(exact_x, exact_y, exact_z, 'subtraction' if mode else 'comparison', 'subtraction' if known_modes[layer_x, layer_y, layer_z] else 'comparison') else: return 'Mode check for comparator at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z) elif block_symbol == 'D': # dropper facing up if block['id'] != 'minecraft:dropper': return 'Block at {} {} {} should be a dropper, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x7 != 1: # up return 'Dropper at {} {} {} should be facing up, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']]) for slot in block['tileEntity']['Items']: if not item.matches_slot(slot): return 'Dropper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) elif block_symbol == 'F': # furnace if layer_y == -6 and layer_x == 0 and z < 2: # the first few chests get ignored because their overflow points in the opposite direction pass #TODO introduce special checks for them elif layer_y == -1 and layer_x == 7 and layer_z == 1 and (z == corridor_length - 1 or z == corridor_length - 2 and z % 2 == 0): # the floor ends with a quartz slab instead of a furnace here if block['id'] != 'minecraft:stone_slab': return 'Block at {} {} {} should be a quartz slab, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x7 != 0x7: slab_variant = { 0: 'stone', 1: 'sandstone', 2: 'fake wood', 3: 'cobblestone', 4: 'brick', 5: 'stone brick', 6: 'Nether brick', 7: 'quartz' }[block['damage'] & 0x7] return 'Block at {} {} {} should be a <a href="/block/minecraft/stone_slab/7">quartz slab</a>, is a <a href="/block/minecraft/stone_slab/{}">{} slab</a>.'.format(exact_x, exact_y, exact_z, block['damage'] & 0x7, slab_variant) if block['damage'] & 0x8 != 0x8: return 'Quartz slab at {} {} {} should be a top slab, is a bottom slab.'.format(exact_x, exact_y, exact_z) elif x == 0 and y == 6 and layer_y == -1 and layer_x == 7: # the central corridor on the 6th floor uses stone bricks instead of furnaces for the floor if block['id'] != 'minecraft:stonebrick': return 'Block at {} {} {} should be stone bricks, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] != 0: stonebrick_variant = { 0: 'regular', 1: 'mossy', 2: 'cracked', 3: 'chiseled' }[block['damage']] return 'Block at {} {} {} should be <a href="/block/minecraft/stonebrick/0">regular stone bricks</a>, is <a href="/block/minecraft/stonebrick/{}">{} stone bricks</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stonebrick_variant) else: if block['id'] != 'minecraft:furnace': return 'Block at {} {} {} should be a furnace, is {}.'.format(exact_x, exact_y, exact_z, block['id']) known_signals = { (0, -6, 4): 0, (0, -6, 5): 0, (0, -6, 6): 0, (0, -6, 7): 0, (0, -1, 0): 8, (7, -1, 1): 0, (7, -1, 2): 0, (7, -1, 3): 0, (7, -1, 4): 0, (2, 0, 4): 1, (4, 0, 4): 5 } signal = alltheitems.item.comparator_signal(block, items_data=items_data) if (layer_x, layer_y, layer_z) in known_signals: if known_signals[layer_x, layer_y, layer_z] != signal: return 'Furnace at {} {} {} has a fill level of {}, should be {}.'.format(exact_x, exact_y, exact_z, signal, known_signals[layer_x, layer_y, layer_z]) else: return 'Fill level check for furnace at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z) elif block_symbol == 'G': # glowstone if block['id'] != 'minecraft:glowstone': return 'Block at {} {} {} should be glowstone, is {}.'.format(exact_x, exact_y, exact_z, block['id']) elif block_symbol == 'H': # hopper, any facing if block['id'] != 'minecraft:hopper': return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id']) elif block_symbol == 'N': # overflow hopper chain pointing north if y > 1 and (z == 0 or z == 1): if block['id'] == 'minecraft:hopper': if block['damage'] != 2: # north return 'Overflow hopper at {} {} {} should be pointing north, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']]) elif block['id'] == 'minecraft:air': pass # also allow air because some overflow hopper chains don't start on the first floor else: return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id']) else: if block['id'] != 'minecraft:air': return 'Block at {} {} {} should be air, is {}.'.format(exact_x, exact_y, exact_z, block['id']) elif block_symbol == 'P': # upside-down oak stairs if block['id'] != 'minecraft:oak_stairs': return 'Block at {} {} {} should be oak stairs, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x3 != (0x1 if z % 2 == 0 else 0x0): stairs_facings = { 0: 'west', 1: 'east', 2: 'south', 3: 'north' } return 'Stairs at {} {} {} should be facing {}, is {}.'.format(exact_x, exact_y, exact_z, stairs_facings[0x1 if z % 2 == 0 else 0x0], stairs_facings[block['damage'] & 0x3]) if block['damage'] & 0x4 != 0x4: return 'Stairs at {} {} {} should be upside-down.'.format(exact_x, exact_y, exact_z) elif block_symbol == 'Q': # quartz top slab if block['id'] != 'minecraft:stone_slab': return 'Block at {} {} {} should be a quartz slab, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x7 != 0x7: slab_variant = { 0: 'stone', 1: 'sandstone', 2: 'fake wood', 3: 'cobblestone', 4: 'brick', 5: 'stone brick', 6: 'Nether brick', 7: 'quartz' }[block['damage'] & 0x7] return 'Block at {} {} {} should be a <a href="/block/minecraft/stone_slab/7">quartz slab</a>, is a <a href="/block/minecraft/stone_slab/{}">{} slab</a>.'.format(exact_x, exact_y, exact_z, block['damage'] & 0x7, slab_variant) if block['damage'] & 0x8 != 0x8: return 'Quartz slab at {} {} {} should be a top slab, is a bottom slab.'.format(exact_x, exact_y, exact_z) elif block_symbol == 'R': # repeater if block['id'] not in ('minecraft:unpowered_repeater', 'minecraft:powered_repeater'): return 'Block at {} {} {} should be a repeater, is {}.'.format(exact_x, exact_y, exact_z, block['id']) known_facings = { (1, -8, 2): 0x0, # north (3, -8, 3): 0x3 if z % 2 == 0 else 0x1, # west / east (6, -6, 2): 0x0, # north (7, -5, 5): 0x2, # south (3, -3, 1): 0x1 if z % 2 == 0 else 0x3 # east / west } facing = block['damage'] & 0x3 if (layer_x, layer_y, layer_z) in known_facings: if known_facings[layer_x, layer_y, layer_z] != facing: return 'Repeater at {} {} {} is facing the wrong way.'.format(exact_x, exact_y, exact_z) else: return 'Direction check for repeater at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z) known_delays = { # in game ticks (1, -8, 2): 4, (3, -8, 3): 2, (6, -6, 2): 2, (7, -5, 5): 2, (3, -3, 1): 2 } delay_ticks = 2 * (block['damage'] >> 2) + 2 if (layer_x, layer_y, layer_z) in known_delays: if known_delays[layer_x, layer_y, layer_z] != delay_ticks: return 'Repeater at {} {} {} has a delay of {} game tick{}, should be {}.'.format(exact_x, exact_y, exact_z, delay_ticks, '' if delay_ticks == 1 else 's', known_delays[layer_x, layer_y, layer_z]) else: return 'Delay check for repeater at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z) elif block_symbol == 'S': # stone top slab if block['id'] != 'minecraft:stone_slab': return 'Block at {} {} {} should be a stone slab, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x7 != 0x0: slab_variant = { 0: 'stone', 1: 'sandstone', 2: 'fake wood', 3: 'cobblestone', 4: 'brick', 5: 'stone brick', 6: 'Nether brick', 7: 'quartz' }[block['damage'] & 0x7] return 'Block at {} {} {} should be a <a href="/block/minecraft/stone_slab/0">stone slab</a>, is a <a href="/block/minecraft/stone_slab/{}">{} slab</a>.'.format(exact_x, exact_y, exact_z, block['damage'] & 0x7, slab_variant) if block['damage'] & 0x8 != 0x8: return 'Quartz slab at {} {} {} should be a top slab.'.format(exact_x, exact_y, exact_z) elif block_symbol == 'T': # redstone torch attached to the side of a block if block['id'] not in ('minecraft:unlit_redstone_torch', 'minecraft:redstone_torch'): return 'Block at {} {} {} should be a redstone torch, is {}.'.format(exact_x, exact_y, exact_z, block['id']) known_facings = { (3, -8, 1): 1 if z % 2 == 0 else 2, # west / east (2, -7, 1): 3, # north (4, -6, 1): 2 if z % 2 == 0 else 1, # east / west (4, -6, 2): 3, # north (4, -5, 1): 1 if z % 2 == 0 else 2, # west / east (4, -5, 3): 4, # south (7, -5, 3): 3, # north (1, -4, 2): 4, # south (1, -3, 3): 3, # north (1, -1, 4): 4, # south (5, -1, 1): 2 if z % 2 == 0 else 1, # east / west (3, 0, 3): 4 # south } if (layer_x, layer_y, layer_z) in known_facings: if known_facings[layer_x, layer_y, layer_z] != block['damage']: return 'Redstone torch at {} {} {} attached to the block {}, should be attached to the block {}.'.format(exact_x, exact_y, exact_z, TORCH_FACINGS[block['damage']], TORCH_FACINGS[known_facings[layer_x, layer_y, layer_z]]) else: return 'Facing check for redstone torch at {} {} {} (relative coords: {} {} {}) not yet implemented.'.format(exact_x, exact_y, exact_z, layer_x, layer_y, layer_z) elif block_symbol == 'W': # back wall if z == corridor_length - 1 or z == corridor_length - 2 and z % 2 == 0: if block['id'] != 'minecraft:stone': return 'Block at {} {} {} should be stone, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] != 0: stone_variant = { 0: 'stone', 1: 'granite', 2: 'polished granite', 3: 'diorite', 4: 'polished diorite', 5: 'andesite', 6: 'polished andesite' }[block['damage']] return 'Block at {} {} {} should be <a href="/block/minecraft/stone/0">regular stone</a>, is <a href="/block/minecraft/stone/{}">{}</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stone_variant) elif block_symbol == 'X': # overflow hopper chain pointing down if layer_y < -7 and y < 6 and (z == 4 or z == 5) or layer_y > -7 and y > 1 and (z == 0 or z == 1): if block['id'] == 'minecraft:hopper': if block['damage'] != 0: # down return 'Overflow hopper at {} {} {} should be pointing down, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']]) elif block['id'] == 'minecraft:air': pass # also allow air because some overflow hopper chains don't start on the first floor else: return 'Block at {} {} {} should be air or a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id']) else: if block['id'] != 'minecraft:air': return 'Block at {} {} {} should be air, is {}.'.format(exact_x, exact_y, exact_z, block['id']) elif block_symbol == '^': # hopper facing outward if block['id'] != 'minecraft:hopper': return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x7 != (5 if z % 2 == 0 else 4): # east / west return 'Hopper at {} {} {} should be pointing {}, is {}.'.format(exact_x, exact_y, exact_z, 'east' if z % 2 == 0 else 'west', HOPPER_FACINGS[block['damage']]) storage_hoppers = { (3, -5, 3), (6, -5, 3), (7, -4, 3), (5, -3, 2), (6, -3, 2) } if (layer_x, layer_y, layer_z) in storage_hoppers: for slot in block['tileEntity']['Items']: if not item.matches_slot(slot): return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) elif block_symbol == 'c': # crafting table if layer_y == -7 and (y == 6 or z < 4 or z < 6 and layer_z > 1): if block['id'] != 'minecraft:stone': return 'Block at {} {} {} should be stone, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] != 0: stone_variant = STONE_VARIANTS[block['damage']] return 'Block at {} {} {} should be <a href="/block/minecraft/stone/0">regular stone</a>, is <a href="/block/minecraft/stone/{}">{}</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stone_variant) else: if block['id'] != 'minecraft:crafting_table': return 'Block at {} {} {} should be a crafting table, is {}.'.format(exact_x, exact_y, exact_z, block['id']) elif block_symbol == 'i': # torch attached to the top of a block if block['id'] != 'minecraft:torch': return 'Block at {} {} {} should be a torch, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] != 5: # attached to the block below return 'Torch at {} {} {} should be attached to the block below, is attached to the block {}'.format(exact_x, exact_y, exact_z, TORCH_FACINGS[block['damage']]) elif block_symbol == 'p': # oak planks if layer_y == -8 and (y == 6 or z < 4 or z < 6 and layer_z > 1): if block['id'] != 'minecraft:stone': return 'Block at {} {} {} should be stone, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] != 0: stone_variant = STONE_VARIANTS[block['damage']] return 'Block at {} {} {} should be <a href="/block/minecraft/stone/0">regular stone</a>, is <a href="/block/minecraft/stone/{}">{}</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stone_variant) else: if block['id'] != 'minecraft:planks': return 'Block at {} {} {} should be oak planks, is {}.'.format(exact_x, exact_y, exact_z, block['id']) pass #TODO check material elif block_symbol == 'r': # redstone dust if block['id'] != 'minecraft:redstone_wire': return 'Block at {} {} {} should be redstone, is {}.'.format(exact_x, exact_y, exact_z, block['id']) elif block_symbol == 's': # stone if block['id'] != 'minecraft:stone': if exact_y < 5: if block['id'] != 'minecraft:bedrock': return 'Block at {} {} {} should be stone or bedrock, is {}.'.format(exact_x, exact_y, exact_z, block['id']) else: return 'Block at {} {} {} should be stone, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] != 0: stone_variant = STONE_VARIANTS[block['damage']] return 'Block at {} {} {} should be <a href="/block/minecraft/stone/0">regular stone</a>, is <a href="/block/minecraft/stone/{}">{}</a>.'.format(exact_x, exact_y, exact_z, block['damage'], stone_variant) elif block_symbol == 't': # redstone torch attached to the top of a block if block['id'] not in ('minecraft:unlit_redstone_torch', 'minecraft:redstone_torch'): return 'Block at {} {} {} should be a redstone torch, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] != 5: # attached to the block below return 'Redstone torch at {} {} {} should be attached to the block below, is attached to the block {}'.format(exact_x, exact_y, exact_z, TORCH_FACINGS[block['damage']]) elif block_symbol == 'v': # hopper facing inwards if block['id'] != 'minecraft:hopper': return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x7 != (4 if z % 2 == 0 else 5): # west / east return 'Hopper at {} {} {} should be pointing {}, is {}.'.format(exact_x, exact_y, exact_z, 'west' if z % 2 == 0 else 'east', HOPPER_FACINGS[block['damage']]) storage_hoppers = { (3, -7, 4), (4, -7, 4), (2, -6, 3) } if (layer_x, layer_y, layer_z) in storage_hoppers: for slot in block['tileEntity']['Items']: if not item.matches_slot(slot): return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) elif block_symbol == 'x': # hopper facing down if block['id'] != 'minecraft:hopper': return 'Block at {} {} {} should be a hopper, is {}.'.format(exact_x, exact_y, exact_z, block['id']) if block['damage'] & 0x7 != 0: # down return 'Hopper at {} {} {} should be pointing down, is {}.'.format(exact_x, exact_y, exact_z, HOPPER_FACINGS[block['damage']]) storage_hoppers = { (5, -1, 2) } if (layer_x, layer_y, layer_z) in storage_hoppers: for slot in block['tileEntity']['Items']: if not item.matches_slot(slot): return 'Storage hopper at {} {} {} contains items of the wrong kind: {}.'.format(exact_x, exact_y, exact_z, alltheitems.item.Item.from_slot(slot, items_data=items_data).link_text()) elif block_symbol == '~': # hopper chain if block['id'] == 'minecraft:hopper': pass #TODO check facing pass #TODO check alignment elif block['id'] == 'minecraft:air': pass #TODO check alignment else: return 'Block at {} {} {} should be a hopper or air, is {}.'.format(exact_x, exact_y, exact_z, block['id']) pass #TODO check hopper chain integrity else: return 'Not yet implemented: block at {} {} {} should be {}.'.format(exact_x, exact_y, exact_z, block_symbol) # error check: items in storage chests but not in access chest access_chest_fill_level = alltheitems.item.comparator_signal(north_half, south_half) bottom_dropper_fill_level = alltheitems.item.comparator_signal(block_at(*layer_coords(5, -7, 3), chunk_cache=chunk_cache)) if access_chest_fill_level < 2 and bottom_dropper_fill_level > 2: return 'Access chest is {}empty but there are items stuck in the storage dropper at {} {} {}.'.format('' if access_chest_fill_level == 0 else 'almost ', *layer_coords(5, -7, 3)) if durability and has_smart_chest: # error check: damaged or enchanted tools in storage chests storage_containers = set(CONTAINERS) - {(5, 0, 2), (5, 0, 3)} for container in storage_containers: for slot in block_at(*layer_coords(*container), chunk_cache=chunk_cache)['tileEntity']['Items']: if slot.get('Damage', 0) > 0: return 'Item in storage container at {} {} {} is damaged.'.format(*layer_coords(*container)) if len(slot.get('tag', {}).get('ench', [])) > 0: return 'Item in storage container at {} {} {} is enchanted.'.format(*layer_coords(*container)) def chest_state(coords, item_stub, corridor_length, item_name=None, pre_sorter=None, *, items_data=None, block_at=alltheitems.world.World().block_at, document_root=ati.document_root, chunk_cache=None, cache=None, allow_cache=True): if items_data is None: with (ati.assets_root / 'json' / 'items.json').open() as items_file: items_data = json.load(items_file) if chunk_cache is None: chunk_cache = {} if isinstance(item_stub, str): item_stub = {'id': item_stub} item = alltheitems.item.Item(item_stub, items_data=items_data) if item_name is None: item_name = item.info()['name'] state = None, 'This SmartChest is in perfect state.', None x, y, z = coords # determine the base coordinate, i.e. the position of the north half of the access chest if z % 2 == 0: # left wall base_x = 15 * x + 2 else: # right wall base_x = 15 * x - 3 base_y = 73 - 10 * y base_z = 28 + 10 * y + 4 * (z // 2) def layer_coords(layer_x, layer_y, layer_z): if z % 2 == 0: # left wall exact_x = base_x + 5 - layer_x else: # right wall exact_x = base_x - 5 + layer_x exact_y = base_y + layer_y exact_z = base_z + 3 - layer_z return exact_x, exact_y, exact_z # does the access chest exist? exists = False north_half = block_at(base_x, base_y, base_z, chunk_cache=chunk_cache) south_half = block_at(base_x, base_y, base_z + 1, chunk_cache=chunk_cache) if north_half['id'] != 'minecraft:chest' and south_half['id'] != 'minecraft:chest': state = 'gray', 'Access chest does not exist.', None elif north_half['id'] != 'minecraft:chest': state = 'gray', 'North half of access chest does not exist.', None elif south_half['id'] != 'minecraft:chest': state = 'gray', 'South half of access chest does not exist.', None else: exists = True # does it have a SmartChest? has_smart_chest = False missing_droppers = set() for dropper_y in range(base_y - 7, base_y): dropper = block_at(base_x, dropper_y, base_z, chunk_cache=chunk_cache) if dropper['id'] != 'minecraft:dropper': missing_droppers.add(dropper_y) if len(missing_droppers) == 7: if state[0] is None: state = 'orange', 'SmartChest droppers do not exist.', None elif len(missing_droppers) > 1: if state[0] is None: state = 'orange', 'SmartChest droppers at y={} do not exist.'.format(', y='.join(str(dropper) for dropper in missing_droppers)), None elif len(missing_droppers) == 1: if state[0] is None: state = 'orange', 'SmartChest dropper at y={} does not exist, is {}.'.format(next(iter(missing_droppers)), block_at(base_x, dropper_y, base_z)['id']), None else: has_smart_chest = True # is it stackable? stackable = item.info().get('stackable', True) if not stackable and state[0] is None: state = 'cyan', "This SmartChest is in perfect state (but the item is not stackable, so it can't be sorted).", None # does it have a durability bar? durability = 'durability' in item.info() # does it have a sorter? has_sorter = False if item == 'minecraft:crafting_table' or stackable and item.max_stack_size < 64: filler_item = alltheitems.item.Item('minecraft:crafting_table', items_data=items_data) else: filler_item = alltheitems.item.Item('minecraft:ender_pearl', items_data=items_data) sorting_hopper = block_at(base_x - 2 if z % 2 == 0 else base_x + 2, base_y - 3, base_z, chunk_cache=chunk_cache) if sorting_hopper['id'] != 'minecraft:hopper': if state[0] is None: state = 'yellow', 'Sorting hopper does not exist, is {}.'.format(sorting_hopper['id']), None else: for slot in sorting_hopper['tileEntity']['Items']: if slot['Slot'] == 0 and stackable and not item.matches_slot(slot) and filler_item.matches_slot(slot): if state[0] is None or state[0] == 'cyan': state = 'yellow', 'Sorting hopper is full of {}, but the sorted item is stackable, so the first slot should contain the item.'.format(filler_item.link_text()), None break else: has_sorter = True # does it have an overflow? has_overflow = False missing_overflow_hoppers = set() for overflow_x in range(base_x + 3 if z % 2 == 0 else base_x - 3, base_x + 6 if z % 2 == 0 else base_x - 6, 1 if z % 2 == 0 else -1): overflow_hopper = block_at(overflow_x, base_y - 7, base_z - 1, chunk_cache=chunk_cache) if overflow_hopper['id'] != 'minecraft:hopper': missing_overflow_hoppers.add(overflow_x) if len(missing_overflow_hoppers) == 0: has_overflow = True # state determined, check for errors if coords == (1, 1, 0): # Ender pearls message = global_error_checks(chunk_cache=chunk_cache, block_at=block_at) if message is not None: return 'red', message, None cache_path = ati.cache_root / 'cloud-chests.json' if cache is None: if cache_path.exists(): with cache_path.open() as cache_f: cache = json.load(cache_f) else: cache = {} max_age = datetime.timedelta(hours=1, minutes=random.randrange(0, 60)) # use a random value between 1 and 2 hours for the cache expiration if allow_cache and str(y) in cache and str(x) in cache[str(y)] and str(z) in cache[str(y)][str(x)] and cache[str(y)][str(x)][str(z)]['errorMessage'] is None and datetime.datetime.strptime(cache[str(y)][str(x)][str(z)]['timestamp'], '%Y-%m-%d %H:%M:%S') > datetime.datetime.utcnow() - max_age: message = cache[str(y)][str(x)][str(z)]['errorMessage'] pass # cached check results are recent enough else: # cached check results are too old, recheck message = chest_error_checks(x, y, z, base_x, base_y, base_z, item, item_name, exists, stackable, durability, has_smart_chest, has_sorter, has_overflow, filler_item, sorting_hopper, missing_overflow_hoppers, north_half, south_half, corridor_length, pre_sorter, layer_coords, block_at, items_data, chunk_cache, document_root) if ati.cache_root.exists(): if str(y) not in cache: cache[str(y)] = {} if str(x) not in cache[str(y)]: cache[str(y)][str(x)] = {} cache[str(y)][str(x)][str(z)] = { 'errorMessage': message, 'timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') } with cache_path.open('w') as cache_f: json.dump(cache, cache_f, sort_keys=True, indent=4) if message is not None: return 'red', message, None # no errors, determine fill level if state[0] in (None, 'cyan', 'orange', 'yellow'): try: containers = CONTAINERS if state[0] in (None, 'cyan') else [ # layer coords of the access chest (5, 0, 2), (5, 0, 3) ] total_items = sum(max(0, sum(slot['Count'] for slot in block_at(*layer_coords(*container), chunk_cache=chunk_cache)['tileEntity']['Items'] if slot.get('Damage', 0) == 0 or not durability) - (4 * item.max_stack_size if container == (5, -7, 3) else 0)) for container in containers) # Don't count the 4 stacks of items that are stuck in the bottom dropper. Don't count damaged tools. max_slots = sum(alltheitems.item.NUM_SLOTS[block_at(*layer_coords(*container), chunk_cache=chunk_cache)['id']] for container in containers) - (0 if state[0] == 'orange' else 4) return state[0], state[1], FillLevel(item.max_stack_size, total_items, max_slots, is_smart_chest=state[0] in (None, 'cyan')) except: # something went wrong determining fill level, re-check errors message = chest_error_checks(x, y, z, base_x, base_y, base_z, item, item_name, exists, stackable, durability, has_smart_chest, has_sorter, has_overflow, filler_item, sorting_hopper, missing_overflow_hoppers, north_half, south_half, corridor_length, g, layer_coords, block_at, items_data, chunk_cache, document_root) if ati.cache_root.exists(): if str(y) not in cache: cache[str(y)] = {} if str(x) not in cache[str(y)]: cache[str(y)][str(x)] = {} cache[str(y)][str(x)][str(z)] = { 'errorMessage': message, 'timestamp': datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') } with cache_path.open('w') as cache_f: json.dump(cache, cache_f, sort_keys=True, indent=4) if message is None: raise else: return 'red', message, None return state def cell_from_chest(coords, item_stub, corridor_length, item_name=None, pre_sorter=None, *, chunk_cache=None, items_data=None, colors_to_explain=None, cache=None, allow_cache=True): color, state_message, fill_level = chest_state(coords, item_stub, corridor_length, item_name, pre_sorter, items_data=items_data, chunk_cache=chunk_cache, cache=cache, allow_cache=allow_cache) if colors_to_explain is not None: colors_to_explain.add(color) if fill_level is None or fill_level.is_full(): return '<td style="background-color: {};">{}</td>'.format(HTML_COLORS[color], alltheitems.item.Item(item_stub, items_data=items_data).image()) else: return '<td style="background-color: {};">{}<div class="durability"><div style="background-color: #f0f; width: {}px;"></div></div></td>'.format(HTML_COLORS[color], alltheitems.item.Item(item_stub, items_data=items_data).image(), 0 if fill_level.is_empty() else 2 + int(fill_level.fraction * 13) * 2) def index(allow_cache=True): yield ati.header(title='Cloud') def body(): yield '<p>The <a href="//wiki.{host}/Cloud">Cloud</a> is the public item storage on <a href="//{host}/">Wurstmineberg</a>, consisting of 6 underground floors with <a href="//wiki.{host}/SmartChest">SmartChests</a> in them.</p>'.format(host=ati.host) yield """<style type="text/css"> .item-table td { box-sizing: content-box; height: 32px; width: 32px; position: relative; } .item-table .left-sep { border-left: 1px solid gray; } .durability { z-index: 1; } </style>""" chunk_cache = {} with (ati.assets_root / 'json' / 'items.json').open() as items_file: items_data = json.load(items_file) cache_path = ati.cache_root / 'cloud-chests.json' if cache_path.exists(): try: with cache_path.open() as cache_f: cache = json.load(cache_f) except ValueError: # cache JSON is corrupted, probably because of a full disk, try without cache cache_path.unlink() cache = None else: cache = None colors_to_explain = set() floors = {} for x, corridor, y, floor, z, chest in chest_iter(): if y not in floors: floors[y] = floor for y, floor in sorted(floors.items(), key=lambda tup: tup[0]): def cell(coords, item_stub, corridor): if isinstance(item_stub, str): item_stub = {'id': item_stub} item_name = None pre_sorter = None else: item_stub = item_stub.copy() if 'name' in item_stub: item_name = item_stub['name'] del item_stub['name'] else: item_name = None if 'sorter' in item_stub: pre_sorter = item_stub['sorter'] del item_stub['sorter'] else: pre_sorter = None return cell_from_chest(coords, item_stub, len(corridor), item_name, pre_sorter, chunk_cache=chunk_cache, colors_to_explain=colors_to_explain, items_data=items_data, cache=cache, allow_cache=allow_cache) yield bottle.template(""" %import itertools <h2 id="floor{{y}}">{{y}}{{ordinal(y)}} floor (y={{73 - 10 * y}})</h2> <table class="item-table" style="margin-left: auto; margin-right: auto;"> %for x in range(-3, 4): %if x > -3: <colgroup class="left-sep"> <col /> <col /> </colgroup> %else: <colgroup> <col /> <col /> </colgroup> %end %end <tbody> %for z_left, z_right in zip(itertools.count(step=2), itertools.count(start=1, step=2)): %found = False <tr> %for x in range(-3, 4): %if str(x) not in floor: <td></td> <td></td> %continue %end %corridor = floor[str(x)] %if len(corridor) > z_right: {{!cell((x, y, z_right), corridor[z_right], corridor)}} %else: <td></td> %end %if len(corridor) > z_left: {{!cell((x, y, z_left), corridor[z_left], corridor)}} %found = True %else: <td></td> %end %end </tr> %if not found: %break %end %end </tbody> </table> """, ordinal=alltheitems.util.ordinal, cell=cell, floor=floor, y=y) color_explanations = collections.OrderedDict([ ('red', '<p>A red background means that there is something wrong with the chest. See the item info page for details.</p>'), ('gray', "<p>A gray background means that the chest hasn't been built yet or is still located somewhere else.</p>"), ('orange', "<p>An orange background means that the chest doesn't have a SmartChest yet. It can only store 54 stacks.</p>"), ('yellow', "<p>A yellow background means that the chest doesn't have a sorter yet.</p>"), ('cyan', '<p>A cyan background means that the chest has no sorter because it stores an unstackable item. These items should not be automatically <a href="//wiki.wurstmineberg.de/Soup#Cloud">sent</a> to the Cloud.</p>'), (None, '<p>A white background means that everything is okay: the chest has a SmartChest, a sorter, and overflow protection.</p>') ]) for chest_color in sorted(colors_to_explain, key=list(color_explanations.keys()).index): if chest_color is not None or len(colors_to_explain) > 1: yield color_explanations[chest_color] yield from ati.html_exceptions(body()) yield ati.footer(linkify_headers=True) def todo(): yield ati.header(title='Cloud by priority') def body(): yield """<style type="text/css"> .todo-table td { text-align: left; vertical-align: middle !important; } .todo-table .coord { width: 3em; text-align: right; } .todo-table .item-image { box-sizing: content-box; width: 32px; } .todo-table .item-name { width: 24em; } </style>""" headers = collections.OrderedDict([ ('red', 'Build errors'), ('gray', 'Missing chests'), ('orange', 'Missing SmartChests'), ('yellow', 'Missing sorters'), ('cyan', 'Empty SmartChests (unstackable)'), ('white', 'Empty SmartChests (stackable)'), ('cyan2', 'Missing items (unstackable)'), ('white2', 'Missing items (stackable)') ]) header_indexes = {color: i for i, color in enumerate(headers.keys())} def priority(pair): coords, state = pair x, y, z = coords color, _, fill_level, _ = state return header_indexes[color], None if fill_level is None else fill_level.fraction * (-1 if color == 'orange' else 1), y * (-1 if color == 'orange' else 1), x if y % 2 == 0 else -x, z chunk_cache = {} with (ati.assets_root / 'json' / 'items.json').open() as items_file: items_data = json.load(items_file) cache_path = ati.cache_root / 'cloud-chests.json' if cache_path.exists(): try: with cache_path.open() as cache_f: cache = json.load(cache_f) except ValueError: # cache JSON is corrupted, probably because of a full disk, try without cache cache_path.unlink() cache = None else: cache = None states = {} current_color = None for x, corridor, y, _, z, item_stub in chest_iter(): if isinstance(item_stub, str): item_stub = {'id': item_stub} item_name = None pre_sorter = None else: item_stub = item_stub.copy() if 'name' in item_stub: item_name = item_stub['name'] del item_stub['name'] else: item_name = None if 'sorter' in item_stub: pre_sorter = item_stub['sorter'] del item_stub['sorter'] else: pre_sorter = None color, state_message, fill_level = chest_state((x, y, z), item_stub, len(corridor), item_name, pre_sorter, items_data=items_data, chunk_cache=chunk_cache, cache=cache) if color is None: color = 'white' if color in ('cyan', 'white') and not fill_level.is_empty(): color += '2' if fill_level is None or not fill_level.is_full() or color not in ('cyan', 'white', 'cyan2', 'white2'): states[x, y, z] = color, state_message, fill_level, alltheitems.item.Item(item_stub, items_data=items_data) for coords, state in sorted(states.items(), key=priority): x, y, z = coords color, state_message, fill_level, item = state if color != current_color: if current_color is not None: yield '</tbody></table>' yield bottle.template('<h2 id="{{color}}">{{header}}</h2>', color=color, header=headers[color]) yield '<table class="todo-table table table-responsive"><thead><tr><th class="coord">X</th><th class="coord">Y</th><th class="coord">Z</th><th class="item-image">&nbsp;</th><th class="item-name">Item</th><th>{}</th></tr></thead><tbody>'.format('Fill Level' if color in ('cyan', 'white', 'cyan2', 'white2') else 'Info') current_color = color yield bottle.template(""" <tr> <td class="coord">{{x}}</td> <td class="coord">{{y}}</td> <td class="coord">{{z}}</td> <td class="item-image">{{!item.image()}}</td> <td class="item-name">{{!item.link_text()}}</td> <td style="background-color: {{color}}">{{!fill_level if color in ('#0ff', '#fff') else state_message}}</td> </tr> """, x=x, y=y, z=z, item=item, color=HTML_COLORS[color], fill_level=fill_level, state_message=state_message) yield '</tbody></table>' yield from ati.html_exceptions(body()) yield ati.footer(linkify_headers=True)
mit
6,210,669,962,743,115,000
59.09228
392
0.481912
false
3.874814
false
false
false
amirgeva/coide
mainwindow.py
1
54237
from PyQt4 import QtCore from PyQt4 import QtGui import os import re import stat import qutepart from workspace import WorkSpace import output from consts import FileRole from gdbwrapper import GDBWrapper from watchestree import WatchesTree from breakpoints import BreakpointsDB, BreakpointDialog from properties import Properties from functools import partial from globals import is_src_ext import utils import genmake import uis import plugins import dwarf class MainWindow(QtGui.QMainWindow): """ Main IDE Window Contains the main code view, along with docking panes for: source files, watches, call stack, and output """ LIBRARY_SCAN = "Scanning Libraries" def __init__(self,rootDir,parent=None): """ Initialize. rootDir indicates where data files are located """ super(MainWindow,self).__init__(parent) s=QtCore.QSettings() self.recent_ws=[d for d in s.value('recent_ws','').toString().split(':') if d] self.symbolScan=s.value('symbol_scan',True).toBool() self.setMinimumSize(QtCore.QSize(1024,768)) self.currentLine=0 self.currentFile='' self.rootDir=rootDir utils.setIconsDir(os.path.join(rootDir,"icons")) self.debugger=None self.breakpoints=BreakpointsDB() self.findDetails=None self.scm_mods=[] self.setWindowIcon(utils.loadIcon('coide')) self.setWindowTitle("Coide") self.generateQueue=set() self.editors={} self.file_times={} self.central=QtGui.QTabWidget() self.setCentralWidget(self.central) self.central.setTabsClosable(True) self.central.tabCloseRequested.connect(self.closeTab) self.central.currentChanged.connect(self.tabChanged) self.tabOrder=[] self.plugins=plugins.PluginsManager() self.setupMenu() self.setupContextMenuItems() self.setupToolbar(rootDir) self.showWorkspacePane() self.showOutputPane() self.showWatchesPane() self.showLocalsPane() self.showCallStackPane() self.buildProcess=None self.timerCall=None self.config=s.value("config").toString() if self.config=='': self.config="Debug" self.configCombo.setCurrentIndex(0 if self.config=='Debug' else 1) self.workspaceTree.setConfig(self.config) self.setAllFonts() self.loadWindowSettings() # Debugger timer that is supposed to periodically check # if the program has stopped at a breakpoint self.timer=QtCore.QTimer(self) self.timer.timeout.connect(self.update) self.runningWidget=None self.asyncPollTimer=QtCore.QTimer(self) self.asyncPollTimer.timeout.connect(self.pollAsync) self.generateTimer=QtCore.QTimer() self.generateTimer.timeout.connect(self.timer1000) self.generateTimer.start(1000) self.lowFreqTimer=QtCore.QTimer() self.lowFreqTimer.timeout.connect(self.timer5000) self.lowFreqTimer.start(5000) #self.showStatus("Generating All Makefiles") #self.timerCall=self.generateAllInThread self.timerCall=None self.paneWatches.hide() self.paneLocals.hide() self.paneStack.hide() #self.sc=QtGui.QShortcut("Ctrl+F8",self) #self.sc.activated.connect(self.prtsc) def closeEvent(self, event): """ Called before the application window closes Informs sub-windows to prepare and saves window settings to allow future sessions to look the same """ self.workspaceTree.onClose() self.workspaceTree.saveTabs(self.central) while self.central.count()>0: if not self.closeFile(): event.ignore() return self.timer.stop() self.generateTimer.stop() if self.debugger: self.debugger.closingApp() settings = QtCore.QSettings() settings.setValue("geometry", self.saveGeometry()) settings.setValue("windowState", self.saveState()) settings.sync() self.removeTempScripts() super(MainWindow,self).closeEvent(event) def saveDebugWindowState(self): """ Save the state of the tool docks, like watches and call stack """ settings = QtCore.QSettings() settings.setValue("debugWindowState", self.saveState()) settings.sync() def loadDebugWindowState(self): """ Restore previous debug windows layout """ settings = QtCore.QSettings() self.restoreState(settings.value("debugWindowState").toByteArray()) def loadWindowSettings(self): """ Restore the window size settings from the previous session """ settings = QtCore.QSettings() self.restoreGeometry(settings.value("geometry").toByteArray()) self.restoreState(settings.value("windowState").toByteArray()) self.loadTabs() def loadTabs(self): self.closeAllTabs() ws=self.workspaceTree.settings() opentabs=ws.value('opentabs','').toString() opentabs=opentabs.split(',') for path in opentabs: self.openSourceFile(path) curtab=ws.value('curtab','').toString() if curtab: self.setActiveSourceFile(curtab) def setupMenu(self): """ Creates the application main menu The action handlers are also mapped from the toolbar icons """ bar=self.menuBar() m=bar.addMenu('&File') m.addAction(QtGui.QAction('&Initialize Workspace',self,triggered=self.initWorkspace)) m.addAction(QtGui.QAction('Open &Workspace',self,triggered=self.openWorkspace)) self.recents_menu=m.addMenu('&Recent Workspaces') m.addAction(QtGui.QAction('&Save',self,shortcut='Ctrl+S',triggered=self.saveFile)) m.addAction(QtGui.QAction('Save &As',self,triggered=self.saveAsFile)) m.addAction(QtGui.QAction('&Close File',self,shortcut='Ctrl+F4',triggered=self.closeFile)) m.addAction(QtGui.QAction('E&xit',self,shortcut='Ctrl+Q',triggered=self.exitApp)) m=bar.addMenu('&Edit') m.addAction(QtGui.QAction('&Copy',self,shortcut='Ctrl+C',triggered=self.onCopy)) m.addAction(QtGui.QAction('C&ut',self,shortcut='Ctrl+X',triggered=self.onCut)) m.addAction(QtGui.QAction('&Paste',self,shortcut='Ctrl+V',triggered=self.onPaste)) m.addSeparator() m.addAction(QtGui.QAction('&Find/Replace',self,shortcut='Ctrl+F',triggered=self.onFindReplace)) m.addAction(QtGui.QAction('Find/Replace &Next',self,shortcut='F3',triggered=self.onFindNext)) m=bar.addMenu('&View') panes=m.addMenu('Panes') panes.addAction(QtGui.QAction('&Workspace',self,triggered=self.onViewPaneWorkspace)) panes.addAction(QtGui.QAction('&Output',self,triggered=self.onViewPaneOutput)) m.addAction(QtGui.QAction('&Next Tab',self,shortcut='Ctrl+F6',triggered=self.onViewNextTab)) m=bar.addMenu('&Build') m.addAction(QtGui.QAction('&Build',self,shortcut='F7',triggered=self.build)) m.addAction(QtGui.QAction('&Clean',self,triggered=self.clean)) m.addAction(QtGui.QAction('&Rebuild',self,shortcut='Shift+F7',triggered=self.rebuild)) m.addAction(QtGui.QAction('&Settings',self,shortcut='Ctrl+F7',triggered=self.buildSettings)) m.addAction(QtGui.QAction('&Next Error',self,shortcut='F4',triggered=self.nextError)) m=bar.addMenu('&Debug') m.addAction(QtGui.QAction('&Run',self,shortcut='Ctrl+F5',triggered=self.runProject)) m.addAction(QtGui.QAction('&Start/Continue Debugger',self,shortcut='F5',triggered=self.startDebug)) ma=m.addMenu('Actions') ma.addAction(QtGui.QAction('&Step',self,shortcut='F11',triggered=self.actStep)) ma.addAction(QtGui.QAction('&Next',self,shortcut='F10',triggered=self.actNext)) ma.addAction(QtGui.QAction('Step &Out',self,shortcut='Shift+F11',triggered=self.actOut)) ma.addAction(QtGui.QAction('&Break',self,shortcut='Ctrl+C',triggered=self.actBreak)) ma.addAction(QtGui.QAction('Sto&p',self,shortcut='Shift+F5',triggered=self.actStop)) ma=m.addMenu('&Breakpoints') ma.addAction(QtGui.QAction('&Clear',self,triggered=self.clearBreakpoints)) m=bar.addMenu('&Settings') m.addAction(QtGui.QAction('&General',self,triggered=self.settingsGeneral)) m.addAction(QtGui.QAction('&Fonts',self,triggered=self.settingsFonts)) m.addAction(QtGui.QAction('&Editor',self,triggered=self.settingsEditor)) m.addAction(QtGui.QAction('&Templates',self,triggered=self.settingsTemplates)) m.addAction(QtGui.QAction('&Plugins',self,triggered=self.settingsPlugins)) m=bar.addMenu('&Tools') pm=m.addMenu('&Plugins') self.plugins.addToMenu(pm) def onViewPaneWorkspace(self): self.paneWorkspace.show() def onViewPaneOutput(self): self.paneOutput.show() def onViewNextTab(self): count=self.central.count() if count>0: if len(self.tabOrder)!=count: self.tabOrder=range(0,self.central.count()) if self.central.currentIndex() == self.tabOrder[0]: self.tabOrder=self.tabOrder[1:]+self.tabOrder[:1] self.central.setCurrentIndex(self.tabOrder[0]) def setupContextMenuItems(self): self.contextMenuItems={ 'all':[ QtGui.QAction('Toggle Breakpoint',self,triggered=self.contextToggleBreakpoint) ], 'files':[ QtGui.QAction('Open Header',self,triggered=self.contextOpenHeader) ], 'breakpoints':[ QtGui.QAction('Edit Breakpoint',self,triggered=self.contextEditBreakpoint), QtGui.QAction('Dis/Enable Breakpoint',self,triggered=self.contextAbleBreakpoint) ], 'symbols':[ QtGui.QAction('Goto Definition',self,triggered=self.contextGotoDefinition) ] } def insertContextMenuItems(self,editor,menu): first=None acts=menu.actions() if len(acts)>0: first=acts[0] actions=list(self.contextMenuItems.get('all')) path=editor.path line=editor.contextMenuLine word=editor.contextMenuWord self.context=(path,line,word) if len(word)>0: actions.extend(self.contextMenuItems.get('symbols')) if self.breakpoints.hasBreakpoint(path,line): actions.extend(self.contextMenuItems.get('breakpoints')) if self.workspaceTree.exists(editor.contextFilename): actions.extend(self.contextMenuItems.get('files')) menu.insertActions(first,actions) menu.insertSeparator(first) def contextGotoDefinition(self): src=os.path.join(self.workspaceTree.root,'src') intr=os.path.join(self.workspaceTree.root,'.intr') srcpath=self.context[0] objpath='' if srcpath.startswith(src) and is_src_ext(srcpath): rel=srcpath[len(src):] rel=rel[1:-4]+'.o' objpath=os.path.join(intr,rel) (dir,name)=os.path.split(objpath) objpath=os.path.join(dir,'Debug',name) if srcpath.startswith(self.workspaceTree.root) and srcpath.endswith('.h'): dir=self.workspaceTree.mainPath() mkPath=os.path.join(dir,'Makefile') objpath=utils.objForHeader(mkPath,srcpath) if len(objpath)>0: try: s=dwarf.DwarfSymbols(objpath) (path,line)=s.find(self.context[2]) if len(path)>0: self.goToSource(path,line,1) except IOError: utils.message('Project must first be compiled in Debug') def contextToggleBreakpoint(self): e=self.central.currentWidget() self.breakpoints.toggleBreakpoint(e) e.update() def contextEditBreakpoint(self): e=self.central.currentWidget() path=e.path line=e.contextMenuLine bp=self.breakpoints.getBreakpoint(path,line) if bp: d=BreakpointDialog() d.condition.setText(bp.condition()) utils.setCheckbox(d.enabled,bp.isEnabled()) if d.exec_(): bp.setCondition(d.condition.text()) bp.able(utils.getCheckbox(d.enabled)) self.breakpoints.update() e.update() def contextAbleBreakpoint(self): e=self.central.currentWidget() path=e.path line=e.contextMenuLine bp=self.breakpoints.getBreakpoint(path,line) if bp: if bp.isEnabled(): bp.disable() else: bp.enable() self.breakpoints.update() e.update() def contextOpenHeader(self): e=self.central.currentWidget() filename=self.workspaceTree.exists(e.contextFilename) if filename: self.workspaceTree.openFile(filename) def markToggleBreakpoint(self,line): e=self.central.currentWidget() #path=e.path self.breakpoints.toggleBreakpoint(e) e.update() def createPluginCuror(self): from pcursor import PluginCursor e=self.central.currentWidget() if e: return PluginCursor(e.textCursor()) return None def setupToolbar(self,rootDir): """ Creates the application main toolbar """ tb=self.addToolBar('Actions') tb.setObjectName("Toolbar") tb.addAction(utils.loadIcon('gear'),'Generate Makefiles').triggered.connect(self.generate) self.configCombo=self.createConfigCombo(tb) tb.addWidget(self.configCombo) tb.addAction(utils.loadIcon('step.png'),'Step').triggered.connect(self.actStep) tb.addAction(utils.loadIcon('next.png'),'Next').triggered.connect(self.actNext) tb.addAction(utils.loadIcon('out.png'),'Out').triggered.connect(self.actOut) tb.addAction(utils.loadIcon('cont.png'),'Continue').triggered.connect(self.actCont) tb.addAction(utils.loadIcon('break.png'),'Break').triggered.connect(self.actBreak) tb.addAction(utils.loadIcon('stop.png'),'Stop').triggered.connect(self.actStop) self.createTemplatesCombo(tb) tb.addWidget(self.tmplCombo) def exitApp(self): self.close() def nextError(self): e=self.outputEdit.getNextError() if e: self.showStatus(e[3]) self.goToSource(e[0],e[1],e[2],'#ff8080') self.outputEdit.highlightLine(e[4]) def onCopy(self): (e,p)=self.currentEditor() if e: e.copy() def onCut(self): (e,p)=self.currentEditor() if e: e.cut() def onPaste(self): (e,p)=self.currentEditor() if e: e.paste() def onFindReplace(self): (e,p)=self.currentEditor() if e: from finddlg import FindDialog d=FindDialog(self) c=e.textCursor() if c.hasSelection: d.setFindText(c.selectedText()) if d.exec_(): self.findDetails=d.details self.onFindNext() def onFindNext(self): (e,p)=self.currentEditor() if e and self.findDetails: flags=QtGui.QTextDocument.FindFlags() if not self.findDetails.get('find_case'): flags = flags | QtGui.QTextDocument.FindCaseSensitively if self.findDetails.get('find_words'): flags = flags | QtGui.QTextDocument.FindWholeWords if self.findDetails.get('find_back'): flags = flags | QtGui.QTextDocument.FindBackward text=self.findDetails.get('find_text') replaceText=self.findDetails.get('find_replace_text') replace=self.findDetails.get('find_replace') all=self.findDetails.get('find_all') if all and replace: while e.find(text,flags): e.textCursor().insertText(replaceText) elif e.find(text,flags): if replace: e.textCursor().insertText(replaceText) def settingsTemplates(self): """ Show the code templates editing dialog """ from settings import TemplatesDialog d=TemplatesDialog() if d.exec_(): d.save() self.updateTemplates() def settingsPlugins(self): """ Show the python plugins settings dialog """ from plugins import PluginsDialog d=PluginsDialog() if d.exec_(): d.save() def settingsGeneral(self): """ Show the general settings """ from settings import GeneralSettingsDialog d=GeneralSettingsDialog() if d.exec_(): d.save() self.updateGeneralSettings() def settingsEditor(self): """ Show the editor settings """ from settings import EditorSettingsDialog d=EditorSettingsDialog() if d.exec_(): d.save() self.updateEditorsSettings() def settingsFonts(self): """ Edit the font settings for the code window and various panes """ from settings import FontSettingsDialog d=FontSettingsDialog() if d.exec_(): self.setAllFonts() def loadFont(self,name,target): """ Load previously saved font settings """ settings=QtCore.QSettings() if settings.contains(name): fb=settings.value(name).toByteArray() buf=QtCore.QBuffer(fb) buf.open(QtCore.QIODevice.ReadOnly) font=QtGui.QFont() QtCore.QDataStream(fb) >> font target.setFont(font) else: target.setFont(QtGui.QFont('Monospace',14)) def setAllFonts(self): """ Apply fonts to the various sub-windows """ for e in self.editors: self.loadFont('codefont',self.editors.get(e)) #self.loadFont('watchesfont',self.watchesTree) #self.loadFont('watchesfont',self.stackList) self.loadFont('watchesfont',self.outputEdit) self.loadFont('sourcesfont',self.workspaceTree) def updateGeneralSettings(self): """ Apply general settings """ s=QtCore.QSettings() sortFiles=s.value('sortFiles',True).toBool() self.workspaceTree.setSorting(sortFiles) def updateEditorsSettings(self): """ Apply editor settings to all open tabs """ s=QtCore.QSettings() indent=(s.value('indent',2).toInt())[0] clang=s.value('clangCompletion',True).toBool() for e in self.editors: self.editors.get(e).indentWidth=indent self.editors.get(e).clangCompletion=clang def updateTemplates(self): self.tmplCombo.clear() self.tmplCombo.addItem("= Templates =") d=QtCore.QSettings().value('tmplDir','').toString() if d: templates=os.listdir(d) templates=[os.path.splitext(t)[0] for t in templates if t.endswith('.template')] for t in templates: self.tmplCombo.addItem(t) def showStatus(self,status): self.statusBar().showMessage(status) def findUndefinedReferences(self,output): """ Search the linker output to find undefined reference errors, and collect the missing symbol names """ undefined=set() base='undefined reference to ' if output: for line in output: p=line.find(base) if p>0: name=line[(p+len(base)):] if name.startswith('symbol '): name=name[8:] else: name=name[1:] p=name.find('(') if p>0: name=name[0:p] else: name=name[0:len(name)-1] p=name.find('@') if p>0: name=name[0:p] undefined.add(name) return undefined def toggleAdded(self,item): if item.checkState(): self.added.add(item.text()) else: self.added.remove(item.text()) def attemptUndefResolution(self,undefs): if not self.symbolScan: return from system import getLibrarySymbols, getWorkspaceSymbols suggested={} syms=getLibrarySymbols() wsSyms=getWorkspaceSymbols() for sym in undefs: words=sym.split(':') words=[w for w in words if w] words.append(sym) for word in words: if word in syms: s=syms.get(word) for l in s: if not l in suggested: suggested[l]=1 else: n=suggested.get(l)+1 suggested[l]=n if word in wsSyms: s=wsSyms.get(word) for l in s: if not l in suggested: suggested[l]=1 else: n=suggested.get(l)+1 suggested[l]=n self.added=set() if len(suggested)>0: d=uis.loadDialog('libsuggest') model = QtGui.QStandardItemModel(d.libsList) for s in suggested: item=QtGui.QStandardItem(s) item.setCheckable(True) model.appendRow(item) d.libsList.setModel(model) model.itemChanged.connect(lambda item : self.toggleAdded(item)) if d.exec_(): self.workspaceTree.addLibrariesToProject(self.added) def buildSettings(self,path=''): from buildsettings import BuildSettingsDialog if not path: path=self.workspaceTree.mainPath() if not path: path=self.workspaceTree.root d=BuildSettingsDialog(self,path) d.exec_() self.generateQueue.add(path) def checkBuildOutput(self): if self.buildProcess: self.processBuildOutput(self.buildProcess.text) self.buildProcess=None def pollAsync(self): rcs=utils.pollAsync() if len(rcs)>0: if rcs[0]==0: utils.appendColorLine(self.outputEdit,"Success...",'#008020') else: utils.appendColorLine(self.outputEdit,"= Failed ({}) =".format(rcs[0]),'#ff0000') self.checkBuildOutput() self.asyncPollTimer.stop() self.showStatus("Done") def execute(self,path,cmd,*args): if utils.pendingAsync(): self.showStatus('Busy') return None self.outputEdit.clearAll() p=utils.execute(self.outputEdit,path,cmd,*args) if not self.asyncPollTimer.isActive(): self.asyncPollTimer.start(10) return p def buildSpecific(self,path): self.saveAll() self.autoGenerate() if len(path)>0: self.showStatus("Building "+os.path.basename(path)) s=QtCore.QSettings() if s.value('parallel_make',False).toBool(): self.buildProcess=self.execute(path,'/usr/bin/make','-j','3',self.config) else: self.buildProcess=self.execute(path,'/usr/bin/make',self.config) def processBuildOutput(self,output): undefs=self.findUndefinedReferences(output) if len(undefs)>0: self.attemptUndefResolution(undefs) def build(self): self.buildSpecific(self.workspaceTree.mainPath()) def cleanSpecific(self,path): if len(path)>0: self.execute(path,'/usr/bin/make','clean_{}'.format(self.config)) def clean(self): self.cleanSpecific(self.workspaceTree.mainPath()) def rebuildSpecific(self,path): if len(path)>0: cfg=self.config self.showStatus("Rebuilding "+os.path.basename(path)) self.buildProcess=self.execute(path,'/usr/bin/make','clean_'+cfg,cfg) def rebuild(self): self.rebuildSpecific(self.workspaceTree.mainPath()) def autoGenerateRun(self): for path in self.generateQueue: genmake.generateDirectory(self.workspaceTree.root,path) self.generateQueue.clear() self.showStatus('Ready') def autoGenerate(self): if len(self.generateQueue)>0: self.showStatus('Generating Makefiles') self.timerCall=self.autoGenerateRun else: if genmake.genThreadDone(): self.showStatus("Makefile Generate Done") def waitForScanner(self): if self.symbolScan: import system import time while not system.isScannerDone(): time.sleep(1) def timer1000(self): e=self.central.currentWidget() if e: updates=self.breakpoints.updateLineNumbers(e.path) for path in updates: e=self.editors.get(path) if e: e.update() if self.timerCall: f=self.timerCall self.timerCall=None f() self.autoGenerate() #if self.statusBar().currentMessage() == MainWindow.LIBRARY_SCAN: if self.symbolScan: import system if system.isScannerDone(): #if system.scanq and not system.scanq.empty(): if self.statusBar().currentMessage() == MainWindow.LIBRARY_SCAN: self.showStatus('Ready') system.getLibrarySymbols() def timer5000(self): import scm res=scm.scan(self.workspaceTree.root) if res: new_scm_mods=[] for (name,status) in res: path=os.path.join(self.workspaceTree.root,name) if path in self.workspaceTree.fileItems: item=self.workspaceTree.fileItems.get(path) if status=='Modified': item.setForeground(0,QtGui.QBrush(QtGui.QColor(255,0,0))) elif status=='Staged': item.setForeground(0,QtGui.QBrush(QtGui.QColor(0,255,0))) new_scm_mods.append(item) for item in self.scm_mods: if not item in new_scm_mods: item.setForeground(0,QtGui.QBrush(QtGui.QColor(0,0,0))) self.scm_mods=new_scm_mods for path in self.editors: last=self.file_times.get(path) cur=os.path.getmtime(path) if cur!=last: self.file_times[path]=cur res=QtGui.QMessageBox.question(self,'File changed','Reload {}'.format(path),QtGui.QMessageBox.Yes,QtGui.QMessageBox.No) if res==QtGui.QMessageBox.Yes: text=''.join(open(path,'r').readlines()) self.editors.get(path).text=text def generateAllInThread(self): genmake.generateTree(self.workspaceTree.root,False) def generateAll(self): genmake.generateTree(self.workspaceTree.root,True) def generate(self): mb=QtGui.QMessageBox() mb.setText("Generate make files") mb.setInformativeText("Overwrite all make files?") mb.setStandardButtons(QtGui.QMessageBox.Yes|QtGui.QMessageBox.No) mb.setDefaultButton(QtGui.QMessageBox.Yes) rc=mb.exec_() if rc==QtGui.QMessageBox.Yes: self.generateAll() utils.message("Done") def createHelloWorldProject(self,dir): try: os.makedirs(dir) except OSError: pass mainpath=os.path.join(dir,'main.cpp') f=open(mainpath,"w") f.write('#include <iostream>\n\n\nint main(int argc, char* argv[])\n') f.write('{\n std::cout << "Hello World" << std::endl;\n return 0;\n}\n') f.close() self.workspaceTree.update() genmake.generateDirectory(self.workspaceTree.root,dir) self.workspaceTree.setMainPath(dir) def initWorkspace(self): d=QtGui.QFileDialog() d.setFileMode(QtGui.QFileDialog.Directory) d.setOption(QtGui.QFileDialog.ShowDirsOnly) if d.exec_(): ws=(d.selectedFiles())[0] os.makedirs(os.path.join(ws,'include')) dir=os.path.join(ws,'src','hello') self.workspaceTree.setWorkspacePath(ws) self.createHelloWorldProject(dir) self.workspaceTree.saveSettings() self.generateAll() def updateRecents(self): ws=self.workspaceTree.root if ws in self.recent_ws: del self.recent_ws[self.recent_ws.index(ws)] self.recent_ws.insert(0,ws) while len(self.recent_ws)>4: del self.recent_ws[-1] s=QtCore.QSettings() s.setValue('recent_ws',':'.join(self.recent_ws)) s.sync() self.recents_menu.clear() handlers=[partial(self.openRecent,w) for w in self.recent_ws] for ws,h in zip(self.recent_ws,handlers): self.recents_menu.addAction(QtGui.QAction(ws,self,triggered=h)) def openRecent(self,ws): self.workspaceTree.saveTabs(self.central) self.closeAllTabs() self.workspaceTree.setWorkspacePath(ws) #self.generateAll() self.loadTabs() self.waitForScanner() import symbolscanner symbolscanner.setWorkspacePath(ws) self.updateRecents() def openWorkspace(self): d=QtGui.QFileDialog() d.setFileMode(QtGui.QFileDialog.Directory) d.setOption(QtGui.QFileDialog.ShowDirsOnly) if d.exec_(): ws=(d.selectedFiles())[0] self.openRecent(ws) def saveTabFile(self,index): n=self.central.tabBar().count() if index>=0 and index<n: path=self.central.tabToolTip(index) editor=self.editors.get(path) if editor: doc=editor.document() if doc.isModified(): f=open(path,'w') if not f: utils.errorMessage('Cannot write file: {}'.format(path)) return f.write(doc.toPlainText()) f.close() doc.setModified(False) self.file_times[path]=os.path.getmtime(path) #dir=os.path.dirname(path) #self.generateQueue.add(dir) if self.symbolScan: from system import getLibrarySymbols getLibrarySymbols() from symbolscanner import rescanOnFileSave rescanOnFileSave(path) def saveFile(self): n=self.central.tabBar().count() if n>0: self.saveTabFile(self.central.currentIndex()) def saveAll(self): n=self.central.tabBar().count() for i in xrange(0,n): self.saveTabFile(i) def saveAsFile(self): pass def closeAllTabs(self): while self.central.count()>0: if not self.closeTab(0): return False return True def tabChanged(self,index): for i in xrange(0,len(self.tabOrder)): if self.tabOrder[i]==index: self.tabOrder=self.tabOrder[i:]+self.tabOrder[:i] break def closeTab(self,index): path=self.central.tabToolTip(index) editor=self.editors.get(path) if editor: doc=editor.document() if doc.isModified(): mb = QtGui.QMessageBox() mb.setText("{} has been modified.".format(os.path.basename(path))) mb.setInformativeText("Do you want to save your changes?") mb.setStandardButtons(QtGui.QMessageBox.Save | QtGui.QMessageBox.Discard | QtGui.QMessageBox.Cancel) mb.setDefaultButton(QtGui.QMessageBox.Save) rc = mb.exec_() if rc == QtGui.QMessageBox.Save: f=open(path,'w') if not f: utils.errorMessage('Cannot write file: {}'.format(path)) return False f.write(doc.toPlainText()) f.close() elif rc == QtGui.QMessageBox.Cancel: return False del self.editors[path] del self.file_times[path] self.central.removeTab(index) return True def closeFile(self): n=self.central.tabBar().count() if n>0: index=self.central.currentIndex() return self.closeTab(index) return False def currentEditor(self): if self.central.count()>0: cur=self.central.currentIndex() path=self.central.tabToolTip(cur) if path in self.editors: return (self.editors.get(path),path) return (None,None) def templateSelected(self,index): (editor,path)=self.currentEditor() if index>0 and editor: template=self.tmplCombo.itemText(index) d=QtCore.QSettings().value('tmplDir','').toString() if d: tpath=os.path.join(d,template+".template") try: f=open(tpath,'r') code=f.read() if code: cursor=editor.textCursor() props=Properties() props.assign('PATH',path) base=os.path.basename(path) props.assign('FILENAME',base) p=base.find('.') if (p>0): props.assign('FILEBASE',base[0:p]) props.assign('SELECTION',cursor.selectedText()) cursor.removeSelectedText() import templates text=templates.generateCode(code,props) cursor.insertText(text) except IOError: utils.errorMessage("Cannot read file: {}".format(path)) self.tmplCombo.setCurrentIndex(0) def showWorkspacePane(self): """ Creates a docking pane that shows a list of source files """ self.paneWorkspace=QtGui.QDockWidget("Workspace",self) self.paneWorkspace.setObjectName("Workspace") self.paneWorkspace.setAllowedAreas(QtCore.Qt.LeftDockWidgetArea|QtCore.Qt.RightDockWidgetArea) self.workspaceTree=WorkSpace(self.paneWorkspace,self) self.workspaceTree.depsChanged.connect(lambda path: self.generateQueue.add(path)) self.paneWorkspace.setWidget(self.workspaceTree) self.addDockWidget(QtCore.Qt.LeftDockWidgetArea,self.paneWorkspace) self.updateWorkspace() self.workspaceTree.doubleClicked.connect(self.docDoubleClicked) self.showStatus(MainWindow.LIBRARY_SCAN) if self.symbolScan: from system import startSymbolScan startSymbolScan(self.workspaceTree.root) else: from system import disableSymbolScan disableSymbolScan() self.updateRecents() def updateWorkspace(self): self.workspaceTree.update() def setActiveSourceFile(self,path): if path in self.editors: editor=self.editors.get(path) n=self.central.tabBar().count() for i in xrange(0,n): if self.central.widget(i) == editor: self.central.tabBar().setCurrentIndex(i) return True return False def fixPath(self,path): if path.startswith(self.rootDir): path=os.path.relpath(path,self.rootDir) return path ''' Makes the path given the active source file in the editor. If the file is already open, it is made active. If not, it is opened and made active. Function returns true if the file is found and opened ''' def openSourceFile(self,path): path=self.fixPath(path) if self.setActiveSourceFile(path): return True else: try: f=open(path,"r") if not f: return False lines=f.readlines() if lines: firstLine=lines[0] s=QtCore.QSettings() editor=qutepart.Qutepart() editor.setPath(path) editor.detectSyntax(sourceFilePath=path, firstLine=firstLine) editor.lineLengthEdge = 1024 editor.drawIncorrectIndentation = True editor.drawAnyWhitespace = False editor.indentUseTabs = False editor.indentWidth = (s.value('indent',2).toInt())[0] editor.text="".join(lines) editor.setLineWrapMode(QtGui.QPlainTextEdit.NoWrap) editor.setWorkspace(self.workspaceTree) editor.setMainWindow(self) index=self.central.addTab(editor,os.path.basename(path)) self.central.setTabToolTip(index,path) self.editors[path]=editor self.file_times[path]=os.path.getmtime(path) self.loadFont('codefont',editor) self.central.tabBar().setCurrentIndex(index) bps=self.breakpoints.pathBreakpoints(path) editor.bpMarks=bps editor._markArea.blockDoubleClicked.connect(self.markToggleBreakpoint) return True except IOError: return False return False def docDoubleClicked(self,index): item=self.workspaceTree.currentItem() path=item.data(0,FileRole).toString() if len(path)>0: self.openSourceFile(path) if path in self.editors: self.editors.get(path).setFocus(QtCore.Qt.MouseFocusReason) def goToSource(self,path,row,col,color=''): """ Given a file path, and a position within, open a tab or switch to an already open tab, and scroll to that position. Usually useful to find references or compiler error positions """ path=self.fixPath(path) if self.openSourceFile(path): editor=self.editors.get(path) if editor: self.setActiveSourceFile(path) c=editor.textCursor() c.movePosition(QtGui.QTextCursor.Start) c.movePosition(QtGui.QTextCursor.Down,n=row-1) c.movePosition(QtGui.QTextCursor.Right,n=col-1) editor.setTextCursor(c) editor.ensureCursorVisible() if len(color)>0: editor.colorLine(row,color) def showCallStackPane(self): self.paneStack=QtGui.QDockWidget("Call Stack",self) self.paneStack.setObjectName("CallStack") self.paneStack.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea) self.stackList=QtGui.QListWidget(self.paneStack) self.paneStack.setWidget(self.stackList) self.addDockWidget(QtCore.Qt.BottomDockWidgetArea,self.paneStack) self.loadFont('watchesfont',self.stackList) self.stackList.itemDoubleClicked.connect(self.stackItemDoubleClicked) def showLocalsPane(self): self.paneLocals=QtGui.QDockWidget("Locals",self) self.paneLocals.setObjectName("Locals") self.paneLocals.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea) self.localsTree=WatchesTree(self.paneLocals) self.localsTree.setColumnCount(2) self.localsTree.setHeaderLabels(['Name','Value']) self.paneLocals.setWidget(self.localsTree) self.addDockWidget(QtCore.Qt.BottomDockWidgetArea,self.paneLocals) self.loadFont('watchesfont',self.watchesTree) def showWatchesPane(self): self.paneWatches=QtGui.QDockWidget("Watches",self) self.paneWatches.setObjectName("Watches") self.paneWatches.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea) self.watchesTree=WatchesTree(self.paneWatches) self.watchesTree.setColumnCount(2) self.watchesTree.setHeaderLabels(['Name','Value']) self.paneWatches.setWidget(self.watchesTree) self.addDockWidget(QtCore.Qt.BottomDockWidgetArea,self.paneWatches) self.loadFont('watchesfont',self.watchesTree) self.watchesTree.addTopLevelItem(QtGui.QTreeWidgetItem(['* Double-Click for new watch'])) self.watchesTree.resizeColumnToContents(0) self.watchesTree.itemDoubleClicked.connect(lambda item,column : self.watchDoubleClicked(item,column)) def showOutputPane(self): self.paneOutput=QtGui.QDockWidget("Output",self) self.paneOutput.setObjectName("Output") self.paneOutput.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea) self.outputEdit=output.OutputWidget(self.paneOutput,self) self.outputEdit.setReadOnly(True) self.paneOutput.setWidget(self.outputEdit) self.addDockWidget(QtCore.Qt.BottomDockWidgetArea,self.paneOutput) def stackItemDoubleClicked(self,item): pat='at (.+):(\d+)' m=re.search(pat,item.text()) if m: g=m.groups() path=g[0] line=int(g[1]) self.goToSource(path,line,1) else: row=self.stackList.row(item) if row<(self.stackList.count()-1): self.stackItemDoubleClicked(self.stackList.item(row+1)) def watchDoubleClicked(self,item,column): """ Edits existing watches, or adds a new watch """ changed=False index=self.watchesTree.indexOfTopLevelItem(item) if item.text(column)=='* Double-Click for new watch': res=QtGui.QInputDialog.getText(self,'New Watch','Expression') expr=res[0] if len(expr)>0 and res[1]: self.watchesTree.insertTopLevelItem(index,QtGui.QTreeWidgetItem([expr])) changed=True self.updateWatches() else: watch=item.text(0) res=QtGui.QInputDialog.getText(self,"Edit Watch",'Expression',text=watch) watch=res[0] if res[1]: changed=True if len(watch)>0: item.setText(0,watch) self.updateWatches() else: self.watchesTree.takeTopLevelItem(index) if changed: self.saveWatches() def createConfigCombo(self,parent): configCombo=QtGui.QComboBox(parent) configCombo.addItem("Debug") configCombo.addItem("Release") configCombo.currentIndexChanged.connect(self.configChanged) return configCombo def createTemplatesCombo(self,parent): self.tmplCombo=QtGui.QComboBox(parent) self.tmplCombo.currentIndexChanged.connect(self.templateSelected) self.updateTemplates() def configChanged(self,index): configs=['Debug','Release'] self.config=configs[index] s=QtCore.QSettings() s.setValue("config",self.config) s.sync() self.workspaceTree.setConfig(self.config) def addOutputText(self,added): """ Append the new text captured Text is appended to the end of existing text and the widget is scrolled to show the end """ text=self.outputEdit.toPlainText() self.outputEdit.setPlainText(text+added) c=self.outputEdit.textCursor() c.movePosition(QtGui.QTextCursor.End) self.outputEdit.setTextCursor(c) self.outputEdit.ensureCursorVisible() def tempScriptPath(self): """ Generate a temporary script name. Used for running programs with an additional wait for key at the end. """ from time import time t=int(time()*10) return '/tmp/coide_{}.sh'.format(t) def removeTempScripts(self): """ Remove all temporary script files. Called before program exit """ files=os.listdir('/tmp') files=[f for f in files if f.startswith('coide_')] for f in files: os.remove('/tmp/{}'.format(f)) def runProject(self): if not utils.checkFor('xterm'): utils.message("xterm not installed") return path=self.tempScriptPath() f=open(path,'w') dir=self.workspaceTree.getDebugDirectory() cmd=self.workspaceTree.getExecutablePath() params=self.workspaceTree.getDebugParams() if len(params)>0: cmd=cmd+" "+params f.write('#!/bin/sh\ncd {}\n{}\nread -r -p "Press any key..." key\n'.format(dir,cmd)) f.close() os.chmod(path,stat.S_IRUSR|stat.S_IWUSR|stat.S_IXUSR) utils.run('/tmp','xterm','-fn','10x20','-e',path) def getCurrentFile(self): if self.central.count()==0: return '' return self.central.tabToolTip(self.central.currentIndex()) def getCurrentEditor(self): path=self.getCurrentFile() if len(path)>0: return self.editors.get(path) def updatePosition(self): """ Query current position and update the code view """ changed=False poslist=self.debugger.getCurrentPos() if poslist and len(poslist)>0: for (path,line) in poslist: if self.getCurrentFile()==path: if self.currentLine!=line: changed=True break if self.openSourceFile(path): changed=True break e=self.editors.get(path) if changed and e: e.colorLine(line,'#0080ff') e.cursorPosition=(line-1,1) self.currentLine=line e.ensureCursorVisible() def saveWatches(self): """ Save all watches to settings, for future sessions """ res=[] n=self.watchesTree.topLevelItemCount()-1 for i in xrange(0,n): item=self.watchesTree.topLevelItem(i) if len(res)>0: res.append(';') res.append(item.text(0)) settings=QtCore.QSettings() key='watches:{}'.format(self.debugger.debugged) settings.setValue(key,''.join(res)) def loadWatches(self): """ Load all previous session watches from settings """ while self.watchesTree.topLevelItemCount()>1: self.watchesTree.takeTopLevelItem(0) settings=QtCore.QSettings() key='watches:{}'.format(self.debugger.debugged) val=settings.value(key,'').toString() if len(val)>0: arr=val.split(';') if len(arr)>0: res=[] for watch in arr: res.append(QtGui.QTreeWidgetItem([watch])) self.watchesTree.insertTopLevelItems(0,res) def updateLocals(self): locals=self.debugger.getLocals() self.localsTree.clear() for var in locals.keys(): item=QtGui.QTreeWidgetItem([var]) self.localsTree.addTopLevelItem(item) res=locals.get(var) if res: self.updateWatchItem(item,res) def updateWatches(self): """ Re-evaluate the value of each watch and update view """ n=self.watchesTree.topLevelItemCount()-1 for i in xrange(0,n): item=self.watchesTree.topLevelItem(i) item.takeChildren() expr=item.text(0) res=self.debugger.evaluate(expr) if res: self.updateWatchItem(item,res) def updateWatchItem(self,item,root): item.setText(1,root.value) def addChildren(item,node): for c in node.children: subitem=QtGui.QTreeWidgetItem([c.name]) subitem.setText(1,c.value) item.addChild(subitem) addChildren(subitem,c) addChildren(item,root) def updateCallstack(self): bt=self.debugger.getBackTrace() self.stackList.clear() for line in bt: self.stackList.addItem(line) def startDebug(self): if self.debugger: self.actCont() return self.outputEdit.setPlainText('') cmd=[self.workspaceTree.getExecutablePath()] args=self.workspaceTree.getDebugParams().split() cwd=self.workspaceTree.getDebugDirectory() if len(cwd)<1: cwd=self.workspaceTree.mainPath() for a in args: cmd.append(a) self.debugger=GDBWrapper(self.breakpoints,cmd,cwd) #self.showWatchesPane() #self.showCallStackPane() #self.loadDebugWindowState() self.showDebugPanes() self.loadWatches() self.timer.start(50) qutepart.evaluator=self.debugger.evaluateAsText def stopDebugger(self): if self.debugger: qutepart.evaluator=None for path in self.editors: e=self.editors.get(path) e.colorLine(0,'') self.saveDebugWindowState() self.debugger.quitDebugger() self.debugger=None #self.paneWatches.close() #self.paneWatches=None #self.paneStack.close() #self.paneStack=None self.hideDebugPanes() self.timer.stop() def hideDebugPanes(self): self.paneWatches.hide() self.paneLocals.hide() self.paneStack.hide() def showDebugPanes(self): self.paneWatches.show() self.paneLocals.show() self.paneStack.show() def clearBreakpoints(self): self.breakpoints.clear() n=self.central.count() for i in xrange(0,n): self.central.widget(i).bpMarks={} if self.debugger: self.debugger.clearBreakpoints() def actStep(self): if self.debugger: self.debugger.actStep() if not self.debugger.running: self.stopDebugger() def actNext(self): if self.debugger: self.debugger.actNext() if not self.debugger.running: self.stopDebugger() def actOut(self): if self.debugger: self.debugger.actOut() if not self.debugger.running: self.stopDebugger() def actCont(self): if self.debugger: e=self.getCurrentEditor() if e: e.colorLine(0,'') self.currentLine=-1 self.debugger.actCont() def actBreak(self): if self.debugger: self.debugger.actBreak() def actStop(self): if self.debugger: self.debugger.actStop() def update(self): """ Called every 50ms to check if a change in debugger state occurred Basically this is waiting for a change of state, indicated by: * self.debugger.changed If a change is detected, everything is re-evaluated and drawn """ if self.debugger: self.debugger.update() #if len(text)>0: # self.addOutputText(text) if self.debugger.hasOutput(): self.addOutputText(self.debugger.getOutput()) if self.debugger.changed: self.updatePosition() self.updateWatches() self.updateLocals() self.updateCallstack() self.debugger.changed=False if not self.debugger.running: self.stopDebugger() # If the debugger is active running the program, # create an indication using an animation in the top left # corner of the application window if self.debugger and self.debugger.active: if self.runningWidget is None: from running import RunningWidget self.runningWidget=RunningWidget(self) self.runningWidget.show() self.outputEdit.setBlinkingCursor(True) s=self.outputEdit.getInput() if len(s)>0: text=''.join(s) self.debugger.sendInput(text) self.addOutputText(text) else: self.outputEdit.clearInput() self.outputEdit.setBlinkingCursor(False) if not self.runningWidget is None: self.runningWidget.close() self.runningWidget=None
gpl-2.0
-3,514,037,278,721,606,700
36.664583
135
0.576286
false
4.162791
true
false
false
pradyunsg/dotfiles
lib/checker.py
1
5820
import os import sys import shutil import platform from .logging import Logger, log from .utils import run_output import click import yaml class SystemChecker(object): """A super-fancy helper for checking the system configuration """ def __init__(self, verbose): super().__init__() self._logger = Logger() self.verbose = verbose def _log_happy(self, msg): self._logger.spaced_status("pass", msg, fit_width=4) def _log_angry(self, msg, is_warning): if is_warning: self._logger.spaced_status("warn", msg, fit_width=4) else: self._logger.spaced_status("fail", msg, fit_width=4) def platform(self): return platform.system() def equal(self, expected, *, should_warn=False, **kwargs): """Check if a given value for something is equal to the expected value. checker.equal(value, name=from_system) """ assert len(kwargs) == 1, "expected 1 keyword argument" name, value = next(iter(kwargs.items())) if value == expected: self._log_happy(name + " is correct") else: self._log_angry( f"{name} is not {expected!r}, it is {value!r}", is_warning=should_warn, ) # The actual logic is below def run(self, fname): data = self._load_yaml(fname) self._check_username(data["identity"]["username"]) self._check_ssh(data["identity"]["ssh-key"]) self._check_gpg(data["identity"]["gpg-key"]) for category, contents in data["things"].items(): self._check_category(category, contents, data) def _load_yaml(self, fname): with open(fname) as f: try: return yaml.safe_load(f) except Exception as e: click.secho("ERROR: Could not parse file.", fg="red") click.secho(str(e), fg="red") sys.exit(1) def _check_username(self, expected): self.equal(expected, Username=os.environ["USER"]) def _check_ssh(self, expected): # FIXME: Is this fragile? output = run_output("ssh-keygen -E md5 -lf {}".format( os.path.expanduser("~/.ssh/id_rsa.pub") )) if output is None: ssh_key = "not found" else: ssh_key = output.split()[1] if ssh_key.startswith("MD5:"): ssh_key = ssh_key[4:] self.equal(expected, **{"SSH key": ssh_key}) def _check_gpg(self, expected): # This checks that the GPG key exists in the dB output = run_output("gpg --list-keys {}".format(expected)) if output is not None: self.equal(expected, **{"GPG key": expected}) else: self.equal(expected, **{"GPG key": "not found"}) def _check_category(self, category, contents, data): if "if" in contents: if list(contents["if"]) != ["platform"]: raise ValueError( "Needed condition of category {} to be 'platform'" .format(category) ) if contents["if"]["platform"] != self.platform(): log.spaced_status("skip", category) return log.spaced_status("topic", category, fit_width=5) with log: self._check_executables( category, contents.get("executables", None) ) self._check_run_items( category, contents.get("run_check", None), data ) def _check_executables(self, category, executables): if not executables: return # Convert the string to a list. executables = list(map(lambda x: x.strip(), executables.split(","))) missing = set() for fname in executables: if shutil.which(fname) is None: missing.add(fname) verb = lambda x: "executable" if len(x) == 1 else "executables" if missing: desc = "missing {}: {}".format( verb(missing), ", ".join(map(repr, missing)) ) log.spaced_status("fail", desc, fit_width=4) else: log.spaced_status( "pass", "{} {} available".format(len(executables), verb(executables)), fit_width=4, ) def _check_run_items(self, category, run_items, data): if not run_items: return for name, cmd_dict in run_items.items(): if not isinstance(cmd_dict, dict) or "cmd" not in cmd_dict: log.spaced_status( "warn", f"!!! invalid !!! {category} {name}", fit_width=4 ) continue got = run_output(cmd_dict["cmd"]) if got is None: # Did not exit cleanly ok = False reason = "command did not succeed" elif "equal" in cmd_dict: # Match the output against an expected value... expected = cmd_dict["equal"] # Perform substitution (from values earlier in the dict) if expected.startswith("$"): expected = _dotted_access(data, expected[1:]) ok = expected == got.rstrip() reason = f"{expected!r} != {got!r}" if ok: log.spaced_status("pass", name, fit_width=4) else: log.spaced_status("fail", name, fit_width=4) if self.verbose: with log: log.info(reason) def _dotted_access(data, spec): item = data for part in spec.split("."): item = item[part] return item
mit
-4,368,949,340,365,042,700
30.978022
79
0.518041
false
4.157143
false
false
false
peterrenshaw/socsim
setup.py
1
1400
#!/usr/bin/env python # ~*~ encoding: utf-8 ~*~ """ This file is part of SOCSIM. SOCSIM is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. SOCSIM is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with SOCSIM. If not, see <http://www.gnu.org/licenses/>. """ import os from setuptools import setup from setuptools import find_packages from socsim import __version__ def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup(name = "socsim", version = __version__, description = 'social media simulation tools', long_description=read('README'), license = 'GNU GPL 3.0', author = "Peter Renshaw", author_email = "[email protected]", url = 'https://github.com/peterrenshaw/socsim', packages = find_packages(), keywords = ['message','testing','human','response'], zip_safe = True) # vim: ff=unix:ts=4:sw=4:tw=78:noai:expandtab
gpl-3.0
-4,294,559,742,352,133,000
30.111111
72
0.682143
false
3.763441
false
false
false
judaba13/GenrePredictor
hdf5_utils.py
1
28730
""" Thierry Bertin-Mahieux (2010) Columbia University [email protected] This code contains a set of routines to create HDF5 files containing features and metadata of a song. This is part of the Million Song Dataset project from LabROSA (Columbia University) and The Echo Nest. Copyright 2010, Thierry Bertin-Mahieux This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import os import sys import numpy as np # code relies on pytables, see http://www.pytables.org import tables import hdf5_descriptors as DESC from hdf5_getters import * # musicbrainz related stuff try: from MBrainzDB import query as QUERYMB except ImportError: print 'need pg module and MBrainzDB folder of Python source code if you' print 'want to use musicbrainz related functions, e.g. fill_hdf5_from_musicbrainz' # description of the different arrays in the song file ARRAY_DESC_SIMILAR_ARTISTS = 'array of similar artists Echo Nest id' ARRAY_DESC_ARTIST_TERMS = 'array of terms (Echo Nest tags) for an artist' ARRAY_DESC_ARTIST_TERMS_FREQ = 'array of term (Echo Nest tags) frequencies for an artist' ARRAY_DESC_ARTIST_TERMS_WEIGHT = 'array of term (Echo Nest tags) weights for an artist' ARRAY_DESC_SEGMENTS_START = 'array of start times of segments' ARRAY_DESC_SEGMENTS_CONFIDENCE = 'array of confidence of segments' ARRAY_DESC_SEGMENTS_PITCHES = 'array of pitches of segments (chromas)' ARRAY_DESC_SEGMENTS_TIMBRE = 'array of timbre of segments (MFCC-like)' ARRAY_DESC_SEGMENTS_LOUDNESS_MAX = 'array of max loudness of segments' ARRAY_DESC_SEGMENTS_LOUDNESS_MAX_TIME = 'array of max loudness time of segments' ARRAY_DESC_SEGMENTS_LOUDNESS_START = 'array of loudness of segments at start time' ARRAY_DESC_SECTIONS_START = 'array of start times of sections' ARRAY_DESC_SECTIONS_CONFIDENCE = 'array of confidence of sections' ARRAY_DESC_BEATS_START = 'array of start times of beats' ARRAY_DESC_BEATS_CONFIDENCE = 'array of confidence of sections' ARRAY_DESC_BARS_START = 'array of start times of bars' ARRAY_DESC_BARS_CONFIDENCE = 'array of confidence of bars' ARRAY_DESC_TATUMS_START = 'array of start times of tatums' ARRAY_DESC_TATUMS_CONFIDENCE = 'array of confidence of tatums' ARRAY_DESC_ARTIST_MBTAGS = 'array of tags from MusicBrainz for an artist' ARRAY_DESC_ARTIST_MBTAGS_COUNT = 'array of tag counts from MusicBrainz for an artist' def fill_hdf5_from_artist(h5,artist): """ Fill an open hdf5 using all content in a artist object from the Echo Nest python API There could be overlap with fill_from_song and fill_from_track, we assume the data is consistent! """ # get the metadata table, fill it metadata = h5.root.metadata.songs metadata.cols.artist_id[0] = artist.id idsplitter = lambda x,y: x.split(':')[2] if x else y metadata.cols.artist_mbid[0] = idsplitter(artist.get_foreign_id(idspace='musicbrainz'),'') metadata.cols.artist_playmeid[0] = int(idsplitter(artist.get_foreign_id(idspace='playme'),-1)) metadata.cols.artist_7digitalid[0] = int(idsplitter(artist.get_foreign_id(idspace='7digital'),-1)) # fill the metadata arrays group = h5.root.metadata metadata.cols.idx_similar_artists[0] = 0 group.similar_artists.append( np.array(map(lambda x : x.id,artist.get_similar(results=100)),dtype='string') ) metadata.cols.idx_artist_terms[0] = 0 group.artist_terms.append( np.array(map(lambda x : x.name,artist.get_terms()),dtype='string') ) group.artist_terms_freq.append( np.array(map(lambda x : x.frequency,artist.get_terms()),dtype='float64') ) group.artist_terms_weight.append( np.array(map(lambda x : x.weight,artist.get_terms()),dtype='float64') ) # done, flush metadata.flush() def fill_hdf5_from_song(h5,song): """ Fill an open hdf5 using all the content in a song object from the Echo Nest python API. Usually, fill_hdf5_from_track() will have been called first. """ # get the metadata table, fill it metadata = h5.root.metadata.songs metadata.cols.artist_familiarity[0] = song.get_artist_familiarity() metadata.cols.artist_hotttnesss[0] = song.get_artist_hotttnesss() metadata.cols.artist_id[0] = song.artist_id metadata.cols.artist_latitude[0] = song.get_artist_location().latitude metadata.cols.artist_location[0] = song.get_artist_location().location.encode('utf-8') if song.get_artist_location().location else '' metadata.cols.artist_longitude[0] = song.get_artist_location().longitude metadata.cols.artist_name[0] = song.artist_name.encode('utf-8') if song.artist_name else '' metadata.cols.song_id[0] = song.id metadata.cols.song_hotttnesss[0] = song.get_song_hotttnesss() metadata.cols.title[0] = song.title.encode('utf-8') if song.title else '' metadata.flush() # get the analysis table analysis = h5.root.analysis.songs analysis.danceability = song.get_audio_summary().danceability analysis.energy = song.get_audio_summary().energy analysis.flush() def fill_hdf5_from_track(h5,track): """ Fill an open hdf5 using all the content in a track object from the Echo Nest python API """ # get the metadata table, fill it metadata = h5.root.metadata.songs #metadata.cols.analyzer_version[0] = track.analyzer_version metadata.cols.artist_name[0] = getattr(track, 'artist', u'').encode('utf-8') metadata.cols.release[0] = getattr(track, 'release', u'').encode('utf-8') metadata.cols.title[0] = getattr(track, 'title', u'').encode('utf-8') idsplitter_7digital = lambda x: int(x.split(':')[2]) if x and x.split(':')[0]=='7digital' else -1 metadata.cols.release_7digitalid[0] = idsplitter_7digital(track.foreign_release_id) metadata.cols.track_7digitalid[0] = idsplitter_7digital(track.foreign_id) metadata.flush() # get the analysis table, fill it analysis = h5.root.analysis.songs analysis.cols.analysis_sample_rate[0] = track.analysis_sample_rate analysis.cols.audio_md5[0] = track.audio_md5 analysis.cols.duration[0] = track.duration analysis.cols.end_of_fade_in[0] = track.end_of_fade_in analysis.cols.key[0] = track.key analysis.cols.key_confidence[0] = track.key_confidence analysis.cols.loudness[0] = track.loudness analysis.cols.mode[0] = track.mode analysis.cols.mode_confidence[0] = track.mode_confidence analysis.cols.start_of_fade_out[0] = track.start_of_fade_out analysis.cols.tempo[0] = track.tempo analysis.cols.time_signature[0] = track.time_signature analysis.cols.time_signature_confidence[0] = track.time_signature_confidence analysis.cols.track_id[0] = track.id analysis.flush() group = h5.root.analysis # analysis arrays (segments) analysis.cols.idx_segments_start[0] = 0 group.segments_start.append( np.array(map(lambda x : x['start'],track.segments),dtype='float64') ) analysis.cols.idx_segments_confidence[0] = 0 group.segments_confidence.append( np.array(map(lambda x : x['confidence'],track.segments),dtype='float64') ) analysis.cols.idx_segments_pitches[0] = 0 group.segments_pitches.append( np.array(map(lambda x : x['pitches'],track.segments),dtype='float64') ) analysis.cols.idx_segments_timbre[0] = 0 group.segments_timbre.append( np.array(map(lambda x : x['timbre'],track.segments),dtype='float64') ) analysis.cols.idx_segments_loudness_max[0] = 0 group.segments_loudness_max.append( np.array(map(lambda x : x['loudness_max'],track.segments),dtype='float64') ) analysis.cols.idx_segments_loudness_max_time[0] = 0 group.segments_loudness_max_time.append( np.array(map(lambda x : x['loudness_max_time'],track.segments),dtype='float64') ) analysis.cols.idx_segments_loudness_start[0] = 0 group.segments_loudness_start.append( np.array(map(lambda x : x['loudness_start'],track.segments),dtype='float64') ) # analysis arrays (sections) analysis.cols.idx_sections_start[0] = 0 group.sections_start.append( np.array(map(lambda x : x['start'],track.sections),dtype='float64') ) analysis.cols.idx_sections_confidence[0] = 0 group.sections_confidence.append( np.array(map(lambda x : x['confidence'],track.sections),dtype='float64') ) # analysis arrays (beats analysis.cols.idx_beats_start[0] = 0 group.beats_start.append( np.array(map(lambda x : x['start'],track.beats),dtype='float64') ) analysis.cols.idx_beats_confidence[0] = 0 group.beats_confidence.append( np.array(map(lambda x : x['confidence'],track.beats),dtype='float64') ) # analysis arrays (bars) analysis.cols.idx_bars_start[0] = 0 group.bars_start.append( np.array(map(lambda x : x['start'],track.bars),dtype='float64') ) analysis.cols.idx_bars_confidence[0] = 0 group.bars_confidence.append( np.array(map(lambda x : x['confidence'],track.bars),dtype='float64') ) # analysis arrays (tatums) analysis.cols.idx_tatums_start[0] = 0 group.tatums_start.append( np.array(map(lambda x : x['start'],track.tatums),dtype='float64') ) analysis.cols.idx_tatums_confidence[0] = 0 group.tatums_confidence.append( np.array(map(lambda x : x['confidence'],track.tatums),dtype='float64') ) analysis.flush() # DONE def fill_hdf5_from_musicbrainz(h5,connect): """ Fill an open hdf5 using the musicbrainz server and data. We assume this code is run after fill_hdf5_from_artist/song because we need artist_mbid, artist_name, release and title INPUT h5 - open song file (append mode) connect - open pg connection to musicbrainz_db """ # get info from h5 song file ambid = h5.root.metadata.songs.cols.artist_mbid[0] artist_name = h5.root.metadata.songs.cols.artist_name[0] release = h5.root.metadata.songs.cols.release[0] title = h5.root.metadata.songs.cols.title[0] # get the musicbrainz table, fill it musicbrainz = h5.root.musicbrainz.songs musicbrainz.cols.year[0] = QUERYMB.find_year_safemode(connect,ambid,title,release,artist_name) # fill the musicbrainz arrays group = h5.root.musicbrainz musicbrainz.cols.idx_artist_mbtags[0] = 0 tags,tagcount = QUERYMB.get_artist_tags(connect, ambid, maxtags=20) group.artist_mbtags.append( np.array(tags,dtype='string') ) group.artist_mbtags_count.append( np.array(tagcount,dtype='float64') ) # done, flush musicbrainz.flush() def fill_hdf5_aggregate_file(h5,h5_filenames,summaryfile=False): """ Fill an open hdf5 aggregate file using all the content from all the HDF5 files listed as filenames. These HDF5 files are supposed to be filled already. Usefull to create one big HDF5 file from many, thus improving IO speed. For most of the info, we simply use one row per song. For the arrays (e.g. segment_start) we need the indecies (e.g. idx_segment_start) to know which part of the array belongs to one particular song. If summaryfile=True, we skip arrays (indices all 0) """ # counter counter = 0 # iterate over filenames for h5idx,h5filename in enumerate(h5_filenames): # open h5 file h5tocopy = open_h5_file_read(h5filename) # get number of songs in new file nSongs = get_num_songs(h5tocopy) # iterate over songs in one HDF5 (1 if regular file, more if aggregate file) for songidx in xrange(nSongs): # METADATA row = h5.root.metadata.songs.row row["artist_familiarity"] = get_artist_familiarity(h5tocopy,songidx) row["artist_hotttnesss"] = get_artist_hotttnesss(h5tocopy,songidx) row["artist_id"] = get_artist_id(h5tocopy,songidx) row["artist_mbid"] = get_artist_mbid(h5tocopy,songidx) row["artist_playmeid"] = get_artist_playmeid(h5tocopy,songidx) row["artist_7digitalid"] = get_artist_7digitalid(h5tocopy,songidx) row["artist_latitude"] = get_artist_latitude(h5tocopy,songidx) row["artist_location"] = get_artist_location(h5tocopy,songidx) row["artist_longitude"] = get_artist_longitude(h5tocopy,songidx) row["artist_name"] = get_artist_name(h5tocopy,songidx) row["release"] = get_release(h5tocopy,songidx) row["release_7digitalid"] = get_release_7digitalid(h5tocopy,songidx) row["song_id"] = get_song_id(h5tocopy,songidx) row["song_hotttnesss"] = get_song_hotttnesss(h5tocopy,songidx) row["title"] = get_title(h5tocopy,songidx) row["track_7digitalid"] = get_track_7digitalid(h5tocopy,songidx) # INDICES if not summaryfile: if counter == 0 : # we're first row row["idx_similar_artists"] = 0 row["idx_artist_terms"] = 0 else: row["idx_similar_artists"] = h5.root.metadata.similar_artists.shape[0] row["idx_artist_terms"] = h5.root.metadata.artist_terms.shape[0] row.append() h5.root.metadata.songs.flush() # ARRAYS if not summaryfile: h5.root.metadata.similar_artists.append( get_similar_artists(h5tocopy,songidx) ) h5.root.metadata.artist_terms.append( get_artist_terms(h5tocopy,songidx) ) h5.root.metadata.artist_terms_freq.append( get_artist_terms_freq(h5tocopy,songidx) ) h5.root.metadata.artist_terms_weight.append( get_artist_terms_weight(h5tocopy,songidx) ) # ANALYSIS row = h5.root.analysis.songs.row row["analysis_sample_rate"] = get_analysis_sample_rate(h5tocopy,songidx) row["audio_md5"] = get_audio_md5(h5tocopy,songidx) row["danceability"] = get_danceability(h5tocopy,songidx) row["duration"] = get_duration(h5tocopy,songidx) row["end_of_fade_in"] = get_end_of_fade_in(h5tocopy,songidx) row["energy"] = get_energy(h5tocopy,songidx) row["key"] = get_key(h5tocopy,songidx) row["key_confidence"] = get_key_confidence(h5tocopy,songidx) row["loudness"] = get_loudness(h5tocopy,songidx) row["mode"] = get_mode(h5tocopy,songidx) row["mode_confidence"] = get_mode_confidence(h5tocopy,songidx) row["start_of_fade_out"] = get_start_of_fade_out(h5tocopy,songidx) row["tempo"] = get_tempo(h5tocopy,songidx) row["time_signature"] = get_time_signature(h5tocopy,songidx) row["time_signature_confidence"] = get_time_signature_confidence(h5tocopy,songidx) row["track_id"] = get_track_id(h5tocopy,songidx) # INDICES if not summaryfile: if counter == 0 : # we're first row row["idx_segments_start"] = 0 row["idx_segments_confidence"] = 0 row["idx_segments_pitches"] = 0 row["idx_segments_timbre"] = 0 row["idx_segments_loudness_max"] = 0 row["idx_segments_loudness_max_time"] = 0 row["idx_segments_loudness_start"] = 0 row["idx_sections_start"] = 0 row["idx_sections_confidence"] = 0 row["idx_beats_start"] = 0 row["idx_beats_confidence"] = 0 row["idx_bars_start"] = 0 row["idx_bars_confidence"] = 0 row["idx_tatums_start"] = 0 row["idx_tatums_confidence"] = 0 else : # check the current shape of the arrays row["idx_segments_start"] = h5.root.analysis.segments_start.shape[0] row["idx_segments_confidence"] = h5.root.analysis.segments_confidence.shape[0] row["idx_segments_pitches"] = h5.root.analysis.segments_pitches.shape[0] row["idx_segments_timbre"] = h5.root.analysis.segments_timbre.shape[0] row["idx_segments_loudness_max"] = h5.root.analysis.segments_loudness_max.shape[0] row["idx_segments_loudness_max_time"] = h5.root.analysis.segments_loudness_max_time.shape[0] row["idx_segments_loudness_start"] = h5.root.analysis.segments_loudness_start.shape[0] row["idx_sections_start"] = h5.root.analysis.sections_start.shape[0] row["idx_sections_confidence"] = h5.root.analysis.sections_confidence.shape[0] row["idx_beats_start"] = h5.root.analysis.beats_start.shape[0] row["idx_beats_confidence"] = h5.root.analysis.beats_confidence.shape[0] row["idx_bars_start"] = h5.root.analysis.bars_start.shape[0] row["idx_bars_confidence"] = h5.root.analysis.bars_confidence.shape[0] row["idx_tatums_start"] = h5.root.analysis.tatums_start.shape[0] row["idx_tatums_confidence"] = h5.root.analysis.tatums_confidence.shape[0] row.append() h5.root.analysis.songs.flush() # ARRAYS if not summaryfile: h5.root.analysis.segments_start.append( get_segments_start(h5tocopy,songidx) ) h5.root.analysis.segments_confidence.append( get_segments_confidence(h5tocopy,songidx) ) h5.root.analysis.segments_pitches.append( get_segments_pitches(h5tocopy,songidx) ) h5.root.analysis.segments_timbre.append( get_segments_timbre(h5tocopy,songidx) ) h5.root.analysis.segments_loudness_max.append( get_segments_loudness_max(h5tocopy,songidx) ) h5.root.analysis.segments_loudness_max_time.append( get_segments_loudness_max_time(h5tocopy,songidx) ) h5.root.analysis.segments_loudness_start.append( get_segments_loudness_start(h5tocopy,songidx) ) h5.root.analysis.sections_start.append( get_sections_start(h5tocopy,songidx) ) h5.root.analysis.sections_confidence.append( get_sections_confidence(h5tocopy,songidx) ) h5.root.analysis.beats_start.append( get_beats_start(h5tocopy,songidx) ) h5.root.analysis.beats_confidence.append( get_beats_confidence(h5tocopy,songidx) ) h5.root.analysis.bars_start.append( get_bars_start(h5tocopy,songidx) ) h5.root.analysis.bars_confidence.append( get_bars_confidence(h5tocopy,songidx) ) h5.root.analysis.tatums_start.append( get_tatums_start(h5tocopy,songidx) ) h5.root.analysis.tatums_confidence.append( get_tatums_confidence(h5tocopy,songidx) ) # MUSICBRAINZ row = h5.root.musicbrainz.songs.row row["year"] = get_year(h5tocopy,songidx) # INDICES if not summaryfile: if counter == 0 : # we're first row row["idx_artist_mbtags"] = 0 else: row["idx_artist_mbtags"] = h5.root.musicbrainz.artist_mbtags.shape[0] row.append() h5.root.musicbrainz.songs.flush() # ARRAYS if not summaryfile: h5.root.musicbrainz.artist_mbtags.append( get_artist_mbtags(h5tocopy,songidx) ) h5.root.musicbrainz.artist_mbtags_count.append( get_artist_mbtags_count(h5tocopy,songidx) ) # counter counter += 1 # close h5 file h5tocopy.close() def create_song_file(h5filename,title='H5 Song File',force=False,complevel=1): """ Create a new HDF5 file for a new song. If force=False, refuse to overwrite an existing file Raise a ValueError if it's the case. Other optional param is the H5 file. Setups the groups, each containing a table 'songs' with one row: - metadata - analysis DETAIL - we set the compression level to 1 by default, it uses the ZLIB library to disable compression, set it to 0 """ # check if file exists if not force: if os.path.exists(h5filename): raise ValueError('file exists, can not create HDF5 song file') # create the H5 file h5 = tables.openFile(h5filename, mode='w', title='H5 Song File') # set filter level h5.filters = tables.Filters(complevel=complevel,complib='zlib') # setup the groups and tables # group metadata group = h5.createGroup("/",'metadata','metadata about the song') table = h5.createTable(group,'songs',DESC.SongMetaData,'table of metadata for one song') r = table.row r.append() # filled with default values 0 or '' (depending on type) table.flush() # group analysis group = h5.createGroup("/",'analysis','Echo Nest analysis of the song') table = h5.createTable(group,'songs',DESC.SongAnalysis,'table of Echo Nest analysis for one song') r = table.row r.append() # filled with default values 0 or '' (depending on type) table.flush() # group musicbrainz group = h5.createGroup("/",'musicbrainz','data about the song coming from MusicBrainz') table = h5.createTable(group,'songs',DESC.SongMusicBrainz,'table of data coming from MusicBrainz') r = table.row r.append() # filled with default values 0 or '' (depending on type) table.flush() # create arrays create_all_arrays(h5,expectedrows=3) # close it, done h5.close() def create_aggregate_file(h5filename,title='H5 Aggregate File',force=False,expectedrows=1000,complevel=1, summaryfile=False): """ Create a new HDF5 file for all songs. It will contains everything that are in regular song files. Tables created empty. If force=False, refuse to overwrite an existing file Raise a ValueError if it's the case. If summaryfile=True, creates a sumary file, i.e. no arrays Other optional param is the H5 file. DETAILS - if you create a very large file, try to approximate correctly the number of data points (songs), it speeds things up with arrays (by setting the chunking correctly). - we set the compression level to 1 by default, it uses the ZLIB library to disable compression, set it to 0 Setups the groups, each containing a table 'songs' with one row: - metadata - analysis """ # check if file exists if not force: if os.path.exists(h5filename): raise ValueError('file exists, can not create HDF5 song file') # summary file? change title if summaryfile: title = 'H5 Summary File' # create the H5 file h5 = tables.openFile(h5filename, mode='w', title='H5 Song File') # set filter level h5.filters = tables.Filters(complevel=complevel,complib='zlib') # setup the groups and tables # group metadata group = h5.createGroup("/",'metadata','metadata about the song') table = h5.createTable(group,'songs',DESC.SongMetaData,'table of metadata for one song', expectedrows=expectedrows) # group analysis group = h5.createGroup("/",'analysis','Echo Nest analysis of the song') table = h5.createTable(group,'songs',DESC.SongAnalysis,'table of Echo Nest analysis for one song', expectedrows=expectedrows) # group musicbrainz group = h5.createGroup("/",'musicbrainz','data about the song coming from MusicBrainz') table = h5.createTable(group,'songs',DESC.SongMusicBrainz,'table of data coming from MusicBrainz', expectedrows=expectedrows) # create arrays if not summaryfile: create_all_arrays(h5,expectedrows=expectedrows) # close it, done h5.close() def create_all_arrays(h5,expectedrows=1000): """ Utility functions used by both create_song_file and create_aggregate_files, creates all the EArrays (empty). INPUT h5 - hdf5 file, open with write or append permissions metadata and analysis groups already exist! """ # group metadata arrays group = h5.root.metadata h5.createEArray(where=group,name='similar_artists',atom=tables.StringAtom(20,shape=()),shape=(0,),title=ARRAY_DESC_SIMILAR_ARTISTS) h5.createEArray(group,'artist_terms',tables.StringAtom(256,shape=()),(0,),ARRAY_DESC_ARTIST_TERMS, expectedrows=expectedrows*40) h5.createEArray(group,'artist_terms_freq',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_ARTIST_TERMS_FREQ, expectedrows=expectedrows*40) h5.createEArray(group,'artist_terms_weight',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_ARTIST_TERMS_WEIGHT, expectedrows=expectedrows*40) # group analysis arrays group = h5.root.analysis h5.createEArray(where=group,name='segments_start',atom=tables.Float64Atom(shape=()),shape=(0,),title=ARRAY_DESC_SEGMENTS_START) h5.createEArray(group,'segments_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SEGMENTS_CONFIDENCE, expectedrows=expectedrows*300) h5.createEArray(group,'segments_pitches',tables.Float64Atom(shape=()),(0,12),ARRAY_DESC_SEGMENTS_PITCHES, expectedrows=expectedrows*300) h5.createEArray(group,'segments_timbre',tables.Float64Atom(shape=()),(0,12),ARRAY_DESC_SEGMENTS_TIMBRE, expectedrows=expectedrows*300) h5.createEArray(group,'segments_loudness_max',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SEGMENTS_LOUDNESS_MAX, expectedrows=expectedrows*300) h5.createEArray(group,'segments_loudness_max_time',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SEGMENTS_LOUDNESS_MAX_TIME, expectedrows=expectedrows*300) h5.createEArray(group,'segments_loudness_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SEGMENTS_LOUDNESS_START, expectedrows=expectedrows*300) h5.createEArray(group,'sections_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SECTIONS_START, expectedrows=expectedrows*300) h5.createEArray(group,'sections_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_SECTIONS_CONFIDENCE, expectedrows=expectedrows*300) h5.createEArray(group,'beats_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_BEATS_START, expectedrows=expectedrows*300) h5.createEArray(group,'beats_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_BEATS_CONFIDENCE, expectedrows=expectedrows*300) h5.createEArray(group,'bars_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_BARS_START, expectedrows=expectedrows*300) h5.createEArray(group,'bars_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_BARS_CONFIDENCE, expectedrows=expectedrows*300) h5.createEArray(group,'tatums_start',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_TATUMS_START, expectedrows=expectedrows*300) h5.createEArray(group,'tatums_confidence',tables.Float64Atom(shape=()),(0,),ARRAY_DESC_TATUMS_CONFIDENCE, expectedrows=expectedrows*300) # group musicbrainz arrays group = h5.root.musicbrainz h5.createEArray(where=group,name='artist_mbtags',atom=tables.StringAtom(256,shape=()),shape=(0,),title=ARRAY_DESC_ARTIST_MBTAGS, expectedrows=expectedrows*5) h5.createEArray(group,'artist_mbtags_count',tables.IntAtom(shape=()),(0,),ARRAY_DESC_ARTIST_MBTAGS_COUNT, expectedrows=expectedrows*5) def open_h5_file_read(h5filename): """ Open an existing H5 in read mode. """ return tables.openFile(h5filename, mode='r') def open_h5_file_append(h5filename): """ Open an existing H5 in append mode. """ return tables.openFile(h5filename, mode='a') ################################################ MAIN ##################################### def die_with_usage(): """ HELP MENU """ print 'hdf5_utils.py' print 'by T. Bertin-Mahieux (2010) Columbia University' print '' print 'should be used as a library, contains functions to create' print 'HDF5 files for the Million Song Dataset project' sys.exit(0) if __name__ == '__main__': # help menu die_with_usage()
apache-2.0
7,055,382,129,776,609,000
52.304267
137
0.664636
false
3.349656
false
false
false
blab/antibody-response-pulse
bcell-array/code/Virus_Bcell_IgM_IgG_Infection_OAS_new.py
1
13195
# coding: utf-8 # # Antibody Response Pulse # https://github.com/blab/antibody-response-pulse # # ### B-cells evolution --- cross-reactive antibody response after influenza virus infection or vaccination # ### Adaptive immune response for repeated infection # In[3]: ''' author: Alvason Zhenhua Li date: 04/09/2015 ''' get_ipython().magic(u'matplotlib inline') import numpy as np import matplotlib.pyplot as plt import os from matplotlib.ticker import FuncFormatter import alva_machinery_event_OAS_new as alva AlvaFontSize = 23 AlvaFigSize = (15, 5) numberingFig = 0 # plotting dir_path = '/Users/al/Desktop/GitHub/antibody-response-pulse/bcell-array/figure' file_name = 'Virus-Bcell-IgM-IgG' figure_name = '-equation' file_suffix = '.png' save_figure = os.path.join(dir_path, file_name + figure_name + file_suffix) numberingFig = numberingFig + 1 plt.figure(numberingFig, figsize=(12, 5)) plt.axis('off') plt.title(r'$ Virus-Bcell-IgM-IgG \ equations \ (antibody-response \ for \ repeated-infection) $' , fontsize = AlvaFontSize) plt.text(0, 7.0/9, r'$ \frac{\partial V_n(t)}{\partial t} = +\mu_{v}V_{n}(t)(1 - \frac{V_n(t)}{V_{max}}) - \phi_{m} M_{n}(t) V_{n}(t) - \phi_{g} G_{n}(t) V_{n}(t) $' , fontsize = 1.2*AlvaFontSize) plt.text(0, 5.0/9, r'$ \frac{\partial B_n(t)}{\partial t} = +\mu_{b}V_{n}(t)(1 - \frac{V_n(t)}{V_{max}}) + (\beta_{m} + \beta_{g}) V_{n}(t) B_{n}(t) - \mu_{b} B_{n}(t) + m_b V_{n}(t)\frac{B_{i-1}(t) - 2B_i(t) + B_{i+1}(t)}{(\Delta i)^2} $' , fontsize = 1.2*AlvaFontSize) plt.text(0, 3.0/9,r'$ \frac{\partial M_n(t)}{\partial t} = +\xi_{m} B_{n}(t) - \phi_{m} M_{n}(t) V_{n}(t) - \mu_{m} M_{n}(t) $' , fontsize = 1.2*AlvaFontSize) plt.text(0, 1.0/9,r'$ \frac{\partial G_n(t)}{\partial t} = +\xi_{g} B_{n}(t) - \phi_{g} G_{n}(t) V_{n}(t) - \mu_{g} G_{n}(t) + m_a V_{n}(t)\frac{G_{i-1}(t) - 2G_i(t) + G_{i+1}(t)}{(\Delta i)^2} $' , fontsize = 1.2*AlvaFontSize) plt.savefig(save_figure, dpi = 100) plt.show() # define the V-M-G partial differential equations def dVdt_array(VBMGxt = [], *args): # naming V = VBMGxt[0] B = VBMGxt[1] M = VBMGxt[2] G = VBMGxt[3] x_totalPoint = VBMGxt.shape[1] # there are n dSdt dV_dt_array = np.zeros(x_totalPoint) # each dSdt with the same equation form dV_dt_array[:] = +inRateV*V[:]*(1 - V[:]/maxV) - killRateVm*M[:]*V[:] - killRateVg*G[:]*V[:] return(dV_dt_array) def dBdt_array(VBMGxt = [], *args): # naming V = VBMGxt[0] B = VBMGxt[1] M = VBMGxt[2] G = VBMGxt[3] x_totalPoint = VBMGxt.shape[1] # there are n dSdt dB_dt_array = np.zeros(x_totalPoint) # each dSdt with the same equation form Bcopy = np.copy(B) centerX = Bcopy[:] leftX = np.roll(Bcopy[:], 1) rightX = np.roll(Bcopy[:], -1) leftX[0] = centerX[0] rightX[-1] = centerX[-1] dB_dt_array[:] = +inRateB*V[:]*(1 - V[:]/maxV) + (actRateBm + alva.event_active + alva.event_OAS_B)*V[:]*B[:] - outRateB*B[:] + mutatRateB*V[:]*(leftX[:] - 2*centerX[:] + rightX[:])/(dx**2) return(dB_dt_array) def dMdt_array(VBMGxt = [], *args): # naming V = VBMGxt[0] B = VBMGxt[1] M = VBMGxt[2] G = VBMGxt[3] x_totalPoint = VBMGxt.shape[1] # there are n dSdt dM_dt_array = np.zeros(x_totalPoint) # each dSdt with the same equation form dM_dt_array[:] = +inRateM*B[:] - consumeRateM*M[:]*V[:] - outRateM*M[:] return(dM_dt_array) def dGdt_array(VBMGxt = [], *args): # naming V = VBMGxt[0] B = VBMGxt[1] M = VBMGxt[2] G = VBMGxt[3] x_totalPoint = VBMGxt.shape[1] # there are n dSdt dG_dt_array = np.zeros(x_totalPoint) # each dSdt with the same equation form Gcopy = np.copy(G) centerX = Gcopy[:] leftX = np.roll(Gcopy[:], 1) rightX = np.roll(Gcopy[:], -1) leftX[0] = centerX[0] rightX[-1] = centerX[-1] dG_dt_array[:] = +(inRateG + alva.event_OAS)*B[:] - consumeRateG*G[:]*V[:] - outRateG*G[:] + mutatRateA*(leftX[:] - 2*centerX[:] + rightX[:])/(dx**2) return(dG_dt_array) # In[7]: # setting parameter timeUnit = 'day' if timeUnit == 'hour': hour = float(1) day = float(24) elif timeUnit == 'day': day = float(1) hour = float(1)/24 elif timeUnit == 'year': year = float(1) day = float(1)/365 hour = float(1)/24/365 maxV = float(50) # max virus/micro-liter inRateV = 0.2/hour # in-rate of virus killRateVm = 0.0003/hour # kill-rate of virus by antibody-IgM killRateVg = killRateVm # kill-rate of virus by antibody-IgG inRateB = 0.06/hour # in-rate of B-cell outRateB = inRateB/8 # out-rate of B-cell actRateBm = killRateVm # activation rate of naive B-cell inRateM = 0.16/hour # in-rate of antibody-IgM from naive B-cell outRateM = inRateM/1 # out-rate of antibody-IgM from naive B-cell consumeRateM = killRateVm # consume-rate of antibody-IgM by cleaning virus inRateG = inRateM/10 # in-rate of antibody-IgG from memory B-cell outRateG = outRateM/250 # out-rate of antibody-IgG from memory B-cell consumeRateG = killRateVg # consume-rate of antibody-IgG by cleaning virus mutatRateB = 0.00003/hour # B-cell mutation rate mutatRateA = 0.0001/hour # antibody mutation rate mutatRateB = 0.0000/hour # B-cell mutation rate mutatRateA = 0.000/hour # antibody mutation rate # time boundary and griding condition minT = float(0) maxT = float(6*28*day) totalPoint_T = int(1*10**3 + 1) gT = np.linspace(minT, maxT, totalPoint_T) spacingT = np.linspace(minT, maxT, num = totalPoint_T, retstep = True) gT = spacingT[0] dt = spacingT[1] # space boundary and griding condition minX = float(0) maxX = float(3) totalPoint_X = int(maxX - minX + 1) gX = np.linspace(minX, maxX, totalPoint_X) gridingX = np.linspace(minX, maxX, num = totalPoint_X, retstep = True) gX = gridingX[0] dx = gridingX[1] gV_array = np.zeros([totalPoint_X, totalPoint_T]) gB_array = np.zeros([totalPoint_X, totalPoint_T]) gM_array = np.zeros([totalPoint_X, totalPoint_T]) gG_array = np.zeros([totalPoint_X, totalPoint_T]) # initial output condition #gV_array[1, 0] = float(2) #[pre-parameter, post-parameter, recovered-day, OAS+, OSA-] actRateBg_1st = 0.0002/hour # activation rate of memory B-cell at 1st time (pre-) actRateBg_2nd = actRateBg_1st*10 # activation rate of memory B-cell at 2nd time (post-) origin_virus = int(1) current_virus = int(2) event_parameter = np.array([[actRateBg_1st, actRateBg_2nd, 14*day, +5/hour, -actRateBm - actRateBg_1st + (actRateBm + actRateBg_1st)/3, origin_virus, current_virus]]) # [viral population, starting time, first] # [viral population, starting time] ---first infection_period = 1*28*day viral_population = np.zeros(int(maxX + 1)) viral_population[origin_virus:current_virus + 1] = 3 infection_starting_time = np.arange(int(maxX + 1))*infection_period event_1st = np.zeros([int(maxX + 1), 2]) event_1st[:, 0] = viral_population event_1st[:, 1] = infection_starting_time print ('event_1st = {:}'.format(event_1st)) # [viral population, starting time] ---2nd] viral_population = np.zeros(int(maxX + 1)) viral_population[origin_virus:current_virus + 1] = 0 infection_starting_time = np.arange(int(maxX + 1))*0 event_2nd = np.zeros([int(maxX + 1), 2]) event_2nd[:, 0] = viral_population event_2nd[:, 1] = infection_starting_time print ('event_2nd = {:}'.format(event_2nd)) event_table = np.array([event_parameter, event_1st, event_2nd]) # Runge Kutta numerical solution pde_array = np.array([dVdt_array, dBdt_array, dMdt_array, dGdt_array]) initial_Out = np.array([gV_array, gB_array, gM_array, gG_array]) gOut_array = alva.AlvaRungeKutta4XT(pde_array, initial_Out, minX, maxX, totalPoint_X, minT, maxT, totalPoint_T, event_table) # plotting gV = gOut_array[0] gB = gOut_array[1] gM = gOut_array[2] gG = gOut_array[3] numberingFig = numberingFig + 1 for i in range(totalPoint_X): figure_name = '-response-%i'%(i) figure_suffix = '.png' save_figure = os.path.join(dir_path, file_name + figure_name + file_suffix) plt.figure(numberingFig, figsize = AlvaFigSize) plt.plot(gT, gV[i], color = 'red', label = r'$ V_{%i}(t) $'%(i), linewidth = 3.0, alpha = 0.5) plt.plot(gT, gM[i], color = 'blue', label = r'$ IgM_{%i}(t) $'%(i), linewidth = 3.0, alpha = 0.5) plt.plot(gT, gG[i], color = 'green', label = r'$ IgG_{%i}(t) $'%(i), linewidth = 3.0, alpha = 0.5) plt.plot(gT, gM[i] + gG[i], color = 'gray', linewidth = 5.0, alpha = 0.5, linestyle = 'dashed' , label = r'$ IgM_{%i}(t) + IgG_{%i}(t) $'%(i, i)) plt.grid(True, which = 'both') plt.title(r'$ Antibody \ from \ Virus-{%i} $'%(i), fontsize = AlvaFontSize) plt.xlabel(r'$time \ (%s)$'%(timeUnit), fontsize = AlvaFontSize) plt.ylabel(r'$ Neutralization \ \ titer $', fontsize = AlvaFontSize) plt.xlim([minT, maxT]) plt.xticks(fontsize = AlvaFontSize*0.6) plt.yticks(fontsize = AlvaFontSize*0.6) plt.ylim([2**0, 2**14]) plt.yscale('log', basey = 2) plt.legend(loc = (1,0), fontsize = AlvaFontSize) plt.savefig(save_figure, dpi = 100) plt.show() # In[5]: # Experimental lab data from OAS paper gT_lab = np.array([28, 28 + 7, 28 + 14, 28 + 28]) + 28 gPR8_lab = np.array([2**(9 + 1.0/10), 2**(13 - 1.0/5), 2**(13 + 1.0/3), 2**(13 - 1.0/4)]) standard_PR8 = gPR8_lab**(3.0/4) gFM1_lab = np.array([0, 2**(6 - 1.0/5), 2**(7 - 1.0/4), 2**(8 + 1.0/4)]) standard_FM1 = gFM1_lab**(3.0/4) bar_width = 2.0 # Sequential immunization graph numberingFig = numberingFig + 1 plt.figure(numberingFig, figsize = (12, 6)) plt.subplot(111) plt.plot(gT, (gM[origin_virus] + gG[origin_virus]), linewidth = 5.0, alpha = 0.5, color = 'gray' , label = r'$ Origin-virus $') plt.plot(gT, (gM[origin_virus + 1] + gG[origin_virus + 1]), linewidth = 5.0, alpha = 0.5, color = 'red' , label = r'$ Subsequence-virus $') plt.bar(gT_lab - bar_width/2, gPR8_lab, bar_width, alpha = 0.6, color = 'gray', yerr = standard_PR8 , error_kw = dict(elinewidth = 1, ecolor = 'black'), label = r'$ PR8-virus $') plt.bar(gT_lab + bar_width/2, gFM1_lab, bar_width, alpha = 0.6, color = 'red', yerr = standard_FM1 , error_kw = dict(elinewidth = 1, ecolor = 'black'), label = r'$ FM1-virus $') plt.grid(True, which = 'both') plt.title(r'$ Original \ Antigenic \ Sin \ (sequential-infection)$', fontsize = AlvaFontSize) plt.xlabel(r'$time \ (%s)$'%(timeUnit), fontsize = AlvaFontSize) plt.ylabel(r'$ Neutralization \ \ titer $', fontsize = AlvaFontSize) plt.xticks(fontsize = AlvaFontSize*0.6) plt.yticks(fontsize = AlvaFontSize*0.6) plt.xlim([minT, 6*30*day]) plt.ylim([2**5, 2**14]) plt.yscale('log', basey = 2) # gca()---GetCurrentAxis and Format the ticklabel to be 2**x plt.gca().yaxis.set_major_formatter(FuncFormatter(lambda x, pos: int(2**(np.log(x)/np.log(2))))) #plt.gca().xaxis.set_major_locator(plt.MultipleLocator(7)) plt.legend(loc = (1, 0), fontsize = AlvaFontSize) plt.show() # In[6]: # Experimental lab data from OAS paper gT_lab = np.array([28, 28 + 7, 28 + 14, 28 + 28]) + 28 gPR8_lab = np.array([2**(9 + 1.0/10), 2**(13 - 1.0/5), 2**(13 + 1.0/3), 2**(13 - 1.0/4)]) standard_PR8 = gPR8_lab**(3.0/4) gFM1_lab = np.array([0, 2**(6 - 1.0/5), 2**(7 - 1.0/4), 2**(8 + 1.0/4)]) standard_FM1 = gFM1_lab**(3.0/4) bar_width = 1.0 # Sequential immunization graph figure_name = '-Original-Antigenic-Sin-infection' figure_suffix = '.png' save_figure = os.path.join(dir_path, file_name + figure_name + file_suffix) numberingFig = numberingFig + 1 plt.figure(numberingFig, figsize = (12, 6)) plt.subplot(111) plt.plot(gT, (gM[origin_virus] + gG[origin_virus]), linewidth = 5.0, alpha = 0.5, color = 'gray' , label = r'$ Origin-virus $') plt.plot(gT, (gM[origin_virus + 1] + gG[origin_virus + 1]), linewidth = 5.0, alpha = 0.5, color = 'red' , label = r'$ Subsequence-virus $') plt.bar(gT_lab - bar_width/2, gPR8_lab, bar_width, alpha = 0.6, color = 'gray', yerr = standard_PR8 , error_kw = dict(elinewidth = 1, ecolor = 'black'), label = r'$ PR8-virus $') plt.bar(gT_lab + bar_width/2, gFM1_lab, bar_width, alpha = 0.6, color = 'red', yerr = standard_FM1 , error_kw = dict(elinewidth = 1, ecolor = 'black'), label = r'$ FM1-virus $') plt.grid(True, which = 'both') plt.title(r'$ Original \ Antigenic \ Sin \ (sequential-infection)$', fontsize = AlvaFontSize) plt.xlabel(r'$time \ (%s)$'%(timeUnit), fontsize = AlvaFontSize) plt.ylabel(r'$ Neutralization \ \ titer $', fontsize = AlvaFontSize) plt.xticks(fontsize = AlvaFontSize*0.6) plt.yticks(fontsize = AlvaFontSize*0.6) plt.xlim([minT, 3*30*day]) plt.ylim([2**5, 2**14]) plt.yscale('log', basey = 2) # gca()---GetCurrentAxis and Format the ticklabel to be 2**x plt.gca().yaxis.set_major_formatter(FuncFormatter(lambda x, pos: int(2**(np.log(x)/np.log(2))))) plt.gca().xaxis.set_major_locator(plt.MultipleLocator(7)) plt.legend(loc = (1, 0), fontsize = AlvaFontSize) plt.savefig(save_figure, dpi = 100, bbox_inches='tight') plt.show() # In[ ]:
gpl-2.0
6,642,361,420,170,856,000
38.154303
257
0.619856
false
2.478865
false
false
false
lucifurtun/myquotes
apps/search/bible/management/commands/zefania_xml_generator.py
1
2231
import json from itertools import groupby import xmltodict from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Generates zefania xml from different formats' def handle(self, *args, **options): with open('NTR.json') as f: data = json.load(f) current_b_number = 0 for item in data: if item['chapter'] == 1 and item['verse'] == 1: current_b_number += 1 item['b_number'] = current_b_number grouped_books = groupby(data, lambda item: item['b_number']) books_list = [] for book_grouper, chapters in grouped_books: chapters_list = [] grouped_chapters = groupby(chapters, lambda item: item['chapter']) for chapter_grouper, verses in grouped_chapters: chapters_list.append({ 'number': chapter_grouper, 'items': list(verses) }) books_list.append({ 'title': chapters_list[0]['items'][0]['long_name'], 'number': int(book_grouper), 'items': chapters_list }) with open('NTR.xml', 'w+') as f: d = { 'XMLBIBLE': { 'BIBLEBOOK': [ { '@bnumber': book['number'], '@bname': book['title'], 'CHAPTER': [ { '@cnumber': chapter['number'], 'VERS': [ { '@vnumber': verse['verse'], '#text': verse['text'] } for verse in chapter['items'] ] } for chapter in book['items'] ] } for book in books_list ] } } f.write(xmltodict.unparse(d, pretty=True)) self.stdout.write(self.style.SUCCESS('Imported!'))
bsd-3-clause
-6,679,120,634,669,536,000
32.298507
78
0.404303
false
5.013483
false
false
false
vcatechnology/cmake-boilerplate
cmake/pygh/__init__.py
1
25114
#!/usr/bin/env python # -*- coding: utf-8 -*- import re import os import sys import json import errno import platform import fileinput import subprocess from datetime import datetime, timezone try: import requests except ImportError: raise ImportError( 'Failed to import \'requests\', run \'pip install requests\'') try: import pystache except ImportError: raise ImportError( 'Failed to import \'pystache\', run \'pip install pystache\'') class ReleaseError(Exception): def __init__(self, message): self.message = message def __str__(self): return self.message class ExecuteCommandError(Exception): def __init__(self, message, cmd, code, out, err): self.message = message self.cmd = cmd self.code = code self.out = out self.err = err def __str__(self): return self.message class EmptyLogger(object): '''Provides an implementation of an empty logging function''' def debug(self, *k, **kw): pass def info(self, *k, **kw): pass def warn(self, *k, **kw): pass def error(self, *k, **kw): pass def critical(self, *k, **kw): pass def setLevel(self, *k, **kw): pass class Version(object): '''Represents a version number''' def __init__(self, *k, **kw): ''' A version number can be instantiate with: - a dot-separated string - Version('1.2.3') - an iterable - Version([1, 2, 3]) - seperate arguments - `Version(1, 2, 3)` - another version class - `Version(Version(1, 2, 3))` - a dictionary - `Version({'minor':2,'major':1,'patch':3})` - keywords - `Version(minor = 2,major = 1, patch = 3)` ''' try: version = (k[0].major, k[0].minor, k[0].patch) except (AttributeError, TypeError): try: version = (kw['major'], kw['minor'], kw['patch']) except (KeyError, TypeError): try: version = (k[0]['major'], k[0]['minor'], k[0]['patch']) except (KeyError, TypeError): if isinstance(k[0], str): version = k[0].split('.') else: try: version = (k[0][0], k[0][1], k[0][2]) except (IndexError, TypeError): version = k self.major = int(version[0]) self.minor = int(version[1]) self.patch = int(version[2]) def bump(self, category): ''' Bumps the version number depending on the category ''' setattr(self, category, getattr(self, category) + 1) if category == 'major': self.minor = 0 self.patch = 0 elif category == 'minor': self.patch = 0 def __gt__(self, other): return tuple(self) > tuple(other) def __ge__(self, other): return tuple(self) >= tuple(other) def __lt__(self, other): return tuple(self) < tuple(other) def __le__(self, other): return tuple(self) <= tuple(other) def __eq__(self, other): return tuple(self) == tuple(other) def __ne__(self, other): return tuple(self) != tuple(other) def __getitem__(self, index): ''' Allows iteration of the version number ''' if index == 0: return self.major elif index == 1: return self.minor elif index == 2: return self.patch else: raise IndexError('version index out of range') def __repr__(self): ''' Provides a dot-separated string representation of the version number ''' return '%i.%i.%i' % (self.major, self.minor, self.patch) class GitVersion(Version): '''A git repository version number''' def __init__(self, *k, **kw): ''' A git version number can be instantiate with: - a dot-separated string - Version('1.2.3.ef3aa43d-dirty') - an iterable - Version([1, 2, 3, 'ef3aa43d', True]) - seperate arguments - `Version(1, 2, 3, 'ef3aa43d', True)` - another version class - `Version(Version(1, 2, 3, 'ef3aa43d', True))` - a dictionary - `Version({'minor':2,'major':1,'patch':3, 'commit': 'ef3aa43d', 'dirty', True})` - keywords - `Version(minor = 2,major = 1, patch = 3, commit ='ef3aa43d', dirty =True)` ''' super(GitVersion, self).__init__(*k, **kw) try: version = (k[0].commit, k[0].dirty) except (AttributeError, TypeError): try: version = (kw['commit'], kw['dirty']) except (KeyError, TypeError): try: version = (k[0]['commit'], k[0]['dirty']) except (KeyError, TypeError): if isinstance(k[0], str): version = k[0].split('.')[3] else: try: version = (k[0][3], k[0][4]) except (IndexError, TypeError): version = k[3:] self.commit = str(version[0]) try: self.dirty = bool(version[1]) except: try: split = self.commit.split('-') self.dirty = (split[1] == 'dirty') self.commit = split[0] except: self.dirty = False try: int(self.commit, 16) except ValueError: raise ValueError('The git commit string is not hexidecimal: %s' % self.commit) def __repr__(self): ''' Provides a dot-separated string representation of the version number ''' string = '%s.%s' % (super(GitVersion, self).__repr__(), self.commit[:8]) if self.dirty: string += '-dirty' return string def find_exe_in_path(filename, path=None): ''' Finds an executable in the PATH environment variable ''' if platform.system() == 'Windows': filename += '.exe' if path is None: path = os.environ.get('PATH', '') if type(path) is type(''): pathlist = path.split(os.pathsep) return list(filter(os.path.exists, map(lambda dir, filename=filename: os.path.join(dir, filename), pathlist))) def execute_command(cmd, error_message='Failed to run external program', expected=0, cwd=os.getcwd()): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, cwd=cwd) (out, err) = p.communicate() if expected != None and p.returncode != expected: raise ExecuteCommandError(error_message, cmd, p.returncode, out, err) return (p.returncode, out, err) def close_milestone(number, repo, token, logger=EmptyLogger()): logger.debug('Closing milestone #%d for %s' % (number, repo)) number = int(number) r = requests.patch('https://api.github.com/repos/%s/milestones/%d' % (repo, number), params={ 'access_token': token, }, json={ 'state': 'closed', }) if r.status_code != 200: json = r.json() message = json['message'] errors = json.get('errors', []) for e in errors: message += '\n - %s: %s: %s' % (e.get('resource', 'unknown'), e.get('field', 'unknown'), e.get('code', 'unknown')) raise ReleaseError('Failed to close github milestone #%d: %s' % (number, message)) logger.info('Closed milestone #%d' % number) return r.json() def get_milestones(repo, token, logger=EmptyLogger()): logger.debug('Retrieving milestones for %s' % repo) r = requests.get('https://api.github.com/repos/%s/milestones' % repo, params={ 'access_token': token, }) if r.status_code != 200: raise ReleaseError('Failed to retrieve github milestones from %s: %s' % (repo, r.json()['message'])) return r.json() def get_git_tag_version(path, git_executable=find_exe_in_path('git'), logger=EmptyLogger()): if isinstance(git_executable, list): git_executable = git_executable[0] logger.debug('Getting latest git tag version') # Get the head commit cmd = [git_executable, 'rev-parse', 'HEAD'] _, out, _ = execute_command(cmd, 'Failed to get HEAD revision of repository', cwd=path) commit = out.split('\n')[0].strip() if commit == 'HEAD' or not commit: commit = '0000000000000000000000000000000000000000' # Check if dirty dirty = False cmd = [git_executable, 'diff-index', '--name-only', 'HEAD'] if execute_command( cmd, 'Failed to check if the project had local modifications', cwd=path)[1]: dirty = True cmd = [git_executable, 'status', '--porcelain'] if '?? ' in execute_command( cmd, 'Failed to check if the project had local modifications', cwd=path)[1]: dirty = True # Find the latest tag cmd = [git_executable, 'describe', '--match=v[0-9]*', 'HEAD'] code, out, _ = execute_command(cmd, expected=None, cwd=path) if code: return GitVersion(0, 0, 0, commit, dirty) # Parse the tag re_tag = re.compile('^v([0-9]+)\.([0-9]+)\.([0-9]+)(-[0-9]+-g[a-f0-9]+)?') matches = re_tag.match(out) major = int(matches.group(1)) minor = int(matches.group(2)) revision = int(matches.group(3)) version = GitVersion(major, minor, revision, commit, dirty) logger.info('Latest git tag version %s' % version) return version re_remote_fetch_url = re.compile( r'Fetch URL: (?:(?:(git)(?:@))|(?:(https)(?:://)))([^:/]+)[:/]([^/]+/[^.]+)(?:\.git)?') def get_repo(path=os.getcwd(), git_executable=find_exe_in_path('git')): if isinstance(git_executable, list): git_executable = git_executable[0] cmd = [git_executable, 'remote', 'show', '-n', 'origin'] code, out, err = execute_command( cmd, 'Failed to get repository remote information', cwd=path) match = re_remote_fetch_url.search(out) if not match: raise ExecuteCommandError('Failed to match fetch url', cmd, code, out, err) protocol = match.group(1) or match.group(2) server = match.group(3) if server != 'github.com': raise ExecuteCommandError('Repository is not from github', cmd, code, out, err) repo = match.group(4) return repo def get_git_version(git_executable=find_exe_in_path('git'), logger=EmptyLogger()): if isinstance(git_executable, list): git_executable = git_executable[0] logger.debug('Getting git version') _, out, _ = execute_command([git_executable, '--version']) git_version = Version(out.replace('git version ', '')) logger.debug('Using git %s' % git_version) return git_version changelog_template = \ '## [v{{version.to}}](https://github.com/{{repo}}/tree/v{{version.to}}) ({{date}})\n' \ '{{#version.from}}' \ '[Full Changelog](https://github.com/{{repo}}/compare/v{{version.from}}...v{{version.to}})' \ '{{/version.from}}' \ '{{#milestone}}' \ '{{#version.from}} {{/version.from}}' \ '[Milestone]({{html_url}})' \ '{{/milestone}}\n' \ '\n' \ '{{description}}\n' \ '\n' \ '**Closed issues:**\n' \ '{{#issues}}\n' \ '\n' \ ' - {{title}} [\#{{number}}]({{html_url}})\n' \ '{{/issues}}\n' \ '{{^issues}}\n' \ '\n' \ '_None_\n' \ '{{/issues}}\n' \ '\n' \ '**Merged pull requests:**\n' \ '{{#pullrequests}}\n' \ '\n' \ ' - {{title}} [\#{{number}}]({{pull_request.html_url}})\n' \ '{{/pullrequests}}\n' \ '{{^pullrequests}}\n' \ '\n' \ '_None_\n' \ '{{/pullrequests}}\n' def get_closed_issues(repo, token=os.environ.get('GITHUB_TOKEN', None), since=None, logger=EmptyLogger()): logger.debug('Getting issues for %s' % (repo)) if not token: raise ReleaseError('Must provide a valid GitHub API token') issues = [] params = {'state': 'closed', 'sort': 'asc', 'access_token': token, } if since: since = since.astimezone(timezone.utc) params['since'] = since.isoformat()[:19] + 'Z' r = requests.get('https://api.github.com/repos/%s/issues' % repo, params=params) if r.status_code != 200: raise ReleaseError('Failed to retrieve github issues from %s: %s' % (repo, r.json()['message'])) issues = r.json() logger.debug('Retrieved %i closed issues for %s' % (len(issues), repo)) return issues def create_changelog(current_version, previous_version, repo, milestone=None, token=os.environ.get('GITHUB_TOKEN', None), description=None, since=None, date=datetime.utcnow(), template=changelog_template, logger=EmptyLogger()): logger.debug('Creating changelog for %s from %s' % (current_version, repo)) description = description or 'The v%s release of %s' % (current_version, repo.split('/')[1]) issues = get_closed_issues(repo=repo, token=token, since=since, logger=logger) if milestone: milestone[ 'html_url'] = 'https://github.com/%s/issues?q=milestone%%3Av%s+is%%3Aall' % ( repo, current_version) data = { 'version': { 'from': str(previous_version) if previous_version > (0, 0, 0) else None, 'to': str(current_version), }, 'milestone': milestone, 'date': date.isoformat()[:10], 'repo': repo, 'description': description, 'issues': [i for i in issues if not i.get('pull_request', None)], 'pullrequests': [i for i in issues if i.get('pull_request', None)], } renderer = pystache.Renderer() parsed = pystache.parse(template) changelog = renderer.render(parsed, data) logger.info('Rendered changelog') return changelog def write_version(path, version, logger=EmptyLogger()): if not isinstance(version, Version): raise ValueError('must provide a version class') version = Version(version) with open(path, 'w') as f: f.write('%s' % version) logger.info('Wrote %s' % os.path.basename(path)) def write_changelog(path, changelog, logger=EmptyLogger()): try: for line in fileinput.input(path, inplace=True): sys.stdout.write(line) if line.startswith('# Changelog'): print() sys.stdout.write(changelog) logger.info('Updated %s' % os.path.basename(path)) except EnvironmentError as e: if e.errno == errno.ENOENT: with open(path, 'w') as f: f.write('# Changelog\n\n') f.write(changelog) logger.info('Created %s' % os.path.basename(path)) else: raise def get_git_root(path, git_executable=find_exe_in_path('git')): abspath = os.path.abspath(path) if os.path.isfile(abspath): abspath = os.path.dirname(abspath) cmd = [git_executable, 'rev-parse', '--show-toplevel'] _, out, _ = execute_command(cmd, 'Failed to find root of repository', cwd=abspath) return out.strip() def commit_file(path, message, git_executable=find_exe_in_path('git'), logger=EmptyLogger()): if isinstance(git_executable, list): git_executable = git_executable[0] logger.debug('Commiting %s' % path) cwd = get_git_root(path, git_executable=git_executable) path = os.path.relpath(path, cwd) cmd = [git_executable, 'add', path] execute_command(cmd, 'Failed to add file %s' % path, cwd=cwd) cmd = [git_executable, 'commit', '-m', message] execute_command(cmd, 'Failed to commit file %s' % path, cwd=cwd) logger.info('Committed %s' % path) def get_tag_date(tag, path=os.getcwd(), git_executable=find_exe_in_path('git')): if isinstance(git_executable, list): git_executable = git_executable[0] cwd = get_git_root(path, git_executable=git_executable) cmd = [git_executable, 'log', '-1', '--format=%ai', tag] _, out, _ = execute_command(cmd, 'Failed to get tag date: %s' % tag, cwd=cwd) out = out.strip() return datetime.strptime(out, '%Y-%m-%d %H:%M:%S %z') def create_git_version_tag(version, message=None, path=os.getcwd(), git_executable=find_exe_in_path('git'), logger=EmptyLogger()): if isinstance(git_executable, list): git_executable = git_executable[0] if not isinstance(version, Version): raise ValueError('must provide a version class') version = Version(version) logger.debug('Tagging %s' % version) message = message or 'The v%s release of the project' % version cwd = get_git_root(path, git_executable=git_executable) cmd = [git_executable, 'tag', '-a', 'v%s' % version, '-m', message] execute_command(cmd, 'Failed to create version tag %s' % version, cwd=cwd) logger.info('Tagged %s' % version) def create_release(repo, version, description, token=os.environ.get('GITHUB_TOKEN', None), files=[], path=os.getcwd(), git_executable=find_exe_in_path('git'), logger=EmptyLogger()): if isinstance(git_executable, list): git_executable = git_executable[0] if not isinstance(version, Version): raise ValueError('must provide a version class') logger.debug('Creating github release %s' % version) r = requests.post('https://api.github.com/repos/%s/releases' % repo, params={ 'access_token': token, }, json={ 'tag_name': 'v%s' % version, 'name': str(version), 'body': description, }) if r.status_code != 201: json = r.json() message = json['message'] errors = json.get('errors', []) for e in errors: message += '\n - %s: %s: %s' % (e.get('resource', 'unknown'), e.get('field', 'unknown'), e.get('code', 'unknown')) raise ReleaseError('Failed to create github release %s: %s' % (repo, message)) logger.info('Created GitHub release') def release(category='patch', path=os.getcwd(), git_executable=find_exe_in_path('git'), token=os.environ.get('GITHUB_TOKEN', None), repo=None, date=datetime.utcnow(), description=None, changelog='CHANGELOG.md', version='VERSION', template=changelog_template, logger=EmptyLogger(), hooks={}): ''' Performs the release of a repository on GitHub. ''' if isinstance(git_executable, list): git_executable = git_executable[0] logger.debug('Starting %r release' % category) git_version = get_git_version(git_executable=git_executable, logger=logger) if git_version < (1, 0, 0): raise ReleaseError('The version of git is too old %s' % git_version) previous_version = get_git_tag_version(path=path, git_executable=git_executable, logger=logger) if previous_version.dirty: raise ReleaseError( 'Cannot release a dirty repository. Make sure all files are committed') current_version = Version(previous_version) previous_version = Version(current_version) current_version.bump(category) logger.debug('Previous version %r' % previous_version) logger.debug('Bumped version %r' % current_version) repo = repo or get_repo(path=path, git_executable=git_executable) description = description or 'The v%s release of %s' % (current_version, repo.split('/')[1]) milestones = get_milestones(repo=repo, token=token, logger=logger) try: milestone = [ m for m in milestones if m['title'] == ('v%s' % current_version) and m['state'] == 'open' ][0] open_issues = milestone['open_issues'] if open_issues: raise ReleaseError('The v%s milestone has %d open issues' % (current_version, open_issues)) except IndexError: milestone = None try: previous_date = get_tag_date('v%s' % previous_version, path=path, git_executable=git_executable) except ExecuteCommandError: previous_date = None changelog_data = create_changelog(description=description, repo=repo, date=date, token=token, current_version=current_version, previous_version=previous_version, template=template, since=previous_date, logger=logger, milestone=milestone) changelog_data = hooks.get('changelog', lambda d: d)(changelog_data) write_changelog(path=os.path.join(path, changelog), changelog=changelog_data, logger=logger) commit_file(changelog, 'Updated changelog for v%s' % current_version, git_executable=git_executable, logger=logger) write_version(path=os.path.join(path, version), version=current_version, logger=logger) commit_file(version, 'Updated version to v%s' % current_version, git_executable=git_executable, logger=logger) create_git_version_tag(current_version, message=description, path=path, git_executable=git_executable, logger=logger) logger.debug('Pushing branch to remote') cwd = get_git_root(path, git_executable=git_executable) cmd = [git_executable, 'push'] execute_command(cmd, 'Failed to push to remote', cwd=cwd) logger.info('Pushed branch to remote') logger.debug('Pushing tags to remote') cwd = get_git_root(path, git_executable=git_executable) cmd = [git_executable, 'push', '--tags'] execute_command(cmd, 'Failed to push tags to remote', cwd=cwd) logger.info('Pushed tags to remote') files = [] create_release(path=path, version=current_version, description=changelog_data, git_executable=git_executable, repo=repo, logger=logger, files=files, token=token) if milestone: close_milestone(number=milestone['number'], repo=repo, token=token, logger=logger) logger.info('Released %s' % current_version)
bsd-3-clause
-6,535,361,961,749,887,000
34.026499
114
0.510472
false
4.129913
false
false
false
rven/odoo
addons/pad/models/pad.py
1
5592
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import logging import random import re import string import requests from odoo import api, models, _ from odoo.exceptions import UserError from ..py_etherpad import EtherpadLiteClient _logger = logging.getLogger(__name__) class PadCommon(models.AbstractModel): _name = 'pad.common' _description = 'Pad Common' def _valid_field_parameter(self, field, name): return name == 'pad_content_field' or super()._valid_field_parameter(field, name) @api.model def pad_is_configured(self): return bool(self.env.company.pad_server) @api.model def pad_generate_url(self): company = self.env.company.sudo() pad = { "server": company.pad_server, "key": company.pad_key, } # make sure pad server in the form of http://hostname if not pad["server"]: return pad if not pad["server"].startswith('http'): pad["server"] = 'http://' + pad["server"] pad["server"] = pad["server"].rstrip('/') # generate a salt s = string.ascii_uppercase + string.digits salt = ''.join([s[random.SystemRandom().randint(0, len(s) - 1)] for i in range(10)]) # path # etherpad hardcodes pad id length limit to 50 path = '-%s-%s' % (self._name, salt) path = '%s%s' % (self.env.cr.dbname.replace('_', '-')[0:50 - len(path)], path) # contruct the url url = '%s/p/%s' % (pad["server"], path) # if create with content if self.env.context.get('field_name') and self.env.context.get('model'): myPad = EtherpadLiteClient(pad["key"], pad["server"] + '/api') try: myPad.createPad(path) except IOError: raise UserError(_("Pad creation failed, either there is a problem with your pad server URL or with your connection.")) # get attr on the field model model = self.env[self.env.context["model"]] field = model._fields[self.env.context['field_name']] real_field = field.pad_content_field res_id = self.env.context.get("object_id") record = model.browse(res_id) # get content of the real field real_field_value = record[real_field] or self.env.context.get('record', {}).get(real_field, '') if real_field_value: myPad.setHtmlFallbackText(path, real_field_value) return { "server": pad["server"], "path": path, "url": url, } @api.model def pad_get_content(self, url): company = self.env.company.sudo() myPad = EtherpadLiteClient(company.pad_key, (company.pad_server or '') + '/api') content = '' if url: split_url = url.split('/p/') path = len(split_url) == 2 and split_url[1] try: content = myPad.getHtml(path).get('html', '') except IOError: _logger.warning('Http Error: the credentials might be absent for url: "%s". Falling back.' % url) try: r = requests.get('%s/export/html' % url) r.raise_for_status() except Exception: _logger.warning("No pad found with url '%s'.", url) else: mo = re.search('<body>(.*)</body>', r.content.decode(), re.DOTALL) if mo: content = mo.group(1) return content # TODO # reverse engineer protocol to be setHtml without using the api key def write(self, vals): self._set_field_to_pad(vals) self._set_pad_to_field(vals) return super(PadCommon, self).write(vals) @api.model def create(self, vals): # Case of a regular creation: we receive the pad url, so we need to update the # corresponding field self._set_pad_to_field(vals) pad = super(PadCommon, self).create(vals) # Case of a programmatical creation (e.g. copy): we receive the field content, so we need # to create the corresponding pad if self.env.context.get('pad_no_create', False): return pad for k, field in self._fields.items(): if hasattr(field, 'pad_content_field') and k not in vals: ctx = { 'model': self._name, 'field_name': k, 'object_id': pad.id, } pad_info = self.with_context(**ctx).pad_generate_url() pad[k] = pad_info.get('url') return pad def _set_field_to_pad(self, vals): # Update the pad if the `pad_content_field` is modified for k, field in self._fields.items(): if hasattr(field, 'pad_content_field') and vals.get(field.pad_content_field) and self[k]: company = self.env.user.sudo().company_id myPad = EtherpadLiteClient(company.pad_key, (company.pad_server or '') + '/api') path = self[k].split('/p/')[1] myPad.setHtmlFallbackText(path, vals[field.pad_content_field]) def _set_pad_to_field(self, vals): # Update the `pad_content_field` if the pad is modified for k, v in list(vals.items()): field = self._fields.get(k) if hasattr(field, 'pad_content_field'): vals[field.pad_content_field] = self.pad_get_content(v)
agpl-3.0
-3,819,001,917,685,144,000
36.530201
134
0.552933
false
3.86722
false
false
false
Azure/azure-sdk-for-python
sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/_generated/v2016_10_01/aio/operations/_key_vault_client_operations.py
1
230715
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class KeyVaultClientOperationsMixin: async def create_key( self, vault_base_url: str, key_name: str, parameters: "_models.KeyCreateParameters", **kwargs: Any ) -> "_models.KeyBundle": """Creates a new key, stores it, then returns key parameters and attributes to the client. The create key operation can be used to create any key type in Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. It requires the keys/create permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name for the new key. The system will generate the version name for the new key. :type key_name: str :param parameters: The parameters to create a key. :type parameters: ~azure.keyvault.v2016_10_01.models.KeyCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeyCreateParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_key.metadata = {'url': '/keys/{key-name}/create'} # type: ignore async def import_key( self, vault_base_url: str, key_name: str, parameters: "_models.KeyImportParameters", **kwargs: Any ) -> "_models.KeyBundle": """Imports an externally created key, stores it, and returns key parameters and attributes to the client. The import key operation may be used to import any key type into an Azure Key Vault. If the named key already exists, Azure Key Vault creates a new version of the key. This operation requires the keys/import permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: Name for the imported key. :type key_name: str :param parameters: The parameters to import a key. :type parameters: ~azure.keyvault.v2016_10_01.models.KeyImportParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.import_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeyImportParameters') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized import_key.metadata = {'url': '/keys/{key-name}'} # type: ignore async def delete_key( self, vault_base_url: str, key_name: str, **kwargs: Any ) -> "_models.DeletedKeyBundle": """Deletes a key of any type from storage in Azure Key Vault. The delete key operation cannot be used to remove individual versions of a key. This operation removes the cryptographic material associated with the key, which means the key is not usable for Sign/Verify, Wrap/Unwrap or Encrypt/Decrypt operations. This operation requires the keys/delete permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key to delete. :type key_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DeletedKeyBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.DeletedKeyBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedKeyBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.delete_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('DeletedKeyBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_key.metadata = {'url': '/keys/{key-name}'} # type: ignore async def update_key( self, vault_base_url: str, key_name: str, key_version: str, parameters: "_models.KeyUpdateParameters", **kwargs: Any ) -> "_models.KeyBundle": """The update key operation changes specified attributes of a stored key and can be applied to any key type and key version stored in Azure Key Vault. In order to perform this operation, the key must already exist in the Key Vault. Note: The cryptographic material of a key itself cannot be changed. This operation requires the keys/update permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of key to update. :type key_name: str :param key_version: The version of the key to update. :type key_version: str :param parameters: The parameters of the key to update. :type parameters: ~azure.keyvault.v2016_10_01.models.KeyUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), 'key-version': self._serialize.url("key_version", key_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeyUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_key.metadata = {'url': '/keys/{key-name}/{key-version}'} # type: ignore async def get_key( self, vault_base_url: str, key_name: str, key_version: str, **kwargs: Any ) -> "_models.KeyBundle": """Gets the public part of a stored key. The get key operation is applicable to all key types. If the requested key is symmetric, then no key material is released in the response. This operation requires the keys/get permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key to get. :type key_name: str :param key_version: Adding the version parameter retrieves a specific version of a key. :type key_version: str :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), 'key-version': self._serialize.url("key_version", key_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_key.metadata = {'url': '/keys/{key-name}/{key-version}'} # type: ignore def get_key_versions( self, vault_base_url: str, key_name: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.KeyListResult"]: """Retrieves a list of individual key versions with the same key name. The full key identifier, attributes, and tags are provided in the response. This operation requires the keys/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either KeyListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.KeyListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_key_versions.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('KeyListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_key_versions.metadata = {'url': '/keys/{key-name}/versions'} # type: ignore def get_keys( self, vault_base_url: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.KeyListResult"]: """List keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the public part of a stored key. The LIST operation is applicable to all key types, however only the base key identifier, attributes, and tags are provided in the response. Individual versions of a key are not listed in the response. This operation requires the keys/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either KeyListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.KeyListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_keys.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('KeyListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_keys.metadata = {'url': '/keys'} # type: ignore async def backup_key( self, vault_base_url: str, key_name: str, **kwargs: Any ) -> "_models.BackupKeyResult": """Requests that a backup of the specified key be downloaded to the client. The Key Backup operation exports a key from Azure Key Vault in a protected form. Note that this operation does NOT return key material in a form that can be used outside the Azure Key Vault system, the returned key material is either protected to a Azure Key Vault HSM or to Azure Key Vault itself. The intent of this operation is to allow a client to GENERATE a key in one Azure Key Vault instance, BACKUP the key, and then RESTORE it into another Azure Key Vault instance. The BACKUP operation may be used to export, in protected form, any key type from Azure Key Vault. Individual versions of a key cannot be backed up. BACKUP / RESTORE can be performed within geographical boundaries only; meaning that a BACKUP from one geographical area cannot be restored to another geographical area. For example, a backup from the US geographical area cannot be restored in an EU geographical area. This operation requires the key/backup permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: BackupKeyResult, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.BackupKeyResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.BackupKeyResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.backup_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('BackupKeyResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized backup_key.metadata = {'url': '/keys/{key-name}/backup'} # type: ignore async def restore_key( self, vault_base_url: str, parameters: "_models.KeyRestoreParameters", **kwargs: Any ) -> "_models.KeyBundle": """Restores a backed up key to a vault. Imports a previously backed up key into Azure Key Vault, restoring the key, its key identifier, attributes and access control policies. The RESTORE operation may be used to import a previously backed up key. Individual versions of a key cannot be restored. The key is restored in its entirety with the same key name as it had when it was backed up. If the key name is not available in the target Key Vault, the RESTORE operation will be rejected. While the key name is retained during restore, the final key identifier will change if the key is restored to a different vault. Restore will restore all versions and preserve version identifiers. The RESTORE operation is subject to security constraints: The target Key Vault must be owned by the same Microsoft Azure Subscription as the source Key Vault The user must have RESTORE permission in the target Key Vault. This operation requires the keys/restore permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param parameters: The parameters to restore the key. :type parameters: ~azure.keyvault.v2016_10_01.models.KeyRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.restore_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeyRestoreParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized restore_key.metadata = {'url': '/keys/restore'} # type: ignore async def encrypt( self, vault_base_url: str, key_name: str, key_version: str, parameters: "_models.KeyOperationsParameters", **kwargs: Any ) -> "_models.KeyOperationResult": """Encrypts an arbitrary sequence of bytes using an encryption key that is stored in a key vault. The ENCRYPT operation encrypts an arbitrary sequence of bytes using an encryption key that is stored in Azure Key Vault. Note that the ENCRYPT operation only supports a single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. The ENCRYPT operation is only strictly necessary for symmetric keys stored in Azure Key Vault since protection with an asymmetric key can be performed using public portion of the key. This operation is supported for asymmetric keys as a convenience for callers that have a key-reference but do not have access to the public key material. This operation requires the keys/encrypt permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :param key_version: The version of the key. :type key_version: str :param parameters: The parameters for the encryption operation. :type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyOperationResult, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.encrypt.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), 'key-version': self._serialize.url("key_version", key_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeyOperationsParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyOperationResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized encrypt.metadata = {'url': '/keys/{key-name}/{key-version}/encrypt'} # type: ignore async def decrypt( self, vault_base_url: str, key_name: str, key_version: str, parameters: "_models.KeyOperationsParameters", **kwargs: Any ) -> "_models.KeyOperationResult": """Decrypts a single block of encrypted data. The DECRYPT operation decrypts a well-formed block of ciphertext using the target encryption key and specified algorithm. This operation is the reverse of the ENCRYPT operation; only a single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to be used. The DECRYPT operation applies to asymmetric and symmetric keys stored in Azure Key Vault since it uses the private portion of the key. This operation requires the keys/decrypt permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :param key_version: The version of the key. :type key_version: str :param parameters: The parameters for the decryption operation. :type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyOperationResult, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.decrypt.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), 'key-version': self._serialize.url("key_version", key_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeyOperationsParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyOperationResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized decrypt.metadata = {'url': '/keys/{key-name}/{key-version}/decrypt'} # type: ignore async def sign( self, vault_base_url: str, key_name: str, key_version: str, parameters: "_models.KeySignParameters", **kwargs: Any ) -> "_models.KeyOperationResult": """Creates a signature from a digest using the specified key. The SIGN operation is applicable to asymmetric and symmetric keys stored in Azure Key Vault since this operation uses the private portion of the key. This operation requires the keys/sign permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :param key_version: The version of the key. :type key_version: str :param parameters: The parameters for the signing operation. :type parameters: ~azure.keyvault.v2016_10_01.models.KeySignParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyOperationResult, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.sign.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), 'key-version': self._serialize.url("key_version", key_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeySignParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyOperationResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized sign.metadata = {'url': '/keys/{key-name}/{key-version}/sign'} # type: ignore async def verify( self, vault_base_url: str, key_name: str, key_version: str, parameters: "_models.KeyVerifyParameters", **kwargs: Any ) -> "_models.KeyVerifyResult": """Verifies a signature using a specified key. The VERIFY operation is applicable to symmetric keys stored in Azure Key Vault. VERIFY is not strictly necessary for asymmetric keys stored in Azure Key Vault since signature verification can be performed using the public portion of the key but this operation is supported as a convenience for callers that only have a key-reference and not the public portion of the key. This operation requires the keys/verify permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :param key_version: The version of the key. :type key_version: str :param parameters: The parameters for verify operations. :type parameters: ~azure.keyvault.v2016_10_01.models.KeyVerifyParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyVerifyResult, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyVerifyResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyVerifyResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.verify.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), 'key-version': self._serialize.url("key_version", key_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeyVerifyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyVerifyResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized verify.metadata = {'url': '/keys/{key-name}/{key-version}/verify'} # type: ignore async def wrap_key( self, vault_base_url: str, key_name: str, key_version: str, parameters: "_models.KeyOperationsParameters", **kwargs: Any ) -> "_models.KeyOperationResult": """Wraps a symmetric key using a specified key. The WRAP operation supports encryption of a symmetric key using a key encryption key that has previously been stored in an Azure Key Vault. The WRAP operation is only strictly necessary for symmetric keys stored in Azure Key Vault since protection with an asymmetric key can be performed using the public portion of the key. This operation is supported for asymmetric keys as a convenience for callers that have a key-reference but do not have access to the public key material. This operation requires the keys/wrapKey permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :param key_version: The version of the key. :type key_version: str :param parameters: The parameters for wrap operation. :type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyOperationResult, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.wrap_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), 'key-version': self._serialize.url("key_version", key_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeyOperationsParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyOperationResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized wrap_key.metadata = {'url': '/keys/{key-name}/{key-version}/wrapkey'} # type: ignore async def unwrap_key( self, vault_base_url: str, key_name: str, key_version: str, parameters: "_models.KeyOperationsParameters", **kwargs: Any ) -> "_models.KeyOperationResult": """Unwraps a symmetric key using the specified key that was initially used for wrapping that key. The UNWRAP operation supports decryption of a symmetric key using the target key encryption key. This operation is the reverse of the WRAP operation. The UNWRAP operation applies to asymmetric and symmetric keys stored in Azure Key Vault since it uses the private portion of the key. This operation requires the keys/unwrapKey permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :param key_version: The version of the key. :type key_version: str :param parameters: The parameters for the key operation. :type parameters: ~azure.keyvault.v2016_10_01.models.KeyOperationsParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyOperationResult, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyOperationResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyOperationResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.unwrap_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), 'key-version': self._serialize.url("key_version", key_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'KeyOperationsParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyOperationResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized unwrap_key.metadata = {'url': '/keys/{key-name}/{key-version}/unwrapkey'} # type: ignore def get_deleted_keys( self, vault_base_url: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.DeletedKeyListResult"]: """Lists the deleted keys in the specified vault. Retrieves a list of the keys in the Key Vault as JSON Web Key structures that contain the public part of a deleted key. This operation includes deletion-specific information. The Get Deleted Keys operation is applicable for vaults enabled for soft-delete. While the operation can be invoked on any vault, it will return an error if invoked on a non soft-delete enabled vault. This operation requires the keys/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DeletedKeyListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.DeletedKeyListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedKeyListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_deleted_keys.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('DeletedKeyListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_deleted_keys.metadata = {'url': '/deletedkeys'} # type: ignore async def get_deleted_key( self, vault_base_url: str, key_name: str, **kwargs: Any ) -> "_models.DeletedKeyBundle": """Gets the public part of a deleted key. The Get Deleted Key operation is applicable for soft-delete enabled vaults. While the operation can be invoked on any vault, it will return an error if invoked on a non soft-delete enabled vault. This operation requires the keys/get permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DeletedKeyBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.DeletedKeyBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedKeyBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_deleted_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('DeletedKeyBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_deleted_key.metadata = {'url': '/deletedkeys/{key-name}'} # type: ignore async def purge_deleted_key( self, vault_base_url: str, key_name: str, **kwargs: Any ) -> None: """Permanently deletes the specified key. The Purge Deleted Key operation is applicable for soft-delete enabled vaults. While the operation can be invoked on any vault, it will return an error if invoked on a non soft-delete enabled vault. This operation requires the keys/purge permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the key. :type key_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.purge_deleted_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) purge_deleted_key.metadata = {'url': '/deletedkeys/{key-name}'} # type: ignore async def recover_deleted_key( self, vault_base_url: str, key_name: str, **kwargs: Any ) -> "_models.KeyBundle": """Recovers the deleted key to its latest version. The Recover Deleted Key operation is applicable for deleted keys in soft-delete enabled vaults. It recovers the deleted key back to its latest version under /keys. An attempt to recover an non-deleted key will return an error. Consider this the inverse of the delete operation on soft-delete enabled vaults. This operation requires the keys/recover permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param key_name: The name of the deleted key. :type key_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: KeyBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.KeyBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.recover_deleted_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'key-name': self._serialize.url("key_name", key_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('KeyBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized recover_deleted_key.metadata = {'url': '/deletedkeys/{key-name}/recover'} # type: ignore async def set_secret( self, vault_base_url: str, secret_name: str, parameters: "_models.SecretSetParameters", **kwargs: Any ) -> "_models.SecretBundle": """Sets a secret in a specified key vault. The SET operation adds a secret to the Azure Key Vault. If the named secret already exists, Azure Key Vault creates a new version of that secret. This operation requires the secrets/set permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param secret_name: The name of the secret. :type secret_name: str :param parameters: The parameters for setting the secret. :type parameters: ~azure.keyvault.v2016_10_01.models.SecretSetParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: SecretBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.set_secret.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'SecretSetParameters') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SecretBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized set_secret.metadata = {'url': '/secrets/{secret-name}'} # type: ignore async def delete_secret( self, vault_base_url: str, secret_name: str, **kwargs: Any ) -> "_models.DeletedSecretBundle": """Deletes a secret from a specified key vault. The DELETE operation applies to any secret stored in Azure Key Vault. DELETE cannot be applied to an individual version of a secret. This operation requires the secrets/delete permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param secret_name: The name of the secret. :type secret_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DeletedSecretBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.DeletedSecretBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSecretBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.delete_secret.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('DeletedSecretBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_secret.metadata = {'url': '/secrets/{secret-name}'} # type: ignore async def update_secret( self, vault_base_url: str, secret_name: str, secret_version: str, parameters: "_models.SecretUpdateParameters", **kwargs: Any ) -> "_models.SecretBundle": """Updates the attributes associated with a specified secret in a given key vault. The UPDATE operation changes specified attributes of an existing stored secret. Attributes that are not specified in the request are left unchanged. The value of a secret itself cannot be changed. This operation requires the secrets/set permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param secret_name: The name of the secret. :type secret_name: str :param secret_version: The version of the secret. :type secret_version: str :param parameters: The parameters for update secret operation. :type parameters: ~azure.keyvault.v2016_10_01.models.SecretUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: SecretBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_secret.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str'), 'secret-version': self._serialize.url("secret_version", secret_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'SecretUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SecretBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_secret.metadata = {'url': '/secrets/{secret-name}/{secret-version}'} # type: ignore async def get_secret( self, vault_base_url: str, secret_name: str, secret_version: str, **kwargs: Any ) -> "_models.SecretBundle": """Get a specified secret from a given key vault. The GET operation is applicable to any secret stored in Azure Key Vault. This operation requires the secrets/get permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param secret_name: The name of the secret. :type secret_name: str :param secret_version: The version of the secret. :type secret_version: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SecretBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_secret.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str'), 'secret-version': self._serialize.url("secret_version", secret_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SecretBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_secret.metadata = {'url': '/secrets/{secret-name}/{secret-version}'} # type: ignore def get_secrets( self, vault_base_url: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.SecretListResult"]: """List secrets in a specified key vault. The Get Secrets operation is applicable to the entire vault. However, only the base secret identifier and its attributes are provided in the response. Individual secret versions are not listed in the response. This operation requires the secrets/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param maxresults: Maximum number of results to return in a page. If not specified, the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SecretListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.SecretListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_secrets.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('SecretListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_secrets.metadata = {'url': '/secrets'} # type: ignore def get_secret_versions( self, vault_base_url: str, secret_name: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.SecretListResult"]: """List all versions of the specified secret. The full secret identifier and attributes are provided in the response. No values are returned for the secrets. This operations requires the secrets/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param secret_name: The name of the secret. :type secret_name: str :param maxresults: Maximum number of results to return in a page. If not specified, the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SecretListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.SecretListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_secret_versions.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('SecretListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_secret_versions.metadata = {'url': '/secrets/{secret-name}/versions'} # type: ignore def get_deleted_secrets( self, vault_base_url: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.DeletedSecretListResult"]: """Lists deleted secrets for the specified vault. The Get Deleted Secrets operation returns the secrets that have been deleted for a vault enabled for soft-delete. This operation requires the secrets/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DeletedSecretListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.DeletedSecretListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSecretListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_deleted_secrets.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('DeletedSecretListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_deleted_secrets.metadata = {'url': '/deletedsecrets'} # type: ignore async def get_deleted_secret( self, vault_base_url: str, secret_name: str, **kwargs: Any ) -> "_models.DeletedSecretBundle": """Gets the specified deleted secret. The Get Deleted Secret operation returns the specified deleted secret along with its attributes. This operation requires the secrets/get permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param secret_name: The name of the secret. :type secret_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DeletedSecretBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.DeletedSecretBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSecretBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_deleted_secret.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('DeletedSecretBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}'} # type: ignore async def purge_deleted_secret( self, vault_base_url: str, secret_name: str, **kwargs: Any ) -> None: """Permanently deletes the specified secret. The purge deleted secret operation removes the secret permanently, without the possibility of recovery. This operation can only be enabled on a soft-delete enabled vault. This operation requires the secrets/purge permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param secret_name: The name of the secret. :type secret_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.purge_deleted_secret.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) purge_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}'} # type: ignore async def recover_deleted_secret( self, vault_base_url: str, secret_name: str, **kwargs: Any ) -> "_models.SecretBundle": """Recovers the deleted secret to the latest version. Recovers the deleted secret in the specified vault. This operation can only be performed on a soft-delete enabled vault. This operation requires the secrets/recover permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param secret_name: The name of the deleted secret. :type secret_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SecretBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.recover_deleted_secret.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SecretBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized recover_deleted_secret.metadata = {'url': '/deletedsecrets/{secret-name}/recover'} # type: ignore async def backup_secret( self, vault_base_url: str, secret_name: str, **kwargs: Any ) -> "_models.BackupSecretResult": """Backs up the specified secret. Requests that a backup of the specified secret be downloaded to the client. All versions of the secret will be downloaded. This operation requires the secrets/backup permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param secret_name: The name of the secret. :type secret_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: BackupSecretResult, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.BackupSecretResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.BackupSecretResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.backup_secret.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'secret-name': self._serialize.url("secret_name", secret_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('BackupSecretResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized backup_secret.metadata = {'url': '/secrets/{secret-name}/backup'} # type: ignore async def restore_secret( self, vault_base_url: str, parameters: "_models.SecretRestoreParameters", **kwargs: Any ) -> "_models.SecretBundle": """Restores a backed up secret to a vault. Restores a backed up secret, and all its versions, to a vault. This operation requires the secrets/restore permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param parameters: The parameters to restore the secret. :type parameters: ~azure.keyvault.v2016_10_01.models.SecretRestoreParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: SecretBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.SecretBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SecretBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.restore_secret.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'SecretRestoreParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SecretBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized restore_secret.metadata = {'url': '/secrets/restore'} # type: ignore def get_certificates( self, vault_base_url: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.CertificateListResult"]: """List certificates in a specified key vault. The GetCertificates operation returns the set of certificates resources in the specified key vault. This operation requires the certificates/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either CertificateListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.CertificateListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_certificates.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('CertificateListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_certificates.metadata = {'url': '/certificates'} # type: ignore async def delete_certificate( self, vault_base_url: str, certificate_name: str, **kwargs: Any ) -> "_models.DeletedCertificateBundle": """Deletes a certificate from a specified key vault. Deletes all versions of a certificate object along with its associated policy. Delete certificate cannot be used to remove individual versions of a certificate object. This operation requires the certificates/delete permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DeletedCertificateBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.DeletedCertificateBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedCertificateBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.delete_certificate.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('DeletedCertificateBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_certificate.metadata = {'url': '/certificates/{certificate-name}'} # type: ignore async def set_certificate_contacts( self, vault_base_url: str, contacts: "_models.Contacts", **kwargs: Any ) -> "_models.Contacts": """Sets the certificate contacts for the specified key vault. Sets the certificate contacts for the specified key vault. This operation requires the certificates/managecontacts permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param contacts: The contacts for the key vault certificate. :type contacts: ~azure.keyvault.v2016_10_01.models.Contacts :keyword callable cls: A custom type or function that will be passed the direct response :return: Contacts, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.Contacts :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Contacts"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.set_certificate_contacts.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(contacts, 'Contacts') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('Contacts', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized set_certificate_contacts.metadata = {'url': '/certificates/contacts'} # type: ignore async def get_certificate_contacts( self, vault_base_url: str, **kwargs: Any ) -> "_models.Contacts": """Lists the certificate contacts for a specified key vault. The GetCertificateContacts operation returns the set of certificate contact resources in the specified key vault. This operation requires the certificates/managecontacts permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :keyword callable cls: A custom type or function that will be passed the direct response :return: Contacts, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.Contacts :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Contacts"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_certificate_contacts.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('Contacts', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_certificate_contacts.metadata = {'url': '/certificates/contacts'} # type: ignore async def delete_certificate_contacts( self, vault_base_url: str, **kwargs: Any ) -> "_models.Contacts": """Deletes the certificate contacts for a specified key vault. Deletes the certificate contacts for a specified key vault certificate. This operation requires the certificates/managecontacts permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :keyword callable cls: A custom type or function that will be passed the direct response :return: Contacts, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.Contacts :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Contacts"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.delete_certificate_contacts.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('Contacts', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_certificate_contacts.metadata = {'url': '/certificates/contacts'} # type: ignore def get_certificate_issuers( self, vault_base_url: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.CertificateIssuerListResult"]: """List certificate issuers for a specified key vault. The GetCertificateIssuers operation returns the set of certificate issuer resources in the specified key vault. This operation requires the certificates/manageissuers/getissuers permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either CertificateIssuerListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.CertificateIssuerListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateIssuerListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_certificate_issuers.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('CertificateIssuerListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_certificate_issuers.metadata = {'url': '/certificates/issuers'} # type: ignore async def set_certificate_issuer( self, vault_base_url: str, issuer_name: str, parameter: "_models.CertificateIssuerSetParameters", **kwargs: Any ) -> "_models.IssuerBundle": """Sets the specified certificate issuer. The SetCertificateIssuer operation adds or updates the specified certificate issuer. This operation requires the certificates/setissuers permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param issuer_name: The name of the issuer. :type issuer_name: str :param parameter: Certificate issuer set parameter. :type parameter: ~azure.keyvault.v2016_10_01.models.CertificateIssuerSetParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: IssuerBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.set_certificate_issuer.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameter, 'CertificateIssuerSetParameters') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('IssuerBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized set_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore async def update_certificate_issuer( self, vault_base_url: str, issuer_name: str, parameter: "_models.CertificateIssuerUpdateParameters", **kwargs: Any ) -> "_models.IssuerBundle": """Updates the specified certificate issuer. The UpdateCertificateIssuer operation performs an update on the specified certificate issuer entity. This operation requires the certificates/setissuers permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param issuer_name: The name of the issuer. :type issuer_name: str :param parameter: Certificate issuer update parameter. :type parameter: ~azure.keyvault.v2016_10_01.models.CertificateIssuerUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: IssuerBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_certificate_issuer.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameter, 'CertificateIssuerUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('IssuerBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore async def get_certificate_issuer( self, vault_base_url: str, issuer_name: str, **kwargs: Any ) -> "_models.IssuerBundle": """Lists the specified certificate issuer. The GetCertificateIssuer operation returns the specified certificate issuer resources in the specified key vault. This operation requires the certificates/manageissuers/getissuers permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param issuer_name: The name of the issuer. :type issuer_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IssuerBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_certificate_issuer.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('IssuerBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore async def delete_certificate_issuer( self, vault_base_url: str, issuer_name: str, **kwargs: Any ) -> "_models.IssuerBundle": """Deletes the specified certificate issuer. The DeleteCertificateIssuer operation permanently removes the specified certificate issuer from the vault. This operation requires the certificates/manageissuers/deleteissuers permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param issuer_name: The name of the issuer. :type issuer_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: IssuerBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.IssuerBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.IssuerBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.delete_certificate_issuer.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'issuer-name': self._serialize.url("issuer_name", issuer_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('IssuerBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_certificate_issuer.metadata = {'url': '/certificates/issuers/{issuer-name}'} # type: ignore async def create_certificate( self, vault_base_url: str, certificate_name: str, parameters: "_models.CertificateCreateParameters", **kwargs: Any ) -> "_models.CertificateOperation": """Creates a new certificate. If this is the first version, the certificate resource is created. This operation requires the certificates/create permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :param parameters: The parameters to create a certificate. :type parameters: ~azure.keyvault.v2016_10_01.models.CertificateCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificateOperation, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_certificate.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'CertificateCreateParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificateOperation', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_certificate.metadata = {'url': '/certificates/{certificate-name}/create'} # type: ignore async def import_certificate( self, vault_base_url: str, certificate_name: str, parameters: "_models.CertificateImportParameters", **kwargs: Any ) -> "_models.CertificateBundle": """Imports a certificate into a specified key vault. Imports an existing valid certificate, containing a private key, into Azure Key Vault. The certificate to be imported can be in either PFX or PEM format. If the certificate is in PEM format the PEM file must contain the key as well as x509 certificates. This operation requires the certificates/import permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :param parameters: The parameters to import the certificate. :type parameters: ~azure.keyvault.v2016_10_01.models.CertificateImportParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificateBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.import_certificate.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str', pattern=r'^[0-9a-zA-Z-]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'CertificateImportParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificateBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized import_certificate.metadata = {'url': '/certificates/{certificate-name}/import'} # type: ignore def get_certificate_versions( self, vault_base_url: str, certificate_name: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.CertificateListResult"]: """List the versions of a certificate. The GetCertificateVersions operation returns the versions of a certificate in the specified key vault. This operation requires the certificates/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either CertificateListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.CertificateListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_certificate_versions.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('CertificateListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_certificate_versions.metadata = {'url': '/certificates/{certificate-name}/versions'} # type: ignore async def get_certificate_policy( self, vault_base_url: str, certificate_name: str, **kwargs: Any ) -> "_models.CertificatePolicy": """Lists the policy for a certificate. The GetCertificatePolicy operation returns the specified certificate policy resources in the specified key vault. This operation requires the certificates/get permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate in a given key vault. :type certificate_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificatePolicy, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificatePolicy :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificatePolicy"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_certificate_policy.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificatePolicy', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_certificate_policy.metadata = {'url': '/certificates/{certificate-name}/policy'} # type: ignore async def update_certificate_policy( self, vault_base_url: str, certificate_name: str, certificate_policy: "_models.CertificatePolicy", **kwargs: Any ) -> "_models.CertificatePolicy": """Updates the policy for a certificate. Set specified members in the certificate policy. Leave others as null. This operation requires the certificates/update permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate in the given vault. :type certificate_name: str :param certificate_policy: The policy for the certificate. :type certificate_policy: ~azure.keyvault.v2016_10_01.models.CertificatePolicy :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificatePolicy, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificatePolicy :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificatePolicy"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_certificate_policy.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(certificate_policy, 'CertificatePolicy') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificatePolicy', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_certificate_policy.metadata = {'url': '/certificates/{certificate-name}/policy'} # type: ignore async def update_certificate( self, vault_base_url: str, certificate_name: str, certificate_version: str, parameters: "_models.CertificateUpdateParameters", **kwargs: Any ) -> "_models.CertificateBundle": """Updates the specified attributes associated with the given certificate. The UpdateCertificate operation applies the specified update on the given certificate; the only elements updated are the certificate's attributes. This operation requires the certificates/update permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate in the given key vault. :type certificate_name: str :param certificate_version: The version of the certificate. :type certificate_version: str :param parameters: The parameters for certificate update. :type parameters: ~azure.keyvault.v2016_10_01.models.CertificateUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificateBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_certificate.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), 'certificate-version': self._serialize.url("certificate_version", certificate_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'CertificateUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificateBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_certificate.metadata = {'url': '/certificates/{certificate-name}/{certificate-version}'} # type: ignore async def get_certificate( self, vault_base_url: str, certificate_name: str, certificate_version: str, **kwargs: Any ) -> "_models.CertificateBundle": """Gets information about a certificate. Gets information about a specific certificate. This operation requires the certificates/get permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate in the given vault. :type certificate_name: str :param certificate_version: The version of the certificate. :type certificate_version: str :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificateBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_certificate.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), 'certificate-version': self._serialize.url("certificate_version", certificate_version, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificateBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_certificate.metadata = {'url': '/certificates/{certificate-name}/{certificate-version}'} # type: ignore async def update_certificate_operation( self, vault_base_url: str, certificate_name: str, certificate_operation: "_models.CertificateOperationUpdateParameter", **kwargs: Any ) -> "_models.CertificateOperation": """Updates a certificate operation. Updates a certificate creation operation that is already in progress. This operation requires the certificates/update permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :param certificate_operation: The certificate operation response. :type certificate_operation: ~azure.keyvault.v2016_10_01.models.CertificateOperationUpdateParameter :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificateOperation, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_certificate_operation.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(certificate_operation, 'CertificateOperationUpdateParameter') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificateOperation', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'} # type: ignore async def get_certificate_operation( self, vault_base_url: str, certificate_name: str, **kwargs: Any ) -> "_models.CertificateOperation": """Gets the creation operation of a certificate. Gets the creation operation associated with a specified certificate. This operation requires the certificates/get permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificateOperation, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_certificate_operation.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificateOperation', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'} # type: ignore async def delete_certificate_operation( self, vault_base_url: str, certificate_name: str, **kwargs: Any ) -> "_models.CertificateOperation": """Deletes the creation operation for a specific certificate. Deletes the creation operation for a specified certificate that is in the process of being created. The certificate is no longer created. This operation requires the certificates/update permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificateOperation, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificateOperation :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateOperation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.delete_certificate_operation.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificateOperation', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_certificate_operation.metadata = {'url': '/certificates/{certificate-name}/pending'} # type: ignore async def merge_certificate( self, vault_base_url: str, certificate_name: str, parameters: "_models.CertificateMergeParameters", **kwargs: Any ) -> "_models.CertificateBundle": """Merges a certificate or a certificate chain with a key pair existing on the server. The MergeCertificate operation performs the merging of a certificate or certificate chain with a key pair currently available in the service. This operation requires the certificates/create permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :param parameters: The parameters to merge certificate. :type parameters: ~azure.keyvault.v2016_10_01.models.CertificateMergeParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificateBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.merge_certificate.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'CertificateMergeParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificateBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized merge_certificate.metadata = {'url': '/certificates/{certificate-name}/pending/merge'} # type: ignore def get_deleted_certificates( self, vault_base_url: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.DeletedCertificateListResult"]: """Lists the deleted certificates in the specified vault currently available for recovery. The GetDeletedCertificates operation retrieves the certificates in the current vault which are in a deleted state and ready for recovery or purging. This operation includes deletion-specific information. This operation requires the certificates/get/list permission. This operation can only be enabled on soft-delete enabled vaults. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DeletedCertificateListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.DeletedCertificateListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedCertificateListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_deleted_certificates.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('DeletedCertificateListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_deleted_certificates.metadata = {'url': '/deletedcertificates'} # type: ignore async def get_deleted_certificate( self, vault_base_url: str, certificate_name: str, **kwargs: Any ) -> "_models.DeletedCertificateBundle": """Retrieves information about the specified deleted certificate. The GetDeletedCertificate operation retrieves the deleted certificate information plus its attributes, such as retention interval, scheduled permanent deletion and the current deletion recovery level. This operation requires the certificates/get permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DeletedCertificateBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.DeletedCertificateBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedCertificateBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_deleted_certificate.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('DeletedCertificateBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}'} # type: ignore async def purge_deleted_certificate( self, vault_base_url: str, certificate_name: str, **kwargs: Any ) -> None: """Permanently deletes the specified deleted certificate. The PurgeDeletedCertificate operation performs an irreversible deletion of the specified certificate, without possibility for recovery. The operation is not available if the recovery level does not specify 'Purgeable'. This operation requires the certificate/purge permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the certificate. :type certificate_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.purge_deleted_certificate.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) purge_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}'} # type: ignore async def recover_deleted_certificate( self, vault_base_url: str, certificate_name: str, **kwargs: Any ) -> "_models.CertificateBundle": """Recovers the deleted certificate back to its current version under /certificates. The RecoverDeletedCertificate operation performs the reversal of the Delete operation. The operation is applicable in vaults enabled for soft-delete, and must be issued during the retention interval (available in the deleted certificate's attributes). This operation requires the certificates/recover permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param certificate_name: The name of the deleted certificate. :type certificate_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: CertificateBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.CertificateBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.CertificateBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.recover_deleted_certificate.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'certificate-name': self._serialize.url("certificate_name", certificate_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('CertificateBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized recover_deleted_certificate.metadata = {'url': '/deletedcertificates/{certificate-name}/recover'} # type: ignore def get_storage_accounts( self, vault_base_url: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.StorageListResult"]: """List storage accounts managed by the specified key vault. This operation requires the storage/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either StorageListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.StorageListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_storage_accounts.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('StorageListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_storage_accounts.metadata = {'url': '/storage'} # type: ignore async def delete_storage_account( self, vault_base_url: str, storage_account_name: str, **kwargs: Any ) -> "_models.StorageBundle": """Deletes a storage account. This operation requires the storage/delete permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.delete_storage_account.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('StorageBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore async def get_storage_account( self, vault_base_url: str, storage_account_name: str, **kwargs: Any ) -> "_models.StorageBundle": """Gets information about a specified storage account. This operation requires the storage/get permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_storage_account.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('StorageBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore async def set_storage_account( self, vault_base_url: str, storage_account_name: str, parameters: "_models.StorageAccountCreateParameters", **kwargs: Any ) -> "_models.StorageBundle": """Creates or updates a new storage account. This operation requires the storage/set permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :param parameters: The parameters to create a storage account. :type parameters: ~azure.keyvault.v2016_10_01.models.StorageAccountCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.set_storage_account.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'StorageAccountCreateParameters') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('StorageBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized set_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore async def update_storage_account( self, vault_base_url: str, storage_account_name: str, parameters: "_models.StorageAccountUpdateParameters", **kwargs: Any ) -> "_models.StorageBundle": """Updates the specified attributes associated with the given storage account. This operation requires the storage/set/update permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :param parameters: The parameters to update a storage account. :type parameters: ~azure.keyvault.v2016_10_01.models.StorageAccountUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_storage_account.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'StorageAccountUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('StorageBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_storage_account.metadata = {'url': '/storage/{storage-account-name}'} # type: ignore async def regenerate_storage_account_key( self, vault_base_url: str, storage_account_name: str, parameters: "_models.StorageAccountRegenerteKeyParameters", **kwargs: Any ) -> "_models.StorageBundle": """Regenerates the specified key value for the given storage account. This operation requires the storage/regeneratekey permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :param parameters: The parameters to regenerate storage account key. :type parameters: ~azure.keyvault.v2016_10_01.models.StorageAccountRegenerteKeyParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: StorageBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.StorageBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.regenerate_storage_account_key.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'StorageAccountRegenerteKeyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('StorageBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized regenerate_storage_account_key.metadata = {'url': '/storage/{storage-account-name}/regeneratekey'} # type: ignore def get_sas_definitions( self, vault_base_url: str, storage_account_name: str, maxresults: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.SasDefinitionListResult"]: """List storage SAS definitions for the given storage account. This operation requires the storage/listsas permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SasDefinitionListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.v2016_10_01.models.SasDefinitionListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.get_sas_definitions.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if maxresults is not None: query_parameters['maxresults'] = self._serialize.query("maxresults", maxresults, 'int', maximum=25, minimum=1) query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('SasDefinitionListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error) return pipeline_response return AsyncItemPaged( get_next, extract_data ) get_sas_definitions.metadata = {'url': '/storage/{storage-account-name}/sas'} # type: ignore async def delete_sas_definition( self, vault_base_url: str, storage_account_name: str, sas_definition_name: str, **kwargs: Any ) -> "_models.SasDefinitionBundle": """Deletes a SAS definition from a specified storage account. This operation requires the storage/deletesas permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :param sas_definition_name: The name of the SAS definition. :type sas_definition_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SasDefinitionBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.delete_sas_definition.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), 'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SasDefinitionBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized delete_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore async def get_sas_definition( self, vault_base_url: str, storage_account_name: str, sas_definition_name: str, **kwargs: Any ) -> "_models.SasDefinitionBundle": """Gets information about a SAS definition for the specified storage account. This operation requires the storage/getsas permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :param sas_definition_name: The name of the SAS definition. :type sas_definition_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SasDefinitionBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" accept = "application/json" # Construct URL url = self.get_sas_definition.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), 'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SasDefinitionBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore async def set_sas_definition( self, vault_base_url: str, storage_account_name: str, sas_definition_name: str, parameters: "_models.SasDefinitionCreateParameters", **kwargs: Any ) -> "_models.SasDefinitionBundle": """Creates or updates a new SAS definition for the specified storage account. This operation requires the storage/setsas permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :param sas_definition_name: The name of the SAS definition. :type sas_definition_name: str :param parameters: The parameters to create a SAS definition. :type parameters: ~azure.keyvault.v2016_10_01.models.SasDefinitionCreateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: SasDefinitionBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.set_sas_definition.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), 'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'SasDefinitionCreateParameters') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SasDefinitionBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized set_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore async def update_sas_definition( self, vault_base_url: str, storage_account_name: str, sas_definition_name: str, parameters: "_models.SasDefinitionUpdateParameters", **kwargs: Any ) -> "_models.SasDefinitionBundle": """Updates the specified attributes associated with the given SAS definition. This operation requires the storage/setsas permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param storage_account_name: The name of the storage account. :type storage_account_name: str :param sas_definition_name: The name of the SAS definition. :type sas_definition_name: str :param parameters: The parameters to update a SAS definition. :type parameters: ~azure.keyvault.v2016_10_01.models.SasDefinitionUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: SasDefinitionBundle, or the result of cls(response) :rtype: ~azure.keyvault.v2016_10_01.models.SasDefinitionBundle :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SasDefinitionBundle"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2016-10-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_sas_definition.metadata['url'] # type: ignore path_format_arguments = { 'vaultBaseUrl': self._serialize.url("vault_base_url", vault_base_url, 'str', skip_quote=True), 'storage-account-name': self._serialize.url("storage_account_name", storage_account_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), 'sas-definition-name': self._serialize.url("sas_definition_name", sas_definition_name, 'str', pattern=r'^[0-9a-zA-Z]+$'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'SasDefinitionUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.KeyVaultError, response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize('SasDefinitionBundle', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update_sas_definition.metadata = {'url': '/storage/{storage-account-name}/sas/{sas-definition-name}'} # type: ignore
mit
5,830,821,531,992,470,000
48.203455
158
0.642438
false
4.210358
false
false
false
jmchilton/galaxy-central
galaxy/tools/parameters.py
1
19896
""" Classes encapsulating tool parameters """ import logging, string, sys from galaxy import config, datatypes, util, form_builder import validation from elementtree.ElementTree import XML, Element log = logging.getLogger(__name__) class ToolParameter( object ): """ Describes a parameter accepted by a tool. This is just a simple stub at the moment but in the future should encapsulate more complex parameters (lists of valid choices, validation logic, ...) """ def __init__( self, tool, param ): self.tool = tool self.name = param.get("name") self.label = util.xml_text(param, "label") self.help = util.xml_text(param, "help") self.html = "no html set" self.validators = [] for elem in param.findall("validator"): self.validators.append( validation.Validator.from_element( elem ) ) def get_label( self ): """Return user friendly name for the parameter""" if self.label: return self.label else: return self.name def get_html( self, trans=None, value=None, other_values={} ): """ Returns the html widget corresponding to the paramter. Optionally attempt to retain the current value specific by 'value' """ return self.html def get_required_enctype( self ): """ If this parameter needs the form to have a specific encoding return it, otherwise return None (indicating compatibility with any encoding) """ return None def filter_value( self, value, trans=None, other_values={} ): """ Parse the value returned by the view into a form usable by the tool OR raise a ValueError. """ return value def to_string( self, value, app ): """Convert a value to a string representation suitable for persisting""" return str( value ) def to_python( self, value, app ): """Convert a value created with to_string back to an object representation""" return value def validate( self, value, history=None ): for validator in self.validators: validator.validate( value, history ) @classmethod def build( cls, tool, param ): """Factory method to create parameter of correct type""" param_type = param.get("type") if not param_type or param_type not in parameter_types: raise ValueError( "Unknown tool parameter type '%s'" % param_type ) else: return parameter_types[param_type]( tool, param ) class TextToolParameter( ToolParameter ): """ Parameter that can take on any text value. >>> p = TextToolParameter( None, XML( '<param name="blah" type="text" size="4" value="default" />' ) ) >>> print p.name blah >>> print p.get_html() <input type="text" name="blah" size="4" value="default"> >>> print p.get_html( value="meh" ) <input type="text" name="blah" size="4" value="meh"> """ def __init__( self, tool, elem ): ToolParameter.__init__( self, tool, elem ) self.name = elem.get( 'name' ) self.size = elem.get( 'size' ) self.value = elem.get( 'value' ) self.area = str_bool( elem.get( 'area', False ) ) def get_html( self, trans=None, value=None, other_values={} ): if self.area: return form_builder.TextArea( self.name, self.size, value or self.value ).get_html() return form_builder.TextField( self.name, self.size, value or self.value ).get_html() class IntegerToolParameter( TextToolParameter ): """ Parameter that takes an integer value. >>> p = IntegerToolParameter( None, XML( '<param name="blah" type="integer" size="4" value="10" />' ) ) >>> print p.name blah >>> print p.get_html() <input type="text" name="blah" size="4" value="10"> >>> type( p.filter_value( "10" ) ) <type 'int'> >>> type( p.filter_value( "bleh" ) ) Traceback (most recent call last): ... ValueError: An integer is required """ def filter_value( self, value, trans=None, other_values={} ): try: return int( value ) except: raise ValueError( "An integer is required" ) def to_python( self, value, app ): return int( value ) class FloatToolParameter( TextToolParameter ): """ Parameter that takes a real number value. >>> p = FloatToolParameter( None, XML( '<param name="blah" type="integer" size="4" value="3.141592" />' ) ) >>> print p.name blah >>> print p.get_html() <input type="text" name="blah" size="4" value="3.141592"> >>> type( p.filter_value( "36.1" ) ) <type 'float'> >>> type( p.filter_value( "bleh" ) ) Traceback (most recent call last): ... ValueError: A real number is required """ def filter_value( self, value, trans=None, other_values={} ): try: return float( value ) except: raise ValueError( "A real number is required") def to_python( self, value, app ): return float( value ) class BooleanToolParameter( ToolParameter ): """ Parameter that takes one of two values. >>> p = BooleanToolParameter( None, XML( '<param name="blah" type="boolean" checked="yes" truevalue="bulletproof vests" falsevalue="cellophane chests" />' ) ) >>> print p.name blah >>> print p.get_html() <input type="checkbox" name="blah" value="true" checked><input type="hidden" name="blah" value="true"> >>> print p.filter_value( ["true","true"] ) bulletproof vests >>> print p.filter_value( ["true"] ) cellophane chests """ def __init__( self, tool, elem ): ToolParameter.__init__( self, tool, elem ) self.truevalue = elem.get( 'truevalue', 'true' ) self.falsevalue = elem.get( 'falsevalue', 'false' ) self.name = elem.get( 'name' ) self.checked = elem.get( 'checked' ) def get_html( self, trans=None, value=None, other_values={} ): checked = self.checked if value: checked = form_builder.CheckboxField.is_checked( value ) return form_builder.CheckboxField( self.name, checked ).get_html() def filter_value( self, value, trans=None, other_values={} ): if form_builder.CheckboxField.is_checked( value ): return self.truevalue else: return self.falsevalue def to_python( self, value, app ): return ( value == 'True' ) class FileToolParameter( ToolParameter ): """ Parameter that takes an uploaded file as a value. >>> p = FileToolParameter( None, XML( '<param name="blah" type="file"/>' ) ) >>> print p.name blah >>> print p.get_html() <input type="file" name="blah"> """ def __init__( self, tool, elem ): """ Example: C{<param name="bins" type="file" />} """ ToolParameter.__init__( self, tool, elem ) self.html = form_builder.FileField( elem.get( 'name') ).get_html() def get_required_enctype( self ): """ File upload elements require the multipart/form-data encoding """ return "multipart/form-data" def to_string( self, value, app ): raise Exception( "FileToolParameter cannot be persisted" ) def to_python( self, value, app ): raise Exception( "FileToolParameter cannot be persisted" ) class HiddenToolParameter( ToolParameter ): """ Parameter that takes one of two values. FIXME: This seems hacky, parameters should only describe things the user might change. It is used for 'initializing' the UCSC proxy tool >>> p = HiddenToolParameter( None, XML( '<param name="blah" type="hidden" value="wax so rockin"/>' ) ) >>> print p.name blah >>> print p.get_html() <input type="hidden" name="blah" value="wax so rockin"> """ def __init__( self, tool, elem ): ToolParameter.__init__( self, tool, elem ) self.name = elem.get( 'name' ) self.value = elem.get( 'value' ) self.html = form_builder.HiddenField( self.name, self.value ).get_html() ## This is clearly a HACK, parameters should only be used for things the user ## can change, there needs to be a different way to specify this. I'm leaving ## it for now to avoid breaking any tools. class BaseURLToolParameter( ToolParameter ): """ Returns a parameter the contains its value prepended by the current server base url. Used in all redirects. """ def __init__( self, tool, elem ): ToolParameter.__init__( self, tool, elem ) self.name = elem.get( 'name' ) self.value = elem.get( 'value', '' ) def get_html( self, trans=None, value=None, other_values={} ): return form_builder.HiddenField( self.name, trans.request.base + self.value ).get_html() class SelectToolParameter( ToolParameter ): """ Parameter that takes on one (or many) or a specific set of values. TODO: There should be an alternate display that allows single selects to be displayed as radio buttons and multiple selects as a set of checkboxes >>> p = SelectToolParameter( None, XML( ... ''' ... <param name="blah" type="select"> ... <option value="x">I am X</option> ... <option value="y" selected="true">I am Y</option> ... <option value="z">I am Z</option> ... </param> ... ''' ) ) >>> print p.name blah >>> print p.get_html() <select name="blah"> <option value="x">I am X</option> <option value="y" selected>I am Y</option> <option value="z">I am Z</option> </select> >>> print p.get_html( value="z" ) <select name="blah"> <option value="x">I am X</option> <option value="y">I am Y</option> <option value="z" selected>I am Z</option> </select> >>> print p.filter_value( "y" ) y >>> p = SelectToolParameter( None, XML( ... ''' ... <param name="blah" type="select" multiple="true"> ... <option value="x">I am X</option> ... <option value="y" selected="true">I am Y</option> ... <option value="z" selected="true">I am Z</option> ... </param> ... ''' ) ) >>> print p.name blah >>> print p.get_html() <select name="blah" multiple> <option value="x">I am X</option> <option value="y" selected>I am Y</option> <option value="z" selected>I am Z</option> </select> >>> print p.get_html( value=["x","y"]) <select name="blah" multiple> <option value="x" selected>I am X</option> <option value="y" selected>I am Y</option> <option value="z">I am Z</option> </select> >>> print p.filter_value( ["y", "z"] ) y,z >>> p = SelectToolParameter( None, XML( ... ''' ... <param name="blah" type="select" multiple="true" display="checkboxes"> ... <option value="x">I am X</option> ... <option value="y" selected="true">I am Y</option> ... <option value="z" selected="true">I am Z</option> ... </param> ... ''' ) ) >>> print p.name blah >>> print p.get_html() <div><input type="checkbox" name="blah" value="x">I am X</div> <div><input type="checkbox" name="blah" value="y" checked>I am Y</div> <div><input type="checkbox" name="blah" value="z" checked>I am Z</div> >>> print p.get_html( value=["x","y"]) <div><input type="checkbox" name="blah" value="x" checked>I am X</div> <div><input type="checkbox" name="blah" value="y" checked>I am Y</div> <div><input type="checkbox" name="blah" value="z">I am Z</div> >>> print p.filter_value( ["y", "z"] ) y,z """ def __init__( self, tool, elem): ToolParameter.__init__( self, tool, elem ) self.multiple = str_bool( elem.get( 'multiple', False ) ) self.display = elem.get( 'display', None ) self.separator = elem.get( 'separator', ',' ) self.legal_values = set() self.dynamic_options = elem.get( "dynamic_options", None ) if self.dynamic_options is None: self.options = list() for index, option in enumerate( elem.findall("option") ): value = option.get( "value" ) self.legal_values.add( value ) selected = ( option.get( "selected", None ) == "true" ) self.options.append( ( option.text, value, selected ) ) def get_html( self, trans=None, value=None, other_values={} ): if value is not None: if not isinstance( value, list ): value = [ value ] field = form_builder.SelectField( self.name, self.multiple, self.display ) if self.dynamic_options: options = eval( self.dynamic_options, self.tool.code_namespace, other_values ) else: options = self.options for text, optval, selected in options: if value: selected = ( optval in value ) field.add_option( text, optval, selected ) return field.get_html() def filter_value( self, value, trans=None, other_values={} ): if self.dynamic_options: legal_values = set( v for _, v, _ in eval( self.dynamic_options, self.tool.code_namespace, other_values ) ) else: legal_values = self.legal_values if isinstance( value, list ): assert self.multiple, "Multiple values provided but parameter is not expecting multiple values" rval = [] for v in value: v = util.restore_text( v ) assert v in legal_values rval.append( v ) return self.separator.join( rval ) else: value = util.restore_text( value ) assert value in legal_values return value class DataToolParameter( ToolParameter ): """ Parameter that takes on one (or many) or a specific set of values. TODO: There should be an alternate display that allows single selects to be displayed as radio buttons and multiple selects as a set of checkboxes >>> # Mock up a history (not connected to database) >>> from galaxy.model import History, Dataset >>> from cookbook.patterns import Bunch >>> hist = History() >>> hist.add_dataset( Dataset( id=1, extension='text' ) ) >>> hist.add_dataset( Dataset( id=2, extension='bed' ) ) >>> hist.add_dataset( Dataset( id=3, extension='fasta' ) ) >>> hist.add_dataset( Dataset( id=4, extension='png' ) ) >>> hist.add_dataset( Dataset( id=5, extension='interval' ) ) >>> p = DataToolParameter( None, XML( '<param name="blah" type="data" format="interval"/>' ) ) >>> print p.name blah >>> print p.get_html( trans=Bunch( history=hist ) ) <select name="blah"> <option value="2">2: Unnamed dataset</option> <option value="5" selected>5: Unnamed dataset</option> </select> """ def __init__( self, tool, elem ): ToolParameter.__init__( self, tool, elem ) self.format = datatypes.get_datatype_by_extension( elem.get( 'format', 'data' ).lower() ) self.multiple = str_bool( elem.get( 'multiple', False ) ) self.optional = str_bool( elem.get( 'optional', False ) ) def get_html( self, trans=None, value=None, other_values={} ): assert trans is not None, "DataToolParameter requires a trans" history = trans.history assert history is not None, "DataToolParameter requires a history" if value is not None: if type( value ) != list: value = [ value ] field = form_builder.SelectField( self.name, self.multiple ) some_data = False for data in history.datasets: if isinstance( data.datatype, self.format.__class__ ) and not data.parent_id: some_data = True selected = ( value and ( data in value ) ) field.add_option( "%d: %s" % ( data.hid, data.name[:30] ), data.id, selected ) if some_data and value is None: # Ensure that the last item is always selected a, b, c = field.options[-1]; field.options[-1] = a, b, True else: # HACK: we should just disable the form or something field.add_option( "no data has the proper type", '' ) if self.optional == True: field.add_option( "Selection is Optional", 'None', True ) return field.get_html() def filter_value( self, value, trans, other_values={} ): if not value: raise ValueError( "A data of the appropriate type is required" ) if value in [None, "None"]: temp_data = trans.app.model.Dataset() temp_data.state = temp_data.states.FAKE return temp_data if isinstance( value, list ): return [ trans.app.model.Dataset.get( v ) for v in value ] else: return trans.app.model.Dataset.get( value ) def to_string( self, value, app ): return value.id def to_python( self, value, app ): return app.model.Dataset.get( int( value ) ) class RawToolParameter( ToolParameter ): """ Completely nondescript parameter, HTML representation is provided as text contents. >>> p = RawToolParameter( None, XML( ... ''' ... <param name="blah" type="raw"> ... <![CDATA[<span id="$name">Some random stuff</span>]]> ... </param> ... ''' ) ) >>> print p.name blah >>> print p.get_html().strip() <span id="blah">Some random stuff</span> """ def __init__( self, tool, elem ): ToolParameter.__init__( self, tool, elem ) template = string.Template( elem.text ) self.html = template.substitute( self.__dict__ ) # class HistoryIDParameter( ToolParameter ): # """ # Parameter that takes a name value, makes history.id available. # # FIXME: This is a hack (esp. if hidden params are a hack) but in order to # have the history accessable at the job level, it is necessary # I also probably wrote this docstring test thing wrong. # # >>> from galaxy.model import History, Dataset # >>> from cookbook.patterns import Bunch # >>> hist = History( id=1 ) # >>> p = HistoryIDParameter( None, XML( '<param name="blah" type="history"/>' ) ) # >>> print p.name # blah # >>> html_string = '<input type="hidden" name="blah" value="%d">' % hist.id # >>> assert p.get_html( trans=Bunch( history=hist ) ) == html_string # """ # def __init__( self, tool, elem ): # ToolParameter.__init__( self, tool, elem ) # self.name = elem.get('name') # def get_html( self, trans, value=None, other_values={} ): # assert trans.history is not None, "HistoryIDParameter requires a history" # self.html = form_builder.HiddenField( self.name, trans.history.id ).get_html() # return self.html parameter_types = dict( text = TextToolParameter, integer = IntegerToolParameter, float = FloatToolParameter, boolean = BooleanToolParameter, select = SelectToolParameter, hidden = HiddenToolParameter, baseurl = BaseURLToolParameter, file = FileToolParameter, data = DataToolParameter, raw = RawToolParameter ) def get_suite(): """Get unittest suite for this module""" import doctest, sys return doctest.DocTestSuite( sys.modules[__name__] ) def str_bool(in_str): """ returns true/false of a string, since bool(str), always returns true if string is not empty default action is to return false """ if str(in_str).lower() == 'true': return True return False
mit
-6,214,418,039,163,646,000
39.19596
162
0.587304
false
3.837963
false
false
false
toobaz/pandas
ci/print_skipped.py
1
1409
#!/usr/bin/env python import os import sys import math import xml.etree.ElementTree as et def parse_results(filename): tree = et.parse(filename) root = tree.getroot() skipped = [] current_class = "" i = 1 assert i - 1 == len(skipped) for el in root.findall("testcase"): cn = el.attrib["classname"] for sk in el.findall("skipped"): old_class = current_class current_class = cn name = "{classname}.{name}".format( classname=current_class, name=el.attrib["name"] ) msg = sk.attrib["message"] out = "" if old_class != current_class: ndigits = int(math.log(i, 10) + 1) # 4 for : + space + # + space out += "-" * (len(name + msg) + 4 + ndigits) + "\n" out += "#{i} {name}: {msg}".format(i=i, name=name, msg=msg) skipped.append(out) i += 1 assert i - 1 == len(skipped) assert i - 1 == len(skipped) # assert len(skipped) == int(root.attrib['skip']) return "\n".join(skipped) def main(): test_files = ["test-data-single.xml", "test-data-multiple.xml", "test-data.xml"] print("SKIPPED TESTS:") for fn in test_files: if os.path.isfile(fn): print(parse_results(fn)) return 0 if __name__ == "__main__": sys.exit(main())
bsd-3-clause
-8,257,308,143,200,926,000
26.096154
84
0.515259
false
3.594388
true
false
false
openstack-infra/shade
shade/tests/unit/test_floating_ip_neutron.py
1
41101
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ test_floating_ip_neutron ---------------------------------- Tests Floating IP resource methods for Neutron """ import copy import datetime import munch from shade import exc from shade.tests import fakes from shade.tests.unit import base class TestFloatingIP(base.RequestsMockTestCase): mock_floating_ip_list_rep = { 'floatingips': [ { 'router_id': 'd23abc8d-2991-4a55-ba98-2aaea84cc72f', 'tenant_id': '4969c491a3c74ee4af974e6d800c62de', 'floating_network_id': '376da547-b977-4cfe-9cba-275c80debf57', 'fixed_ip_address': '10.0.0.4', 'floating_ip_address': '172.24.4.229', 'port_id': 'ce705c24-c1ef-408a-bda3-7bbd946164ac', 'id': '2f245a7b-796b-4f26-9cf9-9e82d248fda7', 'status': 'ACTIVE' }, { 'router_id': None, 'tenant_id': '4969c491a3c74ee4af974e6d800c62de', 'floating_network_id': '376da547-b977-4cfe-9cba-275c80debf57', 'fixed_ip_address': None, 'floating_ip_address': '203.0.113.30', 'port_id': None, 'id': '61cea855-49cb-4846-997d-801b70c71bdd', 'status': 'DOWN' } ] } mock_floating_ip_new_rep = { 'floatingip': { 'fixed_ip_address': '10.0.0.4', 'floating_ip_address': '172.24.4.229', 'floating_network_id': 'my-network-id', 'id': '2f245a7b-796b-4f26-9cf9-9e82d248fda8', 'port_id': None, 'router_id': None, 'status': 'ACTIVE', 'tenant_id': '4969c491a3c74ee4af974e6d800c62df' } } mock_floating_ip_port_rep = { 'floatingip': { 'fixed_ip_address': '10.0.0.4', 'floating_ip_address': '172.24.4.229', 'floating_network_id': 'my-network-id', 'id': '2f245a7b-796b-4f26-9cf9-9e82d248fda8', 'port_id': 'ce705c24-c1ef-408a-bda3-7bbd946164ac', 'router_id': None, 'status': 'ACTIVE', 'tenant_id': '4969c491a3c74ee4af974e6d800c62df' } } mock_get_network_rep = { 'status': 'ACTIVE', 'subnets': [ '54d6f61d-db07-451c-9ab3-b9609b6b6f0b' ], 'name': 'my-network', 'provider:physical_network': None, 'admin_state_up': True, 'tenant_id': '4fd44f30292945e481c7b8a0c8908869', 'provider:network_type': 'local', 'router:external': True, 'shared': True, 'id': 'my-network-id', 'provider:segmentation_id': None } mock_search_ports_rep = [ { 'status': 'ACTIVE', 'binding:host_id': 'devstack', 'name': 'first-port', 'created_at': datetime.datetime.now().isoformat(), 'allowed_address_pairs': [], 'admin_state_up': True, 'network_id': '70c1db1f-b701-45bd-96e0-a313ee3430b3', 'tenant_id': '', 'extra_dhcp_opts': [], 'binding:vif_details': { 'port_filter': True, 'ovs_hybrid_plug': True }, 'binding:vif_type': 'ovs', 'device_owner': 'compute:None', 'mac_address': 'fa:16:3e:58:42:ed', 'binding:profile': {}, 'binding:vnic_type': 'normal', 'fixed_ips': [ { 'subnet_id': '008ba151-0b8c-4a67-98b5-0d2b87666062', 'ip_address': u'172.24.4.2' } ], 'id': 'ce705c24-c1ef-408a-bda3-7bbd946164ac', 'security_groups': [], 'device_id': 'server-id' } ] def assertAreInstances(self, elements, elem_type): for e in elements: self.assertIsInstance(e, elem_type) def setUp(self): super(TestFloatingIP, self).setUp() self.fake_server = fakes.make_fake_server( 'server-id', '', 'ACTIVE', addresses={u'test_pnztt_net': [{ u'OS-EXT-IPS:type': u'fixed', u'addr': '192.0.2.129', u'version': 4, u'OS-EXT-IPS-MAC:mac_addr': u'fa:16:3e:ae:7d:42'}]}) self.floating_ip = self.cloud._normalize_floating_ips( self.mock_floating_ip_list_rep['floatingips'])[0] def test_float_no_status(self): floating_ips = [ { 'fixed_ip_address': '10.0.0.4', 'floating_ip_address': '172.24.4.229', 'floating_network_id': 'my-network-id', 'id': '2f245a7b-796b-4f26-9cf9-9e82d248fda8', 'port_id': None, 'router_id': None, 'tenant_id': '4969c491a3c74ee4af974e6d800c62df' } ] normalized = self.cloud._normalize_floating_ips(floating_ips) self.assertEqual('UNKNOWN', normalized[0]['status']) def test_list_floating_ips(self): self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/floatingips.json', json=self.mock_floating_ip_list_rep)]) floating_ips = self.cloud.list_floating_ips() self.assertIsInstance(floating_ips, list) self.assertAreInstances(floating_ips, dict) self.assertEqual(2, len(floating_ips)) self.assert_calls() def test_list_floating_ips_with_filters(self): self.register_uris([ dict(method='GET', uri=('https://network.example.com/v2.0/floatingips.json?' 'Foo=42'), json={'floatingips': []})]) self.cloud.list_floating_ips(filters={'Foo': 42}) self.assert_calls() def test_search_floating_ips(self): self.register_uris([ dict(method='GET', uri=('https://network.example.com/v2.0/floatingips.json'), json=self.mock_floating_ip_list_rep)]) floating_ips = self.cloud.search_floating_ips( filters={'attached': False}) self.assertIsInstance(floating_ips, list) self.assertAreInstances(floating_ips, dict) self.assertEqual(1, len(floating_ips)) self.assert_calls() def test_get_floating_ip(self): self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/floatingips.json', json=self.mock_floating_ip_list_rep)]) floating_ip = self.cloud.get_floating_ip( id='2f245a7b-796b-4f26-9cf9-9e82d248fda7') self.assertIsInstance(floating_ip, dict) self.assertEqual('172.24.4.229', floating_ip['floating_ip_address']) self.assertEqual( self.mock_floating_ip_list_rep['floatingips'][0]['tenant_id'], floating_ip['project_id'] ) self.assertEqual( self.mock_floating_ip_list_rep['floatingips'][0]['tenant_id'], floating_ip['tenant_id'] ) self.assertIn('location', floating_ip) self.assert_calls() def test_get_floating_ip_not_found(self): self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/floatingips.json', json=self.mock_floating_ip_list_rep)]) floating_ip = self.cloud.get_floating_ip(id='non-existent') self.assertIsNone(floating_ip) self.assert_calls() def test_get_floating_ip_by_id(self): fid = self.mock_floating_ip_new_rep['floatingip']['id'] self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/floatingips/' '{id}'.format(id=fid), json=self.mock_floating_ip_new_rep)]) floating_ip = self.cloud.get_floating_ip_by_id(id=fid) self.assertIsInstance(floating_ip, dict) self.assertEqual('172.24.4.229', floating_ip['floating_ip_address']) self.assertEqual( self.mock_floating_ip_new_rep['floatingip']['tenant_id'], floating_ip['project_id'] ) self.assertEqual( self.mock_floating_ip_new_rep['floatingip']['tenant_id'], floating_ip['tenant_id'] ) self.assertIn('location', floating_ip) self.assert_calls() def test_create_floating_ip(self): self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/networks.json', json={'networks': [self.mock_get_network_rep]}), dict(method='POST', uri='https://network.example.com/v2.0/floatingips.json', json=self.mock_floating_ip_new_rep, validate=dict( json={'floatingip': { 'floating_network_id': 'my-network-id'}})) ]) ip = self.cloud.create_floating_ip(network='my-network') self.assertEqual( self.mock_floating_ip_new_rep['floatingip']['floating_ip_address'], ip['floating_ip_address']) self.assert_calls() def test_create_floating_ip_port_bad_response(self): self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/networks.json', json={'networks': [self.mock_get_network_rep]}), dict(method='POST', uri='https://network.example.com/v2.0/floatingips.json', json=self.mock_floating_ip_new_rep, validate=dict( json={'floatingip': { 'floating_network_id': 'my-network-id', 'port_id': u'ce705c24-c1ef-408a-bda3-7bbd946164ab'}})) ]) # Fails because we requested a port and the returned FIP has no port self.assertRaises( exc.OpenStackCloudException, self.cloud.create_floating_ip, network='my-network', port='ce705c24-c1ef-408a-bda3-7bbd946164ab') self.assert_calls() def test_create_floating_ip_port(self): self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/networks.json', json={'networks': [self.mock_get_network_rep]}), dict(method='POST', uri='https://network.example.com/v2.0/floatingips.json', json=self.mock_floating_ip_port_rep, validate=dict( json={'floatingip': { 'floating_network_id': 'my-network-id', 'port_id': u'ce705c24-c1ef-408a-bda3-7bbd946164ac'}})) ]) ip = self.cloud.create_floating_ip( network='my-network', port='ce705c24-c1ef-408a-bda3-7bbd946164ac') self.assertEqual( self.mock_floating_ip_new_rep['floatingip']['floating_ip_address'], ip['floating_ip_address']) self.assert_calls() def test_neutron_available_floating_ips(self): """ Test without specifying a network name. """ fips_mock_uri = 'https://network.example.com/v2.0/floatingips.json' self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/networks.json', json={'networks': [self.mock_get_network_rep]}), dict(method='GET', uri='https://network.example.com/v2.0/subnets.json', json={'subnets': []}), dict(method='GET', uri=fips_mock_uri, json={'floatingips': []}), dict(method='POST', uri=fips_mock_uri, json=self.mock_floating_ip_new_rep, validate=dict(json={ 'floatingip': { 'floating_network_id': self.mock_get_network_rep['id'] }})) ]) # Test if first network is selected if no network is given self.cloud._neutron_available_floating_ips() self.assert_calls() def test_neutron_available_floating_ips_network(self): """ Test with specifying a network name. """ fips_mock_uri = 'https://network.example.com/v2.0/floatingips.json' self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/networks.json', json={'networks': [self.mock_get_network_rep]}), dict(method='GET', uri='https://network.example.com/v2.0/subnets.json', json={'subnets': []}), dict(method='GET', uri=fips_mock_uri, json={'floatingips': []}), dict(method='POST', uri=fips_mock_uri, json=self.mock_floating_ip_new_rep, validate=dict(json={ 'floatingip': { 'floating_network_id': self.mock_get_network_rep['id'] }})) ]) # Test if first network is selected if no network is given self.cloud._neutron_available_floating_ips( network=self.mock_get_network_rep['name'] ) self.assert_calls() def test_neutron_available_floating_ips_invalid_network(self): """ Test with an invalid network name. """ self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/networks.json', json={'networks': [self.mock_get_network_rep]}), dict(method='GET', uri='https://network.example.com/v2.0/subnets.json', json={'subnets': []}) ]) self.assertRaises( exc.OpenStackCloudException, self.cloud._neutron_available_floating_ips, network='INVALID') self.assert_calls() def test_auto_ip_pool_no_reuse(self): # payloads taken from citycloud self.register_uris([ dict(method='GET', uri='https://network.example.com/v2.0/networks.json', json={"networks": [{ "status": "ACTIVE", "subnets": [ "df3e17fa-a4b2-47ae-9015-bc93eb076ba2", "6b0c3dc9-b0b8-4d87-976a-7f2ebf13e7ec", "fc541f48-fc7f-48c0-a063-18de6ee7bdd7"], "availability_zone_hints": [], "availability_zones": ["nova"], "name": "ext-net", "admin_state_up": True, "tenant_id": "a564613210ee43708b8a7fc6274ebd63", "tags": [], "ipv6_address_scope": "9f03124f-89af-483a-b6fd-10f08079db4d", # noqa "mtu": 0, "is_default": False, "router:external": True, "ipv4_address_scope": None, "shared": False, "id": "0232c17f-2096-49bc-b205-d3dcd9a30ebf", "description": None }, { "status": "ACTIVE", "subnets": ["f0ad1df5-53ee-473f-b86b-3604ea5591e9"], "availability_zone_hints": [], "availability_zones": ["nova"], "name": "private", "admin_state_up": True, "tenant_id": "65222a4d09ea4c68934fa1028c77f394", "created_at": "2016-10-22T13:46:26", "tags": [], "updated_at": "2016-10-22T13:46:26", "ipv6_address_scope": None, "router:external": False, "ipv4_address_scope": None, "shared": False, "mtu": 1450, "id": "2c9adcb5-c123-4c5a-a2ba-1ad4c4e1481f", "description": "" }]}), dict(method='GET', uri='https://network.example.com/v2.0/ports.json' '?device_id=f80e3ad0-e13e-41d4-8e9c-be79bccdb8f7', json={"ports": [{ "status": "ACTIVE", "created_at": "2017-02-06T20:59:45", "description": "", "allowed_address_pairs": [], "admin_state_up": True, "network_id": "2c9adcb5-c123-4c5a-a2ba-1ad4c4e1481f", "dns_name": None, "extra_dhcp_opts": [], "mac_address": "fa:16:3e:e8:7f:03", "updated_at": "2017-02-06T20:59:49", "name": "", "device_owner": "compute:None", "tenant_id": "65222a4d09ea4c68934fa1028c77f394", "binding:vnic_type": "normal", "fixed_ips": [{ "subnet_id": "f0ad1df5-53ee-473f-b86b-3604ea5591e9", "ip_address": "10.4.0.16"}], "id": "a767944e-057a-47d1-a669-824a21b8fb7b", "security_groups": [ "9fb5ba44-5c46-4357-8e60-8b55526cab54"], "device_id": "f80e3ad0-e13e-41d4-8e9c-be79bccdb8f7", }]}), dict(method='POST', uri='https://network.example.com/v2.0/floatingips.json', json={"floatingip": { "router_id": "9de9c787-8f89-4a53-8468-a5533d6d7fd1", "status": "DOWN", "description": "", "dns_domain": "", "floating_network_id": "0232c17f-2096-49bc-b205-d3dcd9a30ebf", # noqa "fixed_ip_address": "10.4.0.16", "floating_ip_address": "89.40.216.153", "port_id": "a767944e-057a-47d1-a669-824a21b8fb7b", "id": "e69179dc-a904-4c9a-a4c9-891e2ecb984c", "dns_name": "", "tenant_id": "65222a4d09ea4c68934fa1028c77f394" }}, validate=dict(json={"floatingip": { "floating_network_id": "0232c17f-2096-49bc-b205-d3dcd9a30ebf", # noqa "fixed_ip_address": "10.4.0.16", "port_id": "a767944e-057a-47d1-a669-824a21b8fb7b", }})), dict(method='GET', uri='{endpoint}/servers/detail'.format( endpoint=fakes.COMPUTE_ENDPOINT), json={"servers": [{ "status": "ACTIVE", "updated": "2017-02-06T20:59:49Z", "addresses": { "private": [{ "OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:e8:7f:03", "version": 4, "addr": "10.4.0.16", "OS-EXT-IPS:type": "fixed" }, { "OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:e8:7f:03", "version": 4, "addr": "89.40.216.153", "OS-EXT-IPS:type": "floating" }]}, "key_name": None, "image": {"id": "95e4c449-8abf-486e-97d9-dc3f82417d2d"}, "OS-EXT-STS:task_state": None, "OS-EXT-STS:vm_state": "active", "OS-SRV-USG:launched_at": "2017-02-06T20:59:48.000000", "flavor": {"id": "2186bd79-a05e-4953-9dde-ddefb63c88d4"}, "id": "f80e3ad0-e13e-41d4-8e9c-be79bccdb8f7", "security_groups": [{"name": "default"}], "OS-SRV-USG:terminated_at": None, "OS-EXT-AZ:availability_zone": "nova", "user_id": "c17534835f8f42bf98fc367e0bf35e09", "name": "testmt", "created": "2017-02-06T20:59:44Z", "tenant_id": "65222a4d09ea4c68934fa1028c77f394", "OS-DCF:diskConfig": "MANUAL", "os-extended-volumes:volumes_attached": [], "accessIPv4": "", "accessIPv6": "", "progress": 0, "OS-EXT-STS:power_state": 1, "config_drive": "", "metadata": {} }]}), dict(method='GET', uri='https://network.example.com/v2.0/networks.json', json={"networks": [{ "status": "ACTIVE", "subnets": [ "df3e17fa-a4b2-47ae-9015-bc93eb076ba2", "6b0c3dc9-b0b8-4d87-976a-7f2ebf13e7ec", "fc541f48-fc7f-48c0-a063-18de6ee7bdd7"], "availability_zone_hints": [], "availability_zones": ["nova"], "name": "ext-net", "admin_state_up": True, "tenant_id": "a564613210ee43708b8a7fc6274ebd63", "tags": [], "ipv6_address_scope": "9f03124f-89af-483a-b6fd-10f08079db4d", # noqa "mtu": 0, "is_default": False, "router:external": True, "ipv4_address_scope": None, "shared": False, "id": "0232c17f-2096-49bc-b205-d3dcd9a30ebf", "description": None }, { "status": "ACTIVE", "subnets": ["f0ad1df5-53ee-473f-b86b-3604ea5591e9"], "availability_zone_hints": [], "availability_zones": ["nova"], "name": "private", "admin_state_up": True, "tenant_id": "65222a4d09ea4c68934fa1028c77f394", "created_at": "2016-10-22T13:46:26", "tags": [], "updated_at": "2016-10-22T13:46:26", "ipv6_address_scope": None, "router:external": False, "ipv4_address_scope": None, "shared": False, "mtu": 1450, "id": "2c9adcb5-c123-4c5a-a2ba-1ad4c4e1481f", "description": "" }]}), dict(method='GET', uri='https://network.example.com/v2.0/subnets.json', json={"subnets": [{ "description": "", "enable_dhcp": True, "network_id": "2c9adcb5-c123-4c5a-a2ba-1ad4c4e1481f", "tenant_id": "65222a4d09ea4c68934fa1028c77f394", "created_at": "2016-10-22T13:46:26", "dns_nameservers": [ "89.36.90.101", "89.36.90.102"], "updated_at": "2016-10-22T13:46:26", "gateway_ip": "10.4.0.1", "ipv6_ra_mode": None, "allocation_pools": [{ "start": "10.4.0.2", "end": "10.4.0.200"}], "host_routes": [], "ip_version": 4, "ipv6_address_mode": None, "cidr": "10.4.0.0/24", "id": "f0ad1df5-53ee-473f-b86b-3604ea5591e9", "subnetpool_id": None, "name": "private-subnet-ipv4", }]})]) self.cloud.add_ips_to_server( munch.Munch( id='f80e3ad0-e13e-41d4-8e9c-be79bccdb8f7', addresses={ "private": [{ "OS-EXT-IPS-MAC:mac_addr": "fa:16:3e:e8:7f:03", "version": 4, "addr": "10.4.0.16", "OS-EXT-IPS:type": "fixed" }]}), ip_pool='ext-net', reuse=False) self.assert_calls() def test_available_floating_ip_new(self): self.register_uris([ dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'networks.json']), json={'networks': [self.mock_get_network_rep]}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'subnets.json']), json={'subnets': []}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': []}), dict(method='POST', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), validate=dict( json={'floatingip': { 'floating_network_id': 'my-network-id'}}), json=self.mock_floating_ip_new_rep) ]) ip = self.cloud.available_floating_ip(network='my-network') self.assertEqual( self.mock_floating_ip_new_rep['floatingip']['floating_ip_address'], ip['floating_ip_address']) self.assert_calls() def test_delete_floating_ip_existing(self): fip_id = '2f245a7b-796b-4f26-9cf9-9e82d248fda7' fake_fip = { 'id': fip_id, 'floating_ip_address': '172.99.106.167', 'status': 'ACTIVE', } self.register_uris([ dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [fake_fip]}), dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [fake_fip]}), dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': []}), ]) self.assertTrue( self.cloud.delete_floating_ip(floating_ip_id=fip_id, retry=2)) self.assert_calls() def test_delete_floating_ip_existing_down(self): fip_id = '2f245a7b-796b-4f26-9cf9-9e82d248fda7' fake_fip = { 'id': fip_id, 'floating_ip_address': '172.99.106.167', 'status': 'ACTIVE', } down_fip = { 'id': fip_id, 'floating_ip_address': '172.99.106.167', 'status': 'DOWN', } self.register_uris([ dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [fake_fip]}), dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [down_fip]}), ]) self.assertTrue( self.cloud.delete_floating_ip(floating_ip_id=fip_id, retry=2)) self.assert_calls() def test_delete_floating_ip_existing_no_delete(self): fip_id = '2f245a7b-796b-4f26-9cf9-9e82d248fda7' fake_fip = { 'id': fip_id, 'floating_ip_address': '172.99.106.167', 'status': 'ACTIVE', } self.register_uris([ dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [fake_fip]}), dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [fake_fip]}), dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format(fip_id)]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [fake_fip]}), ]) self.assertRaises( exc.OpenStackCloudException, self.cloud.delete_floating_ip, floating_ip_id=fip_id, retry=2) self.assert_calls() def test_delete_floating_ip_not_found(self): self.register_uris([ dict(method='DELETE', uri=('https://network.example.com/v2.0/floatingips/' 'a-wild-id-appears.json'), status_code=404)]) ret = self.cloud.delete_floating_ip( floating_ip_id='a-wild-id-appears') self.assertFalse(ret) self.assert_calls() def test_attach_ip_to_server(self): fip = self.mock_floating_ip_list_rep['floatingips'][0] device_id = self.fake_server['id'] self.register_uris([ dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'ports.json'], qs_elements=["device_id={0}".format(device_id)]), json={'ports': self.mock_search_ports_rep}), dict(method='PUT', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format( fip['id'])]), json={'floatingip': fip}, validate=dict( json={'floatingip': { 'port_id': self.mock_search_ports_rep[0]['id'], 'fixed_ip_address': self.mock_search_ports_rep[0][ 'fixed_ips'][0]['ip_address']}})), ]) self.cloud._attach_ip_to_server( server=self.fake_server, floating_ip=self.floating_ip) self.assert_calls() def test_add_ip_refresh_timeout(self): device_id = self.fake_server['id'] self.register_uris([ dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'networks.json']), json={'networks': [self.mock_get_network_rep]}), dict(method='GET', uri='https://network.example.com/v2.0/subnets.json', json={'subnets': []}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'ports.json'], qs_elements=["device_id={0}".format(device_id)]), json={'ports': self.mock_search_ports_rep}), dict(method='POST', uri='https://network.example.com/v2.0/floatingips.json', json={'floatingip': self.floating_ip}, validate=dict( json={'floatingip': { 'floating_network_id': 'my-network-id', 'fixed_ip_address': self.mock_search_ports_rep[0][ 'fixed_ips'][0]['ip_address'], 'port_id': self.mock_search_ports_rep[0]['id']}})), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [self.floating_ip]}), dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format( self.floating_ip['id'])]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': []}), ]) self.assertRaises( exc.OpenStackCloudTimeout, self.cloud._add_auto_ip, server=self.fake_server, wait=True, timeout=0.01, reuse=False) self.assert_calls() def test_detach_ip_from_server(self): fip = self.mock_floating_ip_new_rep['floatingip'] attached_fip = copy.copy(fip) attached_fip['port_id'] = 'server-port-id' self.register_uris([ dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [attached_fip]}), dict(method='PUT', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format( fip['id'])]), json={'floatingip': fip}, validate=dict( json={'floatingip': {'port_id': None}})) ]) self.cloud.detach_ip_from_server( server_id='server-id', floating_ip_id=fip['id']) self.assert_calls() def test_add_ip_from_pool(self): network = self.mock_get_network_rep fip = self.mock_floating_ip_new_rep['floatingip'] fixed_ip = self.mock_search_ports_rep[0]['fixed_ips'][0]['ip_address'] port_id = self.mock_search_ports_rep[0]['id'] self.register_uris([ dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'networks.json']), json={'networks': [network]}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'subnets.json']), json={'subnets': []}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [fip]}), dict(method='POST', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingip': fip}, validate=dict( json={'floatingip': { 'floating_network_id': network['id']}})), dict(method="GET", uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'ports.json'], qs_elements=[ "device_id={0}".format(self.fake_server['id'])]), json={'ports': self.mock_search_ports_rep}), dict(method='PUT', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format( fip['id'])]), json={'floatingip': fip}, validate=dict( json={'floatingip': { 'fixed_ip_address': fixed_ip, 'port_id': port_id}})), ]) server = self.cloud._add_ip_from_pool( server=self.fake_server, network=network['id'], fixed_address=fixed_ip) self.assertEqual(server, self.fake_server) self.assert_calls() def test_cleanup_floating_ips(self): floating_ips = [{ "id": "this-is-a-floating-ip-id", "fixed_ip_address": None, "internal_network": None, "floating_ip_address": "203.0.113.29", "network": "this-is-a-net-or-pool-id", "port_id": None, "status": "ACTIVE" }, { "id": "this-is-an-attached-floating-ip-id", "fixed_ip_address": None, "internal_network": None, "floating_ip_address": "203.0.113.29", "network": "this-is-a-net-or-pool-id", "attached": True, "port_id": "this-is-id-of-port-with-fip", "status": "ACTIVE" }] self.register_uris([ dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': floating_ips}), dict(method='DELETE', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips/{0}.json'.format( floating_ips[0]['id'])]), json={}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingips': [floating_ips[1]]}), ]) self.cloud.delete_unattached_floating_ips() self.assert_calls() def test_create_floating_ip_no_port(self): server_port = { "id": "port-id", "device_id": "some-server", 'created_at': datetime.datetime.now().isoformat(), 'fixed_ips': [ { 'subnet_id': 'subnet-id', 'ip_address': '172.24.4.2' } ], } floating_ip = { "id": "floating-ip-id", "port_id": None } self.register_uris([ dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'networks.json']), json={'networks': [self.mock_get_network_rep]}), dict(method='GET', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'subnets.json']), json={'subnets': []}), dict(method="GET", uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'ports.json'], qs_elements=['device_id=some-server']), json={'ports': [server_port]}), dict(method='POST', uri=self.get_mock_url( 'network', 'public', append=['v2.0', 'floatingips.json']), json={'floatingip': floating_ip}) ]) self.assertRaises( exc.OpenStackCloudException, self.cloud._neutron_create_floating_ip, server=dict(id='some-server')) self.assert_calls()
apache-2.0
-3,525,956,898,842,083,300
40.101
91
0.467069
false
3.734418
true
false
false
bblais/plasticity
setup.py
1
2798
# this is from https://github.com/cython/cython/wiki/PackageHierarchy import sys, os, stat, subprocess from distutils.core import setup from Cython.Distutils import build_ext from distutils.extension import Extension # we'd better have Cython installed, or it's a no-go try: from Cython.Distutils import build_ext except: print("You don't seem to have Cython installed. Please get a") print("copy from www.cython.org and install it") sys.exit(1) import numpy def get_version(package): d={} version_line='' with open('%s/version.py' % package) as fid: for line in fid: if line.startswith('version='): version_line=line print(version_line) exec(version_line,d) return d['version'] # scan the directory for extension files, converting # them to extension names in dotted notation def scandir(dir, files=[]): for file in os.listdir(dir): path = os.path.join(dir, file) if os.path.isfile(path) and path.endswith(".pyx"): files.append(path.replace(os.path.sep, ".")[:-4]) elif os.path.isdir(path): scandir(path, files) return files def cleanc(dir): for file in os.listdir(dir): path = os.path.join(dir, file) if os.path.isfile(path) and path.endswith(".pyx"): base,ext=os.path.splitext(path) cpath=base+'.c' if os.path.isfile(cpath): os.remove(cpath) print("~~",cpath) elif os.path.isdir(path): cleanc(path) # generate an Extension object from its dotted name def makeExtension(extName): extPath = extName.replace(".", os.path.sep)+".pyx" folder=extName.split(".")[0] return Extension( extName, [extPath,'plasticity/randomkit.c'], include_dirs = [numpy.get_include(), ".", "%s/" % folder], # adding the '.' to include_dirs is CRUCIAL!! extra_compile_args = ["-O3", "-Wall"], extra_link_args = ['-g'], ) # get the list of extensions extNames = scandir("plasticity") print(extNames) cleanc("plasticity") # and build up the set of Extension objects print(extNames) extensions = [makeExtension(name) for name in extNames] # finally, we can pass all this to distutils setup( name="plasticity", version=get_version('plasticity'), description="Synaptic Plasticity in Rate-Based Neurons", author="Brian Blais", packages=['plasticity', 'plasticity.dialogs', 'plasticity.dialogs.waxy'], scripts=['plasticity/Plasticity.pyw'], package_data={'plasticity': ['images/*.*','dialogs/images/*.*', 'dialogs/images/learning_rules/*.*','hdf5/*.*']}, ext_modules=extensions, cmdclass = {'build_ext': build_ext}, )
mit
-212,955,220,237,154,800
29.086022
114
0.62366
false
3.578005
false
false
false
dslackw/sbo-templates
sbo_templates/__metadata__.py
1
1203
#!/usr/bin/python3 # -*- coding: utf-8 -*- # __metadata__.py file is part of sbo-templates. # Copyright 2015-2021 Dimitris Zlatanidis <[email protected]> # All rights reserved. # SBo tool for managing templates. # https://gitlab.com/dslackw/sbo-templates # sbo-templates is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. __prog__ = "sbo-templates" __author__ = "dslackw" __copyright__ = 2015-2021 __version_info__ = (1, 3, 2) __version__ = "{0}.{1}.{2}".format(*__version_info__) __license__ = "GNU General Public License v3 (GPLv3)" __email__ = "[email protected]" __website__ = "https://gitlab.com/dslackw/sbo-templates"
gpl-3.0
-942,446,830,145,835,600
36.59375
71
0.717373
false
3.379213
false
false
false
mozilla/pto
pto/apps/autocomplete/views.py
1
1912
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. import logging from django import http from pto.apps.dates.decorators import json_view from pto.apps.users.models import UserProfile, User from pto.apps.users.utils import ldap_lookup @json_view def cities(request): if not request.user.is_authenticated(): return http.HttpResponseForbidden('Must be logged in') data = [] term = request.GET.get('term') qs = UserProfile.objects.exclude(city='') if term: qs = qs.filter(city__istartswith=term) for each in (qs .values('city') .distinct() .order_by('city')): city = each['city'] data.append(city) return data @json_view def users(request, known_only=False): if not request.user.is_authenticated(): return http.HttpResponseForbidden('Must be logged in') query = request.GET.get('term').strip() if len(query) < 2: return [] results = [] # I chose a limit of 30 because there are about 20+ 'peter' # something in mozilla for each in ldap_lookup.search_users(query, 30, autocomplete=True): if not each.get('givenName'): logging.warn("Skipping LDAP entry %s" % each) continue if known_only: if not User.objects.filter(email__iexact=each['mail']).exists(): continue full_name_and_email = '%s %s <%s>' % (each['givenName'], each['sn'], each['mail']) result = {'id': each['uid'], 'label': full_name_and_email, 'value': full_name_and_email} results.append(result) return results
mpl-2.0
9,029,721,242,809,781,000
34.407407
76
0.578452
false
4.016807
false
false
false
ajylee/gpaw-rtxs
gpaw/test/diamond_gllb.py
1
2143
from ase.structure import bulk from sys import argv from ase.dft.kpoints import ibz_points, get_bandpath from gpaw import * from ase import * from gpaw.test import gen from gpaw import setup_paths import os """This calculation has the following structure. 1) Calculate the ground state of Diamond. 2) Calculate the band structure of diamond in order to obtain accurate KS band gap for Diamond. 3) Calculate ground state again, and calculate the potential discontinuity using accurate band gap. 4) Calculate band structure again, and apply the discontinuity to CBM. Compare to reference. """ xc = 'GLLBSC' gen('C',xcname=xc) setup_paths.insert(0, '.') # Calculate ground state atoms = bulk('C', 'diamond', a=3.567) calc = GPAW(h=0.15, kpts=(4,4,4), xc=xc, nbands = 6, eigensolver='cg') atoms.set_calculator(calc) atoms.get_potential_energy() calc.write('Cgs.gpw') # Calculate accurate KS-band gap from band structure points = ibz_points['fcc'] # CMB is in G-X G = points['Gamma'] X = points['X'] #W = points['W'] #K = points['K'] #L = points['L'] #[W, L, G, X, W, K] kpts, x, X = get_bandpath([G, X], atoms.cell, npoints=12) calc = GPAW('Cgs.gpw', kpts=kpts, fixdensity=True, usesymm=None, convergence=dict(bands=6)) calc.get_atoms().get_potential_energy() # Get the accurate KS-band gap homolumo = calc.occupations.get_homo_lumo(calc.wfs) homo, lumo = homolumo print "band gap ",(lumo-homo)*27.2 # Redo the ground state calculation calc = GPAW(h=0.15, kpts=(4,4,4), xc=xc, nbands = 6, eigensolver='cg') atoms.set_calculator(calc) atoms.get_potential_energy() # And calculate the discontinuity potential with accurate band gap response = calc.hamiltonian.xc.xcs['RESPONSE'] response.calculate_delta_xc(homolumo=homolumo) calc.write('CGLLBSC.gpw') # Redo the band structure calculation atoms, calc = restart('CGLLBSC.gpw', kpts=kpts, fixdensity=True, usesymm=None, convergence=dict(bands=6)) atoms.get_potential_energy() response = calc.hamiltonian.xc.xcs['RESPONSE'] KS, dxc = response.calculate_delta_xc_perturbation() assert abs(KS+dxc-5.41)<0.10 #M. Kuisma et. al, Phys. Rev. B 82, 115106, QP gap for C, 5.41eV, expt. 5.48eV
gpl-3.0
7,119,025,400,674,546,000
30.985075
105
0.728885
false
2.747436
false
false
false
discoapi/discotech
discotech/discoAPI/keywordManager.py
1
3203
__package__ = 'discotech.discoAPI' from discotech import discotechError class KeywordManager(object): """ Simple object to store and queue keyword to search in social media providers """ def __init__(self,keywords = [],convertToSearchPhrases = False): """ @type keywords: list @param keywords: the keyword you want search for @type convertToSearchPhrases: bool @param convertToSearchPhrases: whether keyword should be conveted to matching search phrases for example 'spider man' => ['spider','man','spiderman','spider_man'] """ if keywords: self.keywords = self._keyworsToSearchPhrases(keywords) if convertToSearchPhrases else list(keywords) self._keywordCount = len(self.keywords) self._headLocation = 0 else: self.keywords = keywords def dequque(self): """ dequque a keyword from the queue, the keyword is then moved to the end of the queue @return: the next keyword in queue """ if not self.keywords: raise discotechError("you don't any keywords") retValue = self.keywords[self._headLocation] # move head next self._headLocation = (self._headLocation + 1) % self._keywordCount return retValue def _updateFromList(self,keywords): self.keywords = list(keywords) self._keywordCount = len(self.keywords) self._headLocation = 0 def _updateFromDict(self,config): if 'keywords' in config: convertToSearchPhrases = False if 'search_phrase' in config and config['search_phrase'] is True: convertToSearchPhrases = True self.keywords = self._keyworsToSearchPhrases(config['keywords']) if convertToSearchPhrases else list(config['keywords']) self._keywordCount = len(self.keywords) self._headLocation = 0 else: raise discotechError("no keywords were given") def _keyworToSearchPhrases(self,keyword): words = keyword.split(' ') #edge case if len(words) == 1: return words cleanWords = [] #cleanup stage for word in words: word = word.strip() if word != '': cleanWords.append(word) #combinator stage combinators = ['','_'] combinedWords = [] for combinator in combinators: combinedWords.append(combinator.join(cleanWords)) return cleanWords + combinedWords def _keyworsToSearchPhrases(self,keywords): retList = [] for keyword in keywords: retList += self._keyworToSearchPhrases(keyword) return retList def loadConfig(self,config): """ load keywords from a configuation @type config: list | str @param config: a list of keywords or a path or address of JSON configuration file """ #if it's list if type(config) is list: self._updateFromList(config) #if it's a dict if type(config) is dict: self._updateFromDict(config) #if it's string if type(config) is str: #could be an address if config.startswith('http://') or config.startswith('https://'): configFile = getUrlContents(config) confList = json.loads(configFile['response_text']) #recursivly call yourself return self.loadConfig(confList) #could be file name confFile = open(config,'r') confLisr = json.loads(confFile.read()) #recursivly call yourself return self.loadConfig(confList)
gpl-2.0
-1,382,077,788,170,863,000
27.345133
163
0.70153
false
3.496725
true
false
false
ioggstream/python-course
ansible-101/notebooks/exercise-05/inventory-docker-solution.py
1
1376
#!/usr/bin/env python # List our containers. Note: this only works with docker-compose containers. from __future__ import print_function from collections import defaultdict import json # # Manage different docker libraries # try: from docker import Client except ImportError: from docker import APIClient as Client import logging log = logging.getLogger() logging.basicConfig(level=logging.DEBUG) def print_hosts(): c=Client(base_url="http://172.17.0.1:2375") container_fmt = lambda x: ( x['Names'][0][1:], x['NetworkSettings']['Networks']['bridge']['IPAddress'], ) inventory = dict() for x in c.containers(): log.debug("Processing entry %r", '\t\t'.join(container_fmt(x))) try: group_name = x['Labels']['com.docker.compose.service'] ip_address = x['NetworkSettings']['Networks']['bridge']['IPAddress'] if group_name not in inventory: inventory[group_name] = defaultdict(list) inventory[group_name]['hosts'].append(ip_address) except KeyError: log.warning("Host not run via docker-compose: skipping") inventory['web']['host_vars'] = {'ansible_ssh_common_args': ' -o StrictHostKeyChecking=no '} ret = json.dumps(inventory, indent=True) return ret if __name__ == '__main__': print(print_hosts())
agpl-3.0
-8,088,468,184,198,138,000
28.913043
96
0.634448
false
3.965418
false
false
false
googleapis/googleapis-gen
google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/common/types/ad_type_infos.py
1
46175
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import proto # type: ignore from google.ads.googleads.v8.common.types import ad_asset from google.ads.googleads.v8.enums.types import call_conversion_reporting_state from google.ads.googleads.v8.enums.types import display_ad_format_setting from google.ads.googleads.v8.enums.types import display_upload_product_type as gage_display_upload_product_type from google.ads.googleads.v8.enums.types import legacy_app_install_ad_app_store from google.ads.googleads.v8.enums.types import mime_type as gage_mime_type __protobuf__ = proto.module( package='google.ads.googleads.v8.common', marshal='google.ads.googleads.v8', manifest={ 'TextAdInfo', 'ExpandedTextAdInfo', 'ExpandedDynamicSearchAdInfo', 'HotelAdInfo', 'ShoppingSmartAdInfo', 'ShoppingProductAdInfo', 'ShoppingComparisonListingAdInfo', 'GmailAdInfo', 'GmailTeaser', 'DisplayCallToAction', 'ProductImage', 'ProductVideo', 'ImageAdInfo', 'VideoBumperInStreamAdInfo', 'VideoNonSkippableInStreamAdInfo', 'VideoTrueViewInStreamAdInfo', 'VideoOutstreamAdInfo', 'VideoTrueViewDiscoveryAdInfo', 'VideoAdInfo', 'VideoResponsiveAdInfo', 'ResponsiveSearchAdInfo', 'LegacyResponsiveDisplayAdInfo', 'AppAdInfo', 'AppEngagementAdInfo', 'LegacyAppInstallAdInfo', 'ResponsiveDisplayAdInfo', 'LocalAdInfo', 'DisplayUploadAdInfo', 'ResponsiveDisplayAdControlSpec', 'SmartCampaignAdInfo', 'CallAdInfo', }, ) class TextAdInfo(proto.Message): r"""A text ad. Attributes: headline (str): The headline of the ad. description1 (str): The first line of the ad's description. description2 (str): The second line of the ad's description. """ headline = proto.Field( proto.STRING, number=4, optional=True, ) description1 = proto.Field( proto.STRING, number=5, optional=True, ) description2 = proto.Field( proto.STRING, number=6, optional=True, ) class ExpandedTextAdInfo(proto.Message): r"""An expanded text ad. Attributes: headline_part1 (str): The first part of the ad's headline. headline_part2 (str): The second part of the ad's headline. headline_part3 (str): The third part of the ad's headline. description (str): The description of the ad. description2 (str): The second description of the ad. path1 (str): The text that can appear alongside the ad's displayed URL. path2 (str): Additional text that can appear alongside the ad's displayed URL. """ headline_part1 = proto.Field( proto.STRING, number=8, optional=True, ) headline_part2 = proto.Field( proto.STRING, number=9, optional=True, ) headline_part3 = proto.Field( proto.STRING, number=10, optional=True, ) description = proto.Field( proto.STRING, number=11, optional=True, ) description2 = proto.Field( proto.STRING, number=12, optional=True, ) path1 = proto.Field( proto.STRING, number=13, optional=True, ) path2 = proto.Field( proto.STRING, number=14, optional=True, ) class ExpandedDynamicSearchAdInfo(proto.Message): r"""An expanded dynamic search ad. Attributes: description (str): The description of the ad. description2 (str): The second description of the ad. """ description = proto.Field( proto.STRING, number=3, optional=True, ) description2 = proto.Field( proto.STRING, number=4, optional=True, ) class HotelAdInfo(proto.Message): r"""A hotel ad. """ class ShoppingSmartAdInfo(proto.Message): r"""A Smart Shopping ad. """ class ShoppingProductAdInfo(proto.Message): r"""A standard Shopping ad. """ class ShoppingComparisonListingAdInfo(proto.Message): r"""A Shopping Comparison Listing ad. Attributes: headline (str): Headline of the ad. This field is required. Allowed length is between 25 and 45 characters. """ headline = proto.Field( proto.STRING, number=2, optional=True, ) class GmailAdInfo(proto.Message): r"""A Gmail ad. Attributes: teaser (google.ads.googleads.v8.common.types.GmailTeaser): The Gmail teaser. header_image (str): The MediaFile resource name of the header image. Valid image types are GIF, JPEG and PNG. The minimum size is 300x100 pixels and the aspect ratio must be between 3:1 and 5:1 (+-1%). marketing_image (str): The MediaFile resource name of the marketing image. Valid image types are GIF, JPEG and PNG. The image must either be landscape with a minimum size of 600x314 pixels and aspect ratio of 600:314 (+-1%) or square with a minimum size of 300x300 pixels and aspect ratio of 1:1 (+-1%) marketing_image_headline (str): Headline of the marketing image. marketing_image_description (str): Description of the marketing image. marketing_image_display_call_to_action (google.ads.googleads.v8.common.types.DisplayCallToAction): Display-call-to-action of the marketing image. product_images (Sequence[google.ads.googleads.v8.common.types.ProductImage]): Product images. Up to 15 images are supported. product_videos (Sequence[google.ads.googleads.v8.common.types.ProductVideo]): Product videos. Up to 7 videos are supported. At least one product video or a marketing image must be specified. """ teaser = proto.Field( proto.MESSAGE, number=1, message='GmailTeaser', ) header_image = proto.Field( proto.STRING, number=10, optional=True, ) marketing_image = proto.Field( proto.STRING, number=11, optional=True, ) marketing_image_headline = proto.Field( proto.STRING, number=12, optional=True, ) marketing_image_description = proto.Field( proto.STRING, number=13, optional=True, ) marketing_image_display_call_to_action = proto.Field( proto.MESSAGE, number=6, message='DisplayCallToAction', ) product_images = proto.RepeatedField( proto.MESSAGE, number=7, message='ProductImage', ) product_videos = proto.RepeatedField( proto.MESSAGE, number=8, message='ProductVideo', ) class GmailTeaser(proto.Message): r"""Gmail teaser data. The teaser is a small header that acts as an invitation to view the rest of the ad (the body). Attributes: headline (str): Headline of the teaser. description (str): Description of the teaser. business_name (str): Business name of the advertiser. logo_image (str): The MediaFile resource name of the logo image. Valid image types are GIF, JPEG and PNG. The minimum size is 144x144 pixels and the aspect ratio must be 1:1 (+-1%). """ headline = proto.Field( proto.STRING, number=5, optional=True, ) description = proto.Field( proto.STRING, number=6, optional=True, ) business_name = proto.Field( proto.STRING, number=7, optional=True, ) logo_image = proto.Field( proto.STRING, number=8, optional=True, ) class DisplayCallToAction(proto.Message): r"""Data for display call to action. The call to action is a piece of the ad that prompts the user to do something. Like clicking a link or making a phone call. Attributes: text (str): Text for the display-call-to-action. text_color (str): Text color for the display-call-to-action in hexadecimal, e.g. #ffffff for white. url_collection_id (str): Identifies the url collection in the ad.url_collections field. If not set the url defaults to final_url. """ text = proto.Field( proto.STRING, number=5, optional=True, ) text_color = proto.Field( proto.STRING, number=6, optional=True, ) url_collection_id = proto.Field( proto.STRING, number=7, optional=True, ) class ProductImage(proto.Message): r"""Product image specific data. Attributes: product_image (str): The MediaFile resource name of the product image. Valid image types are GIF, JPEG and PNG. The minimum size is 300x300 pixels and the aspect ratio must be 1:1 (+-1%). description (str): Description of the product. display_call_to_action (google.ads.googleads.v8.common.types.DisplayCallToAction): Display-call-to-action of the product image. """ product_image = proto.Field( proto.STRING, number=4, optional=True, ) description = proto.Field( proto.STRING, number=5, optional=True, ) display_call_to_action = proto.Field( proto.MESSAGE, number=3, message='DisplayCallToAction', ) class ProductVideo(proto.Message): r"""Product video specific data. Attributes: product_video (str): The MediaFile resource name of a video which must be hosted on YouTube. """ product_video = proto.Field( proto.STRING, number=2, optional=True, ) class ImageAdInfo(proto.Message): r"""An image ad. Attributes: pixel_width (int): Width in pixels of the full size image. pixel_height (int): Height in pixels of the full size image. image_url (str): URL of the full size image. preview_pixel_width (int): Width in pixels of the preview size image. preview_pixel_height (int): Height in pixels of the preview size image. preview_image_url (str): URL of the preview size image. mime_type (google.ads.googleads.v8.enums.types.MimeTypeEnum.MimeType): The mime type of the image. name (str): The name of the image. If the image was created from a MediaFile, this is the MediaFile's name. If the image was created from bytes, this is empty. media_file (str): The MediaFile resource to use for the image. data (bytes): Raw image data as bytes. ad_id_to_copy_image_from (int): An ad ID to copy the image from. """ pixel_width = proto.Field( proto.INT64, number=15, optional=True, ) pixel_height = proto.Field( proto.INT64, number=16, optional=True, ) image_url = proto.Field( proto.STRING, number=17, optional=True, ) preview_pixel_width = proto.Field( proto.INT64, number=18, optional=True, ) preview_pixel_height = proto.Field( proto.INT64, number=19, optional=True, ) preview_image_url = proto.Field( proto.STRING, number=20, optional=True, ) mime_type = proto.Field( proto.ENUM, number=10, enum=gage_mime_type.MimeTypeEnum.MimeType, ) name = proto.Field( proto.STRING, number=21, optional=True, ) media_file = proto.Field( proto.STRING, number=12, oneof='image', ) data = proto.Field( proto.BYTES, number=13, oneof='image', ) ad_id_to_copy_image_from = proto.Field( proto.INT64, number=14, oneof='image', ) class VideoBumperInStreamAdInfo(proto.Message): r"""Representation of video bumper in-stream ad format (very short in-stream non-skippable video ad). Attributes: companion_banner (str): The MediaFile resource name of the companion banner used with the ad. """ companion_banner = proto.Field( proto.STRING, number=2, optional=True, ) class VideoNonSkippableInStreamAdInfo(proto.Message): r"""Representation of video non-skippable in-stream ad format (15 second in-stream non-skippable video ad). Attributes: companion_banner (str): The MediaFile resource name of the companion banner used with the ad. """ companion_banner = proto.Field( proto.STRING, number=2, optional=True, ) class VideoTrueViewInStreamAdInfo(proto.Message): r"""Representation of video TrueView in-stream ad format (ad shown during video playback, often at beginning, which displays a skip button a few seconds into the video). Attributes: action_button_label (str): Label on the CTA (call-to-action) button taking the user to the video ad's final URL. Required for TrueView for action campaigns, optional otherwise. action_headline (str): Additional text displayed with the CTA (call- o-action) button to give context and encourage clicking on the button. companion_banner (str): The MediaFile resource name of the companion banner used with the ad. """ action_button_label = proto.Field( proto.STRING, number=4, optional=True, ) action_headline = proto.Field( proto.STRING, number=5, optional=True, ) companion_banner = proto.Field( proto.STRING, number=6, optional=True, ) class VideoOutstreamAdInfo(proto.Message): r"""Representation of video out-stream ad format (ad shown alongside a feed with automatic playback, without sound). Attributes: headline (str): The headline of the ad. description (str): The description line. """ headline = proto.Field( proto.STRING, number=3, optional=True, ) description = proto.Field( proto.STRING, number=4, optional=True, ) class VideoTrueViewDiscoveryAdInfo(proto.Message): r"""Representation of video TrueView discovery ad format. Attributes: headline (str): The headline of the ad. description1 (str): First text line for a TrueView video discovery ad. description2 (str): Second text line for a TrueView video discovery ad. """ headline = proto.Field( proto.STRING, number=4, optional=True, ) description1 = proto.Field( proto.STRING, number=5, optional=True, ) description2 = proto.Field( proto.STRING, number=6, optional=True, ) class VideoAdInfo(proto.Message): r"""A video ad. Attributes: media_file (str): The MediaFile resource to use for the video. in_stream (google.ads.googleads.v8.common.types.VideoTrueViewInStreamAdInfo): Video TrueView in-stream ad format. bumper (google.ads.googleads.v8.common.types.VideoBumperInStreamAdInfo): Video bumper in-stream ad format. out_stream (google.ads.googleads.v8.common.types.VideoOutstreamAdInfo): Video out-stream ad format. non_skippable (google.ads.googleads.v8.common.types.VideoNonSkippableInStreamAdInfo): Video non-skippable in-stream ad format. discovery (google.ads.googleads.v8.common.types.VideoTrueViewDiscoveryAdInfo): Video TrueView discovery ad format. """ media_file = proto.Field( proto.STRING, number=7, optional=True, ) in_stream = proto.Field( proto.MESSAGE, number=2, oneof='format', message='VideoTrueViewInStreamAdInfo', ) bumper = proto.Field( proto.MESSAGE, number=3, oneof='format', message='VideoBumperInStreamAdInfo', ) out_stream = proto.Field( proto.MESSAGE, number=4, oneof='format', message='VideoOutstreamAdInfo', ) non_skippable = proto.Field( proto.MESSAGE, number=5, oneof='format', message='VideoNonSkippableInStreamAdInfo', ) discovery = proto.Field( proto.MESSAGE, number=6, oneof='format', message='VideoTrueViewDiscoveryAdInfo', ) class VideoResponsiveAdInfo(proto.Message): r"""A video responsive ad. Attributes: headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets used for the short headline, e.g. the "Call To Action" banner. Currently, only a single value for the short headline is supported. long_headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets used for the long headline. Currently, only a single value for the long headline is supported. descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets used for the description. Currently, only a single value for the description is supported. call_to_actions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets used for the button, e.g. the "Call To Action" button. Currently, only a single value for the button is supported. videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]): List of YouTube video assets used for the ad. Currently, only a single value for the YouTube video asset is supported. companion_banners (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]): List of image assets used for the companion banner. Currently, only a single value for the companion banner asset is supported. """ headlines = proto.RepeatedField( proto.MESSAGE, number=1, message=ad_asset.AdTextAsset, ) long_headlines = proto.RepeatedField( proto.MESSAGE, number=2, message=ad_asset.AdTextAsset, ) descriptions = proto.RepeatedField( proto.MESSAGE, number=3, message=ad_asset.AdTextAsset, ) call_to_actions = proto.RepeatedField( proto.MESSAGE, number=4, message=ad_asset.AdTextAsset, ) videos = proto.RepeatedField( proto.MESSAGE, number=5, message=ad_asset.AdVideoAsset, ) companion_banners = proto.RepeatedField( proto.MESSAGE, number=6, message=ad_asset.AdImageAsset, ) class ResponsiveSearchAdInfo(proto.Message): r"""A responsive search ad. Responsive search ads let you create an ad that adapts to show more text, and more relevant messages, to your customers. Enter multiple headlines and descriptions when creating a responsive search ad, and over time, Google Ads will automatically test different combinations and learn which combinations perform best. By adapting your ad's content to more closely match potential customers' search terms, responsive search ads may improve your campaign's performance. More information at https://support.google.com/google- ads/answer/7684791 Attributes: headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for headlines. When the ad serves the headlines will be selected from this list. descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for descriptions. When the ad serves the descriptions will be selected from this list. path1 (str): First part of text that may appear appended to the url displayed in the ad. path2 (str): Second part of text that may appear appended to the url displayed in the ad. This field can only be set when path1 is also set. """ headlines = proto.RepeatedField( proto.MESSAGE, number=1, message=ad_asset.AdTextAsset, ) descriptions = proto.RepeatedField( proto.MESSAGE, number=2, message=ad_asset.AdTextAsset, ) path1 = proto.Field( proto.STRING, number=5, optional=True, ) path2 = proto.Field( proto.STRING, number=6, optional=True, ) class LegacyResponsiveDisplayAdInfo(proto.Message): r"""A legacy responsive display ad. Ads of this type are labeled 'Responsive ads' in the Google Ads UI. Attributes: short_headline (str): The short version of the ad's headline. long_headline (str): The long version of the ad's headline. description (str): The description of the ad. business_name (str): The business name in the ad. allow_flexible_color (bool): Advertiser's consent to allow flexible color. When true, the ad may be served with different color if necessary. When false, the ad will be served with the specified colors or a neutral color. The default value is true. Must be true if main_color and accent_color are not set. accent_color (str): The accent color of the ad in hexadecimal, e.g. #ffffff for white. If one of main_color and accent_color is set, the other is required as well. main_color (str): The main color of the ad in hexadecimal, e.g. #ffffff for white. If one of main_color and accent_color is set, the other is required as well. call_to_action_text (str): The call-to-action text for the ad. logo_image (str): The MediaFile resource name of the logo image used in the ad. square_logo_image (str): The MediaFile resource name of the square logo image used in the ad. marketing_image (str): The MediaFile resource name of the marketing image used in the ad. square_marketing_image (str): The MediaFile resource name of the square marketing image used in the ad. format_setting (google.ads.googleads.v8.enums.types.DisplayAdFormatSettingEnum.DisplayAdFormatSetting): Specifies which format the ad will be served in. Default is ALL_FORMATS. price_prefix (str): Prefix before price. E.g. 'as low as'. promo_text (str): Promotion text used for dynamic formats of responsive ads. For example 'Free two-day shipping'. """ short_headline = proto.Field( proto.STRING, number=16, optional=True, ) long_headline = proto.Field( proto.STRING, number=17, optional=True, ) description = proto.Field( proto.STRING, number=18, optional=True, ) business_name = proto.Field( proto.STRING, number=19, optional=True, ) allow_flexible_color = proto.Field( proto.BOOL, number=20, optional=True, ) accent_color = proto.Field( proto.STRING, number=21, optional=True, ) main_color = proto.Field( proto.STRING, number=22, optional=True, ) call_to_action_text = proto.Field( proto.STRING, number=23, optional=True, ) logo_image = proto.Field( proto.STRING, number=24, optional=True, ) square_logo_image = proto.Field( proto.STRING, number=25, optional=True, ) marketing_image = proto.Field( proto.STRING, number=26, optional=True, ) square_marketing_image = proto.Field( proto.STRING, number=27, optional=True, ) format_setting = proto.Field( proto.ENUM, number=13, enum=display_ad_format_setting.DisplayAdFormatSettingEnum.DisplayAdFormatSetting, ) price_prefix = proto.Field( proto.STRING, number=28, optional=True, ) promo_text = proto.Field( proto.STRING, number=29, optional=True, ) class AppAdInfo(proto.Message): r"""An app ad. Attributes: mandatory_ad_text (google.ads.googleads.v8.common.types.AdTextAsset): Mandatory ad text. headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for headlines. When the ad serves the headlines will be selected from this list. descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for descriptions. When the ad serves the descriptions will be selected from this list. images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]): List of image assets that may be displayed with the ad. youtube_videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]): List of YouTube video assets that may be displayed with the ad. html5_media_bundles (Sequence[google.ads.googleads.v8.common.types.AdMediaBundleAsset]): List of media bundle assets that may be used with the ad. """ mandatory_ad_text = proto.Field( proto.MESSAGE, number=1, message=ad_asset.AdTextAsset, ) headlines = proto.RepeatedField( proto.MESSAGE, number=2, message=ad_asset.AdTextAsset, ) descriptions = proto.RepeatedField( proto.MESSAGE, number=3, message=ad_asset.AdTextAsset, ) images = proto.RepeatedField( proto.MESSAGE, number=4, message=ad_asset.AdImageAsset, ) youtube_videos = proto.RepeatedField( proto.MESSAGE, number=5, message=ad_asset.AdVideoAsset, ) html5_media_bundles = proto.RepeatedField( proto.MESSAGE, number=6, message=ad_asset.AdMediaBundleAsset, ) class AppEngagementAdInfo(proto.Message): r"""App engagement ads allow you to write text encouraging a specific action in the app, like checking in, making a purchase, or booking a flight. They allow you to send users to a specific part of your app where they can find what they're looking for easier and faster. Attributes: headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for headlines. When the ad serves the headlines will be selected from this list. descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for descriptions. When the ad serves the descriptions will be selected from this list. images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]): List of image assets that may be displayed with the ad. videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]): List of video assets that may be displayed with the ad. """ headlines = proto.RepeatedField( proto.MESSAGE, number=1, message=ad_asset.AdTextAsset, ) descriptions = proto.RepeatedField( proto.MESSAGE, number=2, message=ad_asset.AdTextAsset, ) images = proto.RepeatedField( proto.MESSAGE, number=3, message=ad_asset.AdImageAsset, ) videos = proto.RepeatedField( proto.MESSAGE, number=4, message=ad_asset.AdVideoAsset, ) class LegacyAppInstallAdInfo(proto.Message): r"""A legacy app install ad that only can be used by a few select customers. Attributes: app_id (str): The id of the mobile app. app_store (google.ads.googleads.v8.enums.types.LegacyAppInstallAdAppStoreEnum.LegacyAppInstallAdAppStore): The app store the mobile app is available in. headline (str): The headline of the ad. description1 (str): The first description line of the ad. description2 (str): The second description line of the ad. """ app_id = proto.Field( proto.STRING, number=6, optional=True, ) app_store = proto.Field( proto.ENUM, number=2, enum=legacy_app_install_ad_app_store.LegacyAppInstallAdAppStoreEnum.LegacyAppInstallAdAppStore, ) headline = proto.Field( proto.STRING, number=7, optional=True, ) description1 = proto.Field( proto.STRING, number=8, optional=True, ) description2 = proto.Field( proto.STRING, number=9, optional=True, ) class ResponsiveDisplayAdInfo(proto.Message): r"""A responsive display ad. Attributes: marketing_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]): Marketing images to be used in the ad. Valid image types are GIF, JPEG, and PNG. The minimum size is 600x314 and the aspect ratio must be 1.91:1 (+-1%). At least one marketing_image is required. Combined with square_marketing_images the maximum is 15. square_marketing_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]): Square marketing images to be used in the ad. Valid image types are GIF, JPEG, and PNG. The minimum size is 300x300 and the aspect ratio must be 1:1 (+-1%). At least one square marketing_image is required. Combined with marketing_images the maximum is 15. logo_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]): Logo images to be used in the ad. Valid image types are GIF, JPEG, and PNG. The minimum size is 512x128 and the aspect ratio must be 4:1 (+-1%). Combined with square_logo_images the maximum is 5. square_logo_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]): Square logo images to be used in the ad. Valid image types are GIF, JPEG, and PNG. The minimum size is 128x128 and the aspect ratio must be 1:1 (+-1%). Combined with square_logo_images the maximum is 5. headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): Short format headlines for the ad. The maximum length is 30 characters. At least 1 and max 5 headlines can be specified. long_headline (google.ads.googleads.v8.common.types.AdTextAsset): A required long format headline. The maximum length is 90 characters. descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): Descriptive texts for the ad. The maximum length is 90 characters. At least 1 and max 5 headlines can be specified. youtube_videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]): Optional YouTube videos for the ad. A maximum of 5 videos can be specified. business_name (str): The advertiser/brand name. Maximum display width is 25. main_color (str): The main color of the ad in hexadecimal, e.g. #ffffff for white. If one of main_color and accent_color is set, the other is required as well. accent_color (str): The accent color of the ad in hexadecimal, e.g. #ffffff for white. If one of main_color and accent_color is set, the other is required as well. allow_flexible_color (bool): Advertiser's consent to allow flexible color. When true, the ad may be served with different color if necessary. When false, the ad will be served with the specified colors or a neutral color. The default value is true. Must be true if main_color and accent_color are not set. call_to_action_text (str): The call-to-action text for the ad. Maximum display width is 30. price_prefix (str): Prefix before price. E.g. 'as low as'. promo_text (str): Promotion text used for dynamic formats of responsive ads. For example 'Free two-day shipping'. format_setting (google.ads.googleads.v8.enums.types.DisplayAdFormatSettingEnum.DisplayAdFormatSetting): Specifies which format the ad will be served in. Default is ALL_FORMATS. control_spec (google.ads.googleads.v8.common.types.ResponsiveDisplayAdControlSpec): Specification for various creative controls. """ marketing_images = proto.RepeatedField( proto.MESSAGE, number=1, message=ad_asset.AdImageAsset, ) square_marketing_images = proto.RepeatedField( proto.MESSAGE, number=2, message=ad_asset.AdImageAsset, ) logo_images = proto.RepeatedField( proto.MESSAGE, number=3, message=ad_asset.AdImageAsset, ) square_logo_images = proto.RepeatedField( proto.MESSAGE, number=4, message=ad_asset.AdImageAsset, ) headlines = proto.RepeatedField( proto.MESSAGE, number=5, message=ad_asset.AdTextAsset, ) long_headline = proto.Field( proto.MESSAGE, number=6, message=ad_asset.AdTextAsset, ) descriptions = proto.RepeatedField( proto.MESSAGE, number=7, message=ad_asset.AdTextAsset, ) youtube_videos = proto.RepeatedField( proto.MESSAGE, number=8, message=ad_asset.AdVideoAsset, ) business_name = proto.Field( proto.STRING, number=17, optional=True, ) main_color = proto.Field( proto.STRING, number=18, optional=True, ) accent_color = proto.Field( proto.STRING, number=19, optional=True, ) allow_flexible_color = proto.Field( proto.BOOL, number=20, optional=True, ) call_to_action_text = proto.Field( proto.STRING, number=21, optional=True, ) price_prefix = proto.Field( proto.STRING, number=22, optional=True, ) promo_text = proto.Field( proto.STRING, number=23, optional=True, ) format_setting = proto.Field( proto.ENUM, number=16, enum=display_ad_format_setting.DisplayAdFormatSettingEnum.DisplayAdFormatSetting, ) control_spec = proto.Field( proto.MESSAGE, number=24, message='ResponsiveDisplayAdControlSpec', ) class LocalAdInfo(proto.Message): r"""A local ad. Attributes: headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for headlines. When the ad serves the headlines will be selected from this list. At least 1 and at most 5 headlines must be specified. descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for descriptions. When the ad serves the descriptions will be selected from this list. At least 1 and at most 5 descriptions must be specified. call_to_actions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for call-to-actions. When the ad serves the call-to-actions will be selected from this list. Call-to-actions are optional and at most 5 can be specified. marketing_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]): List of marketing image assets that may be displayed with the ad. The images must be 314x600 pixels or 320x320 pixels. At least 1 and at most 20 image assets must be specified. logo_images (Sequence[google.ads.googleads.v8.common.types.AdImageAsset]): List of logo image assets that may be displayed with the ad. The images must be 128x128 pixels and not larger than 120KB. At least 1 and at most 5 image assets must be specified. videos (Sequence[google.ads.googleads.v8.common.types.AdVideoAsset]): List of YouTube video assets that may be displayed with the ad. Videos are optional and at most 20 can be specified. path1 (str): First part of optional text that may appear appended to the url displayed in the ad. path2 (str): Second part of optional text that may appear appended to the url displayed in the ad. This field can only be set when path1 is also set. """ headlines = proto.RepeatedField( proto.MESSAGE, number=1, message=ad_asset.AdTextAsset, ) descriptions = proto.RepeatedField( proto.MESSAGE, number=2, message=ad_asset.AdTextAsset, ) call_to_actions = proto.RepeatedField( proto.MESSAGE, number=3, message=ad_asset.AdTextAsset, ) marketing_images = proto.RepeatedField( proto.MESSAGE, number=4, message=ad_asset.AdImageAsset, ) logo_images = proto.RepeatedField( proto.MESSAGE, number=5, message=ad_asset.AdImageAsset, ) videos = proto.RepeatedField( proto.MESSAGE, number=6, message=ad_asset.AdVideoAsset, ) path1 = proto.Field( proto.STRING, number=9, optional=True, ) path2 = proto.Field( proto.STRING, number=10, optional=True, ) class DisplayUploadAdInfo(proto.Message): r"""A generic type of display ad. The exact ad format is controlled by the display_upload_product_type field, which determines what kinds of data need to be included with the ad. Attributes: display_upload_product_type (google.ads.googleads.v8.enums.types.DisplayUploadProductTypeEnum.DisplayUploadProductType): The product type of this ad. See comments on the enum for details. media_bundle (google.ads.googleads.v8.common.types.AdMediaBundleAsset): A media bundle asset to be used in the ad. For information about the media bundle for HTML5_UPLOAD_AD see https://support.google.com/google-ads/answer/1722096 Media bundles that are part of dynamic product types use a special format that needs to be created through the Google Web Designer. See https://support.google.com/webdesigner/answer/7543898 for more information. """ display_upload_product_type = proto.Field( proto.ENUM, number=1, enum=gage_display_upload_product_type.DisplayUploadProductTypeEnum.DisplayUploadProductType, ) media_bundle = proto.Field( proto.MESSAGE, number=2, oneof='media_asset', message=ad_asset.AdMediaBundleAsset, ) class ResponsiveDisplayAdControlSpec(proto.Message): r"""Specification for various creative controls for a responsive display ad. Attributes: enable_asset_enhancements (bool): Whether the advertiser has opted into the asset enhancements feature. enable_autogen_video (bool): Whether the advertiser has opted into auto- en video feature. """ enable_asset_enhancements = proto.Field( proto.BOOL, number=1, ) enable_autogen_video = proto.Field( proto.BOOL, number=2, ) class SmartCampaignAdInfo(proto.Message): r"""A Smart campaign ad. Attributes: headlines (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for headlines. When the ad serves the headlines will be selected from this list. 3 headlines must be specified. descriptions (Sequence[google.ads.googleads.v8.common.types.AdTextAsset]): List of text assets for descriptions. When the ad serves the descriptions will be selected from this list. 2 descriptions must be specified. """ headlines = proto.RepeatedField( proto.MESSAGE, number=1, message=ad_asset.AdTextAsset, ) descriptions = proto.RepeatedField( proto.MESSAGE, number=2, message=ad_asset.AdTextAsset, ) class CallAdInfo(proto.Message): r"""A call ad. Attributes: country_code (str): The country code in the ad. phone_number (str): The phone number in the ad. business_name (str): The business name in the ad. headline1 (str): First headline in the ad. headline2 (str): Second headline in the ad. description1 (str): The first line of the ad's description. description2 (str): The second line of the ad's description. call_tracked (bool): Whether to enable call tracking for the creative. Enabling call tracking also enables call conversions. disable_call_conversion (bool): Whether to disable call conversion for the creative. If set to ``true``, disables call conversions even when ``call_tracked`` is ``true``. If ``call_tracked`` is ``false``, this field is ignored. phone_number_verification_url (str): The URL to be used for phone number verification. conversion_action (str): The conversion action to attribute a call conversion to. If not set a default conversion action is used. This field only has effect if call_tracked is set to true. Otherwise this field is ignored. conversion_reporting_state (google.ads.googleads.v8.enums.types.CallConversionReportingStateEnum.CallConversionReportingState): The call conversion behavior of this call ad. It can use its own call conversion setting, inherit the account level setting, or be disabled. path1 (str): First part of text that may appear appended to the url displayed to in the ad. Optional. path2 (str): Second part of text that may appear appended to the url displayed to in the ad. This field can only be set when path1 is set. Optional. """ country_code = proto.Field( proto.STRING, number=1, ) phone_number = proto.Field( proto.STRING, number=2, ) business_name = proto.Field( proto.STRING, number=3, ) headline1 = proto.Field( proto.STRING, number=11, ) headline2 = proto.Field( proto.STRING, number=12, ) description1 = proto.Field( proto.STRING, number=4, ) description2 = proto.Field( proto.STRING, number=5, ) call_tracked = proto.Field( proto.BOOL, number=6, ) disable_call_conversion = proto.Field( proto.BOOL, number=7, ) phone_number_verification_url = proto.Field( proto.STRING, number=8, ) conversion_action = proto.Field( proto.STRING, number=9, ) conversion_reporting_state = proto.Field( proto.ENUM, number=10, enum=call_conversion_reporting_state.CallConversionReportingStateEnum.CallConversionReportingState, ) path1 = proto.Field( proto.STRING, number=13, ) path2 = proto.Field( proto.STRING, number=14, ) __all__ = tuple(sorted(__protobuf__.manifest))
apache-2.0
1,371,305,617,412,775,000
30.178258
135
0.60667
false
4.161035
false
false
false
mapzen/tilequeue
tilequeue/wof.py
1
46004
from __future__ import absolute_import from collections import namedtuple from contextlib import closing from cStringIO import StringIO from datetime import datetime from edtf import parse_edtf from operator import attrgetter from psycopg2.extras import register_hstore from shapely import geos from tilequeue.tile import coord_marshall_int from tilequeue.tile import coord_unmarshall_int from tilequeue.tile import mercator_point_to_coord from tilequeue.tile import reproject_lnglat_to_mercator import csv import json import os.path import psycopg2 import Queue import requests import shapely.geometry import shapely.ops import shapely.wkb import threading DATABASE_SRID = 3857 def generate_csv_lines(requests_result): for line in requests_result.iter_lines(): if line: yield line neighbourhood_placetypes_to_int = dict( neighbourhood=1, microhood=2, macrohood=3, borough=4, ) neighbourhood_int_to_placetypes = { 1: 'neighbourhood', 2: 'microhood', 3: 'macrohood', 4: 'borough', } NeighbourhoodMeta = namedtuple( 'NeighbourhoodMeta', 'wof_id placetype name hash label_position') Neighbourhood = namedtuple( 'Neighbourhood', 'wof_id placetype name hash label_position geometry n_photos area ' 'min_zoom max_zoom is_landuse_aoi inception cessation l10n_names') def parse_neighbourhood_meta_csv(csv_line_generator, placetype): reader = csv.reader(csv_line_generator) it = iter(reader) header = it.next() lbl_lat_idx = header.index('lbl_latitude') lbl_lng_idx = header.index('lbl_longitude') name_idx = header.index('name') wof_id_idx = header.index('id') hash_idx = header.index('file_hash') superseded_by_idx = header.index('superseded_by') min_row_length = (max( lbl_lat_idx, lbl_lng_idx, name_idx, wof_id_idx, hash_idx, superseded_by_idx) + 1) for row in it: if len(row) < min_row_length: continue superseded_by = row[superseded_by_idx] if superseded_by: continue wof_id_str = row[wof_id_idx] if not wof_id_str: continue try: wof_id = int(wof_id_str) except ValueError: continue name = row[name_idx] if not name: continue lat_str = row[lbl_lat_idx] lng_str = row[lbl_lng_idx] try: lat = float(lat_str) lng = float(lng_str) except ValueError: continue file_hash = row[hash_idx] label_x, label_y = reproject_lnglat_to_mercator(lng, lat) label_position = shapely.geometry.Point(label_x, label_y) neighbourhood_meta = NeighbourhoodMeta( wof_id, placetype, name, file_hash, label_position) yield neighbourhood_meta def _make_requests_session_with_retries(max_retries): from requests.adapters import HTTPAdapter from requests.packages.urllib3.util import Retry s = requests.Session() a = HTTPAdapter( max_retries=Retry( total=max_retries, status_forcelist=[ # this is a list of statuses to consider to be # an error and retry. 429, # Too many requests (i.e: back off) 500, # Generic internal server error 502, # Bad Gateway - i.e: upstream failure 503, # Unavailable, temporarily 504, # Gateway timeout 522 # Origin connection timed out ], backoff_factor=1.0 # back off for 0s, 1s, 3s, 7s, etc... after # each successive failure. (factor*(2^N-1)) )) # use retry for both HTTP and HTTPS connections. s.mount('http://', a) s.mount('https://', a) return s def fetch_wof_url_meta_neighbourhoods(url, placetype, max_retries): s = _make_requests_session_with_retries(max_retries) r = s.get(url, stream=True) assert r.status_code == 200, 'Failure requesting: %s' % url csv_line_generator = generate_csv_lines(r) return parse_neighbourhood_meta_csv(csv_line_generator, placetype) class NeighbourhoodFailure(object): def __init__(self, wof_id, reason, message, halt=False, skipped=False, funky=False, superseded=False): # halt is a signal that threads should stop fetching. This # would happen during a network IO error or when we get an # unexpected http response when fetching raw json files. In # some scenarios this could be recoverable, but because that # isn't always the case we assume that we should stop further # requests for more raw json files, and just process what we # have so far. # skipped means that we won't log this failure, ie there was # an earlier "halt" error and processing of further records # has stopped. # funky is a signal downstream that this is a "soft" or # expected failure, in the sense that it only means that we # should skip the record, but we didn't actually detect any # errors with the processing # superseded is set when the json has a value for # wof:superseded. This would indicate a data inconsistency # because the meta csv file didn't have it set if we're trying # to fetch the raw json in the first place. But this is meant # to catch this scenario. self.wof_id = wof_id self.reason = reason self.message = message self.halt = halt self.skipped = skipped self.funky = funky self.superseded = superseded # given a string, parse it as EDTF while allowing a single 'u' or None to mean # completely unknown, and return the EDTF object. def _normalize_edtf(s): if s and s != 'u': try: return parse_edtf(s) except Exception: pass # when all else fails, return the "most unknown" EDTF. return parse_edtf('uuuu') def create_neighbourhood_from_json(json_data, neighbourhood_meta): def failure(reason): return NeighbourhoodFailure( neighbourhood_meta.wof_id, reason, json.dumps(json_data)) if not isinstance(json_data, dict): return failure('Unexpected json') props = json_data.get('properties') if props is None or not isinstance(props, dict): return failure('Missing properties') superseded_by = props.get('wof:superseded_by') # these often show up as empty lists, so we do a truthy test # instead of expicitly checking for None if superseded_by: return NeighbourhoodFailure( neighbourhood_meta.wof_id, 'superseded_by: %s' % superseded_by, json.dumps(json_data), superseded=True) geometry = json_data.get('geometry') if geometry is None: return failure('Missing geometry') try: shape_lnglat = shapely.geometry.shape(geometry) except Exception: return failure('Unexpected geometry') shape_mercator = shapely.ops.transform( reproject_lnglat_to_mercator, shape_lnglat) # ignore any features that are marked as funky is_funky = props.get('mz:is_funky') if is_funky is not None: try: is_funky = int(is_funky) except ValueError: return failure('Unexpected mz:is_funky value %s' % is_funky) if is_funky != 0: return NeighbourhoodFailure( neighbourhood_meta.wof_id, 'mz:is_funky value is not 0: %s' % is_funky, json.dumps(json_data), funky=True) wof_id = props.get('wof:id') if wof_id is None: return failure('Missing wof:id') try: wof_id = int(wof_id) except ValueError: return failure('wof_id is not an int: %s' % wof_id) name = props.get('wof:name') if name is None: return failure('Missing name') n_photos = props.get('misc:photo_sum') if n_photos is not None: try: n_photos = int(n_photos) except ValueError: return failure('misc:photo_sum is not an int: %s' % n_photos) label_lat = props.get('lbl:latitude') label_lng = props.get('lbl:longitude') if label_lat is None or label_lng is None: # first, try to fall back to geom:* when lbl:* is missing. we'd prefer # to have lbl:*, but it's better to have _something_ than nothing. label_lat = props.get('geom:latitude') label_lng = props.get('geom:longitude') if label_lat is None or label_lng is None: return failure('Missing lbl:latitude or lbl:longitude and ' + 'geom:latitude or geom:longitude') try: label_lat = float(label_lat) label_lng = float(label_lng) except ValueError: return failure('lbl:latitude or lbl:longitude not float') label_merc_x, label_merc_y = reproject_lnglat_to_mercator( label_lng, label_lat) label_position = shapely.geometry.Point(label_merc_x, label_merc_y) placetype = props.get('wof:placetype') if placetype is None: return failure('Missing wof:placetype') default_min_zoom = 15 default_max_zoom = 16 min_zoom = props.get('mz:min_zoom') if min_zoom is None: min_zoom = default_min_zoom else: try: min_zoom = float(min_zoom) except ValueError: return failure('mz:min_zoom not float: %s' % min_zoom) max_zoom = props.get('mz:max_zoom') if max_zoom is None: max_zoom = default_max_zoom else: try: max_zoom = float(max_zoom) except ValueError: return failure('mz:max_zoom not float: %s' % max_zoom) is_landuse_aoi = props.get('mz:is_landuse_aoi') if is_landuse_aoi is not None: try: is_landuse_aoi = int(is_landuse_aoi) except ValueError: return failure('is_landuse_aoi not int: %s' % is_landuse_aoi) is_landuse_aoi = is_landuse_aoi != 0 if shape_mercator.type in ('Polygon', 'MultiPolygon'): area = int(shape_mercator.area) else: area = None # for the purposes of display, we only care about the times when something # should first start to be shown, and the time when it should stop # showing. edtf_inception = _normalize_edtf(props.get('edtf:inception')) edtf_cessation = _normalize_edtf(props.get('edtf:cessation')) edtf_deprecated = _normalize_edtf(props.get('edtf:deprecated')) # check that the dates are valid first to return back a better error inception_earliest = edtf_inception.lower_fuzzy() cessation_latest = edtf_cessation.upper_fuzzy() deprecated_latest = edtf_deprecated.upper_fuzzy() if inception_earliest is None: return failure('invalid edtf:inception: %s' % props.get('edtf:inception')) if cessation_latest is None: return failure('invalid edtf:cessation: %s' % props.get('edtf:cessation')) if deprecated_latest is None: return failure('invalid edtf:deprecated: %s' % props.get('edtf:deprecated')) # the 'edtf:inception' property gives us approximately the former and we # take the earliest date it could mean. the 'edtf:cessation' and # 'edtf:deprecated' would both stop the item showing, so we take the # earliest of each's latest possible date. inception = inception_earliest cessation = min(cessation_latest, deprecated_latest) # grab any names in other languages lang_suffix_size = len('_preferred') l10n_names = {} for k, v in props.iteritems(): if not v: continue if not k.startswith('name:') or not k.endswith('_preferred'): continue if isinstance(v, list): v = v[0] lang = k[:-lang_suffix_size] l10n_names[lang] = v if not l10n_names: l10n_names = None neighbourhood = Neighbourhood( wof_id, placetype, name, neighbourhood_meta.hash, label_position, shape_mercator, n_photos, area, min_zoom, max_zoom, is_landuse_aoi, inception, cessation, l10n_names) return neighbourhood def fetch_url_raw_neighbourhood(url, neighbourhood_meta, max_retries): try: s = _make_requests_session_with_retries(max_retries) r = s.get(url) except Exception, e: # if there is an IO error when fetching the url itself, we'll # want to halt too return NeighbourhoodFailure( neighbourhood_meta.wof_id, 'IO Error fetching %s' % url, str(e), halt=True) if r.status_code != 200: # once we don't get a 200, signal that we should stop all # remaining processing return NeighbourhoodFailure( neighbourhood_meta.wof_id, 'Invalid response %d for %s' % (r.status_code, url), r.text, halt=True) try: doc = r.json() except Exception, e: return NeighbourhoodFailure( neighbourhood_meta.wof_id, 'Response is not json for %s' % url, r.text) try: neighbourhood = create_neighbourhood_from_json(doc, neighbourhood_meta) except Exception, e: return NeighbourhoodFailure( neighbourhood_meta.wof_id, 'Unexpected exception parsing json', json.dumps(doc)) return neighbourhood def fetch_fs_raw_neighbourhood(path, neighbourhood_meta): with open(path) as fp: json_data = json.load(fp) neighbourhood = create_neighbourhood_from_json(json_data, neighbourhood_meta) return neighbourhood def generate_wof_url(url_prefix, wof_id): wof_id_str = str(wof_id) grouped = [] grouping = [] for c in wof_id_str: grouping.append(c) if len(grouping) == 3: grouped.append(grouping) grouping = [] if grouping: grouped.append(grouping) grouped_part = '/'.join([''.join(part) for part in grouped]) wof_url = '%s/%s/%s.geojson' % (url_prefix, grouped_part, wof_id_str) return wof_url def make_fetch_raw_url_fn(data_url_prefix, max_retries): def fn(neighbourhood_meta): wof_url = generate_wof_url( data_url_prefix, neighbourhood_meta.wof_id) neighbourhood = fetch_url_raw_neighbourhood(wof_url, neighbourhood_meta, max_retries) return neighbourhood return fn def make_fetch_raw_filesystem_fn(data_path): def fn(neighbourhood_meta): # this will work for OS's with / separators wof_path = generate_wof_url( data_path, neighbourhood_meta.wof_id) neighbourhood = fetch_fs_raw_neighbourhood(wof_path, neighbourhood_meta) return neighbourhood return fn def threaded_fetch(neighbourhood_metas, n_threads, fetch_raw_fn): queue_size = n_threads * 10 neighbourhood_input_queue = Queue.Queue(queue_size) neighbourhood_output_queue = Queue.Queue(len(neighbourhood_metas)) stop = threading.Event() def _fetch_raw_neighbourhood(): while True: neighbourhood_meta = neighbourhood_input_queue.get() if neighbourhood_meta is None: break if stop.is_set(): # assume all remaining neighbourhoods are failures # these will get skipped neighbourhood_output_queue.put(NeighbourhoodFailure( neighbourhood_meta.wof_id, 'Skipping remaining neighbourhoods', 'Skipping remaining neighbourhoods', skipped=True)) continue neighbourhood = fetch_raw_fn(neighbourhood_meta) if isinstance(neighbourhood, NeighbourhoodFailure): failure = neighbourhood # if this is the type of error that should stop all # processing, notify all other threads if failure.halt: stop.set() neighbourhood_output_queue.put(neighbourhood) fetch_threads = [] for i in xrange(n_threads): fetch_thread = threading.Thread(target=_fetch_raw_neighbourhood) fetch_thread.start() fetch_threads.append(fetch_thread) for neighbourhood_meta in neighbourhood_metas: neighbourhood_input_queue.put(neighbourhood_meta) for fetch_thread in fetch_threads: neighbourhood_input_queue.put(None) neighbourhoods = [] failures = [] for i in xrange(len(neighbourhood_metas)): neighbourhood = neighbourhood_output_queue.get() if isinstance(neighbourhood, NeighbourhoodFailure): failures.append(neighbourhood) else: neighbourhoods.append(neighbourhood) for fetch_thread in fetch_threads: fetch_thread.join() return neighbourhoods, failures class WofUrlNeighbourhoodFetcher(object): def __init__(self, neighbourhood_url, microhood_url, macrohood_url, borough_url, data_url_prefix, n_threads, max_retries): self.neighbourhood_url = neighbourhood_url self.microhood_url = microhood_url self.macrohood_url = macrohood_url self.borough_url = borough_url self.data_url_prefix = data_url_prefix self.n_threads = n_threads self.max_retries = max_retries def fetch_meta_neighbourhoods(self): return fetch_wof_url_meta_neighbourhoods( self.neighbourhood_url, 'neighbourhood', self.max_retries) def fetch_meta_microhoods(self): return fetch_wof_url_meta_neighbourhoods( self.microhood_url, 'microhood', self.max_retries) def fetch_meta_macrohoods(self): return fetch_wof_url_meta_neighbourhoods( self.macrohood_url, 'macrohood', self.max_retries) def fetch_meta_boroughs(self): return fetch_wof_url_meta_neighbourhoods( self.borough_url, 'borough', self.max_retries) def fetch_raw_neighbourhoods(self, neighbourhood_metas): url_fetch_fn = make_fetch_raw_url_fn(self.data_url_prefix, self.max_retries) neighbourhoods, failures = threaded_fetch( neighbourhood_metas, self.n_threads, url_fetch_fn) return neighbourhoods, failures class WofFilesystemNeighbourhoodFetcher(object): def __init__(self, wof_data_path, n_threads): self.wof_data_path = wof_data_path self.n_threads = n_threads def _fetch_meta_neighbourhoods(self, placetype): meta_fs_path = os.path.join( self.wof_data_path, 'meta', 'wof-%s-latest.csv' % placetype) with open(meta_fs_path) as fp: meta_neighbourhoods = list( parse_neighbourhood_meta_csv(fp, placetype)) return meta_neighbourhoods def fetch_meta_neighbourhoods(self): return self._fetch_meta_neighbourhoods('neighbourhood') def fetch_meta_microhoods(self): return self._fetch_meta_neighbourhoods('microhood') def fetch_meta_macrohoods(self): return self._fetch_meta_neighbourhoods('macrohood') def fetch_meta_boroughs(self): return self._fetch_meta_neighbourhoods('borough') def fetch_raw_neighbourhoods(self, neighbourhood_metas): data_prefix = os.path.join( self.wof_data_path, 'data') fs_fetch_fn = make_fetch_raw_filesystem_fn(data_prefix) neighbourhoods, failures = threaded_fetch( neighbourhood_metas, self.n_threads, fs_fetch_fn) return neighbourhoods, failures def create_neighbourhood_file_object(neighbourhoods, curdate=None): if curdate is None: curdate = datetime.now().date() # tell shapely to include the srid when generating WKBs geos.WKBWriter.defaults['include_srid'] = True buf = StringIO() def escape_string(s): return s.encode('utf-8').replace('\t', ' ').replace('\n', ' ') def escape_hstore_string(s): s = escape_string(s) if ' ' in s: s = s.replace('"', '\\\\"') s = '"%s"' % s return s def write_nullable_int(buf, x): if x is None: buf.write('\\N\t') else: buf.write('%d\t' % x) for n in neighbourhoods: buf.write('%d\t' % n.wof_id) buf.write('%d\t' % neighbourhood_placetypes_to_int[n.placetype]) buf.write('%s\t' % escape_string(n.name)) buf.write('%s\t' % escape_string(n.hash)) write_nullable_int(buf, n.n_photos) write_nullable_int(buf, n.area) buf.write('%d\t' % n.min_zoom) buf.write('%d\t' % n.max_zoom) if n.is_landuse_aoi is None: buf.write('\\N\t') else: buf.write('%s\t' % ('true' if n.is_landuse_aoi else 'false')) geos.lgeos.GEOSSetSRID(n.label_position._geom, DATABASE_SRID) buf.write(n.label_position.wkb_hex) buf.write('\t') geos.lgeos.GEOSSetSRID(n.geometry._geom, DATABASE_SRID) buf.write(n.geometry.wkb_hex) buf.write('\t') buf.write('%s\t' % n.inception.isoformat()) buf.write('%s\t' % n.cessation.isoformat()) is_visible = n.inception < curdate and n.cessation >= curdate is_visible_str = 't' if is_visible else 'f' buf.write('%s\t' % is_visible_str) if n.l10n_names: hstore_items = [] for k, v in n.l10n_names.items(): k = escape_hstore_string(k) v = escape_hstore_string(v) hstore_items.append("%s=>%s" % (k, v)) hstore_items_str = ','.join(hstore_items) buf.write('%s' % hstore_items_str) else: buf.write('\\N') buf.write('\n') buf.seek(0) return buf class WofModel(object): def __init__(self, postgresql_conn_info): self.postgresql_conn_info = postgresql_conn_info self.table = 'wof_neighbourhood' def _create_conn(self): conn = psycopg2.connect(**self.postgresql_conn_info) register_hstore(conn) conn.set_session(autocommit=False) return conn def find_previous_neighbourhood_meta(self): with closing(self._create_conn()) as conn: with conn.cursor() as cursor: cursor.execute( 'SELECT wof_id, placetype, name, hash, ' 'ST_AsBinary(label_position) ' 'FROM %s ORDER BY wof_id ASC' % self.table) ns = [] for row in cursor: wof_id, placetype_int, name, hash, label_bytes = row wof_id = int(wof_id) label_bytes = bytes(label_bytes) label_position = shapely.wkb.loads(label_bytes) placetype = neighbourhood_int_to_placetypes[placetype_int] n = NeighbourhoodMeta( wof_id, placetype, name, hash, label_position) ns.append(n) return ns def sync_neighbourhoods( self, neighbourhoods_to_add, neighbourhoods_to_update, ids_to_remove): geos.WKBWriter.defaults['include_srid'] = True def gen_data(n): geos.lgeos.GEOSSetSRID(n.label_position._geom, DATABASE_SRID) geos.lgeos.GEOSSetSRID(n.geometry._geom, DATABASE_SRID) return dict( table=self.table, placetype=neighbourhood_placetypes_to_int[n.placetype], name=n.name, hash=n.hash, n_photos=n.n_photos, area=n.area, min_zoom=n.min_zoom, max_zoom=n.max_zoom, is_landuse_aoi=n.is_landuse_aoi, inception=n.inception, cessation=n.cessation, label_position=n.label_position.wkb_hex, geometry=n.geometry.wkb_hex, wof_id=n.wof_id, l10n_name=n.l10n_names, ) if ids_to_remove: ids_to_remove_str = ', '.join(map(str, ids_to_remove)) if neighbourhoods_to_update: update_data = map(gen_data, neighbourhoods_to_update) if neighbourhoods_to_add: insert_data = map(gen_data, neighbourhoods_to_add) # this closes the connection with closing(self._create_conn()) as conn: # this commits the transaction with conn as conn: # this frees any resources associated with the cursor with conn.cursor() as cursor: if ids_to_remove: cursor.execute( 'DELETE FROM %s WHERE wof_id IN (%s)' % (self.table, ids_to_remove_str)) if neighbourhoods_to_update: cursor.executemany( 'UPDATE ' + self.table + ' SET ' 'placetype=%(placetype)s, ' 'name=%(name)s, ' 'hash=%(hash)s, ' 'n_photos=%(n_photos)s, ' 'area=%(area)s, ' 'min_zoom=%(min_zoom)s, ' 'max_zoom=%(max_zoom)s, ' 'is_landuse_aoi=%(is_landuse_aoi)s, ' 'inception=%(inception)s, ' 'cessation=%(cessation)s, ' 'label_position=%(label_position)s, ' 'l10n_name=%(l10n_name)s, ' 'geometry=%(geometry)s ' 'WHERE wof_id=%(wof_id)s', update_data) if neighbourhoods_to_add: cursor.executemany( 'INSERT INTO ' + self.table + ' ' '(wof_id, placetype, name, hash, n_photos, area, ' 'min_zoom, max_zoom, is_landuse_aoi, ' 'inception, cessation, ' 'label_position, geometry, l10n_name) ' 'VALUES (%(wof_id)s, %(placetype)s, %(name)s, ' '%(hash)s, %(n_photos)s, %(area)s, %(min_zoom)s, ' '%(max_zoom)s, %(is_landuse_aoi)s, ' '%(inception)s, %(cessation)s, ' '%(label_position)s, %(geometry)s, %(l10n_name)s)', insert_data) def insert_neighbourhoods(self, neighbourhoods): # create this whole input file like object outside of the transaction nf = create_neighbourhood_file_object(neighbourhoods) # close the connection with closing(self._create_conn()) as conn: # commit the transaction with conn as conn: with conn.cursor() as cursor: cursor.copy_from(nf, self.table) # update the whole table so that the `is_visible` flag is accurate for the # `current_date`. this returns a list of coords at `zoom` which have # changed visibility from true to false or vice-versa. def update_visible_timestamp(self, zoom, current_date): coords = set() def coord_int(row): x, y = row return coord_int_at_mercator_point(zoom, x, y) # close the connection with closing(self._create_conn()) as conn: # commit the transaction with conn as conn: with conn.cursor() as cursor: # select the x, y position of the label for each WOF # neighbourhood that changed visibility when the date # was updated to `current_date`. cursor.execute( 'SELECT st_x(n.label_position) as x, ' ' st_y(n.label_position) as y ' 'FROM (' ' SELECT wof_update_visible_ids(%s::date) AS id ' ') u ' 'JOIN wof_neighbourhood n ' 'ON n.wof_id = u.id', (current_date.isoformat(),)) for result in cursor: coords.add(coord_int(result)) return coords def diff_neighbourhoods(xs, ys): # NOTE this requires that both xs and ys be sequences of # neighbourhoods, sorted by wof_id in ascending order # returns a sequence of tuples: # (None, x) -> neighbourhoods that have been added # (x, None) -> neighbourhoods that have been removed # (x, y) -> neighbourhoods that have been updated diffs = [] n_xs = len(xs) n_ys = len(ys) idx_xs = 0 idx_ys = 0 # iterate through both lists while we still have values for both while idx_xs < n_xs and idx_ys < n_ys: x = xs[idx_xs] y = ys[idx_ys] if x.wof_id < y.wof_id: diffs.append((x, None)) idx_xs += 1 continue if y.wof_id < x.wof_id: diffs.append((None, y)) idx_ys += 1 continue if x.hash != y.hash: # if there are any differences the hash will be different diffs.append((x, y)) idx_xs += 1 idx_ys += 1 # catch any differences while idx_xs < n_xs: x = xs[idx_xs] diffs.append((x, None)) idx_xs += 1 while idx_ys < n_ys: y = ys[idx_ys] diffs.append((None, y)) idx_ys += 1 return diffs def coord_int_at_mercator_point(z, x, y): coord = mercator_point_to_coord(z, x, y) coord_int = coord_marshall_int(coord) return coord_int def generate_tile_expiry_list(zoom, diffs): coord_ints = set() def add_neighbourhood_diff(n): if n is not None: x = n.label_position.x y = n.label_position.y coord_int = coord_int_at_mercator_point(zoom, x, y) coord_ints.add(coord_int) for n1, n2 in diffs: # for our purposes, we will expire any kind of modification, # whether the neighbourhoods were added, removed, or updated add_neighbourhood_diff(n1) add_neighbourhood_diff(n2) return coord_ints def log_failure(logger, failure): if not (failure.skipped or failure.funky or failure.superseded): failure_message_one_line = failure.message.replace('\n', ' | ') logger.error('Neighbourhood failure for %d: %r - %r' % ( failure.wof_id, failure.reason, failure_message_one_line)) class WofProcessor(object): def __init__(self, fetcher, model, redis_cache_index, intersector, rawr_enqueuer, logger, current_date): self.fetcher = fetcher self.model = model self.redis_cache_index = redis_cache_index self.intersector = intersector self.rawr_enqueuer = rawr_enqueuer self.logger = logger self.zoom_expiry = 16 self.zoom_until = 11 self.current_date = current_date def __call__(self): # perform IO to get old/new neighbourhoods and tiles of # interest in parallel # queues to pass the results through the threads prev_neighbourhoods_queue = Queue.Queue(1) meta_neighbourhoods_queue = Queue.Queue(1) meta_microhoods_queue = Queue.Queue(1) meta_macrohoods_queue = Queue.Queue(1) meta_boroughs_queue = Queue.Queue(1) toi_queue = Queue.Queue(1) # functions for the threads def find_prev_neighbourhoods(): prev_neighbourhoods = ( self.model.find_previous_neighbourhood_meta()) prev_neighbourhoods_queue.put(prev_neighbourhoods) def make_fetch_meta_csv_fn(fn, queue): neighbourhood_metas = list(fn()) queue.put(neighbourhood_metas) def fetch_toi(): toi = self.redis_cache_index.fetch_tiles_of_interest() toi_queue.put(toi) self.logger.info('Fetching tiles of interest in background ...') self.logger.info('Fetching old and new neighbourhoods ...') # start the threads in parallel prev_neighbourhoods_thread = threading.Thread( target=find_prev_neighbourhoods) prev_neighbourhoods_thread.start() meta_neighbourhoods_thread = threading.Thread( target=make_fetch_meta_csv_fn( self.fetcher.fetch_meta_neighbourhoods, meta_neighbourhoods_queue)) meta_neighbourhoods_thread.start() meta_microhoods_thread = threading.Thread( target=make_fetch_meta_csv_fn( self.fetcher.fetch_meta_microhoods, meta_microhoods_queue)) meta_microhoods_thread.start() meta_macrohoods_thread = threading.Thread( target=make_fetch_meta_csv_fn( self.fetcher.fetch_meta_macrohoods, meta_macrohoods_queue)) meta_macrohoods_thread.start() meta_boroughs_thread = threading.Thread( target=make_fetch_meta_csv_fn( self.fetcher.fetch_meta_boroughs, meta_boroughs_queue)) meta_boroughs_thread.start() toi_thread = threading.Thread(target=fetch_toi) toi_thread.start() # ensure we're done with finding the next and previous # neighbourhoods by this point prev_neighbourhoods_thread.join() meta_neighbourhoods_thread.join() meta_microhoods_thread.join() meta_macrohoods_thread.join() meta_boroughs_thread.join() self.logger.info('Fetching old and new neighbourhoods ... done') prev_neighbourhoods = prev_neighbourhoods_queue.get() meta_neighbourhoods = meta_neighbourhoods_queue.get() meta_microhoods = meta_microhoods_queue.get() meta_macrohoods = meta_macrohoods_queue.get() meta_boroughs = meta_boroughs_queue.get() # each of these has the appropriate placetype set now meta_neighbourhoods = ( meta_neighbourhoods + meta_microhoods + meta_macrohoods + meta_boroughs) self.logger.info('Diffing neighbourhoods ...') by_neighborhood_id = attrgetter('wof_id') # the model is expected to return records in ascending order by id # it doesn't seem like the neighbourhoods in the wof csv # are in ascending order, so we sort explicitly here meta_neighbourhoods.sort(key=by_neighborhood_id) # the diff algorithm depends on the neighbourhood lists # being in sorted order by id diffs = diff_neighbourhoods(prev_neighbourhoods, meta_neighbourhoods) self.logger.info('Diffing neighbourhoods ... done') # we need to fetch neighbourhoods that have either been # updated or are new wof_neighbourhoods_to_fetch = [] # based on the diff, we'll need to keep track of how we'll # need to update ids_to_add = set() ids_to_update = set() ids_to_remove = set() for dx, dy in diffs: if dy is not None: if dx is None: ids_to_add.add(dy.wof_id) else: ids_to_update.add(dy.wof_id) wof_neighbourhoods_to_fetch.append(dy) else: ids_to_remove.add(dx.wof_id) if wof_neighbourhoods_to_fetch: self.logger.info('Fetching %d raw neighbourhoods ...' % len(wof_neighbourhoods_to_fetch)) raw_neighbourhoods, failures = ( self.fetcher.fetch_raw_neighbourhoods( wof_neighbourhoods_to_fetch)) self.logger.info('Fetching %d raw neighbourhoods ... done' % len(wof_neighbourhoods_to_fetch)) else: self.logger.info('No raw neighbourhoods found to fetch') raw_neighbourhoods = () failures = [] # we should just remove any neighbourhoods from add/update lists # also keep track of these ids to remove from the diffs too failed_wof_ids = set() superseded_by_wof_ids = set() funky_wof_ids = set() for failure in failures: failure_wof_id = failure.wof_id log_failure(self.logger, failure) if failure.funky: # this scenario is triggered for new neighbourhoods, # or if a neighbourhood became funky # we handle both of these scenarios in tests later on, # but for now we just track the id of the funky # neighbourhoods funky_wof_ids.add(failure_wof_id) if failure.superseded: self.logger.warn( 'superseded_by inconsistency for %s' % failure_wof_id) # this means that we had a value for superseded_by in # the raw json, but not in the meta file # this should get treated as a removal superseded_by_wof_ids.add(failure_wof_id) failed_wof_ids.add(failure_wof_id) ids_to_add.discard(failure_wof_id) ids_to_update.discard(failure_wof_id) # we'll only log the number of funky records that we found if funky_wof_ids: self.logger.warn('Number of funky neighbourhoods: %d' % len(funky_wof_ids)) # now we'll want to ensure that the failed ids are not present # in any additions or updates new_diffs = [] for n1, n2 in diffs: if n2 is None or n2.wof_id not in failed_wof_ids: new_diffs.append((n1, n2)) diffs = new_diffs # and we'll want to also treat any superseded_by # inconsistencies as removals # but we need the original neighbourhood meta object to # generate the diff, for its label position to expire the # appropriate tile if superseded_by_wof_ids: for n in prev_neighbourhoods: if n.wof_id in superseded_by_wof_ids: ids_to_remove.add(n.wof_id) diffs.append((n, None)) # if the neighbourhood became funky and we had it in our # existing set, we'll want to remove it if funky_wof_ids: for n in prev_neighbourhoods: if n.wof_id in funky_wof_ids: ids_to_remove.add(n.wof_id) diffs.append((n, None)) sync_neighbourhoods_thread = None if diffs: self.logger.info("Sync'ing neighbourhoods ...") # raw_neighbourhoods contains both the neighbourhoods to # add and update # we split it up here neighbourhoods_to_update = [] neighbourhoods_to_add = [] for neighbourhood in raw_neighbourhoods: if neighbourhood.wof_id in ids_to_add: neighbourhoods_to_add.append(neighbourhood) elif neighbourhood.wof_id in ids_to_update: neighbourhoods_to_update.append(neighbourhood) else: assert 0, '%d should have been found to add or update' % ( neighbourhood.wof_id) if neighbourhoods_to_add: self.logger.info('Inserting neighbourhoods: %d' % len(neighbourhoods_to_add)) if neighbourhoods_to_update: self.logger.info('Updating neighbourhoods: %d' % len(neighbourhoods_to_update)) if ids_to_remove: self.logger.info('Removing neighbourhoods: %d' % len(ids_to_remove)) def _sync_neighbourhoods(): self.model.sync_neighbourhoods( neighbourhoods_to_add, neighbourhoods_to_update, ids_to_remove) sync_neighbourhoods_thread = threading.Thread( target=_sync_neighbourhoods) sync_neighbourhoods_thread.start() else: self.logger.info('No diffs found, no sync necessary') if diffs: self.logger.info('Generating tile expiry list ...') expired_coord_ints = generate_tile_expiry_list( self.zoom_expiry, diffs) self.logger.info( 'Generating tile expiry list ... done - ' 'Found %d expired tiles' % len(expired_coord_ints)) else: self.logger.info('No diffs found, not generating expired coords') expired_coord_ints = set() # ensure we're done fetching the tiles of interest by this point toi_thread.join() toi = toi_queue.get() self.logger.info('Have tiles of interest') # we need to finish sync'ing neighbourhoods before we flip the # visibility flag and enqueue coordinates if sync_neighbourhoods_thread is not None: sync_neighbourhoods_thread.join() self.logger.info("Sync'ing neighbourhoods ... done") # update the current timestamp, returning the list of coords that # have changed visibility. visibility_updates = \ self.model.update_visible_timestamp( self.zoom_expiry, self.current_date) self.logger.info('Have %d tile expiries from visibility changes.' % len(visibility_updates)) expired_coord_ints.update(visibility_updates) if diffs: # intersect the tiles of interest with the expired coords from # the neighbourhood diff self.logger.info('Intersecting %d tiles of interest with %d ' 'expired tiles' % ( len(toi), len(expired_coord_ints))) toi_expired_coord_ints, _ = self.intersector( expired_coord_ints, toi, self.zoom_until) coords = map(coord_unmarshall_int, toi_expired_coord_ints) self.logger.info('Intersection complete, will expire %d tiles' % len(coords)) else: self.logger.info('No diffs found, no need to intersect') coords = () if coords: self.logger.info('Asking enqueuer to enqueue %d coords ...' % len(coords)) self.rawr_enqueuer(coords) self.logger.info('Asking enqueuer to enqueue %d coords ... done' % len(coords)) else: self.logger.info('No expired tiles to enqueue') class WofInitialLoader(object): def __init__(self, fetcher, model, logger): self.fetcher = fetcher self.model = model self.logger = logger def __call__(self): self.logger.info('Fetching meta neighbourhoods csv ...') neighbourhood_metas = list(self.fetcher.fetch_meta_neighbourhoods()) self.logger.info('Fetching meta neighbourhoods csv ... done') self.logger.info('Fetching meta microhoods csv ...') microhood_metas = list(self.fetcher.fetch_meta_microhoods()) self.logger.info('Fetching meta microhoods csv ... done') self.logger.info('Fetching meta macrohoods csv ...') macrohood_metas = list(self.fetcher.fetch_meta_macrohoods()) self.logger.info('Fetching meta macrohoods csv ... done') self.logger.info('Fetching meta boroughs csv ...') borough_metas = list(self.fetcher.fetch_meta_boroughs()) self.logger.info('Fetching meta boroughs csv ... done') neighbourhood_metas = ( neighbourhood_metas + microhood_metas + macrohood_metas + borough_metas) self.logger.info('Fetching raw neighbourhoods ...') neighbourhoods, failures = self.fetcher.fetch_raw_neighbourhoods( neighbourhood_metas) for failure in failures: log_failure(self.logger, failure) self.logger.info('Fetching raw neighbourhoods ... done') self.logger.info('Inserting %d neighbourhoods ...' % len(neighbourhoods)) self.model.insert_neighbourhoods(neighbourhoods) self.logger.info('Inserting %d neighbourhoods ... done' % len(neighbourhoods)) def make_wof_url_neighbourhood_fetcher( neighbourhood_url, microhood_url, macrohood_url, borough_url, data_prefix_url, n_threads, max_retries): fetcher = WofUrlNeighbourhoodFetcher( neighbourhood_url, microhood_url, macrohood_url, borough_url, data_prefix_url, n_threads, max_retries) return fetcher def make_wof_filesystem_neighbourhood_fetcher(wof_data_path, n_threads): fetcher = WofFilesystemNeighbourhoodFetcher( wof_data_path, n_threads) return fetcher def make_wof_model(postgresql_conn_info): wof_model = WofModel(postgresql_conn_info) return wof_model def make_wof_processor( fetcher, model, redis_cache_index, rawr_enqueuer, logger, current_date): from tilequeue.command import explode_and_intersect wof_processor = WofProcessor( fetcher, model, redis_cache_index, explode_and_intersect, rawr_enqueuer, logger, current_date) return wof_processor def make_wof_initial_loader(fetcher, model, logger): wof_loader = WofInitialLoader(fetcher, model, logger) return wof_loader
mit
9,120,416,503,911,111,000
35.280757
79
0.582449
false
3.652271
false
false
false
Tala/bybop
src/interactive.py
1
1944
#!/usr/bin/env python import sys try: import readline except ImportError: import pyreadline as readline import os import code import rlcompleter lib_path = os.path.abspath(os.path.join('..', 'src')) sys.path.append(lib_path) lib_path = os.path.abspath(os.path.join('..', '..', 'ARSDKBuildUtils', 'Utils', 'Python')) sys.path.append(lib_path) from Bybop_Discovery import * import Bybop_Device print('Searching for devices') from zeroconf import ZeroconfServiceTypes print('\n'.join(ZeroconfServiceTypes.find())) print('done.') discovery = Discovery([DeviceID.BEBOP_DRONE, DeviceID.JUMPING_SUMO, DeviceID.AIRBORNE_NIGHT, DeviceID.JUMPING_NIGHT]) discovery.wait_for_change() devices = discovery.get_devices() #discovery.stop() if not devices: print('Oops ...') sys.exit(1) device = devices.itervalues().next() print('Will connect to ' + get_name(device)) d2c_port = 43210 controller_type = "PC" controller_name = "bybop shell" drone = Bybop_Device.create_and_connect(device, d2c_port, controller_type, controller_name) if drone is None: print('Unable to connect to a product') sys.exit(1) drone.dump_state() vars = globals().copy() vars.update(locals()) readline.set_completer(rlcompleter.Completer(vars).complete) readline.parse_and_bind("tab: complete") shell = code.InteractiveConsole(vars) # drone.jump(0) # jump forward # drone.jump(1) # jump up # drone.move_forward(20) # move forwards # drone.move_forward(-20) # move backwards # drone.move(0,50) # turn right? # drone.move(0,-50) # turn left? # drone.spin() # spin around # drone.simpleAnimation(0) # drone.simpleAnimation(9) # Currently known values: # - 0 : stop # - 1 : spin # - 2 : tap # - 3 : slowshake # - 4 : metronome # - 5 : ondulation # - 6 : spinjump # - 7 : spintoposture # - 8 : spiral # - 9 : slalom # """ shell.interact() drone.stop()
bsd-3-clause
-245,576,819,196,394,050
21.870588
117
0.667181
false
2.901493
false
false
false
JDongian/LangGrind
src/parse_raw.py
1
1957
"""Parse human data into JSON""" import string def parse_file(filename="../data/RAW.txt"): """Parse human readable file into JSON.""" entries = [] with open(filename) as f_in: next_line = f_in.readline() data = {} state = "section" while next_line: if state == "section": line = next_line.split(" ") if line[0] == "Chapter": data = {'section': {'chapter': int(line[1]), 'part': line[4].strip()}} state = "term" elif state == "term": if not next_line.strip(): state = "section" next_line = f_in.readline() continue entry = data.copy() term, definition = next_line.split(";") #print("'{}'".format(next_line)) entry['term'] = term.strip() entry['definitions'] = [_.strip() for\ _ in definition.split(",")] entry['class'] = [] # Determine the lexical class of the word. if "(be)" in "".join(entry['definitions']): entry['class'].append("adjective") for _ in entry['definitions']: initial = _.split(" ")[0] end = _[-1] if initial in ["a", "an"]: entry['class'].append("noun") if initial in ["to"]: entry['class'].append("verb") if end in ".!?": entry['class'].append("phrase") # Proper nouns elif initial[0] in string.ascii_uppercase: entry['class'].append("noun") entries.append(entry) next_line = f_in.readline() return entries
gpl-3.0
6,378,814,179,138,106,000
38.14
67
0.406234
false
4.832099
false
false
false
prateeksan/python-design-patterns
structural/adapter.py
1
3485
""" The Adapter Pattern Notes: If the interface of an object does not match the interface required by the client code, this pattern recommends using an 'adapter' that can create a proxy interface. It is particularly useful in homogenizing interfaces of non-homogenous objects. The following example represents a use case for adapting various resource types to be readable as text resources. We assume that the client programmer works with resource objects that wrap binary, web-based or textual data. Each of the aforementioned has its own type and interface but we need to read them all as text type objects. Since every resource type can be represented as text (albeit the method calls to do so vary), we use the TextResourceAdapter to homogenize the interface and output the textual representation using a common read() method (set to behave like the read() method for TextResource). """ class TextResource: """We assume that our server can only read text. Therefore this resource is the only resource the server knows how to interpret. """ def read(self): return "Sample plain text." class BinaryResource: """An instance of this class wraps binary data. While it has many output formats, the server can only read the plain-text output. """ def read_plain_text(self): return "Sample plain text from binary." def read_raw(self): pass def read_interactive(self): pass class WebResource: """An instance of this class wraps web data. While it has many output formats, the server can only read the json output. """ def read_json(self): return "Sample plain text as json." def read_html(self): pass class IncompatibleResourceError(Exception): pass class TextResourceAdapter: """Acts as an adapter that uses the read() method to return a textual representation of the client_resource. """ convertibles = ("TextResource", "BinaryResource", "WebResource") def __init__(self, client_resource): self._verify_compatibility(client_resource) self._client_resource = client_resource def read(self): """Note that for a resource to use the adapter, it needs to be configured beforehand in this method. Your implementation may be modified to change this (depending on your use case). """ if self._client_resource.__class__ is BinaryResource: return self._client_resource.read_plain_text() elif self._client_resource.__class__ is WebResource: return self._client_resource.read_json() return self._client_resource.read() def _verify_compatibility(self, resource): """Since we need to pre-configure the adapter to handle various resource types, we raise an error if the client_resource is not pre-configured. """ if resource.__class__.__name__ not in self.__class__.convertibles: raise IncompatibleResourceError("{} cannot be adapted.".format( resource.__class__.__name__)) if __name__ == "__main__": client_resources = [BinaryResource(), WebResource(), TextResource()] for resource in client_resources: print("Adapting {} as a text resource...".format( resource.__class__.__name__)) adapted_resource = TextResourceAdapter(resource) # Note how the read interface has been homogenized. print(adapted_resource.read() + "\n")
mit
5,776,622,469,529,610,000
32.84466
80
0.68637
false
4.456522
false
false
false
3dfxsoftware/cbss-addons
account_aged_partner_balance_report/report/account_aged_partner_balance_report.py
1
10928
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Addons modules by CLEARCORP S.A. # Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import pooler from report import report_sxw from tools.translate import _ from openerp.osv import fields, osv from openerp.addons.account_report_lib.account_report_base import accountReportbase class Parser(accountReportbase): def __init__(self, cr, uid, name, context): super(Parser, self).__init__(cr, uid, name, context=context) self.localcontext.update({ 'cr' : cr, 'uid': uid, 'storage':{}, 'set_data_template': self.set_data_template, 'set_data_move_lines': self.set_data_move_lines, 'get_data':self.get_data, 'get_data_by_partner':self.get_data_by_partner, 'get_period_length': self.get_period_length, 'get_direction_selection': self.get_direction_selection, 'display_account_type':self.display_account_type, 'display_direction_selection': self.display_direction_selection, 'display_period_length': self.display_period_length, 'process_lines_period':self.process_lines_period, }) #====Extract data from wizard============================================== def get_period_length(self, data): return self._get_form_param('period_length', data) def get_direction_selection(self, data): return self._get_form_param('direction_selection', data) def get_account_type(self, data): return self._get_form_param('account_type', data) """ Return a dictionary, with this structure: result[account_type][move_list_lines] (a dictionary) """ def get_data_by_partner(self, partner_id): return self.localcontext['storage']['result'][partner_id] #========================================================================== #====Display data========================================================== def display_account_type(self, data=None, account_type=None): #if it's necessary to display in report's header if data: account_type = self.get_account_type(data) ##Options for report information (keys are different) if account_type == 'receivable': return _('Receivable Accounts') elif account_type == 'payable': return _('Payable Accounts') ###Options for header if account_type == 'customer': return _('Receivable accounts') elif account_type == 'supplier': return _('Payable accounts') elif account_type == 'customer_supplier': return _('Payable and Receivable accounts') return '' def display_direction_selection(self, data): direction_selection = self.get_direction_selection(data) if direction_selection == 'past': return _('Past') elif direction_selection == 'future': return _('Future') return '' def display_period_length(self, data): return self.get_period_length(data) #===== Set data ========================================================= #set data to use in odt template. def set_data_template(self, cr, uid, data): result, partner_ids_order = self.get_data(cr, uid, data) dict_update = {'result': result, 'partner_ids_order': partner_ids_order,} self.localcontext['storage'].update(dict_update) return False def set_data_move_lines(self, data, move_lines): #move_lines is a dictionary move_lines, partner_total = self.process_lines_period(data, move_lines) dict_update = {'move_lines':move_lines, 'partner_total':partner_total} self.localcontext['storage'].update(dict_update) return False #========================================================================== def get_move_lines(self, data): account_account_obj = self.pool.get('account.account') account_move_line_obj = self.pool.get('account.move.line') account_type_domain = [] #Get parameters date_from = str(self.get_date_from(data)) direction_selection = str(self.get_direction_selection(data)) account_type = self.get_account_type(data) if account_type == 'customer': account_type_domain.append('receivable') if account_type == 'supplier': account_type_domain.append('payable') if account_type == 'customer_supplier': account_type_domain.append('receivable') account_type_domain.append('payable') #Build domains account_account_ids = account_account_obj.search(self.cr, self.uid, [('type', 'in', account_type_domain), ('active','=',True)]) account_move_line_domain = [('state', '=', 'valid'), ('reconcile_id', '=', False), ('account_id', 'in', account_account_ids)] #=====Build a account move lines domain #Date tuple_date = () if direction_selection == 'past': tuple_date = ('date','<=', date_from) account_move_line_domain.append(tuple_date) else: tuple_date = ('date','>=', date_from) account_move_line_domain.append(tuple_date) #Get move_lines based on previous domain account_move_line_ids = account_move_line_obj.search(self.cr, self.uid, account_move_line_domain, order='date_maturity desc') account_move_lines = account_move_line_obj.browse(self.cr, self.uid, account_move_line_ids) return account_move_lines def get_data(self, cr, uid, data): partner_ids = [] res = {} """ 1. Extract move lines """ move_lines = self.get_move_lines(data) """ 2. Classified move_lines by partner and account_type """ for line in move_lines: if line.partner_id: partner_id = line.partner_id.id else: partner_id = 0 #key for lines that don't have partner_id #== Create a list, them order it by name ============ if partner_id not in partner_ids: partner_ids.append(partner_id) #==================================================== if partner_id not in res.keys(): res[partner_id] = {} if line.account_id.type not in res[partner_id].keys(): res[line.partner_id.id][line.account_id.type] = [] res[partner_id][line.account_id.type].append(line) #Sort by partner's name (alphabetically) partner_ids_order = self.pool.get('res.partner').search(cr, uid, [('id','in', partner_ids)], order='name ASC') partner_list = self.pool.get('res.partner').browse(self.cr, self.uid, partner_ids_order) return res, partner_list #Process each column for line. def process_lines_period(self, data, move_lines): res = {} partner_total = 0.0 result_list = [7] #Get parameters date_from = str(self.get_date_from(data)) direction_selection = str(self.get_direction_selection(data)) for line in move_lines: result_list = map(float, result_list) #initialize list result_list = [0.0 for i in range(7)] if not line.date_maturity or direction_selection == 'past' and line.date_maturity > date_from \ or direction_selection == 'future' and line.date_maturity < date_from: if line.debit: value = line.debit else: value = line.credit result_list[0] = value if line.date_maturity >= data['form']['4']['start'] and line.date_maturity <= data['form']['4']['stop']: if line.debit: value = line.debit else: value = line.credit result_list[1] = value if line.date_maturity >= data['form']['3']['start'] and line.date_maturity <= data['form']['3']['stop']: if line.debit: value = line.debit else: value = line.credit result_list[2] = value if line.date_maturity >= data['form']['2']['start'] and line.date_maturity <= data['form']['2']['stop']: if line.debit: value = line.debit else: value = line.credit result_list[3] = value if line.date_maturity >= data['form']['1']['start'] and line.date_maturity <= data['form']['1']['stop']: if line.debit: value = line.debit else: value = line.credit result_list[4] = value if line.date_maturity and data['form']['0']['stop'] and line.date_maturity <= data['form']['0']['stop'] or line.date_maturity and data['form']['0']['start'] and line.date_maturity >= data['form']['0']['start']: if line.debit: value = line.debit else: value = line.credit result_list[5] = value #Total by partner partner_total += line.debit if line.debit else line.credit * -1 result_list[6] = partner_total res[line.id] = result_list return res, partner_total
gpl-2.0
284,019,789,491,869,440
41.858824
221
0.522053
false
4.333069
false
false
false
nosuchtim/VizBench
src/jsonrpc/jsonrpc.py
1
1044
# Utility to send JSON RPC messages # Avoid the requests module to reduce installation hassles import urllib import urllib2 import json import sys verbose = False def dorpc(port,meth,params): url = 'http://127.0.0.1:%d/api' % (port) id = '12345' data = '{ "jsonrpc": "2.0", "method": "'+meth+'", "params": '+params+', "id":"'+id+'" }\n' req = urllib2.Request(url,data) r = urllib2.urlopen(req) response = r.read() if verbose: print "HTTP status code = ",r.getcode() print "HTTP url = ",r.geturl() print "HTTP info = ",r.info() print "response is ",response j = json.loads(response) if "error" in j: print "ERROR: "+str(j["error"]["message"]) elif "result" in j: print "RESULT: "+str(j["result"]) else: print "No error or result in JSON response!? r="+r if __name__ == '__main__': if len(sys.argv) < 2: print "Usage: jsonrpc {port} {meth} [ {params} ]" else: port = int(sys.argv[1]) meth = sys.argv[2] if len(sys.argv) < 4: params = "{}" else: params = sys.argv[3] dorpc(port,meth,params)
mit
6,698,785,829,892,262,000
22.2
91
0.6159
false
2.690722
false
false
false
h4wldev/Frest
app/routes/api/v1/users/user.py
1
6676
# # -*- coding: utf-8 -*- import re import datetime from flask import request from flask_api import status from flask_restful import Resource from sqlalchemy.exc import IntegrityError from werkzeug.security import generate_password_hash from app import db, token_auth from app.models.user_model import UserModel, get_user from app.models.user_token_model import token_is_auth, token_load_with_auth, token_expire_all, token_delete_all from app.modules import frest from app.modules.frest.api.error import get_exists_error from app.modules.frest.validate import user as userValidate from app.modules.frest.serialize.user import serialize_user _URL = '/users/<prefix>' class User(Resource): """ @api {get} /users/:prefix Get particular user's info @apiName User Info @apiGroup Users @apiHeader {String} Authorization Access token. @apiHeaderExample {json} Header-Example: { "Authorization": "304924" } @apiParam {String} prefix user's prefix @apiSuccess (200) {String} data Users data. @apiError (401) UnAuthorized You don't have permission. @apiError (400) ValueError Prefix can only be me or number """ @frest.API @token_auth.login_required def get(self, prefix): try: if prefix == 'me': user_id = token_load_with_auth(request.headers['Authorization'])['user_id'] else: user_id = int(prefix) if token_is_auth(request.headers['Authorization'], user_id): user = get_user(user_id) return serialize_user(user), status.HTTP_200_OK else: return "You don't have permission.", status.HTTP_401_UNAUTHORIZED except ValueError: return "Prefix can only be me or a number.", status.HTTP_400_BAD_REQUEST """ @api {put} /users/:prefix Update user info @apiName Update user info @apiGroup Users @apiPermission Admin @apiHeader {String} Authorization Access token. @apiHeaderExample {json} Header-Example: { "Authorization": "304924" } @apiParam {String} prefix user's prefix @apiSuccess (200) None @apiError (400) BadRequest Invalid input - Prefix can only be me or a number. @apiError (401) UnAuthorized You don't have permission - Should be admin. @apiError (404) NotFound User not found. """ @frest.API @token_auth.login_required def put(self, prefix): try: if prefix == 'me': user_id = token_load_with_auth(request.headers['Authorization'])['user_id'] else: user_id = int(prefix) user_query = UserModel.query \ .filter(UserModel.id == user_id) if token_is_auth(request.headers['Authorization'], user_id): user_permission = token_load_with_auth(request.headers['Authorization'])['permission'] if user_permission != 'ADMIN' and request.form.get('permission') is not None: return "You don't have permission.", status.HTTP_401_UNAUTHORIZED form = userValidate.modificationForm(request.form) if form.validate(): if user_query.count(): user = user_query.first() try: for key, value in request.form.items(): if value is not None and value != '': if key == 'password': value = generate_password_hash(value) token_expire_all(user.id) setattr(user, key, value) user.updated_at = datetime.datetime.now() db.session.commit() except IntegrityError as e: field, value = get_exists_error(e) _return = { 'message': "'" + value + "' is already exists.", 'field': { 'label': getattr(form, field).label.text, 'name': field } } return _return, status.HTTP_400_BAD_REQUEST return None, status.HTTP_200_OK else: return "The user does not exist.", status.HTTP_404_NOT_FOUND for field, errors in form.errors.items(): for error in errors: _return = { 'message': error, 'field': getattr(form, field).label.text } return _return, status.HTTP_400_BAD_REQUEST else: return "You don't have permission.", status.HTTP_401_UNAUTHORIZED except ValueError: return "Prefix can only be me or a number.", status.HTTP_400_BAD_REQUEST """ @api {delete} /users/:prefix Delete user @apiName User Delete @apiGroup Users @apiHeader {String} Authorization Access token. @apiHeaderExample {json} Header-Example: { "Authorization": "304924" } @apiParam {String} prefix user's prefix @apiSuccess (200) None @apiError (404) NotFound User not found. @apiError (401) UnAuthorized You don't have permission. @apiError (400) ValueError Prefix can only be me or number """ @frest.API @token_auth.login_required def delete(self, prefix): try: if prefix == 'me': user_id = token_load_with_auth(request.headers['Authorization'])['user_id'] else: user_id = int(prefix) user_query = UserModel.query \ .filter(UserModel.id == user_id) if token_is_auth(request.headers['Authorization'], user_id): if user_query.count(): token_delete_all(user_id) user = user_query.first() db.session.delete(user) db.session.commit() return None, status.HTTP_200_OK else: return "The user does not exist.", status.HTTP_404_NOT_FOUND else: return "You don't have permission.", status.HTTP_401_UNAUTHORIZED except ValueError: return "Prefix can only be me or a number.", status.HTTP_400_BAD_REQUEST
mit
2,234,537,345,375,100,000
33.770833
111
0.539994
false
4.532247
false
false
false
Nikola-K/django_reddit
users/models.py
1
1565
from hashlib import md5 import mistune from django.contrib.auth.models import User from django.db import models class RedditUser(models.Model): user = models.OneToOneField(User) first_name = models.CharField(max_length=35, null=True, default=None, blank=True) last_name = models.CharField(max_length=35, null=True, default=None, blank=True) email = models.EmailField(null=True, blank=True, default=None) about_text = models.TextField(blank=True, null=True, max_length=500, default=None) about_html = models.TextField(blank=True, null=True, default=None) gravatar_hash = models.CharField(max_length=32, null=True, blank=True, default=None) display_picture = models.NullBooleanField(default=False) homepage = models.URLField(null=True, blank=True, default=None) twitter = models.CharField(null=True, blank=True, max_length=15, default=None) github = models.CharField(null=True, blank=True, max_length=39, default=None) comment_karma = models.IntegerField(default=0) link_karma = models.IntegerField(default=0) def update_profile_data(self): self.about_html = mistune.markdown(self.about_text) if self.display_picture: self.gravatar_hash = md5(self.email.lower().encode('utf-8')).hexdigest() def __unicode__(self): return "<RedditUser:{}>".format(self.user.username)
apache-2.0
-6,009,276,806,158,894,000
42.472222
84
0.628754
false
3.95202
false
false
false