repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
omemo/python-omemo | src/omemo/liteprekeystore.py | 1 | 2577 | # -*- coding: utf-8 -*-
#
# Copyright 2015 Tarek Galal <[email protected]>
#
# This file is part of Gajim-OMEMO plugin.
#
# The Gajim-OMEMO plugin is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# Gajim-OMEMO is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# the Gajim-OMEMO plugin. If not, see <http://www.gnu.org/licenses/>.
#
from axolotl.state.prekeyrecord import PreKeyRecord
from axolotl.state.prekeystore import PreKeyStore
class LitePreKeyStore(PreKeyStore):
def __init__(self, dbConn):
"""
:type dbConn: Connection
"""
self.dbConn = dbConn
dbConn.execute("CREATE TABLE IF NOT EXISTS prekeys(" +
"_id INTEGER PRIMARY KEY AUTOINCREMENT," +
"prekey_id INTEGER UNIQUE, sent_to_server BOOLEAN, " +
" record BLOB);")
def loadPreKey(self, preKeyId):
q = "SELECT record FROM prekeys WHERE prekey_id = ?"
cursor = self.dbConn.cursor()
cursor.execute(q, (preKeyId, ))
result = cursor.fetchone()
if not result:
raise Exception("No such prekeyRecord!")
return PreKeyRecord(serialized=result[0])
def loadPendingPreKeys(self):
q = "SELECT record FROM prekeys"
cursor = self.dbConn.cursor()
cursor.execute(q)
result = cursor.fetchall()
return [PreKeyRecord(serialized=r[0]) for r in result]
def storePreKey(self, preKeyId, preKeyRecord):
# self.removePreKey(preKeyId)
q = "INSERT INTO prekeys (prekey_id, record) VALUES(?,?)"
cursor = self.dbConn.cursor()
cursor.execute(q, (preKeyId, preKeyRecord.serialize()))
self.dbConn.commit()
def containsPreKey(self, preKeyId):
q = "SELECT record FROM prekeys WHERE prekey_id = ?"
cursor = self.dbConn.cursor()
cursor.execute(q, (preKeyId, ))
return cursor.fetchone() is not None
def removePreKey(self, preKeyId):
q = "DELETE FROM prekeys WHERE prekey_id = ?"
cursor = self.dbConn.cursor()
cursor.execute(q, (preKeyId, ))
self.dbConn.commit()
| gpl-3.0 | 4,332,987,200,107,825,700 | 34.791667 | 80 | 0.648428 | false | 3.67094 | false | false | false |
nitmir/django-cas-server | cas_server/admin.py | 1 | 6813 | # This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License version 3 for
# more details.
#
# You should have received a copy of the GNU General Public License version 3
# along with this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# (c) 2015-2016 Valentin Samir
"""module for the admin interface of the app"""
from .default_settings import settings
from django.contrib import admin
from .models import ServiceTicket, ProxyTicket, ProxyGrantingTicket, User, ServicePattern
from .models import Username, ReplaceAttributName, ReplaceAttributValue, FilterAttributValue
from .models import FederatedIendityProvider, FederatedUser, UserAttributes
from .forms import TicketForm
class BaseInlines(admin.TabularInline):
"""
Bases: :class:`django.contrib.admin.TabularInline`
Base class for inlines in the admin interface.
"""
#: This controls the number of extra forms the formset will display in addition to
#: the initial forms.
extra = 0
class UserAdminInlines(BaseInlines):
"""
Bases: :class:`BaseInlines`
Base class for inlines in :class:`UserAdmin` interface
"""
#: The form :class:`TicketForm<cas_server.forms.TicketForm>` used to display tickets.
form = TicketForm
#: Fields to display on a object that are read only (not editable).
readonly_fields = (
'validate', 'service', 'service_pattern',
'creation', 'renew', 'single_log_out', 'value'
)
#: Fields to display on a object.
fields = (
'validate', 'service', 'service_pattern',
'creation', 'renew', 'single_log_out'
)
class ServiceTicketInline(UserAdminInlines):
"""
Bases: :class:`UserAdminInlines`
:class:`ServiceTicket<cas_server.models.ServiceTicket>` in admin interface
"""
#: The model which the inline is using.
model = ServiceTicket
class ProxyTicketInline(UserAdminInlines):
"""
Bases: :class:`UserAdminInlines`
:class:`ProxyTicket<cas_server.models.ProxyTicket>` in admin interface
"""
#: The model which the inline is using.
model = ProxyTicket
class ProxyGrantingInline(UserAdminInlines):
"""
Bases: :class:`UserAdminInlines`
:class:`ProxyGrantingTicket<cas_server.models.ProxyGrantingTicket>` in admin interface
"""
#: The model which the inline is using.
model = ProxyGrantingTicket
class UserAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`User<cas_server.models.User>` in admin interface
"""
#: See :class:`ServiceTicketInline`, :class:`ProxyTicketInline`, :class:`ProxyGrantingInline`
#: objects below the :class:`UserAdmin` fields.
inlines = (ServiceTicketInline, ProxyTicketInline, ProxyGrantingInline)
#: Fields to display on a object that are read only (not editable).
readonly_fields = ('username', 'date', "session_key")
#: Fields to display on a object.
fields = ('username', 'date', "session_key")
#: Fields to display on the list of class:`UserAdmin` objects.
list_display = ('username', 'date', "session_key")
class UsernamesInline(BaseInlines):
"""
Bases: :class:`BaseInlines`
:class:`Username<cas_server.models.Username>` in admin interface
"""
#: The model which the inline is using.
model = Username
class ReplaceAttributNameInline(BaseInlines):
"""
Bases: :class:`BaseInlines`
:class:`ReplaceAttributName<cas_server.models.ReplaceAttributName>` in admin interface
"""
#: The model which the inline is using.
model = ReplaceAttributName
class ReplaceAttributValueInline(BaseInlines):
"""
Bases: :class:`BaseInlines`
:class:`ReplaceAttributValue<cas_server.models.ReplaceAttributValue>` in admin interface
"""
#: The model which the inline is using.
model = ReplaceAttributValue
class FilterAttributValueInline(BaseInlines):
"""
Bases: :class:`BaseInlines`
:class:`FilterAttributValue<cas_server.models.FilterAttributValue>` in admin interface
"""
#: The model which the inline is using.
model = FilterAttributValue
class ServicePatternAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`ServicePattern<cas_server.models.ServicePattern>` in admin interface
"""
#: See :class:`UsernamesInline`, :class:`ReplaceAttributNameInline`,
#: :class:`ReplaceAttributValueInline`, :class:`FilterAttributValueInline` objects below
#: the :class:`ServicePatternAdmin` fields.
inlines = (
UsernamesInline,
ReplaceAttributNameInline,
ReplaceAttributValueInline,
FilterAttributValueInline
)
#: Fields to display on the list of class:`ServicePatternAdmin` objects.
list_display = ('pos', 'name', 'pattern', 'proxy',
'single_log_out', 'proxy_callback', 'restrict_users')
class FederatedIendityProviderAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`FederatedIendityProvider<cas_server.models.FederatedIendityProvider>` in admin
interface
"""
#: Fields to display on a object.
fields = ('pos', 'suffix', 'server_url', 'cas_protocol_version', 'verbose_name', 'display')
#: Fields to display on the list of class:`FederatedIendityProviderAdmin` objects.
list_display = ('verbose_name', 'suffix', 'display')
class FederatedUserAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`FederatedUser<cas_server.models.FederatedUser>` in admin
interface
"""
#: Fields to display on a object.
fields = ('username', 'provider', 'last_update')
#: Fields to display on the list of class:`FederatedUserAdmin` objects.
list_display = ('username', 'provider', 'last_update')
class UserAttributesAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`UserAttributes<cas_server.models.UserAttributes>` in admin
interface
"""
#: Fields to display on a object.
fields = ('username', '_attributs')
admin.site.register(ServicePattern, ServicePatternAdmin)
admin.site.register(FederatedIendityProvider, FederatedIendityProviderAdmin)
if settings.DEBUG: # pragma: no branch (we always test with DEBUG True)
admin.site.register(User, UserAdmin)
admin.site.register(FederatedUser, FederatedUserAdmin)
admin.site.register(UserAttributes, UserAttributesAdmin)
| gpl-3.0 | 5,034,193,972,385,483,000 | 32.895522 | 97 | 0.693821 | false | 3.967967 | false | false | false |
classner/fertilized-devtools | binding_generator/ordered_set.py | 1 | 1936 | # See http://code.activestate.com/recipes/576694/.
import collections
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def update(self, setvalues):
for key in setvalues:
self.add(key)
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
if __name__ == '__main__':
s = OrderedSet('abracadaba')
t = OrderedSet('simsalabim')
print(s | t)
print(s & t)
print(s - t)
| bsd-2-clause | -3,656,923,377,244,034,600 | 25.520548 | 78 | 0.497934 | false | 3.639098 | false | false | false |
googleapis/googleapis-gen | google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/services/types/feed_item_target_service.py | 1 | 5926 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v7.enums.types import response_content_type as gage_response_content_type
from google.ads.googleads.v7.resources.types import feed_item_target as gagr_feed_item_target
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.services',
marshal='google.ads.googleads.v7',
manifest={
'GetFeedItemTargetRequest',
'MutateFeedItemTargetsRequest',
'FeedItemTargetOperation',
'MutateFeedItemTargetsResponse',
'MutateFeedItemTargetResult',
},
)
class GetFeedItemTargetRequest(proto.Message):
r"""Request message for
[FeedItemTargetService.GetFeedItemTarget][google.ads.googleads.v7.services.FeedItemTargetService.GetFeedItemTarget].
Attributes:
resource_name (str):
Required. The resource name of the feed item
targets to fetch.
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
class MutateFeedItemTargetsRequest(proto.Message):
r"""Request message for
[FeedItemTargetService.MutateFeedItemTargets][google.ads.googleads.v7.services.FeedItemTargetService.MutateFeedItemTargets].
Attributes:
customer_id (str):
Required. The ID of the customer whose feed
item targets are being modified.
operations (Sequence[google.ads.googleads.v7.services.types.FeedItemTargetOperation]):
Required. The list of operations to perform
on individual feed item targets.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
response_content_type (google.ads.googleads.v7.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
"""
customer_id = proto.Field(
proto.STRING,
number=1,
)
operations = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='FeedItemTargetOperation',
)
partial_failure = proto.Field(
proto.BOOL,
number=4,
)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
validate_only = proto.Field(
proto.BOOL,
number=3,
)
class FeedItemTargetOperation(proto.Message):
r"""A single operation (create, remove) on an feed item target.
Attributes:
create (google.ads.googleads.v7.resources.types.FeedItemTarget):
Create operation: No resource name is
expected for the new feed item target.
remove (str):
Remove operation: A resource name for the removed feed item
target is expected, in this format:
``customers/{customer_id}/feedItemTargets/{feed_id}~{feed_item_id}~{feed_item_target_type}~{feed_item_target_id}``
"""
create = proto.Field(
proto.MESSAGE,
number=1,
oneof='operation',
message=gagr_feed_item_target.FeedItemTarget,
)
remove = proto.Field(
proto.STRING,
number=2,
oneof='operation',
)
class MutateFeedItemTargetsResponse(proto.Message):
r"""Response message for an feed item target mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v7.services.types.MutateFeedItemTargetResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE,
number=3,
message=status_pb2.Status,
)
results = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='MutateFeedItemTargetResult',
)
class MutateFeedItemTargetResult(proto.Message):
r"""The result for the feed item target mutate.
Attributes:
resource_name (str):
Returned for successful operations.
feed_item_target (google.ads.googleads.v7.resources.types.FeedItemTarget):
The mutated feed item target with only mutable fields after
mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
feed_item_target = proto.Field(
proto.MESSAGE,
number=2,
message=gagr_feed_item_target.FeedItemTarget,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 2,457,467,938,042,463,700 | 32.862857 | 128 | 0.662842 | false | 4.241947 | false | false | false |
ifaoe/daisi-tk | daisi_images.py | 1 | 5255 | #!/usr/bin/python3
import logging
import psycopg2
from argparse import ArgumentParser
from gdal_tif2geo import process
import multiprocessing
import subprocess
from joblib import Parallel, delayed
from math import ceil
import tempfile
import os
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# function which is called in parallel
def parallel_process(row, linco_path, linco_args, threads, overwrite, temppath, compress, opencl):
# split row from database query into single variables
[epsg, iiq_file, geo_file, ne_x, ne_y, nw_x, nw_y, sw_x, sw_y, se_x, se_y] = row
if not overwrite:
if os.path.isfile(geo_file) and os.path.exists(geo_file):
print('{file} already exists.'.format(file=geo_file))
return
print("Processing {0} -> {1}".format(iiq_file, geo_file))
# convert iiq -> tiff
# create temporary file
temp_file = tempfile.NamedTemporaryFile()
# run linco
linco_command = ('nice', '-n 19', linco_path, iiq_file, temp_file.name, '-cputhreads={threads}'.format(threads=threads), linco_args)
logger.debug(' '.join(linco_command))
linco_log = subprocess.run(linco_command, shell=True, check=True, stdout=subprocess.PIPE).stdout.decode('utf8')
logger.debug(linco_log)
# create geotiff
process(temp_file.name, geo_file, [ne_x, ne_y], [nw_x, nw_y], [se_x, se_y], [sw_x, sw_y], threads,
0.02, compress, 95, 'lanczos', epsg, [256, 256], args.verbose, opencl, overwrite, temppath)
if __name__ == '__main__':
parser = ArgumentParser(description='Georeference DAISI images from tif.')
parser.add_argument('-v', '--verbose', action='store_true', help='Verbosity.')
parser.add_argument('-s', '--session', type=str, default='.*', help='Session pattern (default: .*).')
parser.add_argument('-t', '--transect', type=str, default='.*', help='Transect pattern (default: .*).')
parser.add_argument('-c', '--camera', type=str, default='.*', help='Camera pattern (default: .*).')
parser.add_argument('-i', '--image', type=str, default='.*', help='Image pattern (default: .*).')
parser.add_argument('-H', '--host', type=str, default='127.0.0.1', help='Database host (default: 127.0.0.1).')
parser.add_argument('-d', '--database', type=str, default='daisi', help='Database name (default: DAISI).')
parser.add_argument('-u', '--user', type=str, default='daisi', help='Database user (default: DAISI).')
parser.add_argument('-P', '--password', type=str, default='18ifaoe184', help='Database password.')
parser.add_argument('-p', '--port', type=str, default='5432', help='Database port (default: 5432).')
parser.add_argument('-l', '--location', type=str, default='rostock', help='Image data location (default: rostock)')
parser.add_argument('-o', '--overwrite', action='store_true', help='Overwrite image if it already exists.')
parser.add_argument('--linco-path', type=str, default='/usr/local/bin/linco', help='Location of linco executable.')
parser.add_argument('--linco-args', type=str, default='-bits=16 -shadowRecovery=75 -highlightRecovery=75',
help='Set linco arguments (default: -bits=16 -shadowRecovery=75 -highlightRecovery=75).')
parser.add_argument('--linco-help', action='store_true', help='Get linco help (overwrites all other arguments).')
parser.add_argument('--temp-path', type=str, help='Path for temporary files')
parser.add_argument('--compress', action='store_true', help='Enable JPEG compression (default: off).')
parser.add_argument('--opencl', action='store_true', help='Enable OpenCL (default: off, requires working OpenCL setup.).')
args = parser.parse_args()
if args.linco_help:
subprocess.run([args.linco_path, '--help'])
exit(1)
if args.verbose:
logger.setLevel(logging.DEBUG)
# connecting to database
connection = psycopg2.connect(database=args.database, host=args.host, port=args.port, user=args.user, password=args.password)
cursor = connection.cursor()
cursor.execute("SELECT epsg, iiq_path, geo_path, ne_x, ne_y, nw_x, nw_y, sw_x, sw_y, se_x, se_y FROM daisi_dev.gdal_images "
"WHERE location=%s AND session~%s AND transect~%s AND camera~%s AND image~%s",
(args.location, args.session, args.transect, args.camera, args.image))
rows = cursor.fetchall()
row_count = len(rows)
if row_count == 0:
logger.critical('No images match the query {0}'.format(cursor.query))
exit(1)
logger.debug('{0} images match the query {1}'.format(row_count, cursor.query))
connection.commit()
cpu_count = multiprocessing.cpu_count()
thread_count = min(cpu_count, ceil(cpu_count/row_count))
process_count = min(cpu_count, ceil(cpu_count/thread_count))
logger.debug('Found {0} CPUs. Using {1} processes with {2} thread(s) each.'.format(cpu_count, process_count, thread_count))
Parallel(n_jobs=process_count)(delayed(parallel_process)
(
row, args.linco_path, args.linco_args, thread_count, args.overwrite, args.temp_path, args.compress, args.opencl
) for row in rows)
| gpl-2.0 | 3,208,551,945,965,820,400 | 52.622449 | 150 | 0.654234 | false | 3.470938 | false | false | false |
fnoorian/Free-buck-boost | drivers/json_server.py | 1 | 1329 | from werkzeug.wrappers import Request, Response
from werkzeug.serving import run_simple
# this use package json-rpc (not jsonrpc!)
from jsonrpc import JSONRPCResponseManager, dispatcher
from drivers.boost_driver import FCCBoostDriver
from drivers.buck_driver import FCCBuckDriver, FCCMPPTDriver
from drivers.mighty_driver import MightyWattDriver
@dispatcher.add_method
def get_version():
version = ["fcc_json_server", 1]
return version
@Request.application
def application(request):
dispatcher["mightywatt_readstatus"] = mightywatt.read_status
dispatcher["mightywatt_setpower"] = mightywatt.set_power
dispatcher["charger_readstatus"] = charger.read_status
dispatcher["discharger_readstatus"] = discharger.read_status
dispatcher["mppt_readstatus"] = mppt.read_status
response = JSONRPCResponseManager.handle(
request.data, dispatcher)
return Response(response.json, mimetype='application/json')
if __name__ == '__main__':
mightywatt = MightyWattDriver(u'8533434373835120D1C2')
charger = FCCBoostDriver(u'75439333635351719221')
discharger = FCCBuckDriver(u'75439333635351712071')
mppt = FCCMPPTDriver(u'75439333635351918140')
#run_simple('localhost', 4000, application)
run_simple('0.0.0.0', 4002, application)
| bsd-2-clause | -2,386,581,520,251,007,000 | 32.973684 | 64 | 0.734387 | false | 3.339196 | false | false | false |
0sw4l/villas-de-san-pablo | apps/utils/views.py | 1 | 1899 | from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import CreateView
from django.views.generic import ListView
from django.views.generic import TemplateView
from django.views.generic import UpdateView, DetailView
from apps.utils.shortcuts import get_object_or_none
class BaseListView(LoginRequiredMixin, ListView):
pass
class BaseCreateView(LoginRequiredMixin, CreateView):
template_name = 'apps/base/base_form.html'
def get_context_data(self, **kwargs):
context = super(BaseCreateView, self).get_context_data(**kwargs)
context['action'] = 'Crear'
return context
class BaseListViewDinamicHeader(LoginRequiredMixin, ListView):
context_object_name = "list"
query_fields = ()
HEADER = None
def __init__(self):
super(BaseListViewDinamicHeader, self).__init__()
self.HEADER += ('Acciones',)
def get_queryset(self):
return self.model.objects.all()
def get_context_data(self, **kwargs):
context = super(BaseListViewDinamicHeader, self).get_context_data(**kwargs)
context['header_table'] = self.get_header_table()
return context
def get_header_table(self):
return self.HEADER
class DirectDeleteMixin(object):
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
class BaseUpdateView(LoginRequiredMixin, UpdateView):
template_name = 'apps/base/base_form.html'
def get_context_data(self, **kwargs):
context = super(BaseUpdateView, self).get_context_data(**kwargs)
context['action'] = 'Modificar'
return context
def get_object(self, queryset=None):
obj = self.model.objects.get(id=self.kwargs['pk'])
return obj
class BaseTemplateView(LoginRequiredMixin, TemplateView):
pass
class BaseDetailView(LoginRequiredMixin, DetailView):
pass
| mit | -484,693,667,937,133,630 | 27.772727 | 83 | 0.699315 | false | 3.813253 | false | false | false |
tekton/DocuCanvas | accounts/migrations/0007_auto__add_recordpermission__add_unique_recordpermission_contentType_us.py | 1 | 8125 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RecordPermission'
db.create_table(u'accounts_recordpermission', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('contentType', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('recordID', self.gf('django.db.models.fields.IntegerField')()),
('canView', self.gf('django.db.models.fields.BooleanField')(default=False)),
('canUpdate', self.gf('django.db.models.fields.BooleanField')(default=False)),
('canDelete', self.gf('django.db.models.fields.BooleanField')(default=False)),
('viewableFields', self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True)),
('updatableFields', self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True)),
))
db.send_create_signal(u'accounts', ['RecordPermission'])
# Adding unique constraint on 'RecordPermission', fields ['contentType', 'user', 'recordID']
db.create_unique(u'accounts_recordpermission', ['contentType_id', 'user_id', 'recordID'])
def backwards(self, orm):
# Removing unique constraint on 'RecordPermission', fields ['contentType', 'user', 'recordID']
db.delete_unique(u'accounts_recordpermission', ['contentType_id', 'user_id', 'recordID'])
# Deleting model 'RecordPermission'
db.delete_table(u'accounts_recordpermission')
models = {
u'accounts.account': {
'Meta': {'object_name': 'Account'},
'avatar': ('django.db.models.fields.CharField', [], {'default': "'/static/img/pony.png'", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'git_account': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'github_account': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'google_plus': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'accounts.googleaccount': {
'Meta': {'object_name': 'GoogleAccount'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Account']", 'null': 'True'}),
'account_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'credentials': ('oauth2client.django_orm.CredentialsField', [], {'null': 'True'}),
'google_account_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'accounts.recordpermission': {
'Meta': {'unique_together': "(('contentType', 'user', 'recordID'),)", 'object_name': 'RecordPermission'},
'canDelete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canUpdate': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canView': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'contentType': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recordID': ('django.db.models.fields.IntegerField', [], {}),
'updatableFields': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'viewableFields': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['accounts'] | gpl-3.0 | -734,166,846,986,416,400 | 72.872727 | 187 | 0.571323 | false | 3.693182 | false | false | false |
Jumpscale/jumpscale_portal8 | apps/portalbase/AYS81/.macros/wiki/aysservice/3_aysservice.py | 1 | 2580 | from collections import OrderedDict
def main(j, args, params, tags, tasklet):
try:
role = args.getTag('aysrole')
name = args.getTag('aysname')
ayspath = args.getTag('ayspath') or ''
repo = j.atyourservice.repoGet(ayspath)
service = repo.serviceGet(role, name, die=False)
if service:
prods = {}
for prod_role, producers in service.producers.items():
prods.setdefault(prod_role, [])
for producer in producers:
prods[prod_role].append('[{name}|/ays81/Service?aysrole={role}&aysname={name}&ayspath={path}]'.format(
role=prod_role, path=ayspath, name=producer.model.dbobj.name))
parent = {}
if service.parent is not None:
parent['role'] = service.parent.model.role
parent['link'] = '[{name}|/ays81/Service?aysrole={role}&aysname={name}&ayspath={path}]'.format(
role=service.parent.model.role, path=ayspath, name=service.parent.model.dbobj.name)
link_to_template = ('[%s|ays81/ActorTemplate?ayspath=%s&aysname=%s]' % (role,
ayspath, role))
# we prepend service path with '$codedir' to make it work in the explorer.
# because of this line :
# https://github.com/Jumpscale/jumpscale_portal8/blob/master/apps/portalbase/macros/page/explorer/1_main.py#L25
hidden = ['key.priv', 'password', 'passwd', 'pwd', 'oauth.jwt_key', 'keyPriv']
data = j.data.serializer.json.loads(service.model.dataJSON)
data_revised = dict()
for k, v in data.items():
if k.strip() in hidden:
continue
else:
data_revised[k] = v.replace('\\n', '') if isinstance(v, str) else v
args.doc.applyTemplate({
'service': service,
'type': link_to_template,
'data': data_revised,
'name': name,
'role': role,
'producers': OrderedDict(sorted(prods.items())),
'parent': parent,
'actions': service.model.actions,
'reponame': service.aysrepo.name,
})
else:
args.doc.applyTemplate({'error': 'service not found'})
except Exception as e:
args.doc.applyTemplate({'error': e.__str__()})
params.result = (args.doc, args.doc)
return params
| apache-2.0 | 1,651,762,895,446,285,600 | 41.295082 | 123 | 0.526357 | false | 3.944954 | false | false | false |
DailyActie/Surrogate-Model | 01-codes/scikit-learn-master/sklearn/externals/funcsigs.py | 1 | 29802 | # Copyright 2001-2013 Python Software Foundation; All Rights Reserved
"""Function signature objects for callables
Back port of Python 3.3's function signature tools from the inspect module,
modified to be compatible with Python 2.6, 2.7 and 3.2+.
"""
from __future__ import absolute_import, division, print_function
import functools
import itertools
import re
import types
try:
from collections import OrderedDict
except ImportError:
from .odict import OrderedDict
__version__ = "0.4"
__all__ = ['BoundArguments', 'Parameter', 'Signature', 'signature']
_WrapperDescriptor = type(type.__call__)
_MethodWrapper = type(all.__call__)
_NonUserDefinedCallables = (_WrapperDescriptor,
_MethodWrapper,
types.BuiltinFunctionType)
def formatannotation(annotation, base_module=None):
if isinstance(annotation, type):
if annotation.__module__ in ('builtins', '__builtin__', base_module):
return annotation.__name__
return annotation.__module__ + '.' + annotation.__name__
return repr(annotation)
def _get_user_defined_method(cls, method_name, *nested):
try:
if cls is type:
return
meth = getattr(cls, method_name)
for name in nested:
meth = getattr(meth, name, meth)
except AttributeError:
return
else:
if not isinstance(meth, _NonUserDefinedCallables):
# Once '__signature__' will be added to 'C'-level
# callables, this check won't be necessary
return meth
def signature(obj):
'''Get a signature object for the passed callable.'''
if not callable(obj):
raise TypeError('{0!r} is not a callable object'.format(obj))
if isinstance(obj, types.MethodType):
sig = signature(obj.__func__)
if obj.__self__ is None:
# Unbound method: the first parameter becomes positional-only
if sig.parameters:
first = sig.parameters.values()[0].replace(
kind=_POSITIONAL_ONLY)
return sig.replace(
parameters=(first,) + tuple(sig.parameters.values())[1:])
else:
return sig
else:
# In this case we skip the first parameter of the underlying
# function (usually `self` or `cls`).
return sig.replace(parameters=tuple(sig.parameters.values())[1:])
try:
sig = obj.__signature__
except AttributeError:
pass
else:
if sig is not None:
return sig
try:
# Was this function wrapped by a decorator?
wrapped = obj.__wrapped__
except AttributeError:
pass
else:
return signature(wrapped)
if isinstance(obj, types.FunctionType):
return Signature.from_function(obj)
if isinstance(obj, functools.partial):
sig = signature(obj.func)
new_params = OrderedDict(sig.parameters.items())
partial_args = obj.args or ()
partial_keywords = obj.keywords or {}
try:
ba = sig.bind_partial(*partial_args, **partial_keywords)
except TypeError as ex:
msg = 'partial object {0!r} has incorrect arguments'.format(obj)
raise ValueError(msg)
for arg_name, arg_value in ba.arguments.items():
param = new_params[arg_name]
if arg_name in partial_keywords:
# We set a new default value, because the following code
# is correct:
#
# >>> def foo(a): print(a)
# >>> print(partial(partial(foo, a=10), a=20)())
# 20
# >>> print(partial(partial(foo, a=10), a=20)(a=30))
# 30
#
# So, with 'partial' objects, passing a keyword argument is
# like setting a new default value for the corresponding
# parameter
#
# We also mark this parameter with '_partial_kwarg'
# flag. Later, in '_bind', the 'default' value of this
# parameter will be added to 'kwargs', to simulate
# the 'functools.partial' real call.
new_params[arg_name] = param.replace(default=arg_value,
_partial_kwarg=True)
elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and
not param._partial_kwarg):
new_params.pop(arg_name)
return sig.replace(parameters=new_params.values())
sig = None
if isinstance(obj, type):
# obj is a class or a metaclass
# First, let's see if it has an overloaded __call__ defined
# in its metaclass
call = _get_user_defined_method(type(obj), '__call__')
if call is not None:
sig = signature(call)
else:
# Now we check if the 'obj' class has a '__new__' method
new = _get_user_defined_method(obj, '__new__')
if new is not None:
sig = signature(new)
else:
# Finally, we should have at least __init__ implemented
init = _get_user_defined_method(obj, '__init__')
if init is not None:
sig = signature(init)
elif not isinstance(obj, _NonUserDefinedCallables):
# An object with __call__
# We also check that the 'obj' is not an instance of
# _WrapperDescriptor or _MethodWrapper to avoid
# infinite recursion (and even potential segfault)
call = _get_user_defined_method(type(obj), '__call__', 'im_func')
if call is not None:
sig = signature(call)
if sig is not None:
# For classes and objects we skip the first parameter of their
# __call__, __new__, or __init__ methods
return sig.replace(parameters=tuple(sig.parameters.values())[1:])
if isinstance(obj, types.BuiltinFunctionType):
# Raise a nicer error message for builtins
msg = 'no signature found for builtin function {0!r}'.format(obj)
raise ValueError(msg)
raise ValueError('callable {0!r} is not supported by signature'.format(obj))
class _void(object):
'''A private marker - used in Parameter & Signature'''
class _empty(object):
pass
class _ParameterKind(int):
def __new__(self, *args, **kwargs):
obj = int.__new__(self, *args)
obj._name = kwargs['name']
return obj
def __str__(self):
return self._name
def __repr__(self):
return '<_ParameterKind: {0!r}>'.format(self._name)
_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY')
_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD')
_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL')
_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY')
_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD')
class Parameter(object):
'''Represents a parameter in a function signature.
Has the following public attributes:
* name : str
The name of the parameter as a string.
* default : object
The default value for the parameter if specified. If the
parameter has no default value, this attribute is not set.
* annotation
The annotation for the parameter if specified. If the
parameter has no annotation, this attribute is not set.
* kind : str
Describes how argument values are bound to the parameter.
Possible values: `Parameter.POSITIONAL_ONLY`,
`Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
`Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
'''
__slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg')
POSITIONAL_ONLY = _POSITIONAL_ONLY
POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
VAR_POSITIONAL = _VAR_POSITIONAL
KEYWORD_ONLY = _KEYWORD_ONLY
VAR_KEYWORD = _VAR_KEYWORD
empty = _empty
def __init__(self, name, kind, default=_empty, annotation=_empty,
_partial_kwarg=False):
if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD,
_VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD):
raise ValueError("invalid value for 'Parameter.kind' attribute")
self._kind = kind
if default is not _empty:
if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
msg = '{0} parameters cannot have default values'.format(kind)
raise ValueError(msg)
self._default = default
self._annotation = annotation
if name is None:
if kind != _POSITIONAL_ONLY:
raise ValueError("None is not a valid name for a "
"non-positional-only parameter")
self._name = name
else:
name = str(name)
if kind != _POSITIONAL_ONLY and not re.match(r'[a-z_]\w*$', name, re.I):
msg = '{0!r} is not a valid parameter name'.format(name)
raise ValueError(msg)
self._name = name
self._partial_kwarg = _partial_kwarg
@property
def name(self):
return self._name
@property
def default(self):
return self._default
@property
def annotation(self):
return self._annotation
@property
def kind(self):
return self._kind
def replace(self, name=_void, kind=_void, annotation=_void,
default=_void, _partial_kwarg=_void):
'''Creates a customized copy of the Parameter.'''
if name is _void:
name = self._name
if kind is _void:
kind = self._kind
if annotation is _void:
annotation = self._annotation
if default is _void:
default = self._default
if _partial_kwarg is _void:
_partial_kwarg = self._partial_kwarg
return type(self)(name, kind, default=default, annotation=annotation,
_partial_kwarg=_partial_kwarg)
def __str__(self):
kind = self.kind
formatted = self._name
if kind == _POSITIONAL_ONLY:
if formatted is None:
formatted = ''
formatted = '<{0}>'.format(formatted)
# Add annotation and default value
if self._annotation is not _empty:
formatted = '{0}:{1}'.format(formatted,
formatannotation(self._annotation))
if self._default is not _empty:
formatted = '{0}={1}'.format(formatted, repr(self._default))
if kind == _VAR_POSITIONAL:
formatted = '*' + formatted
elif kind == _VAR_KEYWORD:
formatted = '**' + formatted
return formatted
def __repr__(self):
return '<{0} at {1:#x} {2!r}>'.format(self.__class__.__name__,
id(self), self.name)
def __hash__(self):
msg = "unhashable type: '{0}'".format(self.__class__.__name__)
raise TypeError(msg)
def __eq__(self, other):
return (issubclass(other.__class__, Parameter) and
self._name == other._name and
self._kind == other._kind and
self._default == other._default and
self._annotation == other._annotation)
def __ne__(self, other):
return not self.__eq__(other)
class BoundArguments(object):
'''Result of `Signature.bind` call. Holds the mapping of arguments
to the function's parameters.
Has the following public attributes:
* arguments : OrderedDict
An ordered mutable mapping of parameters' names to arguments' values.
Does not contain arguments' default values.
* signature : Signature
The Signature object that created this instance.
* args : tuple
Tuple of positional arguments values.
* kwargs : dict
Dict of keyword arguments values.
'''
def __init__(self, signature, arguments):
self.arguments = arguments
self._signature = signature
@property
def signature(self):
return self._signature
@property
def args(self):
args = []
for param_name, param in self._signature.parameters.items():
if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
param._partial_kwarg):
# Keyword arguments mapped by 'functools.partial'
# (Parameter._partial_kwarg is True) are mapped
# in 'BoundArguments.kwargs', along with VAR_KEYWORD &
# KEYWORD_ONLY
break
try:
arg = self.arguments[param_name]
except KeyError:
# We're done here. Other arguments
# will be mapped in 'BoundArguments.kwargs'
break
else:
if param.kind == _VAR_POSITIONAL:
# *args
args.extend(arg)
else:
# plain argument
args.append(arg)
return tuple(args)
@property
def kwargs(self):
kwargs = {}
kwargs_started = False
for param_name, param in self._signature.parameters.items():
if not kwargs_started:
if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
param._partial_kwarg):
kwargs_started = True
else:
if param_name not in self.arguments:
kwargs_started = True
continue
if not kwargs_started:
continue
try:
arg = self.arguments[param_name]
except KeyError:
pass
else:
if param.kind == _VAR_KEYWORD:
# **kwargs
kwargs.update(arg)
else:
# plain keyword argument
kwargs[param_name] = arg
return kwargs
def __hash__(self):
msg = "unhashable type: '{0}'".format(self.__class__.__name__)
raise TypeError(msg)
def __eq__(self, other):
return (issubclass(other.__class__, BoundArguments) and
self.signature == other.signature and
self.arguments == other.arguments)
def __ne__(self, other):
return not self.__eq__(other)
class Signature(object):
'''A Signature object represents the overall signature of a function.
It stores a Parameter object for each parameter accepted by the
function, as well as information specific to the function itself.
A Signature object has the following public attributes and methods:
* parameters : OrderedDict
An ordered mapping of parameters' names to the corresponding
Parameter objects (keyword-only arguments are in the same order
as listed in `code.co_varnames`).
* return_annotation : object
The annotation for the return type of the function if specified.
If the function has no annotation for its return type, this
attribute is not set.
* bind(*args, **kwargs) -> BoundArguments
Creates a mapping from positional and keyword arguments to
parameters.
* bind_partial(*args, **kwargs) -> BoundArguments
Creates a partial mapping from positional and keyword arguments
to parameters (simulating 'functools.partial' behavior.)
'''
__slots__ = ('_return_annotation', '_parameters')
_parameter_cls = Parameter
_bound_arguments_cls = BoundArguments
empty = _empty
def __init__(self, parameters=None, return_annotation=_empty,
__validate_parameters__=True):
'''Constructs Signature from the given list of Parameter
objects and 'return_annotation'. All arguments are optional.
'''
if parameters is None:
params = OrderedDict()
else:
if __validate_parameters__:
params = OrderedDict()
top_kind = _POSITIONAL_ONLY
for idx, param in enumerate(parameters):
kind = param.kind
if kind < top_kind:
msg = 'wrong parameter order: {0} before {1}'
msg = msg.format(top_kind, param.kind)
raise ValueError(msg)
else:
top_kind = kind
name = param.name
if name is None:
name = str(idx)
param = param.replace(name=name)
if name in params:
msg = 'duplicate parameter name: {0!r}'.format(name)
raise ValueError(msg)
params[name] = param
else:
params = OrderedDict(((param.name, param)
for param in parameters))
self._parameters = params
self._return_annotation = return_annotation
@classmethod
def from_function(cls, func):
'''Constructs Signature for the given python function'''
if not isinstance(func, types.FunctionType):
raise TypeError('{0!r} is not a Python function'.format(func))
Parameter = cls._parameter_cls
# Parameter information.
func_code = func.__code__
pos_count = func_code.co_argcount
arg_names = func_code.co_varnames
positional = tuple(arg_names[:pos_count])
keyword_only_count = getattr(func_code, 'co_kwonlyargcount', 0)
keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
annotations = getattr(func, '__annotations__', {})
defaults = func.__defaults__
kwdefaults = getattr(func, '__kwdefaults__', None)
if defaults:
pos_default_count = len(defaults)
else:
pos_default_count = 0
parameters = []
# Non-keyword-only parameters w/o defaults.
non_default_count = pos_count - pos_default_count
for name in positional[:non_default_count]:
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD))
# ... w/ defaults.
for offset, name in enumerate(positional[non_default_count:]):
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD,
default=defaults[offset]))
# *args
if func_code.co_flags & 0x04:
name = arg_names[pos_count + keyword_only_count]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_POSITIONAL))
# Keyword-only parameters.
for name in keyword_only:
default = _empty
if kwdefaults is not None:
default = kwdefaults.get(name, _empty)
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_KEYWORD_ONLY,
default=default))
# **kwargs
if func_code.co_flags & 0x08:
index = pos_count + keyword_only_count
if func_code.co_flags & 0x04:
index += 1
name = arg_names[index]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_KEYWORD))
return cls(parameters,
return_annotation=annotations.get('return', _empty),
__validate_parameters__=False)
@property
def parameters(self):
try:
return types.MappingProxyType(self._parameters)
except AttributeError:
return OrderedDict(self._parameters.items())
@property
def return_annotation(self):
return self._return_annotation
def replace(self, parameters=_void, return_annotation=_void):
'''Creates a customized copy of the Signature.
Pass 'parameters' and/or 'return_annotation' arguments
to override them in the new copy.
'''
if parameters is _void:
parameters = self.parameters.values()
if return_annotation is _void:
return_annotation = self._return_annotation
return type(self)(parameters,
return_annotation=return_annotation)
def __hash__(self):
msg = "unhashable type: '{0}'".format(self.__class__.__name__)
raise TypeError(msg)
def __eq__(self, other):
if (not issubclass(type(other), Signature) or
self.return_annotation != other.return_annotation or
len(self.parameters) != len(other.parameters)):
return False
other_positions = dict((param, idx)
for idx, param in enumerate(other.parameters.keys()))
for idx, (param_name, param) in enumerate(self.parameters.items()):
if param.kind == _KEYWORD_ONLY:
try:
other_param = other.parameters[param_name]
except KeyError:
return False
else:
if param != other_param:
return False
else:
try:
other_idx = other_positions[param_name]
except KeyError:
return False
else:
if (idx != other_idx or
param != other.parameters[param_name]):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def _bind(self, args, kwargs, partial=False):
'''Private method. Don't use directly.'''
arguments = OrderedDict()
parameters = iter(self.parameters.values())
parameters_ex = ()
arg_vals = iter(args)
if partial:
# Support for binding arguments to 'functools.partial' objects.
# See 'functools.partial' case in 'signature()' implementation
# for details.
for param_name, param in self.parameters.items():
if (param._partial_kwarg and param_name not in kwargs):
# Simulating 'functools.partial' behavior
kwargs[param_name] = param.default
while True:
# Let's iterate through the positional arguments and corresponding
# parameters
try:
arg_val = next(arg_vals)
except StopIteration:
# No more positional arguments
try:
param = next(parameters)
except StopIteration:
# No more parameters. That's it. Just need to check that
# we have no `kwargs` after this while loop
break
else:
if param.kind == _VAR_POSITIONAL:
# That's OK, just empty *args. Let's start parsing
# kwargs
break
elif param.name in kwargs:
if param.kind == _POSITIONAL_ONLY:
msg = '{arg!r} parameter is positional only, ' \
'but was passed as a keyword'
msg = msg.format(arg=param.name)
raise TypeError(msg)
parameters_ex = (param,)
break
elif (param.kind == _VAR_KEYWORD or
param.default is not _empty):
# That's fine too - we have a default value for this
# parameter. So, lets start parsing `kwargs`, starting
# with the current parameter
parameters_ex = (param,)
break
else:
if partial:
parameters_ex = (param,)
break
else:
msg = '{arg!r} parameter lacking default value'
msg = msg.format(arg=param.name)
raise TypeError(msg)
else:
# We have a positional argument to process
try:
param = next(parameters)
except StopIteration:
raise TypeError('too many positional arguments')
else:
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
# Looks like we have no parameter for this positional
# argument
raise TypeError('too many positional arguments')
if param.kind == _VAR_POSITIONAL:
# We have an '*args'-like argument, let's fill it with
# all positional arguments we have left and move on to
# the next phase
values = [arg_val]
values.extend(arg_vals)
arguments[param.name] = tuple(values)
break
if param.name in kwargs:
raise TypeError('multiple values for argument '
'{arg!r}'.format(arg=param.name))
arguments[param.name] = arg_val
# Now, we iterate through the remaining parameters to process
# keyword arguments
kwargs_param = None
for param in itertools.chain(parameters_ex, parameters):
if param.kind == _POSITIONAL_ONLY:
# This should never happen in case of a properly built
# Signature object (but let's have this check here
# to ensure correct behaviour just in case)
raise TypeError('{arg!r} parameter is positional only, '
'but was passed as a keyword'. \
format(arg=param.name))
if param.kind == _VAR_KEYWORD:
# Memorize that we have a '**kwargs'-like parameter
kwargs_param = param
continue
param_name = param.name
try:
arg_val = kwargs.pop(param_name)
except KeyError:
# We have no value for this parameter. It's fine though,
# if it has a default value, or it is an '*args'-like
# parameter, left alone by the processing of positional
# arguments.
if (not partial and param.kind != _VAR_POSITIONAL and
param.default is _empty):
raise TypeError('{arg!r} parameter lacking default value'. \
format(arg=param_name))
else:
arguments[param_name] = arg_val
if kwargs:
if kwargs_param is not None:
# Process our '**kwargs'-like parameter
arguments[kwargs_param.name] = kwargs
else:
raise TypeError('too many keyword arguments')
return self._bound_arguments_cls(self, arguments)
def bind(self, *args, **kwargs):
'''Get a BoundArguments object, that maps the passed `args`
and `kwargs` to the function's signature. Raises `TypeError`
if the passed arguments can not be bound.
'''
return self._bind(args, kwargs)
def bind_partial(self, *args, **kwargs):
'''Get a BoundArguments object, that partially maps the
passed `args` and `kwargs` to the function's signature.
Raises `TypeError` if the passed arguments can not be bound.
'''
return self._bind(args, kwargs, partial=True)
def __str__(self):
result = []
render_kw_only_separator = True
for idx, param in enumerate(self.parameters.values()):
formatted = str(param)
kind = param.kind
if kind == _VAR_POSITIONAL:
# OK, we have an '*args'-like parameter, so we won't need
# a '*' to separate keyword-only arguments
render_kw_only_separator = False
elif kind == _KEYWORD_ONLY and render_kw_only_separator:
# We have a keyword-only parameter to render and we haven't
# rendered an '*args'-like parameter before, so add a '*'
# separator to the parameters list ("foo(arg1, *, arg2)" case)
result.append('*')
# This condition should be only triggered once, so
# reset the flag
render_kw_only_separator = False
result.append(formatted)
rendered = '({0})'.format(', '.join(result))
if self.return_annotation is not _empty:
anno = formatannotation(self.return_annotation)
rendered += ' -> {0}'.format(anno)
return rendered
| mit | -1,135,423,231,880,870,100 | 35.432763 | 84 | 0.537648 | false | 4.875982 | false | false | false |
cockroachdb/examples-orms | python/django/cockroach_example/settings.py | 1 | 3362 | """
Django settings for cockroach_example project.
Generated by 'django-admin startproject' using Django 2.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
from urllib.parse import urlparse
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0pld^66i)iv4df8km5vc%1^sskuqjf16jk&z=c^rk--oh6i0i^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cockroach_example',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cockroach_example.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cockroach_example.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
port = 26257
addr = os.getenv('ADDR')
if addr is not None:
url = urlparse(addr)
port = url.port
DATABASES = {
'default': {
'ENGINE' : 'django_cockroachdb',
'NAME' : 'company_django',
'USER' : 'root',
'PASSWORD': '',
'HOST' : 'localhost',
'PORT' : port,
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| apache-2.0 | -6,756,807,042,726,074,000 | 24.278195 | 91 | 0.678168 | false | 3.487552 | false | false | false |
jbzdak/data-base-checker | bazydanych2/settingsdev.py | 1 | 1181 |
from bazydanych2.settingsshared import *
DEBUG=True
TEMPLATE_DEBUG=True
STATIC_ROOT = '/tmp/staticfiles'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
"level": 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler'
}
},
'root':{
'handlers' : ['console']
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
EMAIL_FILE_PATH = '/tmp/app-messages'
INSTALLED_APPS += ('celery_test_app', )
ALLOW_OFFILNE_GRADING = False
SCHEMA_CHECKER_HOST = '192.168.56.30' | gpl-3.0 | -979,873,808,556,966,700 | 21.730769 | 66 | 0.524979 | false | 3.535928 | false | true | false |
ted-gould/nova | nova/tests/unit/compute/test_compute_mgr.py | 1 | 197801 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for ComputeManager()."""
import contextlib
import time
import uuid
from cinderclient import exceptions as cinder_exception
from eventlet import event as eventlet_event
import mock
from mox3 import mox
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_utils import importutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
import nova
from nova.compute import build_results
from nova.compute import manager
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova.conductor import api as conductor_api
from nova import context
from nova import db
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova import objects
from nova.objects import block_device as block_device_obj
from nova import test
from nova.tests.unit.compute import fake_resource_tracker
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_network_cache_model
from nova.tests.unit import fake_server_actions
from nova.tests.unit.objects import test_instance_fault
from nova.tests.unit.objects import test_instance_info_cache
from nova import utils
from nova.virt import driver as virt_driver
from nova.virt import event as virtevent
from nova.virt import fake as fake_driver
from nova.virt import hardware
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
class ComputeManagerUnitTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerUnitTestCase, self).setUp()
self.flags(use_local=True, group='conductor')
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
fake_server_actions.stub_out_action_events(self.stubs)
@mock.patch.object(manager.ComputeManager, '_get_power_state')
@mock.patch.object(manager.ComputeManager, '_sync_instance_power_state')
@mock.patch.object(objects.Instance, 'get_by_uuid')
def _test_handle_lifecycle_event(self, mock_get, mock_sync,
mock_get_power_state, transition,
event_pwr_state, current_pwr_state):
event = mock.Mock()
event.get_instance_uuid.return_value = mock.sentinel.uuid
event.get_transition.return_value = transition
mock_get_power_state.return_value = current_pwr_state
self.compute.handle_lifecycle_event(event)
mock_get.assert_called_with(mock.ANY, mock.sentinel.uuid,
expected_attrs=[])
if event_pwr_state == current_pwr_state:
mock_sync.assert_called_with(mock.ANY, mock_get.return_value,
event_pwr_state)
else:
self.assertFalse(mock_sync.called)
def test_handle_lifecycle_event(self):
event_map = {virtevent.EVENT_LIFECYCLE_STOPPED: power_state.SHUTDOWN,
virtevent.EVENT_LIFECYCLE_STARTED: power_state.RUNNING,
virtevent.EVENT_LIFECYCLE_PAUSED: power_state.PAUSED,
virtevent.EVENT_LIFECYCLE_RESUMED: power_state.RUNNING,
virtevent.EVENT_LIFECYCLE_SUSPENDED:
power_state.SUSPENDED,
}
for transition, pwr_state in six.iteritems(event_map):
self._test_handle_lifecycle_event(transition=transition,
event_pwr_state=pwr_state,
current_pwr_state=pwr_state)
def test_handle_lifecycle_event_state_mismatch(self):
self._test_handle_lifecycle_event(
transition=virtevent.EVENT_LIFECYCLE_STOPPED,
event_pwr_state=power_state.SHUTDOWN,
current_pwr_state=power_state.RUNNING)
def test_delete_instance_info_cache_delete_ordering(self):
call_tracker = mock.Mock()
call_tracker.clear_events_for_instance.return_value = None
mgr_class = self.compute.__class__
orig_delete = mgr_class._delete_instance
specd_compute = mock.create_autospec(mgr_class)
# spec out everything except for the method we really want
# to test, then use call_tracker to verify call sequence
specd_compute._delete_instance = orig_delete
mock_inst = mock.Mock()
mock_inst.uuid = 'inst-1'
mock_inst.save = mock.Mock()
mock_inst.destroy = mock.Mock()
mock_inst.system_metadata = mock.Mock()
def _mark_notify(*args, **kwargs):
call_tracker._notify_about_instance_usage(*args, **kwargs)
def _mark_shutdown(*args, **kwargs):
call_tracker._shutdown_instance(*args, **kwargs)
specd_compute.instance_events = call_tracker
specd_compute._notify_about_instance_usage = _mark_notify
specd_compute._shutdown_instance = _mark_shutdown
mock_inst.info_cache = call_tracker
specd_compute._delete_instance(specd_compute,
self.context,
mock_inst,
mock.Mock(),
mock.Mock())
methods_called = [n for n, a, k in call_tracker.mock_calls]
self.assertEqual(['clear_events_for_instance',
'_notify_about_instance_usage',
'_shutdown_instance', 'delete'],
methods_called)
@mock.patch.object(manager.ComputeManager, '_get_resource_tracker')
@mock.patch.object(fake_driver.FakeDriver, 'get_available_nodes')
@mock.patch.object(manager.ComputeManager, '_get_compute_nodes_in_db')
def test_update_available_resource(self, get_db_nodes, get_avail_nodes,
get_rt):
info = {'cn_id': 1}
def _make_compute_node(hyp_hostname):
cn = mock.Mock(spec_set=['hypervisor_hostname', 'id',
'destroy'])
cn.id = info['cn_id']
info['cn_id'] += 1
cn.hypervisor_hostname = hyp_hostname
return cn
def _make_rt(node):
n = mock.Mock(spec_set=['update_available_resource',
'nodename'])
n.nodename = node
return n
ctxt = mock.Mock()
db_nodes = [_make_compute_node('node1'),
_make_compute_node('node2'),
_make_compute_node('node3'),
_make_compute_node('node4')]
avail_nodes = set(['node2', 'node3', 'node4', 'node5'])
avail_nodes_l = list(avail_nodes)
rts = [_make_rt(node) for node in avail_nodes_l]
# Make the 2nd and 3rd ones raise
exc = exception.ComputeHostNotFound(host='fake')
rts[1].update_available_resource.side_effect = exc
exc = test.TestingException()
rts[2].update_available_resource.side_effect = exc
rts_iter = iter(rts)
def _get_rt_side_effect(*args, **kwargs):
return next(rts_iter)
expected_rt_dict = {avail_nodes_l[0]: rts[0],
avail_nodes_l[2]: rts[2],
avail_nodes_l[3]: rts[3]}
get_db_nodes.return_value = db_nodes
get_avail_nodes.return_value = avail_nodes
get_rt.side_effect = _get_rt_side_effect
self.compute.update_available_resource(ctxt)
get_db_nodes.assert_called_once_with(ctxt, use_slave=True)
self.assertEqual([mock.call(node) for node in avail_nodes],
get_rt.call_args_list)
for rt in rts:
rt.update_available_resource.assert_called_once_with(ctxt)
self.assertEqual(expected_rt_dict,
self.compute._resource_tracker_dict)
# First node in set should have been removed from DB
for db_node in db_nodes:
if db_node.hypervisor_hostname == 'node1':
db_node.destroy.assert_called_once_with()
else:
self.assertFalse(db_node.destroy.called)
def test_delete_instance_without_info_cache(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ERROR,
host=self.compute.host,
expected_attrs=['system_metadata'])
quotas = mock.create_autospec(objects.Quotas, spec_set=True)
with contextlib.nested(
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute, '_shutdown_instance'),
mock.patch.object(instance, 'obj_load_attr'),
mock.patch.object(instance, 'save'),
mock.patch.object(instance, 'destroy')
) as (
compute_notify_about_instance_usage, comupte_shutdown_instance,
instance_obj_load_attr, instance_save, instance_destroy
):
instance.info_cache = None
self.compute._delete_instance(self.context, instance, [], quotas)
@mock.patch.object(network_api.API, 'allocate_for_instance')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(time, 'sleep')
def test_allocate_network_succeeds_after_retries(
self, mock_sleep, mock_save, mock_allocate_for_instance):
self.flags(network_allocate_retries=8)
instance = fake_instance.fake_instance_obj(
self.context, expected_attrs=['system_metadata'])
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
final_result = 'meow'
dhcp_options = None
mock_allocate_for_instance.side_effect = [
test.TestingException()] * 7 + [final_result]
expected_sleep_times = [1, 2, 4, 8, 16, 30, 30, 30]
res = self.compute._allocate_network_async(self.context, instance,
req_networks,
macs,
sec_groups,
is_vpn,
dhcp_options)
mock_sleep.has_calls(expected_sleep_times)
self.assertEqual(final_result, res)
# Ensure save is not called in while allocating networks, the instance
# is saved after the allocation.
self.assertFalse(mock_save.called)
self.assertEqual('True', instance.system_metadata['network_allocated'])
def test_allocate_network_fails(self):
self.flags(network_allocate_retries=0)
nwapi = self.compute.network_api
self.mox.StubOutWithMock(nwapi, 'allocate_for_instance')
instance = {}
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndRaise(test.TestingException())
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._allocate_network_async,
self.context, instance, req_networks, macs,
sec_groups, is_vpn, dhcp_options)
def test_allocate_network_neg_conf_value_treated_as_zero(self):
self.flags(network_allocate_retries=-1)
nwapi = self.compute.network_api
self.mox.StubOutWithMock(nwapi, 'allocate_for_instance')
instance = {}
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
# Only attempted once.
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndRaise(test.TestingException())
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._allocate_network_async,
self.context, instance, req_networks, macs,
sec_groups, is_vpn, dhcp_options)
@mock.patch.object(network_api.API, 'allocate_for_instance')
@mock.patch.object(manager.ComputeManager, '_instance_update')
@mock.patch.object(time, 'sleep')
def test_allocate_network_with_conf_value_is_one(
self, sleep, _instance_update, allocate_for_instance):
self.flags(network_allocate_retries=1)
instance = fake_instance.fake_instance_obj(
self.context, expected_attrs=['system_metadata'])
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
final_result = 'zhangtralon'
allocate_for_instance.side_effect = [test.TestingException(),
final_result]
res = self.compute._allocate_network_async(self.context, instance,
req_networks,
macs,
sec_groups,
is_vpn,
dhcp_options)
self.assertEqual(final_result, res)
self.assertEqual(1, sleep.call_count)
@mock.patch('nova.utils.spawn_n')
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_build_and_run_instance')
def _test_max_concurrent_builds(self, mock_dbari, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
with mock.patch.object(self.compute,
'_build_semaphore') as mock_sem:
instance = objects.Instance(uuid=str(uuid.uuid4()))
for i in (1, 2, 3):
self.compute.build_and_run_instance(self.context, instance,
mock.sentinel.image,
mock.sentinel.request_spec,
{})
self.assertEqual(3, mock_sem.__enter__.call_count)
def test_max_concurrent_builds_limited(self):
self.flags(max_concurrent_builds=2)
self._test_max_concurrent_builds()
def test_max_concurrent_builds_unlimited(self):
self.flags(max_concurrent_builds=0)
self._test_max_concurrent_builds()
def test_max_concurrent_builds_semaphore_limited(self):
self.flags(max_concurrent_builds=123)
self.assertEqual(123,
manager.ComputeManager()._build_semaphore.balance)
def test_max_concurrent_builds_semaphore_unlimited(self):
self.flags(max_concurrent_builds=0)
compute = manager.ComputeManager()
self.assertEqual(0, compute._build_semaphore.balance)
self.assertIsInstance(compute._build_semaphore,
compute_utils.UnlimitedSemaphore)
def test_nil_out_inst_obj_host_and_node_sets_nil(self):
instance = fake_instance.fake_instance_obj(self.context,
uuid='foo-uuid',
host='foo-host',
node='foo-node')
self.assertIsNotNone(instance.host)
self.assertIsNotNone(instance.node)
self.compute._nil_out_instance_obj_host_and_node(instance)
self.assertIsNone(instance.host)
self.assertIsNone(instance.node)
def test_init_host(self):
our_host = self.compute.host
inst = fake_instance.fake_db_instance(
vm_state=vm_states.ACTIVE,
info_cache=dict(test_instance_info_cache.fake_info_cache,
network_info=None),
security_groups=None)
startup_instances = [inst, inst, inst]
def _do_mock_calls(defer_iptables_apply):
self.compute.driver.init_host(host=our_host)
context.get_admin_context().AndReturn(self.context)
db.instance_get_all_by_host(
self.context, our_host, columns_to_join=['info_cache'],
use_slave=False
).AndReturn(startup_instances)
if defer_iptables_apply:
self.compute.driver.filter_defer_apply_on()
self.compute._destroy_evacuated_instances(self.context)
self.compute._init_instance(self.context,
mox.IsA(objects.Instance))
self.compute._init_instance(self.context,
mox.IsA(objects.Instance))
self.compute._init_instance(self.context,
mox.IsA(objects.Instance))
if defer_iptables_apply:
self.compute.driver.filter_defer_apply_off()
self.mox.StubOutWithMock(self.compute.driver, 'init_host')
self.mox.StubOutWithMock(self.compute.driver,
'filter_defer_apply_on')
self.mox.StubOutWithMock(self.compute.driver,
'filter_defer_apply_off')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(self.compute,
'_destroy_evacuated_instances')
self.mox.StubOutWithMock(self.compute,
'_init_instance')
# Test with defer_iptables_apply
self.flags(defer_iptables_apply=True)
_do_mock_calls(True)
self.mox.ReplayAll()
self.compute.init_host()
self.mox.VerifyAll()
# Test without defer_iptables_apply
self.mox.ResetAll()
self.flags(defer_iptables_apply=False)
_do_mock_calls(False)
self.mox.ReplayAll()
self.compute.init_host()
# tearDown() uses context.get_admin_context(), so we have
# to do the verification here and unstub it.
self.mox.VerifyAll()
self.mox.UnsetStubs()
@mock.patch('nova.objects.InstanceList')
@mock.patch('nova.objects.MigrationList.get_by_filters')
def test_cleanup_host(self, mock_miglist_get, mock_instance_list):
# just testing whether the cleanup_host method
# when fired will invoke the underlying driver's
# equivalent method.
mock_miglist_get.return_value = []
mock_instance_list.get_by_host.return_value = []
with mock.patch.object(self.compute, 'driver') as mock_driver:
self.compute.init_host()
mock_driver.init_host.assert_called_once_with(host='fake-mini')
self.compute.cleanup_host()
# register_event_listener is called on startup (init_host) and
# in cleanup_host
mock_driver.register_event_listener.assert_has_calls([
mock.call(self.compute.handle_events), mock.call(None)])
mock_driver.cleanup_host.assert_called_once_with(host='fake-mini')
def test_init_virt_events_disabled(self):
self.flags(handle_virt_lifecycle_events=False, group='workarounds')
with mock.patch.object(self.compute.driver,
'register_event_listener') as mock_register:
self.compute.init_virt_events()
self.assertFalse(mock_register.called)
@mock.patch('nova.objects.MigrationList.get_by_filters')
@mock.patch('nova.objects.Migration.save')
def test_init_host_with_evacuated_instance(self, mock_save, mock_mig_get):
our_host = self.compute.host
not_our_host = 'not-' + our_host
deleted_instance = fake_instance.fake_instance_obj(
self.context, host=not_our_host, uuid='fake-uuid')
migration = objects.Migration(instance_uuid=deleted_instance.uuid)
mock_mig_get.return_value = [migration]
self.mox.StubOutWithMock(self.compute.driver, 'init_host')
self.mox.StubOutWithMock(self.compute.driver, 'destroy')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(self.compute, 'init_virt_events')
self.mox.StubOutWithMock(self.compute, '_get_instances_on_driver')
self.mox.StubOutWithMock(self.compute, '_init_instance')
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.compute.driver.init_host(host=our_host)
context.get_admin_context().AndReturn(self.context)
db.instance_get_all_by_host(self.context, our_host,
columns_to_join=['info_cache'],
use_slave=False
).AndReturn([])
self.compute.init_virt_events()
# simulate failed instance
self.compute._get_instances_on_driver(
self.context, {'deleted': False}).AndReturn([deleted_instance])
self.compute.network_api.get_instance_nw_info(
self.context, deleted_instance).AndRaise(
exception.InstanceNotFound(instance_id=deleted_instance['uuid']))
# ensure driver.destroy is called so that driver may
# clean up any dangling files
self.compute.driver.destroy(self.context, deleted_instance,
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
self.compute.init_host()
# tearDown() uses context.get_admin_context(), so we have
# to do the verification here and unstub it.
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_init_instance_with_binding_failed_vif_type(self):
# this instance will plug a 'binding_failed' vif
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake-uuid',
info_cache=None,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
task_state=None,
host=self.compute.host,
expected_attrs=['info_cache'])
with contextlib.nested(
mock.patch.object(context, 'get_admin_context',
return_value=self.context),
mock.patch.object(compute_utils, 'get_nw_info_for_instance',
return_value=network_model.NetworkInfo()),
mock.patch.object(self.compute.driver, 'plug_vifs',
side_effect=exception.VirtualInterfacePlugException(
"Unexpected vif_type=binding_failed")),
mock.patch.object(self.compute, '_set_instance_obj_error_state')
) as (get_admin_context, get_nw_info, plug_vifs, set_error_state):
self.compute._init_instance(self.context, instance)
set_error_state.assert_called_once_with(self.context, instance)
def test__get_power_state_InstanceNotFound(self):
instance = fake_instance.fake_instance_obj(
self.context,
power_state=power_state.RUNNING)
with mock.patch.object(self.compute.driver,
'get_info',
side_effect=exception.InstanceNotFound(instance_id=1)):
self.assertEqual(self.compute._get_power_state(self.context,
instance),
power_state.NOSTATE)
def test__get_power_state_NotFound(self):
instance = fake_instance.fake_instance_obj(
self.context,
power_state=power_state.RUNNING)
with mock.patch.object(self.compute.driver,
'get_info',
side_effect=exception.NotFound()):
self.assertRaises(exception.NotFound,
self.compute._get_power_state,
self.context, instance)
def test_init_instance_failed_resume_sets_error(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake-uuid',
info_cache=None,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
task_state=None,
host=self.compute.host,
expected_attrs=['info_cache'])
self.flags(resume_guests_state_on_host_boot=True)
self.mox.StubOutWithMock(self.compute, '_get_power_state')
self.mox.StubOutWithMock(self.compute.driver, 'plug_vifs')
self.mox.StubOutWithMock(self.compute.driver,
'resume_state_on_host_boot')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute,
'_set_instance_obj_error_state')
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute.driver.plug_vifs(instance, mox.IgnoreArg())
self.compute._get_instance_block_device_info(mox.IgnoreArg(),
instance).AndReturn('fake-bdm')
self.compute.driver.resume_state_on_host_boot(mox.IgnoreArg(),
instance, mox.IgnoreArg(),
'fake-bdm').AndRaise(test.TestingException)
self.compute._set_instance_obj_error_state(mox.IgnoreArg(), instance)
self.mox.ReplayAll()
self.compute._init_instance('fake-context', instance)
def test_init_instance_stuck_in_deleting(self):
instance = fake_instance.fake_instance_obj(
self.context,
project_id='fake',
uuid='fake-uuid',
vcpus=1,
memory_mb=64,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
host=self.compute.host,
task_state=task_states.DELETING)
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
self.mox.StubOutWithMock(self.compute, '_delete_instance')
self.mox.StubOutWithMock(instance, 'obj_load_attr')
self.mox.StubOutWithMock(self.compute, '_create_reservations')
bdms = []
quotas = objects.quotas.Quotas(self.context)
instance.obj_load_attr('metadata')
instance.obj_load_attr('system_metadata')
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, instance.uuid).AndReturn(bdms)
self.compute._create_reservations(self.context, instance,
instance.project_id,
instance.user_id).AndReturn(quotas)
self.compute._delete_instance(self.context, instance, bdms,
mox.IgnoreArg())
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
@mock.patch.object(objects.Instance, 'get_by_uuid')
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
def test_init_instance_stuck_in_deleting_raises_exception(
self, mock_get_by_instance_uuid, mock_get_by_uuid):
instance = fake_instance.fake_instance_obj(
self.context,
project_id='fake',
uuid='fake-uuid',
vcpus=1,
memory_mb=64,
metadata={},
system_metadata={},
host=self.compute.host,
vm_state=vm_states.ACTIVE,
task_state=task_states.DELETING,
expected_attrs=['metadata', 'system_metadata'])
bdms = []
reservations = ['fake-resv']
def _create_patch(name, attr):
patcher = mock.patch.object(name, attr)
mocked_obj = patcher.start()
self.addCleanup(patcher.stop)
return mocked_obj
mock_delete_instance = _create_patch(self.compute, '_delete_instance')
mock_set_instance_error_state = _create_patch(
self.compute, '_set_instance_obj_error_state')
mock_create_reservations = _create_patch(self.compute,
'_create_reservations')
mock_create_reservations.return_value = reservations
mock_get_by_instance_uuid.return_value = bdms
mock_get_by_uuid.return_value = instance
mock_delete_instance.side_effect = test.TestingException('test')
self.compute._init_instance(self.context, instance)
mock_set_instance_error_state.assert_called_once_with(
self.context, instance)
def _test_init_instance_reverts_crashed_migrations(self,
old_vm_state=None):
power_on = True if (not old_vm_state or
old_vm_state == vm_states.ACTIVE) else False
sys_meta = {
'old_vm_state': old_vm_state
}
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_states.ERROR,
task_state=task_states.RESIZE_MIGRATING,
power_state=power_state.SHUTDOWN,
system_metadata=sys_meta,
host=self.compute.host,
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(compute_utils, 'get_nw_info_for_instance')
self.mox.StubOutWithMock(self.compute.driver, 'plug_vifs')
self.mox.StubOutWithMock(self.compute.driver,
'finish_revert_migration')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute.driver, 'get_info')
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute, '_retry_reboot')
self.compute._retry_reboot(self.context, instance).AndReturn(
(False, None))
compute_utils.get_nw_info_for_instance(instance).AndReturn(
network_model.NetworkInfo())
self.compute.driver.plug_vifs(instance, [])
self.compute._get_instance_block_device_info(
self.context, instance).AndReturn([])
self.compute.driver.finish_revert_migration(self.context, instance,
[], [], power_on)
instance.save()
self.compute.driver.get_info(instance).AndReturn(
hardware.InstanceInfo(state=power_state.SHUTDOWN))
self.compute.driver.get_info(instance).AndReturn(
hardware.InstanceInfo(state=power_state.SHUTDOWN))
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.assertIsNone(instance.task_state)
def test_init_instance_reverts_crashed_migration_from_active(self):
self._test_init_instance_reverts_crashed_migrations(
old_vm_state=vm_states.ACTIVE)
def test_init_instance_reverts_crashed_migration_from_stopped(self):
self._test_init_instance_reverts_crashed_migrations(
old_vm_state=vm_states.STOPPED)
def test_init_instance_reverts_crashed_migration_no_old_state(self):
self._test_init_instance_reverts_crashed_migrations(old_vm_state=None)
def test_init_instance_resets_crashed_live_migration(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_states.ACTIVE,
host=self.compute.host,
task_state=task_states.MIGRATING)
with contextlib.nested(
mock.patch.object(instance, 'save'),
mock.patch('nova.compute.utils.get_nw_info_for_instance',
return_value=network_model.NetworkInfo())
) as (save, get_nw_info):
self.compute._init_instance(self.context, instance)
save.assert_called_once_with(expected_task_state=['migrating'])
get_nw_info.assert_called_once_with(instance)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
def _test_init_instance_sets_building_error(self, vm_state,
task_state=None):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_state,
host=self.compute.host,
task_state=task_state)
with mock.patch.object(instance, 'save') as save:
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
def test_init_instance_sets_building_error(self):
self._test_init_instance_sets_building_error(vm_states.BUILDING)
def test_init_instance_sets_rebuilding_errors(self):
tasks = [task_states.REBUILDING,
task_states.REBUILD_BLOCK_DEVICE_MAPPING,
task_states.REBUILD_SPAWNING]
vms = [vm_states.ACTIVE, vm_states.STOPPED]
for vm_state in vms:
for task_state in tasks:
self._test_init_instance_sets_building_error(
vm_state, task_state)
def _test_init_instance_sets_building_tasks_error(self, instance):
instance.host = self.compute.host
with mock.patch.object(instance, 'save') as save:
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
def test_init_instance_sets_building_tasks_error_scheduling(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=None,
task_state=task_states.SCHEDULING)
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_block_device(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.BLOCK_DEVICE_MAPPING
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_networking(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.NETWORKING
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_spawning(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.SPAWNING
self._test_init_instance_sets_building_tasks_error(instance)
def _test_init_instance_cleans_image_states(self, instance):
with mock.patch.object(instance, 'save') as save:
self.compute._get_power_state = mock.Mock()
self.compute.driver.post_interrupted_snapshot_cleanup = mock.Mock()
instance.info_cache = None
instance.power_state = power_state.RUNNING
instance.host = self.compute.host
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.compute.driver.post_interrupted_snapshot_cleanup.\
assert_called_once_with(self.context, instance)
self.assertIsNone(instance.task_state)
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
def _test_init_instance_cleans_task_states(self, powerstate, state,
mock_get_uuid, mock_get_power_state):
instance = objects.Instance(self.context)
instance.uuid = 'fake-uuid'
instance.info_cache = None
instance.power_state = power_state.RUNNING
instance.vm_state = vm_states.ACTIVE
instance.task_state = state
instance.host = self.compute.host
mock_get_power_state.return_value = powerstate
self.compute._init_instance(self.context, instance)
return instance
def test_init_instance_cleans_image_state_pending_upload(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_PENDING_UPLOAD
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_uploading(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_UPLOADING
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_snapshot(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_snapshot_pending(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT_PENDING
self._test_init_instance_cleans_image_states(instance)
@mock.patch.object(objects.Instance, 'save')
def test_init_instance_cleans_running_pausing(self, mock_save):
instance = self._test_init_instance_cleans_task_states(
power_state.RUNNING, task_states.PAUSING)
mock_save.assert_called_once_with()
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
@mock.patch.object(objects.Instance, 'save')
def test_init_instance_cleans_running_unpausing(self, mock_save):
instance = self._test_init_instance_cleans_task_states(
power_state.RUNNING, task_states.UNPAUSING)
mock_save.assert_called_once_with()
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
@mock.patch('nova.compute.manager.ComputeManager.unpause_instance')
def test_init_instance_cleans_paused_unpausing(self, mock_unpause):
def fake_unpause(context, instance):
instance.task_state = None
mock_unpause.side_effect = fake_unpause
instance = self._test_init_instance_cleans_task_states(
power_state.PAUSED, task_states.UNPAUSING)
mock_unpause.assert_called_once_with(self.context, instance)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
def test_init_instance_errors_when_not_migrating(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ERROR
instance.task_state = task_states.IMAGE_UPLOADING
instance.host = self.compute.host
self.mox.StubOutWithMock(compute_utils, 'get_nw_info_for_instance')
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.mox.VerifyAll()
def test_init_instance_deletes_error_deleting_instance(self):
instance = fake_instance.fake_instance_obj(
self.context,
project_id='fake',
uuid='fake-uuid',
vcpus=1,
memory_mb=64,
vm_state=vm_states.ERROR,
host=self.compute.host,
task_state=task_states.DELETING)
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
self.mox.StubOutWithMock(self.compute, '_delete_instance')
self.mox.StubOutWithMock(instance, 'obj_load_attr')
self.mox.StubOutWithMock(objects.quotas, 'ids_from_instance')
self.mox.StubOutWithMock(self.compute, '_create_reservations')
bdms = []
quotas = objects.quotas.Quotas(self.context)
instance.obj_load_attr('metadata')
instance.obj_load_attr('system_metadata')
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, instance.uuid).AndReturn(bdms)
objects.quotas.ids_from_instance(self.context, instance).AndReturn(
(instance.project_id, instance.user_id))
self.compute._create_reservations(self.context, instance,
instance.project_id,
instance.user_id).AndReturn(quotas)
self.compute._delete_instance(self.context, instance, bdms,
mox.IgnoreArg())
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.mox.VerifyAll()
def test_init_instance_resize_prep(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ACTIVE,
host=self.compute.host,
task_state=task_states.RESIZE_PREP,
power_state=power_state.RUNNING)
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(compute_utils, 'get_nw_info_for_instance'),
mock.patch.object(instance, 'save', autospec=True)
) as (mock_get_power_state, mock_nw_info, mock_instance_save):
self.compute._init_instance(self.context, instance)
mock_instance_save.assert_called_once_with()
self.assertIsNone(instance.task_state)
@mock.patch('nova.context.RequestContext.elevated')
@mock.patch('nova.compute.utils.get_nw_info_for_instance')
@mock.patch(
'nova.compute.manager.ComputeManager._get_instance_block_device_info')
@mock.patch('nova.virt.driver.ComputeDriver.destroy')
@mock.patch('nova.virt.driver.ComputeDriver.get_volume_connector')
def _test_shutdown_instance_exception(self, exc, mock_connector,
mock_destroy, mock_blk_device_info, mock_nw_info, mock_elevated):
mock_connector.side_effect = exc
mock_elevated.return_value = self.context
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ERROR,
task_state=task_states.DELETING)
bdms = [mock.Mock(id=1, is_volume=True)]
self.compute._shutdown_instance(self.context, instance, bdms,
notify=False, try_deallocate_networks=False)
def test_shutdown_instance_endpoint_not_found(self):
exc = cinder_exception.EndpointNotFound
self._test_shutdown_instance_exception(exc)
def test_shutdown_instance_client_exception(self):
exc = cinder_exception.ClientException
self._test_shutdown_instance_exception(exc)
def test_shutdown_instance_volume_not_found(self):
exc = exception.VolumeNotFound
self._test_shutdown_instance_exception(exc)
def test_shutdown_instance_disk_not_found(self):
exc = exception.DiskNotFound
self._test_shutdown_instance_exception(exc)
def _test_init_instance_retries_reboot(self, instance, reboot_type,
return_power_state):
instance.host = self.compute.host
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=return_power_state),
mock.patch.object(self.compute, 'reboot_instance'),
mock.patch.object(compute_utils, 'get_nw_info_for_instance')
) as (
_get_power_state,
reboot_instance,
get_nw_info_for_instance
):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, block_device_info=None,
reboot_type=reboot_type)
reboot_instance.assert_has_calls([call])
def test_init_instance_retries_reboot_pending(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
for state in vm_states.ALLOW_SOFT_REBOOT:
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'SOFT',
power_state.RUNNING)
def test_init_instance_retries_reboot_pending_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING_HARD
for state in vm_states.ALLOW_HARD_REBOOT:
# NOTE(dave-mcnally) while a reboot of a vm in error state is
# possible we don't attempt to recover an error during init
if state == vm_states.ERROR:
continue
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.RUNNING)
def test_init_instance_retries_reboot_pending_soft_became_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
for state in vm_states.ALLOW_HARD_REBOOT:
# NOTE(dave-mcnally) while a reboot of a vm in error state is
# possible we don't attempt to recover an error during init
if state == vm_states.ERROR:
continue
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.SHUTDOWN)
self.assertEqual(task_states.REBOOT_PENDING_HARD,
instance.task_state)
def test_init_instance_retries_reboot_started(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.NOSTATE)
def test_init_instance_retries_reboot_started_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED_HARD
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.NOSTATE)
def _test_init_instance_cleans_reboot_state(self, instance):
instance.host = self.compute.host
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save', autospec=True),
mock.patch.object(compute_utils, 'get_nw_info_for_instance')
) as (
_get_power_state,
instance_save,
get_nw_info_for_instance
):
self.compute._init_instance(self.context, instance)
instance_save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
def test_init_instance_cleans_image_state_reboot_started(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED
instance.power_state = power_state.RUNNING
self._test_init_instance_cleans_reboot_state(instance)
def test_init_instance_cleans_image_state_reboot_started_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED_HARD
instance.power_state = power_state.RUNNING
self._test_init_instance_cleans_reboot_state(instance)
def test_init_instance_retries_power_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_OFF
instance.host = self.compute.host
with mock.patch.object(self.compute, 'stop_instance'):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, True)
self.compute.stop_instance.assert_has_calls([call])
def test_init_instance_retries_power_on(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_ON
instance.host = self.compute.host
with mock.patch.object(self.compute, 'start_instance'):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.start_instance.assert_has_calls([call])
def test_init_instance_retries_power_on_silent_exception(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_ON
instance.host = self.compute.host
with mock.patch.object(self.compute, 'start_instance',
return_value=Exception):
init_return = self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.start_instance.assert_has_calls([call])
self.assertIsNone(init_return)
def test_init_instance_retries_power_off_silent_exception(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_OFF
instance.host = self.compute.host
with mock.patch.object(self.compute, 'stop_instance',
return_value=Exception):
init_return = self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, True)
self.compute.stop_instance.assert_has_calls([call])
self.assertIsNone(init_return)
def test_get_instances_on_driver(self):
driver_instances = []
for x in range(10):
driver_instances.append(fake_instance.fake_db_instance())
self.mox.StubOutWithMock(self.compute.driver,
'list_instance_uuids')
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
self.compute.driver.list_instance_uuids().AndReturn(
[inst['uuid'] for inst in driver_instances])
db.instance_get_all_by_filters(
self.context,
{'uuid': [inst['uuid'] for
inst in driver_instances]},
'created_at', 'desc', columns_to_join=None,
limit=None, marker=None,
use_slave=True).AndReturn(
driver_instances)
self.mox.ReplayAll()
result = self.compute._get_instances_on_driver(self.context)
self.assertEqual([x['uuid'] for x in driver_instances],
[x['uuid'] for x in result])
@mock.patch('nova.virt.driver.ComputeDriver.list_instance_uuids')
@mock.patch('nova.db.api.instance_get_all_by_filters')
def test_get_instances_on_driver_empty(self, mock_list, mock_db):
mock_list.return_value = []
result = self.compute._get_instances_on_driver(self.context)
# instance_get_all_by_filters should not be called
self.assertEqual(0, mock_db.call_count)
self.assertEqual([],
[x['uuid'] for x in result])
def test_get_instances_on_driver_fallback(self):
# Test getting instances when driver doesn't support
# 'list_instance_uuids'
self.compute.host = 'host'
filters = {'host': self.compute.host}
self.flags(instance_name_template='inst-%i')
all_instances = []
driver_instances = []
for x in range(10):
instance = fake_instance.fake_db_instance(name='inst-%i' % x,
id=x)
if x % 2:
driver_instances.append(instance)
all_instances.append(instance)
self.mox.StubOutWithMock(self.compute.driver,
'list_instance_uuids')
self.mox.StubOutWithMock(self.compute.driver,
'list_instances')
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
self.compute.driver.list_instance_uuids().AndRaise(
NotImplementedError())
self.compute.driver.list_instances().AndReturn(
[inst['name'] for inst in driver_instances])
db.instance_get_all_by_filters(
self.context, filters,
'created_at', 'desc', columns_to_join=None,
limit=None, marker=None,
use_slave=True).AndReturn(all_instances)
self.mox.ReplayAll()
result = self.compute._get_instances_on_driver(self.context, filters)
self.assertEqual([x['uuid'] for x in driver_instances],
[x['uuid'] for x in result])
def test_instance_usage_audit(self):
instances = [objects.Instance(uuid='foo')]
@classmethod
def fake_task_log(*a, **k):
pass
@classmethod
def fake_get(*a, **k):
return instances
self.flags(instance_usage_audit=True)
self.stubs.Set(objects.TaskLog, 'get', fake_task_log)
self.stubs.Set(objects.InstanceList,
'get_active_by_window_joined', fake_get)
self.stubs.Set(objects.TaskLog, 'begin_task', fake_task_log)
self.stubs.Set(objects.TaskLog, 'end_task', fake_task_log)
self.mox.StubOutWithMock(compute_utils, 'notify_usage_exists')
compute_utils.notify_usage_exists(self.compute.notifier,
self.context, instances[0], ignore_missing_network_data=False)
self.mox.ReplayAll()
self.compute._instance_usage_audit(self.context)
@mock.patch.object(objects.InstanceList, 'get_by_host')
def test_sync_power_states(self, mock_get):
instance = mock.Mock()
mock_get.return_value = [instance]
with mock.patch.object(self.compute._sync_power_pool,
'spawn_n') as mock_spawn:
self.compute._sync_power_states(mock.sentinel.context)
mock_get.assert_called_with(mock.sentinel.context,
self.compute.host, expected_attrs=[],
use_slave=True)
mock_spawn.assert_called_once_with(mock.ANY, instance)
def _get_sync_instance(self, power_state, vm_state, task_state=None,
shutdown_terminate=False):
instance = objects.Instance()
instance.uuid = 'fake-uuid'
instance.power_state = power_state
instance.vm_state = vm_state
instance.host = self.compute.host
instance.task_state = task_state
instance.shutdown_terminate = shutdown_terminate
self.mox.StubOutWithMock(instance, 'refresh')
self.mox.StubOutWithMock(instance, 'save')
return instance
def test_sync_instance_power_state_match(self):
instance = self._get_sync_instance(power_state.RUNNING,
vm_states.ACTIVE)
instance.refresh(use_slave=False)
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
power_state.RUNNING)
def test_sync_instance_power_state_running_stopped(self):
instance = self._get_sync_instance(power_state.RUNNING,
vm_states.ACTIVE)
instance.refresh(use_slave=False)
instance.save()
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
power_state.SHUTDOWN)
self.assertEqual(instance.power_state, power_state.SHUTDOWN)
def _test_sync_to_stop(self, power_state, vm_state, driver_power_state,
stop=True, force=False, shutdown_terminate=False):
instance = self._get_sync_instance(
power_state, vm_state, shutdown_terminate=shutdown_terminate)
instance.refresh(use_slave=False)
instance.save()
self.mox.StubOutWithMock(self.compute.compute_api, 'stop')
self.mox.StubOutWithMock(self.compute.compute_api, 'delete')
self.mox.StubOutWithMock(self.compute.compute_api, 'force_stop')
if shutdown_terminate:
self.compute.compute_api.delete(self.context, instance)
elif stop:
if force:
self.compute.compute_api.force_stop(self.context, instance)
else:
self.compute.compute_api.stop(self.context, instance)
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
driver_power_state)
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_sync_instance_power_state_to_stop(self):
for ps in (power_state.SHUTDOWN, power_state.CRASHED,
power_state.SUSPENDED):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE, ps)
for ps in (power_state.SHUTDOWN, power_state.CRASHED):
self._test_sync_to_stop(power_state.PAUSED, vm_states.PAUSED, ps,
force=True)
self._test_sync_to_stop(power_state.SHUTDOWN, vm_states.STOPPED,
power_state.RUNNING, force=True)
def test_sync_instance_power_state_to_terminate(self):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE,
power_state.SHUTDOWN,
force=False, shutdown_terminate=True)
def test_sync_instance_power_state_to_no_stop(self):
for ps in (power_state.PAUSED, power_state.NOSTATE):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE, ps,
stop=False)
for vs in (vm_states.SOFT_DELETED, vm_states.DELETED):
for ps in (power_state.NOSTATE, power_state.SHUTDOWN):
self._test_sync_to_stop(power_state.RUNNING, vs, ps,
stop=False)
@mock.patch('nova.compute.manager.ComputeManager.'
'_sync_instance_power_state')
def test_query_driver_power_state_and_sync_pending_task(
self, mock_sync_power_state):
with mock.patch.object(self.compute.driver,
'get_info') as mock_get_info:
db_instance = objects.Instance(uuid='fake-uuid',
task_state=task_states.POWERING_OFF)
self.compute._query_driver_power_state_and_sync(self.context,
db_instance)
self.assertFalse(mock_get_info.called)
self.assertFalse(mock_sync_power_state.called)
@mock.patch('nova.compute.manager.ComputeManager.'
'_sync_instance_power_state')
def test_query_driver_power_state_and_sync_not_found_driver(
self, mock_sync_power_state):
error = exception.InstanceNotFound(instance_id=1)
with mock.patch.object(self.compute.driver,
'get_info', side_effect=error) as mock_get_info:
db_instance = objects.Instance(uuid='fake-uuid', task_state=None)
self.compute._query_driver_power_state_and_sync(self.context,
db_instance)
mock_get_info.assert_called_once_with(db_instance)
mock_sync_power_state.assert_called_once_with(self.context,
db_instance,
power_state.NOSTATE,
use_slave=True)
def test_run_pending_deletes(self):
self.flags(instance_delete_interval=10)
class FakeInstance(object):
def __init__(self, uuid, name, smd):
self.uuid = uuid
self.name = name
self.system_metadata = smd
self.cleaned = False
def __getitem__(self, name):
return getattr(self, name)
def save(self):
pass
a = FakeInstance('123', 'apple', {'clean_attempts': '100'})
b = FakeInstance('456', 'orange', {'clean_attempts': '3'})
c = FakeInstance('789', 'banana', {})
self.mox.StubOutWithMock(objects.InstanceList,
'get_by_filters')
objects.InstanceList.get_by_filters(
{'read_deleted': 'yes'},
{'deleted': True, 'soft_deleted': False, 'host': 'fake-mini',
'cleaned': False},
expected_attrs=['info_cache', 'security_groups',
'system_metadata'],
use_slave=True).AndReturn([a, b, c])
self.mox.StubOutWithMock(self.compute.driver, 'delete_instance_files')
self.compute.driver.delete_instance_files(
mox.IgnoreArg()).AndReturn(True)
self.compute.driver.delete_instance_files(
mox.IgnoreArg()).AndReturn(False)
self.mox.ReplayAll()
self.compute._run_pending_deletes({})
self.assertFalse(a.cleaned)
self.assertEqual('100', a.system_metadata['clean_attempts'])
self.assertTrue(b.cleaned)
self.assertEqual('4', b.system_metadata['clean_attempts'])
self.assertFalse(c.cleaned)
self.assertEqual('1', c.system_metadata['clean_attempts'])
@mock.patch.object(objects.Migration, 'obj_as_admin')
@mock.patch.object(objects.Migration, 'save')
@mock.patch.object(objects.MigrationList, 'get_by_filters')
@mock.patch.object(objects.InstanceList, 'get_by_filters')
def _test_cleanup_incomplete_migrations(self, inst_host,
mock_inst_get_by_filters,
mock_migration_get_by_filters,
mock_save, mock_obj_as_admin):
def fake_inst(context, uuid, host):
inst = objects.Instance(context)
inst.uuid = uuid
inst.host = host
return inst
def fake_migration(uuid, status, inst_uuid, src_host, dest_host):
migration = objects.Migration()
migration.uuid = uuid
migration.status = status
migration.instance_uuid = inst_uuid
migration.source_compute = src_host
migration.dest_compute = dest_host
return migration
fake_instances = [fake_inst(self.context, '111', inst_host),
fake_inst(self.context, '222', inst_host)]
fake_migrations = [fake_migration('123', 'error', '111',
'fake-host', 'fake-mini'),
fake_migration('456', 'error', '222',
'fake-host', 'fake-mini')]
mock_migration_get_by_filters.return_value = fake_migrations
mock_inst_get_by_filters.return_value = fake_instances
with mock.patch.object(self.compute.driver, 'delete_instance_files'):
self.compute._cleanup_incomplete_migrations(self.context)
# Ensure that migration status is set to 'failed' after instance
# files deletion for those instances whose instance.host is not
# same as compute host where periodic task is running.
for inst in fake_instances:
if inst.host != CONF.host:
for mig in fake_migrations:
if inst.uuid == mig.instance_uuid:
self.assertEqual('failed', mig.status)
def test_cleanup_incomplete_migrations_dest_node(self):
"""Test to ensure instance files are deleted from destination node.
If instance gets deleted during resizing/revert-resizing operation,
in that case instance files gets deleted from instance.host (source
host here), but there is possibility that instance files could be
present on destination node.
This test ensures that `_cleanup_incomplete_migration` periodic
task deletes orphaned instance files from destination compute node.
"""
self.flags(host='fake-mini')
self._test_cleanup_incomplete_migrations('fake-host')
def test_cleanup_incomplete_migrations_source_node(self):
"""Test to ensure instance files are deleted from source node.
If instance gets deleted during resizing/revert-resizing operation,
in that case instance files gets deleted from instance.host (dest
host here), but there is possibility that instance files could be
present on source node.
This test ensures that `_cleanup_incomplete_migration` periodic
task deletes orphaned instance files from source compute node.
"""
self.flags(host='fake-host')
self._test_cleanup_incomplete_migrations('fake-mini')
def test_attach_interface_failure(self):
# Test that the fault methods are invoked when an attach fails
db_instance = fake_instance.fake_db_instance()
f_instance = objects.Instance._from_db_object(self.context,
objects.Instance(),
db_instance)
e = exception.InterfaceAttachFailed(instance_uuid=f_instance.uuid)
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
@mock.patch.object(self.compute.network_api,
'allocate_port_for_instance',
side_effect=e)
@mock.patch.object(self.compute, '_instance_update',
side_effect=lambda *a, **k: {})
def do_test(update, meth, add_fault):
self.assertRaises(exception.InterfaceAttachFailed,
self.compute.attach_interface,
self.context, f_instance, 'net_id', 'port_id',
None)
add_fault.assert_has_calls([
mock.call(self.context, f_instance, e,
mock.ANY)])
do_test()
def test_detach_interface_failure(self):
# Test that the fault methods are invoked when a detach fails
# Build test data that will cause a PortNotFound exception
f_instance = mock.MagicMock()
f_instance.info_cache = mock.MagicMock()
f_instance.info_cache.network_info = []
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
@mock.patch.object(self.compute, '_set_instance_obj_error_state')
def do_test(meth, add_fault):
self.assertRaises(exception.PortNotFound,
self.compute.detach_interface,
self.context, f_instance, 'port_id')
add_fault.assert_has_calls(
[mock.call(self.context, f_instance, mock.ANY, mock.ANY)])
do_test()
def test_swap_volume_volume_api_usage(self):
# This test ensures that volume_id arguments are passed to volume_api
# and that volume states are OK
volumes = {}
old_volume_id = uuidutils.generate_uuid()
volumes[old_volume_id] = {'id': old_volume_id,
'display_name': 'old_volume',
'status': 'detaching',
'size': 1}
new_volume_id = uuidutils.generate_uuid()
volumes[new_volume_id] = {'id': new_volume_id,
'display_name': 'new_volume',
'status': 'available',
'size': 2}
def fake_vol_api_roll_detaching(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'detaching':
volumes[volume_id]['status'] = 'in-use'
fake_bdm = fake_block_device.FakeDbBlockDeviceDict(
{'device_name': '/dev/vdb', 'source_type': 'volume',
'destination_type': 'volume', 'instance_uuid': 'fake',
'connection_info': '{"foo": "bar"}'})
def fake_vol_api_func(context, volume, *args):
self.assertTrue(uuidutils.is_uuid_like(volume))
return {}
def fake_vol_get(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
return volumes[volume_id]
def fake_vol_unreserve(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'attaching':
volumes[volume_id]['status'] = 'available'
def fake_vol_migrate_volume_completion(context, old_volume_id,
new_volume_id, error=False):
self.assertTrue(uuidutils.is_uuid_like(old_volume_id))
self.assertTrue(uuidutils.is_uuid_like(new_volume_id))
volumes[old_volume_id]['status'] = 'in-use'
return {'save_volume_id': new_volume_id}
def fake_func_exc(*args, **kwargs):
raise AttributeError # Random exception
def fake_swap_volume(old_connection_info, new_connection_info,
instance, mountpoint, resize_to):
self.assertEqual(resize_to, 2)
def fake_block_device_mapping_update(ctxt, id, updates, legacy):
self.assertEqual(2, updates['volume_size'])
return fake_bdm
self.stubs.Set(self.compute.volume_api, 'roll_detaching',
fake_vol_api_roll_detaching)
self.stubs.Set(self.compute.volume_api, 'get', fake_vol_get)
self.stubs.Set(self.compute.volume_api, 'initialize_connection',
fake_vol_api_func)
self.stubs.Set(self.compute.volume_api, 'unreserve_volume',
fake_vol_unreserve)
self.stubs.Set(self.compute.volume_api, 'terminate_connection',
fake_vol_api_func)
self.stubs.Set(db, 'block_device_mapping_get_by_volume_id',
lambda x, y, z: fake_bdm)
self.stubs.Set(self.compute.driver, 'get_volume_connector',
lambda x: {})
self.stubs.Set(self.compute.driver, 'swap_volume',
fake_swap_volume)
self.stubs.Set(self.compute.volume_api, 'migrate_volume_completion',
fake_vol_migrate_volume_completion)
self.stubs.Set(db, 'block_device_mapping_update',
fake_block_device_mapping_update)
self.stubs.Set(db,
'instance_fault_create',
lambda x, y:
test_instance_fault.fake_faults['fake-uuid'][0])
self.stubs.Set(self.compute, '_instance_update',
lambda c, u, **k: {})
# Good path
self.compute.swap_volume(self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
# Error paths
volumes[old_volume_id]['status'] = 'detaching'
volumes[new_volume_id]['status'] = 'attaching'
self.stubs.Set(self.compute.driver, 'swap_volume', fake_func_exc)
self.assertRaises(AttributeError, self.compute.swap_volume,
self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
volumes[old_volume_id]['status'] = 'detaching'
volumes[new_volume_id]['status'] = 'attaching'
self.stubs.Set(self.compute.volume_api, 'initialize_connection',
fake_func_exc)
self.assertRaises(AttributeError, self.compute.swap_volume,
self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
@mock.patch.object(compute_utils, 'EventReporter')
def test_check_can_live_migrate_source(self, event_mock):
is_volume_backed = 'volume_backed'
dest_check_data = dict(foo='bar')
db_instance = fake_instance.fake_db_instance()
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), db_instance)
expected_dest_check_data = dict(dest_check_data,
is_volume_backed=is_volume_backed)
self.mox.StubOutWithMock(self.compute.compute_api,
'is_volume_backed_instance')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_source')
self.compute.compute_api.is_volume_backed_instance(
self.context, instance).AndReturn(is_volume_backed)
self.compute._get_instance_block_device_info(
self.context, instance, refresh_conn_info=True
).AndReturn({'block_device_mapping': 'fake'})
self.compute.driver.check_can_live_migrate_source(
self.context, instance, expected_dest_check_data,
{'block_device_mapping': 'fake'})
self.mox.ReplayAll()
self.compute.check_can_live_migrate_source(
self.context, instance=instance,
dest_check_data=dest_check_data)
event_mock.assert_called_once_with(
self.context, 'compute_check_can_live_migrate_source',
instance.uuid)
@mock.patch.object(compute_utils, 'EventReporter')
def _test_check_can_live_migrate_destination(self, event_mock,
do_raise=False,
has_mig_data=False):
db_instance = fake_instance.fake_db_instance(host='fake-host')
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), db_instance)
instance.host = 'fake-host'
block_migration = 'block_migration'
disk_over_commit = 'disk_over_commit'
src_info = 'src_info'
dest_info = 'dest_info'
dest_check_data = dict(foo='bar')
mig_data = dict(cow='moo')
expected_result = dict(mig_data)
if has_mig_data:
dest_check_data['migrate_data'] = dict(cat='meow')
expected_result.update(cat='meow')
self.mox.StubOutWithMock(self.compute, '_get_compute_info')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination')
self.mox.StubOutWithMock(self.compute.compute_rpcapi,
'check_can_live_migrate_source')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination_cleanup')
self.compute._get_compute_info(self.context,
'fake-host').AndReturn(src_info)
self.compute._get_compute_info(self.context,
CONF.host).AndReturn(dest_info)
self.compute.driver.check_can_live_migrate_destination(
self.context, instance, src_info, dest_info,
block_migration, disk_over_commit).AndReturn(dest_check_data)
mock_meth = self.compute.compute_rpcapi.check_can_live_migrate_source(
self.context, instance, dest_check_data)
if do_raise:
mock_meth.AndRaise(test.TestingException())
self.mox.StubOutWithMock(db, 'instance_fault_create')
db.instance_fault_create(
self.context, mox.IgnoreArg()).AndReturn(
test_instance_fault.fake_faults['fake-uuid'][0])
else:
mock_meth.AndReturn(mig_data)
self.compute.driver.check_can_live_migrate_destination_cleanup(
self.context, dest_check_data)
self.mox.ReplayAll()
result = self.compute.check_can_live_migrate_destination(
self.context, instance=instance,
block_migration=block_migration,
disk_over_commit=disk_over_commit)
self.assertEqual(expected_result, result)
event_mock.assert_called_once_with(
self.context, 'compute_check_can_live_migrate_destination',
instance.uuid)
def test_check_can_live_migrate_destination_success(self):
self._test_check_can_live_migrate_destination()
def test_check_can_live_migrate_destination_success_w_mig_data(self):
self._test_check_can_live_migrate_destination(has_mig_data=True)
def test_check_can_live_migrate_destination_fail(self):
self.assertRaises(
test.TestingException,
self._test_check_can_live_migrate_destination,
do_raise=True)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_prepare_for_instance_event(self, lock_name_mock):
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
self.assertIn('foo', self.compute.instance_events._events)
self.assertIn('test-event',
self.compute.instance_events._events['foo'])
self.assertEqual(
result,
self.compute.instance_events._events['foo']['test-event'])
self.assertTrue(hasattr(result, 'send'))
lock_name_mock.assert_called_once_with(inst_obj)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_pop_instance_event(self, lock_name_mock):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'network-vif-plugged': event,
}
}
inst_obj = objects.Instance(uuid='foo')
event_obj = objects.InstanceExternalEvent(name='network-vif-plugged',
tag=None)
result = self.compute.instance_events.pop_instance_event(inst_obj,
event_obj)
self.assertEqual(result, event)
lock_name_mock.assert_called_once_with(inst_obj)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_clear_events_for_instance(self, lock_name_mock):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'test-event': event,
}
}
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events.clear_events_for_instance(
inst_obj)
self.assertEqual(result, {'test-event': event})
lock_name_mock.assert_called_once_with(inst_obj)
def test_instance_events_lock_name(self):
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events._lock_name(inst_obj)
self.assertEqual(result, 'foo-events')
def test_prepare_for_instance_event_again(self):
inst_obj = objects.Instance(uuid='foo')
self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
# A second attempt will avoid creating a new list; make sure we
# get the current list
result = self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
self.assertIn('foo', self.compute.instance_events._events)
self.assertIn('test-event',
self.compute.instance_events._events['foo'])
self.assertEqual(
result,
self.compute.instance_events._events['foo']['test-event'])
self.assertTrue(hasattr(result, 'send'))
def test_process_instance_event(self):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'network-vif-plugged': event,
}
}
inst_obj = objects.Instance(uuid='foo')
event_obj = objects.InstanceExternalEvent(name='network-vif-plugged',
tag=None)
self.compute._process_instance_event(inst_obj, event_obj)
self.assertTrue(event.ready())
self.assertEqual(event_obj, event.wait())
self.assertEqual({}, self.compute.instance_events._events)
def test_process_instance_vif_deleted_event(self):
vif1 = fake_network_cache_model.new_vif()
vif1['id'] = '1'
vif2 = fake_network_cache_model.new_vif()
vif2['id'] = '2'
nw_info = network_model.NetworkInfo([vif1, vif2])
info_cache = objects.InstanceInfoCache(network_info=nw_info,
instance_uuid='uuid')
inst_obj = objects.Instance(id=3, uuid='uuid', info_cache=info_cache)
@mock.patch.object(manager.base_net_api,
'update_instance_cache_with_nw_info')
@mock.patch.object(self.compute.driver, 'detach_interface')
def do_test(detach_interface, update_instance_cache_with_nw_info):
self.compute._process_instance_vif_deleted_event(self.context,
inst_obj,
vif2['id'])
update_instance_cache_with_nw_info.assert_called_once_with(
self.compute.network_api,
self.context,
inst_obj,
nw_info=[vif1])
detach_interface.assert_called_once_with(inst_obj, vif2)
do_test()
def test_external_instance_event(self):
instances = [
objects.Instance(id=1, uuid='uuid1'),
objects.Instance(id=2, uuid='uuid2'),
objects.Instance(id=3, uuid='uuid3')]
events = [
objects.InstanceExternalEvent(name='network-changed',
tag='tag1',
instance_uuid='uuid1'),
objects.InstanceExternalEvent(name='network-vif-plugged',
instance_uuid='uuid2',
tag='tag2'),
objects.InstanceExternalEvent(name='network-vif-deleted',
instance_uuid='uuid3',
tag='tag3')]
@mock.patch.object(self.compute, '_process_instance_vif_deleted_event')
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info')
@mock.patch.object(self.compute, '_process_instance_event')
def do_test(_process_instance_event, get_instance_nw_info,
_process_instance_vif_deleted_event):
self.compute.external_instance_event(self.context,
instances, events)
get_instance_nw_info.assert_called_once_with(self.context,
instances[0])
_process_instance_event.assert_called_once_with(instances[1],
events[1])
_process_instance_vif_deleted_event.assert_called_once_with(
self.context, instances[2], events[2].tag)
do_test()
def test_external_instance_event_with_exception(self):
vif1 = fake_network_cache_model.new_vif()
vif1['id'] = '1'
vif2 = fake_network_cache_model.new_vif()
vif2['id'] = '2'
nw_info = network_model.NetworkInfo([vif1, vif2])
info_cache = objects.InstanceInfoCache(network_info=nw_info,
instance_uuid='uuid2')
instances = [
objects.Instance(id=1, uuid='uuid1'),
objects.Instance(id=2, uuid='uuid2', info_cache=info_cache),
objects.Instance(id=3, uuid='uuid3')]
events = [
objects.InstanceExternalEvent(name='network-changed',
tag='tag1',
instance_uuid='uuid1'),
objects.InstanceExternalEvent(name='network-vif-deleted',
instance_uuid='uuid2',
tag='2'),
objects.InstanceExternalEvent(name='network-vif-plugged',
instance_uuid='uuid3',
tag='tag3')]
# Make sure all the three events are handled despite the exceptions in
# processing events 1 and 2
@mock.patch.object(manager.base_net_api,
'update_instance_cache_with_nw_info')
@mock.patch.object(self.compute.driver, 'detach_interface',
side_effect=exception.NovaException)
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
side_effect=exception.InstanceInfoCacheNotFound(
instance_uuid='uuid1'))
@mock.patch.object(self.compute, '_process_instance_event')
def do_test(_process_instance_event, get_instance_nw_info,
detach_interface, update_instance_cache_with_nw_info):
self.compute.external_instance_event(self.context,
instances, events)
get_instance_nw_info.assert_called_once_with(self.context,
instances[0])
update_instance_cache_with_nw_info.assert_called_once_with(
self.compute.network_api,
self.context,
instances[1],
nw_info=[vif1])
detach_interface.assert_called_once_with(instances[1], vif2)
_process_instance_event.assert_called_once_with(instances[2],
events[2])
do_test()
def test_cancel_all_events(self):
inst = objects.Instance(uuid='uuid')
fake_eventlet_event = mock.MagicMock()
self.compute.instance_events._events = {
inst.uuid: {
'network-vif-plugged-bar': fake_eventlet_event,
}
}
self.compute.instance_events.cancel_all_events()
self.assertTrue(fake_eventlet_event.send.called)
event = fake_eventlet_event.send.call_args_list[0][0][0]
self.assertEqual('network-vif-plugged', event.name)
self.assertEqual('bar', event.tag)
self.assertEqual('failed', event.status)
def test_cleanup_cancels_all_events(self):
with mock.patch.object(self.compute, 'instance_events') as mock_ev:
self.compute.cleanup_host()
mock_ev.cancel_all_events.assert_called_once_with()
def test_cleanup_blocks_new_events(self):
instance = objects.Instance(uuid='uuid')
self.compute.instance_events.cancel_all_events()
callback = mock.MagicMock()
body = mock.MagicMock()
with self.compute.virtapi.wait_for_instance_event(
instance, ['network-vif-plugged-bar'],
error_callback=callback):
body()
self.assertTrue(body.called)
callback.assert_called_once_with('network-vif-plugged-bar', instance)
def test_pop_events_fails_gracefully(self):
inst = objects.Instance(uuid='uuid')
event = mock.MagicMock()
self.compute.instance_events._events = None
self.assertIsNone(
self.compute.instance_events.pop_instance_event(inst, event))
def test_clear_events_fails_gracefully(self):
inst = objects.Instance(uuid='uuid')
self.compute.instance_events._events = None
self.assertEqual(
self.compute.instance_events.clear_events_for_instance(inst), {})
def test_retry_reboot_pending_soft(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
instance.vm_state = vm_states.ACTIVE
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'SOFT')
def test_retry_reboot_pending_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING_HARD
instance.vm_state = vm_states.ACTIVE
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_soft_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.NOSTATE):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_hard_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED_HARD
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.NOSTATE):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_hard_on(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED_HARD
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertFalse(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_no_reboot(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = 'bar'
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertFalse(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
@mock.patch('nova.objects.BlockDeviceMapping.get_by_volume_id')
@mock.patch('nova.compute.manager.ComputeManager._driver_detach_volume')
@mock.patch('nova.objects.Instance._from_db_object')
def test_remove_volume_connection(self, inst_from_db, detach, bdm_get):
bdm = mock.sentinel.bdm
inst_obj = mock.sentinel.inst_obj
bdm_get.return_value = bdm
inst_from_db.return_value = inst_obj
with mock.patch.object(self.compute, 'volume_api'):
self.compute.remove_volume_connection(self.context, 'vol',
inst_obj)
detach.assert_called_once_with(self.context, inst_obj, bdm)
def test_detach_volume(self):
self._test_detach_volume()
def test_detach_volume_not_destroy_bdm(self):
self._test_detach_volume(destroy_bdm=False)
@mock.patch('nova.objects.BlockDeviceMapping.get_by_volume_id')
@mock.patch('nova.compute.manager.ComputeManager._driver_detach_volume')
@mock.patch('nova.compute.manager.ComputeManager.'
'_notify_about_instance_usage')
def _test_detach_volume(self, notify_inst_usage, detach,
bdm_get, destroy_bdm=True):
volume_id = '123'
inst_obj = mock.sentinel.inst_obj
bdm = mock.MagicMock(spec=objects.BlockDeviceMapping)
bdm.device_name = 'vdb'
bdm_get.return_value = bdm
with mock.patch.object(self.compute, 'volume_api') as volume_api:
with mock.patch.object(self.compute, 'driver') as driver:
connector_sentinel = mock.sentinel.connector
driver.get_volume_connector.return_value = connector_sentinel
self.compute._detach_volume(self.context, volume_id,
inst_obj,
destroy_bdm=destroy_bdm)
detach.assert_called_once_with(self.context, inst_obj, bdm)
driver.get_volume_connector.assert_called_once_with(inst_obj)
volume_api.terminate_connection.assert_called_once_with(
self.context, volume_id, connector_sentinel)
volume_api.detach.assert_called_once_with(mock.ANY, volume_id)
notify_inst_usage.assert_called_once_with(
self.context, inst_obj, "volume.detach",
extra_usage_info={'volume_id': volume_id}
)
if destroy_bdm:
bdm.destroy.assert_called_once_with()
else:
self.assertFalse(bdm.destroy.called)
def _test_rescue(self, clean_shutdown=True):
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.ACTIVE)
fake_nw_info = network_model.NetworkInfo()
rescue_image_meta = {'id': 'fake', 'name': 'fake'}
with contextlib.nested(
mock.patch.object(self.context, 'elevated',
return_value=self.context),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=fake_nw_info),
mock.patch.object(self.compute, '_get_rescue_image',
return_value=rescue_image_meta),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute, '_power_off_instance'),
mock.patch.object(self.compute.driver, 'rescue'),
mock.patch.object(compute_utils, 'notify_usage_exists'),
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save')
) as (
elevated_context, get_nw_info,
get_rescue_image, notify_instance_usage, power_off_instance,
driver_rescue, notify_usage_exists, get_power_state, instance_save
):
self.compute.rescue_instance(
self.context, instance, rescue_password='verybadpass',
rescue_image_ref=None, clean_shutdown=clean_shutdown)
# assert the field values on the instance object
self.assertEqual(vm_states.RESCUED, instance.vm_state)
self.assertIsNone(instance.task_state)
self.assertEqual(power_state.RUNNING, instance.power_state)
self.assertIsNotNone(instance.launched_at)
# assert our mock calls
get_nw_info.assert_called_once_with(self.context, instance)
get_rescue_image.assert_called_once_with(
self.context, instance, None)
extra_usage_info = {'rescue_image_name': 'fake'}
notify_calls = [
mock.call(self.context, instance, "rescue.start",
extra_usage_info=extra_usage_info,
network_info=fake_nw_info),
mock.call(self.context, instance, "rescue.end",
extra_usage_info=extra_usage_info,
network_info=fake_nw_info)
]
notify_instance_usage.assert_has_calls(notify_calls)
power_off_instance.assert_called_once_with(self.context, instance,
clean_shutdown)
driver_rescue.assert_called_once_with(
self.context, instance, fake_nw_info, rescue_image_meta,
'verybadpass')
notify_usage_exists.assert_called_once_with(self.compute.notifier,
self.context, instance, current_period=True)
instance_save.assert_called_once_with(
expected_task_state=task_states.RESCUING)
def test_rescue(self):
self._test_rescue()
def test_rescue_forced_shutdown(self):
self._test_rescue(clean_shutdown=False)
def test_unrescue(self):
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.RESCUED)
fake_nw_info = network_model.NetworkInfo()
with contextlib.nested(
mock.patch.object(self.context, 'elevated',
return_value=self.context),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=fake_nw_info),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute.driver, 'unrescue'),
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save')
) as (
elevated_context, get_nw_info,
notify_instance_usage, driver_unrescue, get_power_state,
instance_save
):
self.compute.unrescue_instance(self.context, instance)
# assert the field values on the instance object
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
self.assertEqual(power_state.RUNNING, instance.power_state)
# assert our mock calls
get_nw_info.assert_called_once_with(self.context, instance)
notify_calls = [
mock.call(self.context, instance, "unrescue.start",
network_info=fake_nw_info),
mock.call(self.context, instance, "unrescue.end",
network_info=fake_nw_info)
]
notify_instance_usage.assert_has_calls(notify_calls)
driver_unrescue.assert_called_once_with(instance, fake_nw_info)
instance_save.assert_called_once_with(
expected_task_state=task_states.UNRESCUING)
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch.object(objects.Instance, 'save')
@mock.patch('nova.utils.generate_password', return_value='fake-pass')
def test_set_admin_password(self, gen_password_mock,
instance_save_mock, power_state_mock):
# Ensure instance can have its admin password set.
instance = fake_instance.fake_instance_obj(
self.context,
vm_state=vm_states.ACTIVE,
task_state=task_states.UPDATING_PASSWORD)
@mock.patch.object(self.context, 'elevated', return_value=self.context)
@mock.patch.object(self.compute.driver, 'set_admin_password')
def do_test(driver_mock, elevated_mock):
# call the manager method
self.compute.set_admin_password(self.context, instance, None)
# make our assertions
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
power_state_mock.assert_called_once_with(self.context, instance)
driver_mock.assert_called_once_with(instance, 'fake-pass')
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
do_test()
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.NOSTATE)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def test_set_admin_password_bad_state(self, add_fault_mock,
instance_save_mock,
update_mock,
power_state_mock):
# Test setting password while instance is rebuilding.
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(self.context, 'elevated',
return_value=self.context):
# call the manager method
self.assertRaises(exception.InstancePasswordSetFailed,
self.compute.set_admin_password,
self.context, instance, None)
# make our assertions
power_state_mock.assert_called_once_with(self.context, instance)
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
add_fault_mock.assert_called_once_with(
self.context, instance, mock.ANY, mock.ANY)
@mock.patch('nova.utils.generate_password', return_value='fake-pass')
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def _do_test_set_admin_password_driver_error(self, exc,
expected_vm_state,
expected_task_state,
expected_exception,
add_fault_mock,
instance_save_mock,
update_mock,
power_state_mock,
gen_password_mock):
# Ensure expected exception is raised if set_admin_password fails.
instance = fake_instance.fake_instance_obj(
self.context,
vm_state=vm_states.ACTIVE,
task_state=task_states.UPDATING_PASSWORD)
@mock.patch.object(self.context, 'elevated', return_value=self.context)
@mock.patch.object(self.compute.driver, 'set_admin_password',
side_effect=exc)
def do_test(driver_mock, elevated_mock):
# error raised from the driver should not reveal internal
# information so a new error is raised
self.assertRaises(expected_exception,
self.compute.set_admin_password,
self.context,
instance=instance,
new_pass=None)
if expected_exception == NotImplementedError:
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
else:
# setting the instance to error state
instance_save_mock.assert_called_once_with()
self.assertEqual(expected_vm_state, instance.vm_state)
# check revert_task_state decorator
update_mock.assert_called_once_with(
self.context, instance, task_state=expected_task_state)
# check wrap_instance_fault decorator
add_fault_mock.assert_called_once_with(
self.context, instance, mock.ANY, mock.ANY)
do_test()
def test_set_admin_password_driver_not_authorized(self):
# Ensure expected exception is raised if set_admin_password not
# authorized.
exc = exception.Forbidden('Internal error')
expected_exception = exception.InstancePasswordSetFailed
self._do_test_set_admin_password_driver_error(
exc, vm_states.ERROR, None, expected_exception)
def test_set_admin_password_driver_not_implemented(self):
# Ensure expected exception is raised if set_admin_password not
# implemented by driver.
exc = NotImplementedError()
expected_exception = NotImplementedError
self._do_test_set_admin_password_driver_error(
exc, vm_states.ACTIVE, None, expected_exception)
def test_destroy_evacuated_instances(self):
our_host = self.compute.host
instance_1 = objects.Instance(self.context)
instance_1.uuid = 'foo'
instance_1.task_state = None
instance_1.vm_state = vm_states.ACTIVE
instance_1.host = 'not-' + our_host
instance_2 = objects.Instance(self.context)
instance_2.uuid = 'bar'
instance_2.task_state = None
instance_2.vm_state = vm_states.ACTIVE
instance_2.host = 'not-' + our_host
# Only instance 2 has a migration record
migration = objects.Migration(instance_uuid=instance_2.uuid)
with contextlib.nested(
mock.patch.object(self.compute, '_get_instances_on_driver',
return_value=[instance_1,
instance_2]),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=None),
mock.patch.object(self.compute, '_get_instance_block_device_info',
return_value={}),
mock.patch.object(self.compute, '_is_instance_storage_shared',
return_value=False),
mock.patch.object(self.compute.driver, 'destroy'),
mock.patch('nova.objects.MigrationList.get_by_filters'),
mock.patch('nova.objects.Migration.save')
) as (_get_instances_on_driver, get_instance_nw_info,
_get_instance_block_device_info, _is_instance_storage_shared,
destroy, migration_list, migration_save):
migration_list.return_value = [migration]
self.compute._destroy_evacuated_instances(self.context)
# Only instance 2 should be deleted. Instance 1 is still running
# here, but no migration from our host exists, so ignore it
destroy.assert_called_once_with(self.context, instance_2, None,
{}, True)
@mock.patch('nova.compute.manager.ComputeManager.'
'_destroy_evacuated_instances')
@mock.patch('nova.compute.manager.LOG')
def test_init_host_foreign_instance(self, mock_log, mock_destroy):
inst = mock.MagicMock()
inst.host = self.compute.host + '-alt'
self.compute._init_instance(mock.sentinel.context, inst)
self.assertFalse(inst.save.called)
self.assertTrue(mock_log.warning.called)
msg = mock_log.warning.call_args_list[0]
self.assertIn('appears to not be owned by this host', msg[0][0])
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
def test_error_out_instance_on_exception_not_implemented_err(self,
inst_update_mock):
instance = fake_instance.fake_instance_obj(self.context)
def do_test():
with self.compute._error_out_instance_on_exception(
self.context, instance, instance_state=vm_states.STOPPED):
raise NotImplementedError('test')
self.assertRaises(NotImplementedError, do_test)
inst_update_mock.assert_called_once_with(
self.context, instance,
vm_state=vm_states.STOPPED, task_state=None)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
def test_error_out_instance_on_exception_inst_fault_rollback(self,
inst_update_mock):
instance = fake_instance.fake_instance_obj(self.context)
def do_test():
with self.compute._error_out_instance_on_exception(self.context,
instance):
raise exception.InstanceFaultRollback(
inner_exception=test.TestingException('test'))
self.assertRaises(test.TestingException, do_test)
inst_update_mock.assert_called_once_with(
self.context, instance,
vm_state=vm_states.ACTIVE, task_state=None)
@mock.patch('nova.compute.manager.ComputeManager.'
'_set_instance_obj_error_state')
def test_error_out_instance_on_exception_unknown_with_quotas(self,
set_error):
instance = fake_instance.fake_instance_obj(self.context)
quotas = mock.create_autospec(objects.Quotas, spec_set=True)
def do_test():
with self.compute._error_out_instance_on_exception(
self.context, instance, quotas):
raise test.TestingException('test')
self.assertRaises(test.TestingException, do_test)
self.assertEqual(1, len(quotas.method_calls))
self.assertEqual(mock.call.rollback(), quotas.method_calls[0])
set_error.assert_called_once_with(self.context, instance)
def test_cleanup_volumes(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_do_not_delete_dict = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': False})
bdm_delete_dict = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_do_not_delete_dict, bdm_delete_dict])
with mock.patch.object(self.compute.volume_api,
'delete') as volume_delete:
self.compute._cleanup_volumes(self.context, instance.uuid, bdms)
volume_delete.assert_called_once_with(self.context,
bdms[1].volume_id)
def test_cleanup_volumes_exception_do_not_raise(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_dict1 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': True})
bdm_dict2 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_dict1, bdm_dict2])
with mock.patch.object(self.compute.volume_api,
'delete',
side_effect=[test.TestingException(), None]) as volume_delete:
self.compute._cleanup_volumes(self.context, instance.uuid, bdms,
raise_exc=False)
calls = [mock.call(self.context, bdm.volume_id) for bdm in bdms]
self.assertEqual(calls, volume_delete.call_args_list)
def test_cleanup_volumes_exception_raise(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_dict1 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': True})
bdm_dict2 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_dict1, bdm_dict2])
with mock.patch.object(self.compute.volume_api,
'delete',
side_effect=[test.TestingException(), None]) as volume_delete:
self.assertRaises(test.TestingException,
self.compute._cleanup_volumes, self.context, instance.uuid,
bdms)
calls = [mock.call(self.context, bdm.volume_id) for bdm in bdms]
self.assertEqual(calls, volume_delete.call_args_list)
def test_stop_instance_task_state_none_power_state_shutdown(self):
# Tests that stop_instance doesn't puke when the instance power_state
# is shutdown and the task_state is None.
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.ACTIVE,
task_state=None, power_state=power_state.SHUTDOWN)
@mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.SHUTDOWN)
@mock.patch.object(self.compute, '_notify_about_instance_usage')
@mock.patch.object(self.compute, '_power_off_instance')
@mock.patch.object(instance, 'save')
def do_test(save_mock, power_off_mock, notify_mock, get_state_mock):
# run the code
self.compute.stop_instance(self.context, instance, True)
# assert the calls
self.assertEqual(2, get_state_mock.call_count)
notify_mock.assert_has_calls([
mock.call(self.context, instance, 'power_off.start'),
mock.call(self.context, instance, 'power_off.end')
])
power_off_mock.assert_called_once_with(
self.context, instance, True)
save_mock.assert_called_once_with(
expected_task_state=[task_states.POWERING_OFF, None])
self.assertEqual(power_state.SHUTDOWN, instance.power_state)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.STOPPED, instance.vm_state)
do_test()
def test_reset_network_driver_not_implemented(self):
instance = fake_instance.fake_instance_obj(self.context)
@mock.patch.object(self.compute.driver, 'reset_network',
side_effect=NotImplementedError())
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def do_test(mock_add_fault, mock_reset):
self.assertRaises(messaging.ExpectedException,
self.compute.reset_network,
self.context,
instance)
self.compute = utils.ExceptionHelper(self.compute)
self.assertRaises(NotImplementedError,
self.compute.reset_network,
self.context,
instance)
do_test()
def test_rebuild_default_impl(self):
def _detach(context, bdms):
# NOTE(rpodolyaka): check that instance has been powered off by
# the time we detach block devices, exact calls arguments will be
# checked below
self.assertTrue(mock_power_off.called)
self.assertFalse(mock_destroy.called)
def _attach(context, instance, bdms, do_check_attach=True):
return {'block_device_mapping': 'shared_block_storage'}
def _spawn(context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
self.assertEqual(block_device_info['block_device_mapping'],
'shared_block_storage')
with contextlib.nested(
mock.patch.object(self.compute.driver, 'destroy',
return_value=None),
mock.patch.object(self.compute.driver, 'spawn',
side_effect=_spawn),
mock.patch.object(objects.Instance, 'save',
return_value=None),
mock.patch.object(self.compute, '_power_off_instance',
return_value=None)
) as(
mock_destroy,
mock_spawn,
mock_save,
mock_power_off
):
instance = fake_instance.fake_instance_obj(self.context)
instance.task_state = task_states.REBUILDING
instance.save(expected_task_state=[task_states.REBUILDING])
self.compute._rebuild_default_impl(self.context,
instance,
None,
[],
admin_password='new_pass',
bdms=[],
detach_block_devices=_detach,
attach_block_devices=_attach,
network_info=None,
recreate=False,
block_device_info=None,
preserve_ephemeral=False)
self.assertTrue(mock_save.called)
self.assertTrue(mock_spawn.called)
mock_destroy.assert_called_once_with(
self.context, instance,
network_info=None, block_device_info=None)
mock_power_off.assert_called_once_with(
self.context, instance, clean_shutdown=True)
@mock.patch.object(utils, 'last_completed_audit_period',
return_value=(0, 0))
@mock.patch.object(time, 'time', side_effect=[10, 20, 21])
@mock.patch.object(objects.InstanceList, 'get_by_host', return_value=[])
@mock.patch.object(objects.BandwidthUsage, 'get_by_instance_uuid_and_mac')
@mock.patch.object(db, 'bw_usage_update')
def test_poll_bandwidth_usage(self, bw_usage_update, get_by_uuid_mac,
get_by_host, time, last_completed_audit):
bw_counters = [{'uuid': 'fake-uuid', 'mac_address': 'fake-mac',
'bw_in': 1, 'bw_out': 2}]
usage = objects.BandwidthUsage()
usage.bw_in = 3
usage.bw_out = 4
usage.last_ctr_in = 0
usage.last_ctr_out = 0
self.flags(bandwidth_poll_interval=1)
get_by_uuid_mac.return_value = usage
_time = timeutils.utcnow()
bw_usage_update.return_value = {'uuid': '', 'mac': '',
'start_period': _time, 'last_refreshed': _time, 'bw_in': 0,
'bw_out': 0, 'last_ctr_in': 0, 'last_ctr_out': 0, 'deleted': 0,
'created_at': _time, 'updated_at': _time, 'deleted_at': _time}
with mock.patch.object(self.compute.driver,
'get_all_bw_counters', return_value=bw_counters):
self.compute._poll_bandwidth_usage(self.context)
get_by_uuid_mac.assert_called_once_with(self.context, 'fake-uuid',
'fake-mac', start_period=0, use_slave=True)
# NOTE(sdague): bw_usage_update happens at some time in
# the future, so what last_refreshed is is irrelevant.
bw_usage_update.assert_called_once_with(self.context, 'fake-uuid',
'fake-mac', 0, 4, 6, 1, 2,
last_refreshed=mock.ANY,
update_cells=False)
def test_reverts_task_state_instance_not_found(self):
# Tests that the reverts_task_state decorator in the compute manager
# will not trace when an InstanceNotFound is raised.
instance = objects.Instance(uuid='fake')
instance_update_mock = mock.Mock(
side_effect=exception.InstanceNotFound(instance_id=instance.uuid))
self.compute._instance_update = instance_update_mock
log_mock = mock.Mock()
manager.LOG = log_mock
@manager.reverts_task_state
def fake_function(self, context, instance):
raise test.TestingException()
self.assertRaises(test.TestingException, fake_function,
self, self.context, instance)
self.assertFalse(log_mock.called)
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'update_instance_info')
def test_update_scheduler_instance_info(self, mock_update):
instance = objects.Instance(uuid='fake')
self.compute._update_scheduler_instance_info(self.context, instance)
self.assertEqual(mock_update.call_count, 1)
args = mock_update.call_args[0]
self.assertNotEqual(args[0], self.context)
self.assertIsInstance(args[0], self.context.__class__)
self.assertEqual(args[1], self.compute.host)
# Send a single instance; check that the method converts to an
# InstanceList
self.assertIsInstance(args[2], objects.InstanceList)
self.assertEqual(args[2].objects[0], instance)
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'delete_instance_info')
def test_delete_scheduler_instance_info(self, mock_delete):
self.compute._delete_scheduler_instance_info(self.context,
mock.sentinel.inst_uuid)
self.assertEqual(mock_delete.call_count, 1)
args = mock_delete.call_args[0]
self.assertNotEqual(args[0], self.context)
self.assertIsInstance(args[0], self.context.__class__)
self.assertEqual(args[1], self.compute.host)
self.assertEqual(args[2], mock.sentinel.inst_uuid)
@mock.patch.object(nova.context.RequestContext, 'elevated')
@mock.patch.object(nova.objects.InstanceList, 'get_by_host')
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'sync_instance_info')
def test_sync_scheduler_instance_info(self, mock_sync, mock_get_by_host,
mock_elevated):
inst1 = objects.Instance(uuid='fake1')
inst2 = objects.Instance(uuid='fake2')
inst3 = objects.Instance(uuid='fake3')
exp_uuids = [inst.uuid for inst in [inst1, inst2, inst3]]
mock_get_by_host.return_value = objects.InstanceList(
objects=[inst1, inst2, inst3])
fake_elevated = context.get_admin_context()
mock_elevated.return_value = fake_elevated
self.compute._sync_scheduler_instance_info(self.context)
mock_get_by_host.assert_called_once_with(
fake_elevated, self.compute.host, expected_attrs=[],
use_slave=True)
mock_sync.assert_called_once_with(fake_elevated, self.compute.host,
exp_uuids)
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'sync_instance_info')
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'delete_instance_info')
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'update_instance_info')
def test_scheduler_info_updates_off(self, mock_update, mock_delete,
mock_sync):
mgr = self.compute
mgr.send_instance_updates = False
mgr._update_scheduler_instance_info(self.context,
mock.sentinel.instance)
mgr._delete_scheduler_instance_info(self.context,
mock.sentinel.instance_uuid)
mgr._sync_scheduler_instance_info(self.context)
# None of the calls should have been made
self.assertFalse(mock_update.called)
self.assertFalse(mock_delete.called)
self.assertFalse(mock_sync.called)
def test_refresh_instance_security_rules_takes_non_object(self):
inst = fake_instance.fake_db_instance()
with mock.patch.object(self.compute.driver,
'refresh_instance_security_rules') as mock_r:
self.compute.refresh_instance_security_rules(self.context, inst)
self.assertIsInstance(mock_r.call_args_list[0][0][0],
objects.Instance)
def test_set_instance_obj_error_state_with_clean_task_state(self):
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.BUILDING, task_state=task_states.SPAWNING)
with mock.patch.object(instance, 'save'):
self.compute._set_instance_obj_error_state(self.context, instance,
clean_task_state=True)
self.assertEqual(vm_states.ERROR, instance.vm_state)
self.assertIsNone(instance.task_state)
def test_set_instance_obj_error_state_by_default(self):
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.BUILDING, task_state=task_states.SPAWNING)
with mock.patch.object(instance, 'save'):
self.compute._set_instance_obj_error_state(self.context, instance)
self.assertEqual(vm_states.ERROR, instance.vm_state)
self.assertEqual(task_states.SPAWNING, instance.task_state)
@mock.patch.object(objects.Instance, 'save')
def test_instance_update(self, mock_save):
instance = objects.Instance(task_state=task_states.SCHEDULING,
vm_state=vm_states.BUILDING)
updates = {'task_state': None, 'vm_state': vm_states.ERROR}
with mock.patch.object(self.compute,
'_update_resource_tracker') as mock_rt:
self.compute._instance_update(self.context, instance, **updates)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
mock_save.assert_called_once_with()
mock_rt.assert_called_once_with(self.context, instance)
class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerBuildInstanceTestCase, self).setUp()
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
self.admin_pass = 'pass'
self.injected_files = []
self.image = {}
self.node = 'fake-node'
self.limits = {}
self.requested_networks = []
self.security_groups = []
self.block_device_mapping = []
self.filter_properties = {'retry': {'num_attempts': 1,
'hosts': [[self.compute.host,
'fake-node']]}}
def fake_network_info():
return network_model.NetworkInfo([{'address': '1.2.3.4'}])
self.network_info = network_model.NetworkInfoAsyncWrapper(
fake_network_info)
self.block_device_info = self.compute._prep_block_device(context,
self.instance, self.block_device_mapping)
# override tracker with a version that doesn't need the database:
fake_rt = fake_resource_tracker.FakeResourceTracker(self.compute.host,
self.compute.driver, self.node)
self.compute._resource_tracker_dict[self.node] = fake_rt
def _do_build_instance_update(self, reschedule_update=False):
self.mox.StubOutWithMock(self.instance, 'save')
self.instance.save(
expected_task_state=(task_states.SCHEDULING, None)).AndReturn(
self.instance)
if reschedule_update:
self.instance.save().AndReturn(self.instance)
def _build_and_run_instance_update(self):
self.mox.StubOutWithMock(self.instance, 'save')
self._build_resources_instance_update(stub=False)
self.instance.save(expected_task_state=
task_states.BLOCK_DEVICE_MAPPING).AndReturn(self.instance)
def _build_resources_instance_update(self, stub=True):
if stub:
self.mox.StubOutWithMock(self.instance, 'save')
self.instance.save().AndReturn(self.instance)
def _notify_about_instance_usage(self, event, stub=True, **kwargs):
if stub:
self.mox.StubOutWithMock(self.compute,
'_notify_about_instance_usage')
self.compute._notify_about_instance_usage(self.context, self.instance,
event, **kwargs)
def _instance_action_events(self):
self.mox.StubOutWithMock(objects.InstanceActionEvent, 'event_start')
self.mox.StubOutWithMock(objects.InstanceActionEvent,
'event_finish_with_failure')
objects.InstanceActionEvent.event_start(
self.context, self.instance.uuid, mox.IgnoreArg(),
want_result=False)
objects.InstanceActionEvent.event_finish_with_failure(
self.context, self.instance.uuid, mox.IgnoreArg(),
exc_val=mox.IgnoreArg(), exc_tb=mox.IgnoreArg(),
want_result=False)
@staticmethod
def _assert_build_instance_hook_called(mock_hooks, result):
# NOTE(coreywright): we want to test the return value of
# _do_build_and_run_instance, but it doesn't bubble all the way up, so
# mock the hooking, which allows us to test that too, though a little
# too intimately
mock_hooks.setdefault().run_post.assert_called_once_with(
'build_instance', result, mock.ANY, mock.ANY, f=None)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_build_and_run_instance_called_with_proper_args(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.ACTIVE)
# This test when sending an icehouse compatible rpc call to juno compute
# node, NetworkRequest object can load from three items tuple.
@mock.patch('nova.objects.Instance.save')
@mock.patch('nova.compute.manager.ComputeManager._build_and_run_instance')
@mock.patch('nova.utils.spawn_n')
def test_build_and_run_instance_with_icehouse_requested_network(
self, mock_spawn, mock_build_and_run, mock_save):
fake_server_actions.stub_out_action_events(self.stubs)
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
mock_save.return_value = self.instance
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=[objects.NetworkRequest(
network_id='fake_network_id',
address='10.0.0.1',
port_id='fake_port_id')],
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
requested_network = mock_build_and_run.call_args[0][5][0]
self.assertEqual('fake_network_id', requested_network.network_id)
self.assertEqual('10.0.0.1', str(requested_network.address))
self.assertEqual('fake_port_id', requested_network.port_id)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_build_abort_exception(self, mock_spawn, mock_hooks):
def fake_spawn(f, *args, **kwargs):
# NOTE(danms): Simulate the detached nature of spawn so that
# we confirm that the inner task has the fault logic
try:
return f(*args, **kwargs)
except Exception:
pass
mock_spawn.side_effect = fake_spawn
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.mox.StubOutWithMock(self.compute, '_set_instance_obj_error_state')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.BuildAbortException(reason='',
instance_uuid=self.instance.uuid))
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
self.compute._cleanup_volumes(self.context, self.instance.uuid,
self.block_device_mapping, raise_exc=False)
compute_utils.add_instance_fault_from_exc(self.context,
self.instance, mox.IgnoreArg(), mox.IgnoreArg())
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute._set_instance_obj_error_state(self.context, self.instance,
clean_task_state=True)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception(self, mock_spawn, mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_set_instance_obj_error_state')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.network_api.cleanup_instance_network_on_host(self.context,
self.instance, self.compute.host)
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
def test_rescheduled_exception_with_non_ascii_exception(self):
exc = exception.NovaException(u's\xe9quence')
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.RescheduledException,
self.compute._build_and_run_instance,
self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
mock_save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
@mock.patch.object(manager.ComputeManager, '_build_and_run_instance')
@mock.patch.object(conductor_api.ComputeTaskAPI, 'build_instances')
@mock.patch.object(network_api.API, 'cleanup_instance_network_on_host')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(objects.InstanceActionEvent, 'event_start')
@mock.patch.object(objects.InstanceActionEvent,
'event_finish_with_failure')
@mock.patch.object(virt_driver.ComputeDriver, 'macs_for_instance')
def test_rescheduled_exception_with_network_allocated(self,
mock_macs_for_instance, mock_event_finish,
mock_event_start, mock_ins_save, mock_cleanup_network,
mock_build_ins, mock_build_and_run):
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
system_metadata={'network_allocated': 'True'},
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
mock_ins_save.return_value = instance
mock_macs_for_instance.return_value = []
mock_build_and_run.side_effect = exception.RescheduledException(
reason='', instance_uuid=self.instance.uuid)
self.compute._do_build_and_run_instance(self.context, instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
mock_build_and_run.assert_called_once_with(self.context,
instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
mock_cleanup_network.assert_called_once_with(
self.context, instance, self.compute.host)
mock_build_ins.assert_called_once_with(self.context,
[instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_without_retry(self, mock_spawn, mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc')
self.mox.StubOutWithMock(self.compute, '_set_instance_obj_error_state')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
{}).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
compute_utils.add_instance_fault_from_exc(self.context, self.instance,
mox.IgnoreArg(), mox.IgnoreArg())
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute._set_instance_obj_error_state(self.context, self.instance,
clean_task_state=True)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties={},
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_do_not_deallocate_network(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute.driver,
'deallocate_networks_on_reschedule')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.driver.deallocate_networks_on_reschedule(
self.instance).AndReturn(False)
self.compute.network_api.cleanup_instance_network_on_host(
self.context, self.instance, self.compute.host)
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_deallocate_network(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute.driver,
'deallocate_networks_on_reschedule')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.driver.deallocate_networks_on_reschedule(
self.instance).AndReturn(True)
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
def _test_build_and_run_exceptions(self, exc, set_error=False,
cleanup_volumes=False, nil_out_host_and_node=False):
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(exc)
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
if cleanup_volumes:
self.compute._cleanup_volumes(self.context, self.instance.uuid,
self.block_device_mapping, raise_exc=False)
if nil_out_host_and_node:
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.compute._nil_out_instance_obj_host_and_node(self.instance)
if set_error:
self.mox.StubOutWithMock(self.compute,
'_set_instance_obj_error_state')
self.mox.StubOutWithMock(compute_utils,
'add_instance_fault_from_exc')
compute_utils.add_instance_fault_from_exc(self.context,
self.instance, mox.IgnoreArg(), mox.IgnoreArg())
self.compute._set_instance_obj_error_state(self.context,
self.instance, clean_task_state=True)
self._instance_action_events()
self.mox.ReplayAll()
with contextlib.nested(
mock.patch('nova.utils.spawn_n'),
mock.patch('nova.hooks._HOOKS')
) as (
mock_spawn,
mock_hooks
):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
def test_build_and_run_notfound_exception(self):
self._test_build_and_run_exceptions(exception.InstanceNotFound(
instance_id=''))
def test_build_and_run_unexpecteddeleting_exception(self):
self._test_build_and_run_exceptions(
exception.UnexpectedDeletingTaskStateError(
instance_uuid='fake_uuid', expected={}, actual={}))
def test_build_and_run_buildabort_exception(self):
self._test_build_and_run_exceptions(
exception.BuildAbortException(instance_uuid='', reason=''),
set_error=True, cleanup_volumes=True, nil_out_host_and_node=True)
def test_build_and_run_unhandled_exception(self):
self._test_build_and_run_exceptions(test.TestingException(),
set_error=True, cleanup_volumes=True,
nil_out_host_and_node=True)
def test_instance_not_found(self):
exc = exception.InstanceNotFound(instance_id=1)
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info).AndRaise(exc)
self._notify_about_instance_usage('create.end',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.InstanceNotFound,
self.compute._build_and_run_instance,
self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
mock_save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
def test_reschedule_on_exception(self):
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
exc = test.TestingException()
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.RescheduledException,
self.compute._build_and_run_instance,
self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
mock_save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
def test_spawn_network_alloc_failure(self):
# Because network allocation is asynchronous, failures may not present
# themselves until the virt spawn method is called.
self._test_build_and_run_spawn_exceptions(exception.NoMoreNetworks())
def test_build_and_run_no_more_fixedips_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.NoMoreFixedIps("error messge"))
def test_build_and_run_flavor_disk_smaller_image_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorDiskSmallerThanImage(
flavor_size=0, image_size=1))
def test_build_and_run_flavor_disk_smaller_min_disk(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorDiskSmallerThanMinDisk(
flavor_size=0, image_min_disk=1))
def test_build_and_run_flavor_memory_too_small_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorMemoryTooSmall())
def test_build_and_run_image_not_active_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.ImageNotActive(image_id=self.image.get('id')))
def test_build_and_run_image_unacceptable_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.ImageUnacceptable(image_id=self.image.get('id'),
reason=""))
def _test_build_and_run_spawn_exceptions(self, exc):
with contextlib.nested(
mock.patch.object(self.compute.driver, 'spawn',
side_effect=exc),
mock.patch.object(self.instance, 'save',
side_effect=[self.instance, self.instance, self.instance]),
mock.patch.object(self.compute,
'_build_networks_for_instance',
return_value=network_model.NetworkInfo()),
mock.patch.object(self.compute,
'_notify_about_instance_usage'),
mock.patch.object(self.compute,
'_shutdown_instance'),
mock.patch.object(self.compute,
'_validate_instance_group_policy')
) as (spawn, save,
_build_networks_for_instance, _notify_about_instance_usage,
_shutdown_instance, _validate_instance_group_policy):
self.assertRaises(exception.BuildAbortException,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping, self.node,
self.limits, self.filter_properties)
_validate_instance_group_policy.assert_called_once_with(
self.context, self.instance, self.filter_properties)
_build_networks_for_instance.assert_has_calls(
[mock.call(self.context, self.instance,
self.requested_networks, self.security_groups)])
_notify_about_instance_usage.assert_has_calls([
mock.call(self.context, self.instance, 'create.start',
extra_usage_info={'image_name': self.image.get('name')}),
mock.call(self.context, self.instance, 'create.error',
fault=exc)])
save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(
expected_task_state=task_states.BLOCK_DEVICE_MAPPING)])
spawn.assert_has_calls([mock.call(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info)])
_shutdown_instance.assert_called_once_with(self.context,
self.instance, self.block_device_mapping,
self.requested_networks, try_deallocate_networks=True)
@mock.patch('nova.utils.spawn_n')
def test_reschedule_on_resources_unavailable(self, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
reason = 'resource unavailable'
exc = exception.ComputeResourcesUnavailable(reason=reason)
class FakeResourceTracker(object):
def instance_claim(self, context, instance, limits):
raise exc
self.mox.StubOutWithMock(self.compute, '_get_resource_tracker')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.compute._get_resource_tracker(self.node).AndReturn(
FakeResourceTracker())
self._do_build_instance_update(reschedule_update=True)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.compute.network_api.cleanup_instance_network_on_host(
self.context, self.instance, self.compute.host)
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
def test_build_resources_buildabort_reraise(self):
exc = exception.BuildAbortException(
instance_uuid=self.instance.uuid, reason='')
self.mox.StubOutWithMock(self.compute, '_build_resources')
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups, self.image,
self.block_device_mapping).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.BuildAbortException,
self.compute._build_and_run_instance,
self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping,
self.node, self.limits, self.filter_properties)
mock_save.assert_called_once_with()
def test_build_resources_reraises_on_failed_bdm_prep(self):
self.mox.StubOutWithMock(self.compute, '_prep_block_device')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self._build_resources_instance_update()
self.compute._prep_block_device(self.context, self.instance,
self.block_device_mapping).AndRaise(test.TestingException())
self.mox.ReplayAll()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_failed_bdm_prep_from_delete_raises_unexpected(self):
with contextlib.nested(
mock.patch.object(self.compute,
'_build_networks_for_instance',
return_value=self.network_info),
mock.patch.object(self.instance, 'save',
side_effect=exception.UnexpectedDeletingTaskStateError(
instance_uuid='fake_uuid',
actual={'task_state': task_states.DELETING},
expected={'task_state': None})),
) as (_build_networks_for_instance, save):
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e,
exception.UnexpectedDeletingTaskStateError)
_build_networks_for_instance.assert_has_calls(
[mock.call(self.context, self.instance,
self.requested_networks, self.security_groups)])
save.assert_has_calls([mock.call()])
def test_build_resources_aborts_on_failed_network_alloc(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndRaise(
test.TestingException())
self.mox.ReplayAll()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups, self.image,
self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_failed_network_alloc_from_delete_raises_unexpected(self):
with mock.patch.object(self.compute,
'_build_networks_for_instance') as _build_networks:
exc = exception.UnexpectedDeletingTaskStateError
_build_networks.side_effect = exc(
instance_uuid='fake_uuid',
actual={'task_state': task_states.DELETING},
expected={'task_state': None})
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exc)
_build_networks.assert_has_calls(
[mock.call(self.context, self.instance,
self.requested_networks, self.security_groups)])
def test_build_resources_with_network_info_obj_on_spawn_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
network_model.NetworkInfo([{'address': '1.2.3.4'}]))
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._build_resources_instance_update()
self.mox.ReplayAll()
test_exception = test.TestingException()
def fake_spawn():
raise test_exception
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertEqual(test_exception, e)
def test_build_resources_cleans_up_and_reraises_on_spawn_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._build_resources_instance_update()
self.mox.ReplayAll()
test_exception = test.TestingException()
def fake_spawn():
raise test_exception
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertEqual(test_exception, e)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_instance_not_found_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
expected_exc = exception.InstanceNotFound(
instance_id=self.instance.uuid)
mock_save.side_effect = expected_exc
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except Exception as e:
self.assertEqual(expected_exc, e)
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_unexpected_task_error_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
mock_save.side_effect = exception.UnexpectedTaskStateError(
instance_uuid='fake_uuid', expected={}, actual={})
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except exception.BuildAbortException:
pass
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_exception_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
mock_save.side_effect = Exception()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except exception.BuildAbortException:
pass
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
def test_build_resources_aborts_on_cleanup_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False).AndRaise(
test.TestingException())
self._build_resources_instance_update()
self.mox.ReplayAll()
def fake_spawn():
raise test.TestingException()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_build_networks_if_not_allocated(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata={},
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.compute._allocate_network(self.context, instance,
self.requested_networks, None, self.security_groups, None)
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_build_networks_if_allocated_false(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata=dict(network_allocated='False'),
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.compute._allocate_network(self.context, instance,
self.requested_networks, None, self.security_groups, None)
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_return_networks_if_found(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata=dict(network_allocated='True'),
expected_attrs=['system_metadata'])
def fake_network_info():
return network_model.NetworkInfo([{'address': '123.123.123.123'}])
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.mox.StubOutWithMock(self.compute.network_api,
'setup_instance_network_on_host')
self.compute.network_api.setup_instance_network_on_host(
self.context, instance, instance.host)
self.compute.network_api.get_instance_nw_info(
self.context, instance).AndReturn(
network_model.NetworkInfoAsyncWrapper(fake_network_info))
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_cleanup_allocated_networks_instance_not_found(self):
with contextlib.nested(
mock.patch.object(self.compute, '_deallocate_network'),
mock.patch.object(self.instance, 'save',
side_effect=exception.InstanceNotFound(instance_id=''))
) as (_deallocate_network, save):
# Testing that this doesn't raise an exeption
self.compute._cleanup_allocated_networks(self.context,
self.instance, self.requested_networks)
save.assert_called_once_with()
self.assertEqual('False',
self.instance.system_metadata['network_allocated'])
@mock.patch.object(manager.ComputeManager, '_instance_update')
def test_launched_at_in_create_end_notification(self,
mock_instance_update):
def fake_notify(*args, **kwargs):
if args[2] == 'create.end':
# Check that launched_at is set on the instance
self.assertIsNotNone(args[1].launched_at)
with contextlib.nested(
mock.patch.object(self.compute,
'_update_scheduler_instance_info'),
mock.patch.object(self.compute.driver, 'spawn'),
mock.patch.object(self.compute,
'_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.compute, '_notify_about_instance_usage',
side_effect=fake_notify)
) as (mock_upd, mock_spawn, mock_networks, mock_save, mock_notify):
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
expected_call = mock.call(self.context, self.instance,
'create.end', extra_usage_info={'message': u'Success'},
network_info=[])
create_end_call = mock_notify.call_args_list[
mock_notify.call_count - 1]
self.assertEqual(expected_call, create_end_call)
@mock.patch.object(manager.ComputeManager, '_instance_update')
def test_create_end_on_instance_delete(self, mock_instance_update):
def fake_notify(*args, **kwargs):
if args[2] == 'create.end':
# Check that launched_at is set on the instance
self.assertIsNotNone(args[1].launched_at)
exc = exception.InstanceNotFound(instance_id='')
with contextlib.nested(
mock.patch.object(self.compute.driver, 'spawn'),
mock.patch.object(self.compute,
'_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save',
side_effect=[None, None, None, exc]),
mock.patch.object(self.compute, '_notify_about_instance_usage',
side_effect=fake_notify)
) as (mock_spawn, mock_networks, mock_save, mock_notify):
self.assertRaises(exception.InstanceNotFound,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping, self.node,
self.limits, self.filter_properties)
expected_call = mock.call(self.context, self.instance,
'create.end', fault=exc)
create_end_call = mock_notify.call_args_list[
mock_notify.call_count - 1]
self.assertEqual(expected_call, create_end_call)
class ComputeManagerMigrationTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerMigrationTestCase, self).setUp()
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
self.image = {}
self.instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
self.migration = objects.Migration(context=self.context.elevated(),
new_instance_type_id=7)
self.migration.status = 'migrating'
fake_server_actions.stub_out_action_events(self.stubs)
@mock.patch.object(objects.Migration, 'save')
@mock.patch.object(objects.Migration, 'obj_as_admin')
def test_errors_out_migration_decorator(self, mock_save,
mock_obj_as_admin):
# Tests that errors_out_migration decorator in compute manager
# sets migration status to 'error' when an exception is raised
# from decorated method
instance = fake_instance.fake_instance_obj(self.context)
migration = objects.Migration()
migration.instance_uuid = instance.uuid
migration.status = 'migrating'
migration.id = 0
@manager.errors_out_migration
def fake_function(self, context, instance, migration):
raise test.TestingException()
mock_obj_as_admin.return_value = mock.MagicMock()
self.assertRaises(test.TestingException, fake_function,
self, self.context, instance, migration)
self.assertEqual('error', migration.status)
mock_save.assert_called_once_with()
mock_obj_as_admin.assert_called_once_with()
def test_finish_resize_failure(self):
with contextlib.nested(
mock.patch.object(self.compute, '_finish_resize',
side_effect=exception.ResizeError(reason='')),
mock.patch.object(db, 'instance_fault_create'),
mock.patch.object(self.compute, '_instance_update'),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.migration, 'save'),
mock.patch.object(self.migration, 'obj_as_admin',
return_value=mock.MagicMock())
) as (meth, fault_create, instance_update, instance_save,
migration_save, migration_obj_as_admin):
fault_create.return_value = (
test_instance_fault.fake_faults['fake-uuid'][0])
self.assertRaises(
exception.ResizeError, self.compute.finish_resize,
context=self.context, disk_info=[], image=self.image,
instance=self.instance, reservations=[],
migration=self.migration
)
self.assertEqual("error", self.migration.status)
migration_save.assert_called_once_with()
migration_obj_as_admin.assert_called_once_with()
def test_resize_instance_failure(self):
self.migration.dest_host = None
with contextlib.nested(
mock.patch.object(self.compute.driver,
'migrate_disk_and_power_off',
side_effect=exception.ResizeError(reason='')),
mock.patch.object(db, 'instance_fault_create'),
mock.patch.object(self.compute, '_instance_update'),
mock.patch.object(self.migration, 'save'),
mock.patch.object(self.migration, 'obj_as_admin',
return_value=mock.MagicMock()),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=None),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute,
'_get_instance_block_device_info',
return_value=None),
mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid',
return_value=None),
mock.patch.object(objects.Flavor,
'get_by_id',
return_value=None)
) as (meth, fault_create, instance_update,
migration_save, migration_obj_as_admin, nw_info, save_inst,
notify, vol_block_info, bdm, flavor):
fault_create.return_value = (
test_instance_fault.fake_faults['fake-uuid'][0])
self.assertRaises(
exception.ResizeError, self.compute.resize_instance,
context=self.context, instance=self.instance, image=self.image,
reservations=[], migration=self.migration,
instance_type='type', clean_shutdown=True)
self.assertEqual("error", self.migration.status)
self.assertEqual([mock.call(), mock.call()],
migration_save.mock_calls)
self.assertEqual([mock.call(), mock.call()],
migration_obj_as_admin.mock_calls)
def _test_revert_resize_instance_destroy_disks(self, is_shared=False):
# This test asserts that _is_instance_storage_shared() is called from
# revert_resize() and the return value is passed to driver.destroy().
# Otherwise we could regress this.
@mock.patch.object(self.instance, 'revert_migration_context')
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info')
@mock.patch.object(self.compute, '_is_instance_storage_shared')
@mock.patch.object(self.compute, 'finish_revert_resize')
@mock.patch.object(self.compute, '_instance_update')
@mock.patch.object(self.compute, '_get_resource_tracker')
@mock.patch.object(self.compute.driver, 'destroy')
@mock.patch.object(self.compute.network_api, 'setup_networks_on_host')
@mock.patch.object(self.compute.network_api, 'migrate_instance_start')
@mock.patch.object(compute_utils, 'notify_usage_exists')
@mock.patch.object(self.migration, 'save')
@mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
def do_test(get_by_instance_uuid,
migration_save,
notify_usage_exists,
migrate_instance_start,
setup_networks_on_host,
destroy,
_get_resource_tracker,
_instance_update,
finish_revert_resize,
_is_instance_storage_shared,
get_instance_nw_info,
revert_migration_context):
self.migration.source_compute = self.instance['host']
# Inform compute that instance uses non-shared or shared storage
_is_instance_storage_shared.return_value = is_shared
self.compute.revert_resize(context=self.context,
migration=self.migration,
instance=self.instance,
reservations=None)
_is_instance_storage_shared.assert_called_once_with(
self.context, self.instance,
host=self.migration.source_compute)
# If instance storage is shared, driver destroy method
# should not destroy disks otherwise it should destroy disks.
destroy.assert_called_once_with(self.context, self.instance,
mock.ANY, mock.ANY, not is_shared)
do_test()
def test_revert_resize_instance_destroy_disks_shared_storage(self):
self._test_revert_resize_instance_destroy_disks(is_shared=True)
def test_revert_resize_instance_destroy_disks_non_shared_storage(self):
self._test_revert_resize_instance_destroy_disks(is_shared=False)
def test_consoles_enabled(self):
self.flags(enabled=False, group='vnc')
self.flags(enabled=False, group='spice')
self.flags(enabled=False, group='rdp')
self.flags(enabled=False, group='serial_console')
self.assertFalse(self.compute._consoles_enabled())
self.flags(enabled=True, group='vnc')
self.assertTrue(self.compute._consoles_enabled())
self.flags(enabled=False, group='vnc')
for console in ['spice', 'rdp', 'serial_console']:
self.flags(enabled=True, group=console)
self.assertTrue(self.compute._consoles_enabled())
self.flags(enabled=False, group=console)
@mock.patch('nova.utils.spawn_n')
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_live_migration')
def _test_max_concurrent_live(self, mock_lm, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
@mock.patch('nova.objects.Migration.save')
def _do_it(mock_mig_save):
instance = objects.Instance(uuid=str(uuid.uuid4()))
migration = objects.Migration()
self.compute.live_migration(self.context,
mock.sentinel.dest,
instance,
mock.sentinel.block_migration,
migration,
mock.sentinel.migrate_data)
self.assertEqual('queued', migration.status)
migration.save.assert_called_once_with()
with mock.patch.object(self.compute,
'_live_migration_semaphore') as mock_sem:
for i in (1, 2, 3):
_do_it()
self.assertEqual(3, mock_sem.__enter__.call_count)
def test_max_concurrent_live_limited(self):
self.flags(max_concurrent_live_migrations=2)
self._test_max_concurrent_live()
def test_max_concurrent_live_unlimited(self):
self.flags(max_concurrent_live_migrations=0)
self._test_max_concurrent_live()
def test_max_concurrent_live_semaphore_limited(self):
self.flags(max_concurrent_live_migrations=123)
self.assertEqual(
123,
manager.ComputeManager()._live_migration_semaphore.balance)
def test_max_concurrent_live_semaphore_unlimited(self):
self.flags(max_concurrent_live_migrations=0)
compute = manager.ComputeManager()
self.assertEqual(0, compute._live_migration_semaphore.balance)
self.assertIsInstance(compute._live_migration_semaphore,
compute_utils.UnlimitedSemaphore)
def test_max_concurrent_live_semaphore_negative(self):
self.flags(max_concurrent_live_migrations=-2)
compute = manager.ComputeManager()
self.assertEqual(0, compute._live_migration_semaphore.balance)
self.assertIsInstance(compute._live_migration_semaphore,
compute_utils.UnlimitedSemaphore)
| apache-2.0 | 3,144,236,380,760,686,000 | 47.303053 | 79 | 0.589229 | false | 4.173017 | true | false | false |
rcocetta/kano-profile | kano_profile/apps.py | 1 | 3777 | #!/usr/bin/env python
# apps.py
#
# Copyright (C) 2014, 2015 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU General Public License v2
#
import os
from kano.utils import read_json, write_json, get_date_now, ensure_dir, \
chown_path, run_print_output_error
from kano.logging import logger
from .paths import apps_dir, xp_file, kanoprofile_dir, app_profiles_file
def get_app_dir(app_name):
app_dir = os.path.join(apps_dir, app_name)
return app_dir
def get_app_data_dir(app_name):
data_str = 'data'
app_data_dir = os.path.join(get_app_dir(app_name), data_str)
return app_data_dir
def get_app_state_file(app_name):
app_state_str = 'state.json'
app_state_file = os.path.join(get_app_dir(app_name), app_state_str)
return app_state_file
def load_app_state(app_name):
app_state_file = get_app_state_file(app_name)
app_state = read_json(app_state_file)
if not app_state:
app_state = dict()
return app_state
def load_app_state_variable(app_name, variable):
data = load_app_state(app_name)
if variable in data:
return data[variable]
def save_app_state(app_name, data):
""" Save a state of an application to the user's Kano profile.
:param app_name: The application that this data are associated with.
:type app_name: str
:param data: The data to be stored.
:type data: dict
"""
logger.debug('save_app_state {}'.format(app_name))
app_state_file = get_app_state_file(app_name)
data['save_date'] = get_date_now()
ensure_dir(get_app_dir(app_name))
write_json(app_state_file, data)
if 'SUDO_USER' in os.environ:
chown_path(kanoprofile_dir)
chown_path(apps_dir)
chown_path(get_app_dir(app_name))
chown_path(app_state_file)
def save_app_state_variable(app_name, variable, value):
""" Save a state variable to the user's Kano profile.
:param app_name: The application that this variable is associated with.
:type app_name: str
:param variable: The name of the variable.
:type data: str
:param data: The variable data to be stored.
:type data: any
"""
msg = 'save_app_state_variable {} {} {}'.format(app_name, variable, value)
logger.debug(msg)
data = load_app_state(app_name)
data[variable] = value
save_app_state(app_name, data)
def increment_app_state_variable(app_name, variable, value):
logger.debug(
'increment_app_state_variable {} {} {}'.format(
app_name, variable, value))
data = load_app_state(app_name)
if variable not in data:
data[variable] = 0
data[variable] += value
save_app_state(app_name, data)
def get_app_list():
if not os.path.exists(apps_dir):
return []
else:
return [p for p in os.listdir(apps_dir)
if os.path.isdir(os.path.join(apps_dir, p))]
def get_gamestate_variables(app_name):
allrules = read_json(xp_file)
if not allrules:
return list()
groups = allrules[app_name]
for group, rules in groups.iteritems():
if group == 'multipliers':
return [str(key) for key in rules.keys()]
def launch_project(app, filename, data_dir):
logger.info('launch_project: {} {} {}'.format(app, filename, data_dir))
app_profiles = read_json(app_profiles_file)
fullpath = os.path.join(data_dir, filename)
cmd = app_profiles[app]['cmd'].format(fullpath=fullpath, filename=filename)
_, _, rc = run_print_output_error(cmd)
return rc
def get_app_xp_for_challenge(app, challenge_no):
xp_file_json = read_json(xp_file)
try:
return xp_file_json[app]['level'][challenge_no]
except KeyError:
return 0
| gpl-2.0 | -652,654,093,269,193,900 | 25.787234 | 80 | 0.639396 | false | 3.200847 | false | false | false |
s34rching/python_classes | tests/test_contact_data.py | 1 | 2053 | import re
import random
def test_contact_data_from_home_page(app):
r_index = random.randrange(len(app.contact.get_contact_list()))
data_from_home_page = app.contact.get_contact_list()[r_index]
data_from_edit_page = app.contact.get_contact_info_from_edit_page(r_index)
assert data_from_home_page.firstname == data_from_edit_page.firstname
assert data_from_home_page.lastname == data_from_edit_page.lastname
assert data_from_home_page.address == data_from_edit_page.address
assert data_from_home_page.all_phones_from_homepage == merge_phones_like_on_homepage(data_from_edit_page)
assert data_from_home_page.all_emails_from_homepage == merge_emails_like_on_homepage(data_from_edit_page)
assert data_from_home_page.id == data_from_edit_page.id
def test_phones_from_view_page(app):
r_index = random.randrange(len(app.contact.get_contact_list()))
data_from_view_page = app.contact.get_contact_info_from_view_page(r_index)
data_from_edit_page = app.contact.get_contact_info_from_edit_page(r_index)
assert data_from_view_page.home_number == data_from_edit_page.home_number
assert data_from_view_page.mobile_number == data_from_edit_page.mobile_number
assert data_from_view_page.work_number == data_from_edit_page.work_number
assert data_from_view_page.secondary_number == data_from_edit_page.secondary_number
def clear(s):
return re.sub('[() -]', '', s)
def merge_phones_like_on_homepage(contact):
return '\n'.join(filter(lambda x: x!= '',
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.home_number, contact.work_number, contact.mobile_number, contact.secondary_number]))))
def merge_emails_like_on_homepage(contact):
return '\n'.join(filter(lambda x: x!= '',
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.email, contact.email2, contact.email3])))) | apache-2.0 | 5,947,474,044,754,886,000 | 50.35 | 131 | 0.656113 | false | 3.343648 | false | false | false |
phbradley/tcr-dist | tcrdist/parse_cdr3.py | 1 | 4891 | import logging
logger = logging.getLogger('parse_cdr3.py')
from .all_genes import all_genes, gap_character
def get_cdr3_and_j_match_counts( organism, ab, qseq, j_gene, min_min_j_matchlen = 3,
extended_cdr3 = False ):
#fasta = all_fasta[organism]
jg = all_genes[organism][j_gene]
errors = []
## qseq starts at CA...
assert qseq[0] == 'C'
num_genome_j_positions_in_loop = len(jg.cdrs[0].replace(gap_character,''))-2
#num_genome_j_positions_in_loop = all_num_genome_j_positions_in_loop[organism][ab][j_gene]
if extended_cdr3: num_genome_j_positions_in_loop += 2 ## up to but not including GXG
## history: was only for alpha
aseq = qseq[:] ## starts at the C position
ja_gene = j_gene
#assert ja_gene in fasta
ja_seq = jg.protseq #fasta[ ja_gene ]
min_j_matchlen = min_min_j_matchlen+3
while min_j_matchlen >= min_min_j_matchlen:
ntrim =0
while ntrim+min_j_matchlen<len(ja_seq) and ja_seq[ntrim:ntrim+min_j_matchlen] not in aseq:
ntrim += 1
jatag = ja_seq[ntrim:ntrim+min_j_matchlen]
if jatag in aseq:
break
else:
min_j_matchlen -= 1
#print 'min_j_matchlen:',min_j_matchlen,'jatag:',jatag,'ntrim:',ntrim,'ja_seq:',ja_seq,'qseq',qseq
if jatag not in aseq:
logger.error('whoah %s %s %s',ab,aseq,ja_seq )
errors.append( 'j{}tag_not_in_aseq'.format(ab) )
return '-',[100,0],errors
elif ja_seq.count( jatag ) != 1:
logger.error( 'whoah2 %s %s %s',ab,aseq,ja_seq )
errors.append( 'multiple_j{}tag_in_jseq'.format(ab) )
return '-',[100,0],errors
else:
pos = aseq.find( jatag )
looplen = pos - ntrim + num_genome_j_positions_in_loop
if not extended_cdr3:
aseq = aseq[3:]
looplen -= 3 ## dont count CAX
if len(aseq)<looplen:
logger.error('short %s %s %s',ab,aseq,ja_seq )
errors.append( ab+'seq_too_short' )
return '-',[100,0],errors
cdrseq = aseq[:looplen ]
## now count mismatches in the J gene, beyond the cdrseq
j_seq = jg.protseq #fasta[ j_gene ] ## not sure why we do this again (old legacy code)
if qseq.count( cdrseq ) > 1:
logger.error('multiple cdrseq occurrences %s %s'%(qseq,cdrseq))
errors.append('multiple_cdrseq_occ')
return '-',[100,0],errors
assert qseq.count(cdrseq) == 1
start_counting_qseq = qseq.find(cdrseq)+len(cdrseq)
start_counting_jseq = num_genome_j_positions_in_loop
j_match_counts = [0,0]
#assert extended_cdr3 ## otherwise I think this count is not right?
#print 'here',start_counting_qseq,start_counting_jseq,len(qseq)
for qpos in range( start_counting_qseq, len(qseq)):
jpos = start_counting_jseq + (qpos-start_counting_qseq)
#print 'here',qpos,jpos
if jpos>= len(j_seq): break
if qseq[qpos] == j_seq[jpos]:
j_match_counts[1] += 1
else:
j_match_counts[0] += 1
return cdrseq, j_match_counts,errors
def parse_cdr3( organism, ab, qseq, v_gene, j_gene, q2v_align, extended_cdr3 = False ):
## v_align is a mapping from 0-indexed qseq positions to 0-indexed v_gene protseq positions
#fasta = all_fasta[ organism ]
#align_fasta = all_align_fasta[ organism ]
vg = all_genes[organism][v_gene]
errors = []
## what is the C position in this v gene?
v_seq = vg.protseq #fasta[ v_gene ]
v_alseq = vg.alseq #align_fasta[ v_gene ]
assert v_seq == v_alseq.replace(gap_character,'')
alseq_cpos = vg.cdr_columns[-1][0] - 1 ## now 0-indexed
#alseq_cpos = alseq_C_pos[organism][ab] - 1 ## now 0-indexed
numgaps = v_alseq[:alseq_cpos].count(gap_character)
cpos = alseq_cpos - numgaps ## 0-indexed
cpos_match = -1
v_match_counts = [0,0]
qseq_len = len(qseq)
for (qpos,vpos) in sorted( q2v_align.iteritems() ):
#print 'q2v-align:',qpos, vpos, cpos
if qpos == len(qseq):
continue ## from a partial codon at the end
if vpos == cpos:
cpos_match = qpos
elif vpos <= cpos:
## only count v mismatches here
if qseq[qpos] == v_seq[vpos]:
v_match_counts[1] += 1
else:
v_match_counts[0] += 1
if cpos_match<0 or qseq[ cpos_match ] != 'C':
## problemo
logger.error('failed to find blast match to C position')
errors.append('no_V{}_Cpos_blastmatch'.format(ab))
return '-',[100,0],[100,0],errors
cdrseq, j_match_counts, other_errors = get_cdr3_and_j_match_counts( organism, ab, qseq[ cpos_match: ], j_gene,
extended_cdr3 = extended_cdr3 )
return cdrseq, v_match_counts, j_match_counts, errors+other_errors
| mit | -8,223,407,647,342,972,000 | 34.963235 | 114 | 0.584339 | false | 2.921744 | false | false | false |
DevangS/CoralNet | accounts/migrations/0004_create_user_called_alleviate.py | 1 | 5492 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.conf import settings
class Migration(DataMigration):
# Create a dummy user called "Alleviate".
# This is the user under which Alleviate-accepted annotations
# will be added.
def forwards(self, orm):
username = "Alleviate"
print "-----"
try:
orm['auth.User'].objects.get(username=username)
except orm['auth.User'].DoesNotExist:
alleviateUser = orm['auth.User'](id=settings.ALLEVIATE_USER_ID,
username=username,
first_name="",
last_name="",
email="",
password="",
)
alleviateUser.save()
print "Created user with username %s." % username
else:
print "User with username %s already exists; nothing needs to be done." % username
print "-----"
def backwards(self, orm):
username = "Alleviate"
print (
"-----\n"
"NOTE: This migration rollback does nothing. "
"Deleting the %s user would delete all Alleviate annotations, "
"which would be very bad to do accidentally."
"\n-----" % username
)
models = {
'accounts.profile': {
'Meta': {'object_name': 'Profile'},
'about_me': ('django.db.models.fields.CharField', [], {'max_length': '45', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '5'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '45', 'blank': 'True'}),
'mugshot': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'registered'", 'max_length': '15'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'my_profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['accounts']
symmetrical = True
| bsd-2-clause | -8,000,994,628,860,161,000 | 53.376238 | 182 | 0.545157 | false | 3.856742 | false | false | false |
tuanvu216/udacity-course | intro_to_machine_learning/lesson/lesson_14_evaluation_metrics/evaluate_poi_identifier.py | 1 | 2588 | #!/usr/bin/python
"""
starter code for the evaluation mini-project
start by copying your trained/tested POI identifier from
that you built in the validation mini-project
the second step toward building your POI identifier!
start by loading/formatting the data
"""
import pickle
import sys
sys.path.append("C:/Vindico/Projects/Code/Python/Python/Course/Udacity/Intro to Machine Learning/ud120-projects-master/tools/")
from feature_format import featureFormat, targetFeatureSplit
from sklearn.tree import DecisionTreeClassifier
from sklearn import cross_validation
import numpy as np
data_dict = pickle.load(open("C:/Vindico/Projects/Code/Python/Python/Course/Udacity/Intro to Machine Learning/ud120-projects-master/final_project/final_project_dataset.pkl", "r") )
### add more features to features_list!
features_list = ["poi", "salary"]
data = featureFormat(data_dict, features_list)
labels, features = targetFeatureSplit(data)
### your code goes here
features_train,features_test,labels_train,labels_test = cross_validation.train_test_split(features,labels,test_size=0.3,
random_state=42)
clf = DecisionTreeClassifier()
clf.fit(features_train,labels_train)
clf.score(features_test,labels_test)
# How many POIs are in the test set for your POI identifier?
pred = clf.predict(features_test)
sum(pred)
print len([e for e in labels_test if e == 1.0])
# How many people total are in your test set?
len(pred)
# If your identifier predicted 0. (not POI) for everyone in the test set, what would its accuracy be?
1.0 - 5.0/29
# Precision and recall can help illuminate your performance better.
# Use the precision_score and recall_score available in sklearn.metrics to compute those quantities.
# What’s the precision?
from sklearn.metrics import *
precision_score(labels_test, pred)
# What’s the recall?
recall_score(labels_test, pred)
# Here are some made-up predictions and true labels for a hypothetical test set;
# fill in the following boxes to practice identifying true positives, false positives, true negatives, and false negatives.
# Let’s use the convention that “1” signifies a positive result, and “0” a negative.
predictions = [0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1]
true_labels = [0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0]
# What's the precision of this classifier?
precision_score(true_labels, predictions)
# What's the recall of this classifier?
recall_score(true_labels, predictions)
| mit | -3,788,225,604,616,323,000 | 34.260274 | 180 | 0.721445 | false | 3.413793 | true | false | false |
mitmedialab/MediaCloud-Web-Tools | server/util/request.py | 1 | 3666 | import logging
import os
from functools import wraps
from flask import jsonify, request
from mediacloud.error import MCException
logger = logging.getLogger(__name__)
def validate_params_exist(form, params):
for param in params:
if param not in form:
raise ValueError('Missing required value for '+param)
def json_error_response(message, status_code=400):
response = jsonify({
'statusCode': status_code,
'message': message,
})
response.status_code = status_code
return response
def filters_from_args(request_args):
"""
Helper to centralize reading filters from url params
"""
timespans_id = safely_read_arg('timespanId')
snapshots_id = safely_read_arg('snapshotId')
foci_id = safely_read_arg('focusId')
q = request_args['q'] if ('q' in request_args) and (request_args['q'] != 'undefined') else None
return snapshots_id, timespans_id, foci_id, q
def arguments_required(*expected_args):
"""
Handy decorator for ensuring that request params exist
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
logger.debug(request.args)
validate_params_exist(request.args, expected_args)
return func(*args, **kwargs)
except ValueError as e:
logger.exception("Missing a required arg")
return json_error_response(e.args[0])
return wrapper
return decorator
def form_fields_required(*expected_form_fields):
"""
Handy decorator for ensuring that the form has the fields you need
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
logger.debug(request.form)
validate_params_exist(request.form, expected_form_fields)
return func(*args, **kwargs)
except ValueError as e:
logger.exception("Missing a required form field")
return json_error_response(e.args[0])
return wrapper
return decorator
def api_error_handler(func):
"""
Handy decorator that catches any exception from the Media Cloud API and
sends it back to the browser as a nicely formatted JSON error. The idea is
that the client code can catch these at a low level and display error messages.
"""
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except MCException as e:
logger.exception(e)
return json_error_response(e.message, e.status_code)
return wrapper
def is_csv(filename):
filename, file_extension = os.path.splitext(filename)
return file_extension.lower() in ['.csv']
def csv_required(func):
"""
Validates a file is supplied in the request and that it has a csv extension.
"""
@wraps(func)
def wrapper(*args, **kwargs):
try:
if 'file' not in request.files:
return json_error_response('No file part')
uploaded_file = request.files['file']
if uploaded_file.filename == '':
return json_error_response('No selected file')
if not (uploaded_file and is_csv(uploaded_file.filename)):
return json_error_response('Invalid file')
return func(*args, **kwargs)
except MCException as e:
logger.exception(e)
return json_error_response(e.message, e.status_code)
return wrapper
def safely_read_arg(arg_name, default=None):
return request.args[arg_name] if arg_name in request.args else default
| apache-2.0 | -5,131,921,134,876,014,000 | 30.603448 | 99 | 0.621386 | false | 4.170648 | false | false | false |
iwschris/ezodf2 | tests/test_pages.py | 1 | 3894 | #!/usr/bin/env python
#coding:utf-8
# Purpose: test spreadpage body
# Created: 29.01.2011
# Copyright (C) 2011, Manfred Moitzi
# License: MIT
from __future__ import unicode_literals, print_function, division
__author__ = "mozman <[email protected]>"
# Standard Library
import unittest
# trusted or separately tested modules
from ezodf2.xmlns import CN
from lxml.etree import Element
from ezodf2.drawingpage import DrawingPage as Page
# objects to test
from ezodf2.pages import Pages
class TestPagesManagement(unittest.TestCase):
def setUp(self):
self.pages = Pages(Element(CN('office:drawing')))
def test_empty_body(self):
self.assertEqual(len(self.pages), 0)
def test_has_one_table(self):
self.pages.append(Page(name='Page1'))
self.assertEqual(len(self.pages), 1)
def test_get_page_by_name(self):
self.pages.append(Page(name='Page1'))
page = self.pages['Page1']
self.assertEqual(page.name, 'Page1')
def test_page_not_found_error(self):
with self.assertRaises(KeyError):
self.pages['Morgenstern']
def test_get_page_by_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages += Page(name='Page3')
page = self.pages[2]
self.assertEqual(page.name, 'Page3')
def test_get_last_page_by_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages += Page(name='Page3')
page = self.pages[-1]
self.assertEqual(page.name, 'Page3')
def test_page_index_0_error(self):
with self.assertRaises(IndexError):
self.pages[0]
def test_page_index_1_error(self):
self.pages += Page(name='Page1')
with self.assertRaises(IndexError):
self.pages[1]
def test_set_page_by_index(self):
self.pages += Page(name='Page1')
self.pages[0] = Page(name='Page2')
self.assertEqual(len(self.pages), 1)
self.assertEqual(self.pages[0].name, 'Page2')
def test_set_page_by_name(self):
self.pages += Page(name='Page1')
self.pages['Page1'] = Page(name='Page2')
self.assertEqual(len(self.pages), 1)
self.assertEqual(self.pages[0].name, 'Page2')
def test_remove_page_by_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
del self.pages[0]
self.assertEqual(len(self.pages), 1)
self.assertEqual(self.pages[0].name, 'Page2')
def test_remove_page_by_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
del self.pages['Page1']
self.assertEqual(len(self.pages), 1)
self.assertEqual(self.pages[0].name, 'Page2')
def test_is_same_object(self):
self.pages += Page(name='Page1')
object1 = self.pages['Page1']
object2 = self.pages['Page1']
self.assertTrue(object1 is object2)
def test_page_names(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages += Page(name='Page3')
self.assertEqual(list(self.pages.names()), ['Page1', 'Page2', 'Page3'])
def test_page_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages += Page(name='Page3')
self.assertEqual(self.pages.index(self.pages['Page3']), 2)
def test_page_insert(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages.insert(1, Page(name='Page3'))
self.assertEqual(self.pages[1].name, 'Page3')
self.assertEqual(len(self.pages), 3)
if __name__=='__main__':
unittest.main()
| mit | -2,706,917,590,960,068,000 | 28.186047 | 79 | 0.592193 | false | 3.403846 | true | false | false |
cbrucks/Federated_Python-Swiftclient | swiftclient/contrib/federated/protocols/rax.py | 1 | 2519 | import urllib
import urllib2
import json
import getpass
import BaseHTTPServer
import os
import webbrowser
from swiftclient.contrib.federated import federated_exceptions, federated_utils
import ssl
## Sends the authentication request to the IdP along
# @param idpEndpoint The IdP address
# @param idpRequest The authentication request returned by Keystone
def getIdPResponse(idpEndpoint, idpRequest, realm=None):
print "\nInitiating Authentication against: "+realm["name"]+"\n"
# Get the unscoped token
# 1. Get the user name
chosen = False
user = None
while not chosen:
try:
user = raw_input("Please enter your username: ")
chosen = True
except:
print "Invalid input, please try again"
# 2. Get the password
chosen = False
password = None
while not chosen:
try:
password = getpass.getpass()
chosen = True
except:
print "Invalid input, please try again"
# Insert creds
req = json.loads(idpRequest)
req['auth']['passwordCredentials']['username'] = user
req['auth']['passwordCredentials']['password'] = password
# Contact Keystone V2
unscoped = json.loads(request(idpEndpoint+'/tokens', method='POST', data=req).read())
print "Successfully Logged In\n"
# Get the list of tenants
tenants = json.loads(request(idpEndpoint+'/tenants', method='GET', header={'X-Auth-Token':unscoped['access']['token']['id']}).read())
# Offer the user the choice of tenants
tenant = federated_utils.selectTenantOrDomain(tenants['tenants'],serverName=realm["name"])
# Get the scoped token
newReq = {"auth":{"tenantName": tenant["name"], "token":{"id":unscoped["access"]["token"]["id"]}}}
scoped = json.loads(request(idpEndpoint+'/tokens', method='POST', data=newReq).read())
print "\nSuccessfully Authorised to access: "+tenant["name"]+"\n"
# Return scoped token
return scoped
## Send a request that will be process by the V2 Keystone
def request(keystoneEndpoint, data={}, method="GET", header={}):
headers = header
if method == "GET":
data = urllib.urlencode(data)
req = urllib2.Request(keystoneEndpoint + data, headers = header)
response = urllib2.urlopen(req)
elif method == "POST":
data = json.dumps(data)
headers['Content-Type'] = 'application/json'
req = urllib2.Request(keystoneEndpoint, data, header)
response = urllib2.urlopen(req)
return response
| apache-2.0 | 9,062,573,529,326,963,000 | 37.166667 | 137 | 0.663358 | false | 4.089286 | false | false | false |
NaturalHistoryMuseum/inselect | inselect/gui/views/boxes/box_item.py | 1 | 8872 | import sys
from itertools import chain
from PyQt5.QtCore import Qt, QRect, QRectF
from PyQt5.QtGui import QPen
from PyQt5.QtWidgets import QGraphicsItem, QGraphicsRectItem
from inselect.lib.utils import debug_print
from inselect.gui.colours import colour_scheme_choice
from inselect.gui.utils import painter_state
from .resize_handle import ResizeHandle
from .reticle import Reticle
class BoxItem(QGraphicsRectItem):
# Might be some relevant stuff here:
# http://stackoverflow.com/questions/10590881/events-and-signals-in-qts-qgraphicsitem-how-is-this-supposed-to-work
# The width of the line (in pixels) drawn around the box.
# A width of 1 on Mac OS X is too thin. 2 is too thick on Windows.
BOX_WIDTH = 2 if 'darwin' == sys.platform else 1
def __init__(self, x, y, w, h, isvalid, parent=None):
super(BoxItem, self).__init__(x, y, w, h, parent)
self.setFlags(QGraphicsItem.ItemIsFocusable |
QGraphicsItem.ItemIsSelectable |
QGraphicsItem.ItemSendsGeometryChanges |
QGraphicsItem.ItemIsMovable)
self.setCursor(Qt.OpenHandCursor)
self.setAcceptHoverEvents(True)
# True if the box has valid metadata
self._isvalid = isvalid
# Points of interest as represented by instances of Reticle
self._pois = []
# Resize handles
positions = (Qt.TopLeftCorner, Qt.TopRightCorner, Qt.BottomLeftCorner,
Qt.BottomRightCorner)
self._handles = []
self._handles = [self._create_handle(pos) for pos in positions]
self._layout_children()
self._set_z_index()
def paint(self, painter, option, widget=None):
"""QGraphicsRectItem virtual
"""
# TODO LH Is there a way to clip to overlapping
# QAbstractGraphicsItems with a larger zorder
# TODO LH Get pixmap without tight coupling to scene
if not self.has_mouse():
painter.drawPixmap(self.boundingRect(),
self.scene().pixmap,
self.sceneBoundingRect())
with painter_state(painter):
outline_colour, fill_colour = self.colours
# Cosmetic pens "...draw strokes that have a constant width
# regardless of any transformations applied to the QPainter they are
# used with."
pen = QPen(outline_colour, self.BOX_WIDTH, Qt.SolidLine)
pen.setCosmetic(True)
painter.setPen(pen)
r = self.boundingRect()
painter.drawRect(r)
if fill_colour:
painter.fillRect(r, fill_colour)
def has_mouse(self):
"""True if self or self._handles has grabbed the mouse
"""
return self.scene().mouseGrabberItem() in chain([self], self._handles)
@property
def colours(self):
"""Tuple of two QColors to use for the box's border and fill
respectively. Fill might be None.
"""
colours = colour_scheme_choice().current['Colours']
has_mouse = self.has_mouse()
if has_mouse:
outline = colours['Resizing']
elif self.isSelected():
outline = colours['Selected']
elif self._isvalid:
outline = colours['Valid']
else:
outline = colours['Invalid']
if not self._isvalid and not has_mouse:
fill = colours['InvalidFill']
else:
fill = None
return outline, fill
def update(self, rect=QRectF()):
"""QGraphicsRectItem function
"""
# TODO LH QGraphicsRectItem::update is not a virtual function - is it
# OK to implement this function and call the base class's
# implementation?
super(BoxItem, self).update(rect)
for item in self._handles:
item.update()
def hoverEnterEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.hoverEnterEvent')
super(BoxItem, self).hoverEnterEvent(event)
self._set_handles_visible(True)
self._set_z_index()
self.update()
def hoverLeaveEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.hoverLeaveEvent')
super(BoxItem, self).hoverLeaveEvent(event)
self._set_handles_visible(False)
self._set_z_index()
self.update()
def _set_handles_visible(self, visible):
for handle in self._handles:
handle.setVisible(visible)
def _create_handle(self, corner):
# Creates and returns a new ResizeHandle at the given Qt.Corner
handle = ResizeHandle(corner, self)
handle.setVisible(False)
handle.setFlags(QGraphicsItem.ItemStacksBehindParent |
QGraphicsItem.ItemIgnoresTransformations)
return handle
def _layout_children(self):
"""Moves child graphics items to the appropriate positions
"""
bounding = self.boundingRect()
for child in chain(self._handles, self._pois):
child.layout(bounding)
def setRect(self, rect):
"""QGraphicsRectItem function
"""
debug_print('BoxItem.setRect')
super(BoxItem, self).setRect(rect)
self._set_z_index()
self._layout_children()
def mousePressEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.mousePressEvent')
super(BoxItem, self).mousePressEvent(event)
self._set_z_index()
if Qt.ShiftModifier == event.modifiers():
# Add a point of interest
self.append_point_of_interest(event.pos())
else:
# Starting a move
self.setCursor(Qt.ClosedHandCursor)
self.update()
def mouseReleaseEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.mouseReleaseEvent')
super(BoxItem, self).mouseReleaseEvent(event)
self.setCursor(Qt.OpenHandCursor)
self._set_z_index()
self.update()
def itemChange(self, change, value):
"""QGraphicsItem virtual
"""
if change == self.ItemSelectedHasChanged:
# Clear points of interest
scene = self.scene()
while self._pois:
scene.removeItem(self._pois.pop())
# Item has gained or lost selection
self._set_z_index()
return super(BoxItem, self).itemChange(change, value)
def set_rect(self, new_rect):
"""Sets a new QRect in integer coordinates
"""
# Cumbersome conversion to ints
current = self.sceneBoundingRect()
current = QRect(current.left(), current.top(),
current.width(), current.height())
if current != new_rect:
msg = 'Update rect for [{0}] from [{1}] to [{2}]'
debug_print(msg.format(self, current, new_rect))
self.prepareGeometryChange()
# setrect() expects floating point rect
self.setRect(QRectF(new_rect))
def set_isvalid(self, isvalid):
"""Sets a new 'is valid'
"""
if isvalid != self._isvalid:
self._isvalid = isvalid
self.update()
def _set_z_index(self):
"""Updates the Z-index of the box
This sorts the boxes such that the bigger the area of a box, the lower
it's Z-index is; and boxes that are selected and have mouse or keyboard
focus are always above other boxes.
"""
rect = self.rect()
# Smaller items have a higher z
z = 1.0
if rect.width() and rect.height():
z += + 1.0 / float(rect.width() * rect.height())
if self.isSelected():
z += 1.0
else:
# Newly created items have zero width and height
pass
self.setZValue(z)
def adjust_rect(self, dx1, dy1, dx2, dy2):
"""Adjusts rect
"""
r = self.rect()
r.adjust(dx1, dy1, dx2, dy2)
if r.width() > 1.0 and r.height() > 1.0:
self.prepareGeometryChange()
self.setRect(r)
def append_point_of_interest(self, pos):
"""Appends pos (a QPoint relative to the top-left of this box) to the
list of points of interest
"""
debug_print('New point of interest at [{0}]'.format(pos))
self._pois.append(Reticle(pos - self.boundingRect().topLeft(), self))
self._pois[-1].layout(self.boundingRect())
self._pois[-1].setFlags(QGraphicsItem.ItemIgnoresTransformations)
@property
def points_of_interest(self):
"""An iterable of QPointFs in item coordinates
"""
return [poi.offset for poi in self._pois]
| bsd-3-clause | -3,026,487,980,262,549,000 | 32.73384 | 118 | 0.593102 | false | 4.060412 | false | false | false |
ragupta-git/ImcSdk | imcsdk/mometa/huu/HuuFirmwareCatalogComponent.py | 1 | 4134 | """This module contains the general information for HuuFirmwareCatalogComponent ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class HuuFirmwareCatalogComponentConsts:
pass
class HuuFirmwareCatalogComponent(ManagedObject):
"""This is HuuFirmwareCatalogComponent class."""
consts = HuuFirmwareCatalogComponentConsts()
naming_props = set([u'id'])
mo_meta = {
"classic": MoMeta("HuuFirmwareCatalogComponent", "huuFirmwareCatalogComponent", "id-[id]", VersionMeta.Version151f, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'huuFirmwareCatalog'], [], ["Get"]),
"modular": MoMeta("HuuFirmwareCatalogComponent", "huuFirmwareCatalogComponent", "id-[id]", VersionMeta.Version2013e, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'huuFirmwareCatalog'], [], ["Get"])
}
prop_meta = {
"classic": {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version151f, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"component_name": MoPropertyMeta("component_name", "componentName", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"description": MoPropertyMeta("description", "description", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version151f, MoPropertyMeta.NAMING, None, None, None, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
},
"modular": {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"component_name": MoPropertyMeta("component_name", "componentName", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"description": MoPropertyMeta("description", "description", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version2013e, MoPropertyMeta.NAMING, None, None, None, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
},
}
prop_map = {
"classic": {
"childAction": "child_action",
"componentName": "component_name",
"description": "description",
"dn": "dn",
"id": "id",
"rn": "rn",
"status": "status",
},
"modular": {
"childAction": "child_action",
"componentName": "component_name",
"description": "description",
"dn": "dn",
"id": "id",
"rn": "rn",
"status": "status",
},
}
def __init__(self, parent_mo_or_dn, id, **kwargs):
self._dirty_mask = 0
self.id = id
self.child_action = None
self.component_name = None
self.description = None
self.status = None
ManagedObject.__init__(self, "HuuFirmwareCatalogComponent", parent_mo_or_dn, **kwargs)
| apache-2.0 | 3,600,132,035,256,602,000 | 50.037037 | 217 | 0.604983 | false | 3.655172 | false | false | false |
rouault/Quantum-GIS | tests/src/python/test_qgsserver_wms_getfeatureinfo.py | 1 | 17012 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer GetFeatureInfo WMS.
From build dir, run: ctest -R PyQgsServerWMSGetFeatureInfo -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Alessandro Pasotti'
__date__ = '11/03/2018'
__copyright__ = 'Copyright 2018, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
# Needed on Qt 5 so that the serialization of XML is consistent among all executions
os.environ['QT_HASH_SEED'] = '1'
import re
import urllib.request
import urllib.parse
import urllib.error
from qgis.testing import unittest
from qgis.PyQt.QtCore import QSize
import osgeo.gdal # NOQA
from test_qgsserver_wms import TestQgsServerWMSTestBase
from qgis.core import QgsProject
class TestQgsServerWMSGetFeatureInfo(TestQgsServerWMSTestBase):
"""QGIS Server WMS Tests for GetFeatureInfo request"""
def testGetFeatureInfo(self):
# Test getfeatureinfo response xml
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-xml')
self.wms_request_compare('GetFeatureInfo',
'&layers=&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-xml')
# Test getfeatureinfo response html
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fhtml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-html')
# Test getfeatureinfo response html with geometry
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fhtml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'with_geometry=true',
'wms_getfeatureinfo-text-html-geometry')
# Test getfeatureinfo response html with maptip
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fhtml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'with_maptip=true',
'wms_getfeatureinfo-text-html-maptip')
# Test getfeatureinfo response text
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'info_format=text/plain',
'wms_getfeatureinfo-text-plain')
# Test getfeatureinfo default info_format
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-plain')
# Test getfeatureinfo invalid info_format
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'info_format=InvalidFormat',
'wms_getfeatureinfo-invalid-format')
# Test feature info request with filter geometry
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A4326&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER_GEOM=POLYGON((8.2035381 44.901459,8.2035562 44.901459,8.2035562 44.901418,8.2035381 44.901418,8.2035381 44.901459))',
'wms_getfeatureinfo_geometry_filter')
# Test feature info request with filter geometry in non-layer CRS
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER_GEOM=POLYGON ((913213.6839952 5606021.5399693, 913215.6988780 5606021.5399693, 913215.6988780 5606015.09643322, 913213.6839952 5606015.0964332, 913213.6839952 5606021.5399693))',
'wms_getfeatureinfo_geometry_filter_3857')
# Test feature info request with invalid query_layer
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=InvalidLayer&' +
'FEATURE_COUNT=10&FILTER_GEOM=POLYGON((8.2035381 44.901459,8.2035562 44.901459,8.2035562 44.901418,8.2035381 44.901418,8.2035381 44.901459))',
'wms_getfeatureinfo_invalid_query_layers')
# Test feature info request with '+' instead of ' ' in layers and
# query_layers parameters
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer+%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer+%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-xml')
# layer1 is a clone of layer0 but with a scale visibility. Thus,
# GetFeatureInfo response contains only a feature for layer0 and layer1
# is ignored for the required bbox. Without the scale visibility option,
# the feature for layer1 would have been in the response too.
mypath = self.testdata_path + "test_project_scalevisibility.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer0,layer1&styles=&' +
'VERSION=1.1.0&' +
'info_format=text%2Fxml&' +
'width=500&height=500&srs=EPSG%3A4326' +
'&bbox=8.1976,44.8998,8.2100,44.9027&' +
'query_layers=layer0,layer1&X=235&Y=243',
'wms_getfeatureinfo_notvisible',
'test_project_scalevisibility.qgs')
# Test GetFeatureInfo resolves "value map" widget values
mypath = self.testdata_path + "test_project_values.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer0&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&QUERY_LAYERS=layer0&I=487&J=308',
'wms_getfeatureinfo-values1-text-xml',
'test_project_values.qgs')
# TODO fix regression in QGIS 3 as the widget values don't get solved and enable test
@unittest.expectedFailure
def testGetFeatureInfoValueRelation(self):
"""Test GetFeatureInfo resolves "value relation" widget values"""
mypath = self.testdata_path + "test_project_values.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer1&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=True' +
'&QUERY_LAYERS=layer1&I=487&J=308',
'wms_getfeatureinfo-values1-text-xml',
'test_project_values.qgs')
# TODO make GetFeatureInfo show the dictionary values and enable test
@unittest.expectedFailure
def testGetFeatureInfoValueRelationArray(self):
"""Test GetFeatureInfo on "value relation" widget with array field (multiple selections)"""
mypath = self.testdata_path + "test_project_values.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer3&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=True' +
'&QUERY_LAYERS=layer3&I=487&J=308',
'wms_getfeatureinfo-values3-text-xml',
'test_project_values.qgs')
# TODO make GetFeatureInfo show what's in the display expression and enable test
@unittest.expectedFailure
def testGetFeatureInfoRelationReference(self):
"""Test GetFeatureInfo solves "relation reference" widget "display expression" values"""
mypath = self.testdata_path + "test_project_values.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer2&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=True' +
'&QUERY_LAYERS=layer2&I=487&J=308',
'wms_getfeatureinfo-values2-text-xml',
'test_project_values.qgs')
def testGetFeatureInfoFilter(self):
# Test getfeatureinfo response xml
# Regression for #8656
# Mind the gap! (the space in the FILTER expression)
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\''),
'wms_getfeatureinfo_filter')
# Test a filter with NO condition results
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\' AND "utf8nameè" = \'no-results\''),
'wms_getfeatureinfo_filter_no_results')
# Test a filter with OR condition results
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\' OR "NAME" = \'three\''),
'wms_getfeatureinfo_filter_or')
# Test a filter with OR condition and UTF results
# Note that the layer name that contains utf-8 chars cannot be
# to upper case.
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\' OR "utf8nameè" = \'three èé↓\''),
'wms_getfeatureinfo_filter_or_utf8')
# Regression #18292 Server GetFeatureInfo FILTER search fails when WIDTH, HEIGHT are not specified
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\''),
'wms_getfeatureinfo_filter_no_width')
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 1,467,075,829,748,212,500 | 57.439863 | 236 | 0.499353 | false | 3.760725 | true | false | false |
oppia/oppia-ml | core/domain/remote_access_services.py | 1 | 4226 | # coding: utf-8
#
# Copyright 2017 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module provides interface to communicate with Oppia remotely."""
import base64
import hashlib
import hmac
import json
import requests
from core.domain.proto import training_job_response_payload_pb2
from core.platform import platform_services
import utils
import vmconf
metadata_services = platform_services.Registry.import_metadata_services()
def _get_url():
if vmconf.DEV_MODE:
return vmconf.DEFAULT_COMMUNICATION_URL
return vmconf.SERVER_COMMUNICATION_URL
def _get_port():
if vmconf.DEV_MODE:
return vmconf.DEFAULT_COMMUNICATION_PORT
return vmconf.SERVER_COMMUNICATION_PORT
def _get_vm_id():
if vmconf.DEV_MODE:
return vmconf.DEFAULT_VM_ID
# Get VMID dynamically from metadata. HMAC module does not
# support unicode string. Hence we need to cast them to str.
return str(metadata_services.get_metadata_param(
vmconf.METADATA_VM_ID_PARAM_NAME))
def _get_shared_secret():
if vmconf.DEV_MODE:
return vmconf.DEFAULT_VM_SHARED_SECRET
# Get shared secret dynamically from metadata. HMAC module does not
# support unicode string. Hence we need to cast them to str.
return str(metadata_services.get_metadata_param(
vmconf.METADATA_SHARED_SECRET_PARAM_NAME))
def generate_signature(message, vm_id):
"""Generates digital signature for given message combined with vm_id.
Args:
message: bytes. Message string.
vm_id: str. ID of the VM that trained the job.
Returns:
str. The digital signature generated from request data.
"""
encoded_vm_id = vm_id.encode(encoding='utf-8')
msg = b'%s|%s' % (base64.b64encode(message), encoded_vm_id)
key = _get_shared_secret().encode(encoding='utf-8')
# Generate signature and return it.
return hmac.new(key, msg, digestmod=hashlib.sha256).hexdigest()
def fetch_next_job_request():
"""Returns the next job request to be processed.
Returns:
dict. A dict retrieved remotely from database containing
job request data.
"""
request_url = "%s:%s/%s" % (
_get_url(), _get_port(), vmconf.FETCH_NEXT_JOB_REQUEST_HANDLER)
payload = {
'vm_id': _get_vm_id().encode(encoding='utf-8'),
'message': _get_vm_id().encode(encoding='utf-8'),
}
signature = generate_signature(payload['message'], payload['vm_id'])
payload['signature'] = signature
data = {
'payload': json.dumps(payload)
}
response = requests.post(request_url, data=data)
return utils.parse_data_received_from_server(response.text)
def store_trained_classifier_model(job_result):
"""Stores the result of processed job request.
Args:
job_result: TrainingJobResult. Domain object containing result of
training of classifier along with job_id and algorithm_id.
Returns:
int. Status code of the response.
"""
job_result.validate()
payload = training_job_response_payload_pb2.TrainingJobResponsePayload()
payload.job_result.CopyFrom(job_result.to_proto())
payload.vm_id = _get_vm_id().encode(encoding='utf-8')
message = payload.job_result.SerializeToString().encode(encoding='utf-8')
signature = generate_signature(message, payload.vm_id)
payload.signature = signature
data = payload.SerializeToString()
request_url = "%s:%s/%s" % (
_get_url(), _get_port(), vmconf.STORE_TRAINED_CLASSIFIER_MODEL_HANDLER)
response = requests.post(
request_url, data=data,
headers={'Content-Type': 'application/octet-stream'})
return response.status_code
| apache-2.0 | 7,458,767,984,366,599,000 | 30.537313 | 79 | 0.6938 | false | 3.779964 | false | false | false |
carolFrohlich/nipype | nipype/interfaces/mrtrix3/preprocess.py | 2 | 7501 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# -*- coding: utf-8 -*-
"""
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname(os.path.realpath(__file__ ))
>>> datadir = os.path.realpath(os.path.join(filepath,
... '../../testing/data'))
>>> os.chdir(datadir)
"""
from __future__ import print_function, division, unicode_literals, absolute_import
import os.path as op
from ..traits_extension import isdefined
from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec,
File)
from .base import MRTrix3BaseInputSpec, MRTrix3Base
class ResponseSDInputSpec(MRTrix3BaseInputSpec):
in_file = File(exists=True, argstr='%s', mandatory=True, position=-2,
desc='input diffusion weighted images')
out_file = File(
'response.txt', argstr='%s', mandatory=True, position=-1,
usedefault=True, desc='output file containing SH coefficients')
# DW Shell selection options
shell = traits.List(traits.Float, sep=',', argstr='-shell %s',
desc='specify one or more dw gradient shells')
in_mask = File(exists=True, argstr='-mask %s',
desc='provide initial mask image')
max_sh = traits.Int(8, argstr='-lmax %d',
desc='maximum harmonic degree of response function')
out_sf = File('sf_mask.nii.gz', argstr='-sf %s',
desc='write a mask containing single-fibre voxels')
test_all = traits.Bool(False, argstr='-test_all',
desc='re-test all voxels at every iteration')
# Optimization
iterations = traits.Int(0, argstr='-max_iters %d',
desc='maximum number of iterations per pass')
max_change = traits.Float(
argstr='-max_change %f',
desc=('maximum percentile change in any response function coefficient;'
' if no individual coefficient changes by more than this '
'fraction, the algorithm is terminated.'))
# Thresholds
vol_ratio = traits.Float(
.15, argstr='-volume_ratio %f',
desc=('maximal volume ratio between the sum of all other positive'
' lobes in the voxel and the largest FOD lobe'))
disp_mult = traits.Float(
1., argstr='-dispersion_multiplier %f',
desc=('dispersion of FOD lobe must not exceed some threshold as '
'determined by this multiplier and the FOD dispersion in other '
'single-fibre voxels. The threshold is: (mean + (multiplier * '
'(mean - min))); default = 1.0. Criterion is only applied in '
'second pass of RF estimation.'))
int_mult = traits.Float(
2., argstr='-integral_multiplier %f',
desc=('integral of FOD lobe must not be outside some range as '
'determined by this multiplier and FOD lobe integral in other'
' single-fibre voxels. The range is: (mean +- (multiplier * '
'stdev)); default = 2.0. Criterion is only applied in second '
'pass of RF estimation.'))
class ResponseSDOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='the output response file')
out_sf = File(desc=('mask containing single-fibre voxels'))
class ResponseSD(MRTrix3Base):
"""
Generate an appropriate response function from the image data for
spherical deconvolution.
.. [1] Tax, C. M.; Jeurissen, B.; Vos, S. B.; Viergever, M. A. and
Leemans, A., Recursive calibration of the fiber response function
for spherical deconvolution of diffusion MRI data. NeuroImage,
2014, 86, 67-80
Example
-------
>>> import nipype.interfaces.mrtrix3 as mrt
>>> resp = mrt.ResponseSD()
>>> resp.inputs.in_file = 'dwi.mif'
>>> resp.inputs.in_mask = 'mask.nii.gz'
>>> resp.inputs.grad_fsl = ('bvecs', 'bvals')
>>> resp.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE
'dwi2response -fslgrad bvecs bvals -mask mask.nii.gz dwi.mif response.txt'
>>> resp.run() # doctest: +SKIP
"""
_cmd = 'dwi2response'
input_spec = ResponseSDInputSpec
output_spec = ResponseSDOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['out_file'] = op.abspath(self.inputs.out_file)
if isdefined(self.inputs.out_sf):
outputs['out_sf'] = op.abspath(self.inputs.out_sf)
return outputs
class ACTPrepareFSLInputSpec(CommandLineInputSpec):
in_file = File(exists=True, argstr='%s', mandatory=True, position=-2,
desc='input anatomical image')
out_file = File(
'act_5tt.mif', argstr='%s', mandatory=True, position=-1,
usedefault=True, desc='output file after processing')
class ACTPrepareFSLOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='the output response file')
class ACTPrepareFSL(CommandLine):
"""
Generate anatomical information necessary for Anatomically
Constrained Tractography (ACT).
Example
-------
>>> import nipype.interfaces.mrtrix3 as mrt
>>> prep = mrt.ACTPrepareFSL()
>>> prep.inputs.in_file = 'T1.nii.gz'
>>> prep.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE
'act_anat_prepare_fsl T1.nii.gz act_5tt.mif'
>>> prep.run() # doctest: +SKIP
"""
_cmd = 'act_anat_prepare_fsl'
input_spec = ACTPrepareFSLInputSpec
output_spec = ACTPrepareFSLOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['out_file'] = op.abspath(self.inputs.out_file)
return outputs
class ReplaceFSwithFIRSTInputSpec(CommandLineInputSpec):
in_file = File(exists=True, argstr='%s', mandatory=True, position=-4,
desc='input anatomical image')
in_t1w = File(exists=True, argstr='%s', mandatory=True, position=-3,
desc='input T1 image')
in_config = File(exists=True, argstr='%s', position=-2,
desc='connectome configuration file')
out_file = File(
'aparc+first.mif', argstr='%s', mandatory=True, position=-1,
usedefault=True, desc='output file after processing')
class ReplaceFSwithFIRSTOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='the output response file')
class ReplaceFSwithFIRST(CommandLine):
"""
Replace deep gray matter structures segmented with FSL FIRST in a
FreeSurfer parcellation.
Example
-------
>>> import nipype.interfaces.mrtrix3 as mrt
>>> prep = mrt.ReplaceFSwithFIRST()
>>> prep.inputs.in_file = 'aparc+aseg.nii'
>>> prep.inputs.in_t1w = 'T1.nii.gz'
>>> prep.inputs.in_config = 'mrtrix3_labelconfig.txt'
>>> prep.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE
'fs_parc_replace_sgm_first aparc+aseg.nii T1.nii.gz \
mrtrix3_labelconfig.txt aparc+first.mif'
>>> prep.run() # doctest: +SKIP
"""
_cmd = 'fs_parc_replace_sgm_first'
input_spec = ReplaceFSwithFIRSTInputSpec
output_spec = ReplaceFSwithFIRSTOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['out_file'] = op.abspath(self.inputs.out_file)
return outputs
| bsd-3-clause | -1,332,776,987,818,199,800 | 36.318408 | 87 | 0.614985 | false | 3.713366 | true | false | false |
sam-m888/gramps | gramps/plugins/drawreport/ancestortree.py | 1 | 42337 | # Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2007-2012 Brian G. Matherly
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2014 Paul Franklin
# Copyright (C) 2010-2015 Craig J. Anderson
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""Reports/Graphical Reports/Ancestor Tree"""
#------------------------------------------------------------------------
#
# Python modules
#
#------------------------------------------------------------------------
#------------------------------------------------------------------------
#
# Gramps modules
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
from gramps.gen.errors import ReportError
from gramps.gen.plug.menu import (TextOption, NumberOption, BooleanOption,
EnumeratedListOption, StringOption,
PersonOption)
from gramps.gen.plug.report import Report, MenuReportOptions, stdoptions
from gramps.gen.plug.docgen import (FontStyle, ParagraphStyle, GraphicsStyle,
FONT_SANS_SERIF, PARA_ALIGN_CENTER)
from gramps.plugins.lib.libtreebase import *
from gramps.plugins.lib.librecurse import AscendPerson
from gramps.gen.proxy import CacheProxyDb
from gramps.gen.display.name import displayer as _nd
PT2CM = utils.pt2cm
#cm2pt = utils.cm2pt
#------------------------------------------------------------------------
#
# Constants
#
#------------------------------------------------------------------------
_BORN = _("birth abbreviation|b."),
_DIED = _("death abbreviation|d."),
_MARR = _("marriage abbreviation|m."),
LVL_GEN, LVL_INDX, LVL_Y = range(3)
#------------------------------------------------------------------------
#
# Box classes
#
#------------------------------------------------------------------------
class PersonBox(BoxBase):
"""
Calculates information about the box that will print on a page
"""
def __init__(self, level):
BoxBase.__init__(self)
self.boxstr = "AC2-box"
#self.level = (level[0]-1, level[1])
self.level = level
def __lt__(self, other):
return self.level[LVL_Y] < other.level[LVL_Y]
class FamilyBox(BoxBase):
"""
Calculates information about the box that will print on a page
"""
def __init__(self, level):
BoxBase.__init__(self)
self.boxstr = "AC2-fam-box"
#self.level = (level[0]-1, level[1])
self.level = level
def __lt__(self, other):
return self.level[LVL_Y] < other.level[LVL_Y]
#------------------------------------------------------------------------
#
# Titles Class(es)
#
#------------------------------------------------------------------------
class TitleN(TitleNoDisplay):
"""No Title class for the report """
def __init__(self, doc, locale):
TitleNoDisplay.__init__(self, doc, "AC2-Title-box")
self._ = locale.translation.sgettext
def calc_title(self, center):
"""Calculate the title of the report"""
#we want no text, but need a text for the TOC in a book!
self.mark_text = self._("Ancestor Graph")
self.text = ''
class TitleA(TitleBox):
"""Title class for the report """
def __init__(self, doc, locale, name_displayer):
self._nd = name_displayer
TitleBox.__init__(self, doc, "AC2-Title-box")
self._ = locale.translation.sgettext
def calc_title(self, center):
"""Calculate the title of the report"""
name = ""
if center is not None:
name = self._nd.display(center)
# feature request 2356: avoid genitive form
self.text = self._("Ancestor Graph for %s") % name
self.set_box_height_width()
#------------------------------------------------------------------------
#
# CalcItems (helper class to calculate text)
# make_ancestor_tree (main recursive functions)
#
#------------------------------------------------------------------------
class CalcItems:
""" A helper class to calculate the default box text
and text for each person / marriage
"""
def __init__(self, dbase):
_gui = GUIConnect()
self._gui = _gui
#calculate the printed lines for each box
#str = ""
#if self.get_val('miss_val'):
# str = "_____"
display_repl = _gui.get_val("replace_list")
self.__calc_l = CalcLines(dbase, display_repl, _gui.locale, _gui.n_d)
self.__blank_father = None
self.__blank_mother = None
self.__blank_father = \
self.__calc_l.calc_lines(None, None, _gui.get_val("father_disp"))
self.__blank_mother = \
self.__calc_l.calc_lines(None, None, _gui.get_val("mother_disp"))
self.center_use = _gui.get_val("center_uses")
self.disp_father = _gui.get_val("father_disp")
self.disp_mother = _gui.get_val("mother_disp")
self.disp_marr = [_gui.get_val("marr_disp")]
self.__blank_marriage = \
self.__calc_l.calc_lines(None, None, self.disp_marr)
def calc_person(self, index, indi_handle, fams_handle):
working_lines = ""
if index[1] % 2 == 0 or (index[1] == 1 and self.center_use == 0):
if indi_handle == fams_handle is None:
working_lines = self.__calc_l.calc_lines(
None, None, self._gui.get_val("father_disp"))
else:
working_lines = self.disp_father
else:
if indi_handle == fams_handle is None:
working_lines = self.__calc_l.calc_lines(
None, None, self._gui.get_val("mother_disp"))
else:
working_lines = self.disp_mother
if indi_handle == fams_handle is None:
return working_lines
else:
return self.__calc_l.calc_lines(indi_handle, fams_handle,
working_lines)
def calc_marriage(self, indi_handle, fams_handle):
if indi_handle == fams_handle is None:
return self.__blank_marriage
else:
return self.__calc_l.calc_lines(indi_handle, fams_handle,
self.disp_marr)
class MakeAncestorTree(AscendPerson):
"""
The main procedure to use recursion to make the tree based off of a person.
order of people inserted into Persons is important.
makes sure that order is done correctly.
"""
def __init__(self, dbase, canvas):
_gui = GUIConnect()
AscendPerson.__init__(self, dbase, _gui.maxgen(), _gui.fill_out())
self.database = dbase
self.canvas = canvas
self.inlc_marr = _gui.inc_marr()
self.inc_sib = _gui.inc_sib()
self.compress_tree = _gui.compress_tree()
self.center_family = None
self.lines = [None] * (_gui.maxgen() + 1)
self.max_generation = 0
self.calc_items = CalcItems(self.database)
def add_person(self, index, indi_handle, fams_handle):
""" Makes a person box and add that person into the Canvas. """
#print str(index) + " add_person " + str(indi_handle)
myself = PersonBox((index[0] - 1,) + index[1:])
if index[LVL_GEN] == 1: # Center Person
self.center_family = fams_handle
if index[LVL_GEN] > self.max_generation:
self.max_generation = index[LVL_GEN]
myself.text = self.calc_items.calc_person(index,
indi_handle, fams_handle)
# myself.text[0] = myself.text[0] + ' ' + repr(index) # for debugging
if indi_handle is not None: # None is legal for an empty box
myself.add_mark(self.database,
self.database.get_person_from_handle(indi_handle))
self.canvas.add_box(myself)
#make the lines
indx = index[LVL_GEN]
self.lines[indx] = myself
if indx > 1:
if self.lines[indx - 1].line_to is None:
line = LineBase(self.lines[indx - 1])
self.lines[indx - 1].line_to = line
self.canvas.add_line(line)
else:
line = self.lines[indx - 1].line_to
line.add_to(myself)
return myself
def add_person_again(self, index, indi_handle, fams_handle):
self.add_person(index, indi_handle, fams_handle)
def add_marriage(self, index, indi_handle, fams_handle):
""" Makes a marriage box and add that person into the Canvas. """
if not self.inlc_marr:
return
myself = FamilyBox((index[0] - 1,) + index[1:])
#calculate the text.
myself.text = self.calc_items.calc_marriage(indi_handle, fams_handle)
self.canvas.add_box(myself)
def y_index(self, x_level, index):
""" Calculate the column or generation that this person is in.
x_level -> 0 to max_gen-1
index -> 1 to (self.max_generation**2)-1
"""
#Calculate which row in the column of people.
tmp_y = index - (2**x_level)
#Calculate which row in the table (yes table) of people.
delta = (2**self.max_generation) // (2**(x_level))
return int((delta / 2) + (tmp_y * delta)) - 1
def do_y_indx(self):
''' Make the y_index for all boxes
first off of a forumula, then remove blank areas around the edges,
then compress the tree if desired
'''
min_y = self.y_index(self.canvas.boxes[0].level[LVL_GEN],
self.canvas.boxes[0].level[LVL_INDX])
for box in self.canvas.boxes:
if "fam" in box.boxstr:
box.level = box.level + \
(self.y_index(box.level[LVL_GEN] - 1,
int(box.level[LVL_INDX] / 2)),)
else:
box.level = box.level + \
(self.y_index(box.level[LVL_GEN], box.level[LVL_INDX]),)
min_y = min(min_y, box.level[LVL_Y])
#print (str(box.level))
#if a last father (of fathers) does not have a father/parents
#Then there could be a gap. Remove this gap
if min_y > 0:
for box in self.canvas.boxes:
box.level = box.level[:LVL_Y] + (box.level[LVL_Y] - min_y,)
#Now that we have y_index, lets see if we need to squish the tree
self.canvas.boxes.sort() # Sort them on the y_index
if not self.compress_tree:
return
#boxes are already in top down [LVL_Y] form so lets
#set the box in the correct y level depending on compress_tree
y_level = 0
current_y = self.canvas.boxes[0].level[LVL_Y]
for box in self.canvas.boxes:
y_index = box.level[LVL_Y]
if y_index > current_y:
current_y = y_index
y_level += 1
box.level = box.level[:LVL_Y] + (y_level,)
def do_sibs(self):
if not self.inc_sib or self.center_family is None:
return
family = self.database.get_family_from_handle(self.center_family)
mykids = [kid.ref for kid in family.get_child_ref_list()]
if len(mykids) == 1: # No other siblings. Don't do anything.
return
# The first person is the center person had he/she has our information
center = self.canvas.boxes.pop(self.canvas.boxes.index(self.lines[1]))
line = center.line_to
level = center.level[LVL_Y]
move = level - (len(mykids) // 2) + ((len(mykids) + 1) % 2)
if move < 0:
# more kids than parents. ran off the page. Move them all down
for box in self.canvas.boxes:
box.level = (box.level[0], box.level[1], box.level[2] - move)
move = 0
line.start = []
rrr = -1 # if len(mykids)%2 == 1 else 0
for kid in mykids:
rrr += 1
mee = self.add_person((1, 1, move + rrr), kid, self.center_family)
line.add_from(mee)
#mee.level = (0, 1, level - (len(mykids)//2)+rrr)
mee.line_to = line
def start(self, person_id):
""" go ahead and make it happen """
center = self.database.get_person_from_gramps_id(person_id)
if center is None:
raise ReportError(
_("Person %s is not in the Database") % person_id)
center_h = center.get_handle()
#Step 1. Get the people
self.recurse(center_h)
#Step 2. Calculate the y_index for everyone
self.do_y_indx()
#Step 3. Siblings of the center person
self.do_sibs()
#------------------------------------------------------------------------
#
# Transform Classes
#
#------------------------------------------------------------------------
#------------------------------------------------------------------------
# Class lr_Transform
#------------------------------------------------------------------------
class LRTransform:
"""
setup all of the boxes on the canvas in for a left/right report
"""
def __init__(self, canvas, max_generations):
self.canvas = canvas
self.rept_opts = canvas.report_opts
self.y_offset = (self.rept_opts.littleoffset * 2 +
self.canvas.title.height)
def _place(self, box):
""" put the box in it's correct spot """
#1. cm_x
box.x_cm = self.rept_opts.littleoffset
box.x_cm += (box.level[LVL_GEN] *
(self.rept_opts.col_width + self.rept_opts.max_box_width))
#2. cm_y
box.y_cm = self.rept_opts.max_box_height + self.rept_opts.box_pgap
box.y_cm *= box.level[LVL_Y]
box.y_cm += self.y_offset
#if box.height < self.rept_opts.max_box_height:
# box.y_cm += ((self.rept_opts.max_box_height - box.height) /2)
def place(self):
""" Step through boxes so they can be put in the right spot """
#prime the pump
self.__last_y_level = self.canvas.boxes[0].level[LVL_Y]
#go
for box in self.canvas.boxes:
self._place(box)
#------------------------------------------------------------------------
#
# class make_report
#
#------------------------------------------------------------------------
class MakeReport:
def __init__(self, dbase, doc, canvas, font_normal):
self.database = dbase
self.doc = doc
self.canvas = canvas
self.font_normal = font_normal
_gui = GUIConnect()
self.inlc_marr = _gui.inc_marr()
self.compress_tree = _gui.compress_tree()
self.mother_ht = self.father_ht = 0
self.max_generations = 0
def get_height_width(self, box):
"""
obtain width information for each level (x)
obtain height information for each item
"""
self.canvas.set_box_height_width(box)
if box.width > self.canvas.report_opts.max_box_width:
self.canvas.report_opts.max_box_width = box.width # + box.shadow
if box.level[LVL_Y] > 0:
if box.level[LVL_INDX] % 2 == 0 and box.height > self.father_ht:
self.father_ht = box.height
elif box.level[LVL_INDX] % 2 == 1 and box.height > self.mother_ht:
self.mother_ht = box.height
if box.level[LVL_GEN] > self.max_generations:
self.max_generations = box.level[LVL_GEN]
def get_generations(self):
return self.max_generations
def start(self):
# __gui = GUIConnect()
# 1.
#set the sizes for each box and get the max_generations.
self.father_ht = 0.0
self.mother_ht = 0.0
for box in self.canvas.boxes:
self.get_height_width(box)
if self.compress_tree and not self.inlc_marr:
self.canvas.report_opts.max_box_height = \
min(self.father_ht, self.mother_ht)
else:
self.canvas.report_opts.max_box_height = \
max(self.father_ht, self.mother_ht)
#At this point we know everything we need to make the report.
#Size of each column of people - self.rept_opt.box_width
#size of each column (or row) of lines - self.rept_opt.col_width
#size of each row - self.rept_opt.box_height
#go ahead and set it now.
for box in self.canvas.boxes:
box.width = self.canvas.report_opts.max_box_width
# 2.
#setup the transform class to move around the boxes on the canvas
transform = LRTransform(self.canvas, self.max_generations)
transform.place()
class GUIConnect:
""" This is a BORG object. There is ONLY one.
This give some common routines that EVERYONE can use like
get the value from a GUI variable
"""
__shared_state = {}
def __init__(self): # We are BORG!
self.__dict__ = self.__shared_state
def set__opts(self, options, locale, name_displayer):
""" Set only once as we are BORG. """
self.__opts = options
self.locale = locale
self.n_d = name_displayer
def get_val(self, val):
""" Get a GUI value. """
value = self.__opts.get_option_by_name(val)
if value:
return value.get_value()
else:
False
def title_class(self, doc):
""" Return a class that holds the proper title based off of the
GUI options """
title_type = self.get_val('report_title')
if title_type:
return TitleA(doc, self.locale, self.n_d)
else:
return TitleN(doc, self.locale)
def inc_marr(self):
return self.get_val("inc_marr")
def inc_sib(self):
return self.get_val("inc_siblings")
def maxgen(self):
return self.get_val("maxgen")
def fill_out(self):
return self.get_val("fill_out")
def compress_tree(self):
return self.get_val("compress_tree")
#------------------------------------------------------------------------
#
# AncestorTree
#
#------------------------------------------------------------------------
class AncestorTree(Report):
""" AncestorTree Report """
def __init__(self, database, options, user):
"""
Create AncestorTree object that produces the report.
The arguments are:
database - the Gramps database instance
options - instance of the Options class for this report
user - a gen.user.User() instance
"""
Report.__init__(self, database, options, user)
self.options = options
self._user = user
self.set_locale(options.menu.get_option_by_name('trans').get_value())
stdoptions.run_date_format_option(self, options.menu)
stdoptions.run_private_data_option(self, options.menu)
stdoptions.run_living_people_option(self, options.menu, self._locale)
self.database = CacheProxyDb(self.database)
stdoptions.run_name_format_option(self, options.menu)
self._nd = self._name_display
def begin_report(self):
"""
This report needs the following parameters (class variables)
that come in the options class.
max_generations - Maximum number of generations to include.
pagebbg - Whether to include page breaks between generations.
dispf - Display format for the output box.
scale_report - Whether to scale the report to fit the width or all.
indblank - Whether to include blank pages.
compress - Whether to compress chart.
incl_private - Whether to include private data
living_people - How to handle living people
years_past_death - Consider as living this many years after death
We will
1. a canvas in its full one-page size
2. a page that we wish to print on
scale up/down either or both of the above as needed/desired.
almost all of this should be moved into Canvas!
"""
database = self.database
self.connect = GUIConnect()
self.connect.set__opts(self.options.menu, self._locale, self._nd)
#Set up the canvas that we will print on.
style_sheet = self.doc.get_style_sheet()
font_normal = style_sheet.get_paragraph_style("AC2-Normal").get_font()
#The canvas that we will put our report on and print off of
self.canvas = Canvas(self.doc,
ReportOptions(self.doc, font_normal, 'AC2-line'))
self.canvas.report_opts.box_shadow *= \
self.connect.get_val('shadowscale')
self.canvas.report_opts.box_pgap *= self.connect.get_val('box_Yscale')
self.canvas.report_opts.box_mgap *= self.connect.get_val('box_Yscale')
with self._user.progress(_('Ancestor Tree'),
_('Making the Tree...'), 4) as step:
#make the tree onto the canvas
# inlc_marr = self.connect.get_val("inc_marr")
self.max_generations = self.connect.get_val('maxgen')
tree = MakeAncestorTree(database, self.canvas)
tree.start(self.connect.get_val('pid'))
tree = None
step()
#Title
title = self.connect.title_class(self.doc)
center = self.database.get_person_from_gramps_id(
self.connect.get_val('pid'))
title.calc_title(center)
self.canvas.add_title(title)
#make the report as big as it wants to be.
report = MakeReport(database, self.doc, self.canvas, font_normal)
report.start()
self.max_generations = report.get_generations() # already know
report = None
step()
#Note?
if self.connect.get_val("inc_note"):
note_box = NoteBox(self.doc, "AC2-note-box",
self.connect.get_val("note_place"))
subst = SubstKeywords(self.database, self._locale, self._nd,
None, None)
note_box.text = subst.replace_and_clean(
self.connect.get_val('note_disp'))
self.canvas.add_note(note_box)
#Now we have the report in its full size.
#Do we want to scale the report?
one_page = self.connect.get_val("resize_page")
scale_report = self.connect.get_val("scale_tree")
scale = self.canvas.scale_report(one_page,
scale_report != 0,
scale_report == 2)
step()
if scale != 1 or self.connect.get_val('shadowscale') != 1.0:
self.scale_styles(scale)
def write_report(self):
one_page = self.connect.get_val("resize_page")
#scale_report = self.connect.get_val("scale_tree")
#inlc_marr = self.connect.get_val("inc_marr")
inc_border = self.connect.get_val('inc_border')
incblank = self.connect.get_val("inc_blank")
prnnum = self.connect.get_val("inc_pagenum")
#####################
#Setup page information
colsperpage = self.doc.get_usable_width()
colsperpage += self.canvas.report_opts.col_width
colsperpage = int(
colsperpage / (self.canvas.report_opts.max_box_width +
self.canvas.report_opts.col_width))
colsperpage = colsperpage or 1
#####################
#Vars
if prnnum:
page_num_box = PageNumberBox(self.doc, 'AC2-box', self._locale)
#TODO - Here
#####################
#ok, everyone is now ready to print on the canvas. Paginate?
self.canvas.paginate(colsperpage, one_page)
#####################
#Yeah!!!
#lets finally make some pages!!!
#####################
pages = self.canvas.page_count(incblank)
with self._user.progress(_('Ancestor Tree'),
_('Printing the Tree...'), pages) as step:
for page in self.canvas.page_iter_gen(incblank):
self.doc.start_page()
#do we need to print a border?
if inc_border:
page.draw_border('AC2-line')
#Do we need to print the page number?
if prnnum:
page_num_box.display(page)
#Print the individual people and lines
page.display()
step()
self.doc.end_page()
def scale_styles(self, scale):
"""
Scale the styles for this report.
"""
style_sheet = self.doc.get_style_sheet()
graph_style = style_sheet.get_draw_style("AC2-box")
graph_style.set_shadow(graph_style.get_shadow(),
self.canvas.report_opts.box_shadow * scale)
graph_style.set_line_width(graph_style.get_line_width() * scale)
style_sheet.add_draw_style("AC2-box", graph_style)
graph_style = style_sheet.get_draw_style("AC2-fam-box")
graph_style.set_shadow(graph_style.get_shadow(),
self.canvas.report_opts.box_shadow * scale)
graph_style.set_line_width(graph_style.get_line_width() * scale)
style_sheet.add_draw_style("AC2-fam-box", graph_style)
graph_style = style_sheet.get_draw_style("AC2-note-box")
#graph_style.set_shadow(graph_style.get_shadow(),
# self.canvas.report_opts.box_shadow * scale)
graph_style.set_line_width(graph_style.get_line_width() * scale)
style_sheet.add_draw_style("AC2-note-box", graph_style)
para_style = style_sheet.get_paragraph_style("AC2-Normal")
font = para_style.get_font()
font.set_size(font.get_size() * scale)
para_style.set_font(font)
style_sheet.add_paragraph_style("AC2-Normal", para_style)
para_style = style_sheet.get_paragraph_style("AC2-Note")
font = para_style.get_font()
font.set_size(font.get_size() * scale)
para_style.set_font(font)
style_sheet.add_paragraph_style("AC2-Note", para_style)
para_style = style_sheet.get_paragraph_style("AC2-Title")
font = para_style.get_font()
font.set_size(font.get_size() * scale)
para_style.set_font(font)
style_sheet.add_paragraph_style("AC2-Title", para_style)
graph_style = GraphicsStyle()
width = graph_style.get_line_width()
width = width * scale
graph_style.set_line_width(width)
style_sheet.add_draw_style("AC2-line", graph_style)
self.doc.set_style_sheet(style_sheet)
#------------------------------------------------------------------------
#
# AncestorTreeOptions
#
#------------------------------------------------------------------------
class AncestorTreeOptions(MenuReportOptions):
"""
Defines options and provides handling interface.
"""
def __init__(self, name, dbase):
self.__db = dbase
self.__pid = None
self.box_Y_sf = None
self.box_shadow_sf = None
MenuReportOptions.__init__(self, name, dbase)
def get_subject(self):
""" Return a string that describes the subject of the report. """
gid = self.__pid.get_value()
person = self.__db.get_person_from_gramps_id(gid)
return _nd.display(person)
def add_menu_options(self, menu):
##################
category_name = _("Tree Options")
self.__pid = PersonOption(_("Center Person"))
self.__pid.set_help(_("The center person for the tree"))
menu.add_option(category_name, "pid", self.__pid)
siblings = BooleanOption(
_('Include siblings of the center person'), False)
siblings.set_help(
_("Whether to only display the center person or all "
"of his/her siblings too"))
menu.add_option(category_name, "inc_siblings", siblings)
self.max_gen = NumberOption(_("Generations"), 10, 1, 50)
self.max_gen.set_help(_("The number of generations to include "
"in the tree"))
menu.add_option(category_name, "maxgen", self.max_gen)
self.fillout = EnumeratedListOption(_("Display unknown\ngenerations"),
0)
self.fillout.set_help(_("The number of generations of empty "
"boxes that will be displayed"))
menu.add_option(category_name, "fill_out", self.fillout)
self.max_gen.connect('value-changed', self.__fillout_vals)
self.__fillout_vals()
compress = BooleanOption(_('Compress tree'), True)
compress.set_help(
_("Whether to remove any extra blank spaces set "
"aside for people that are unknown"))
menu.add_option(category_name, "compress_tree", compress)
#better to 'Show siblings of\nthe center person
#Spouse_disp = EnumeratedListOption(_("Show spouses of\nthe center "
# "person"), 0)
#Spouse_disp.add_item(0, _("No. Do not show Spouses"))
#Spouse_disp.add_item(1, _("Yes, and use the Main Display Format"))
#Spouse_disp.add_item(2, _("Yes, and use the Secondary "
# "Display Format"))
#Spouse_disp.set_help(_("Show spouses of the center person?"))
#menu.add_option(category_name, "Spouse_disp", Spouse_disp)
##################
category_name = _("Report Options")
self.title = EnumeratedListOption(_("Report Title"), 0)
self.title.add_item(0, _("Do not include a title"))
self.title.add_item(1, _("Include Report Title"))
self.title.set_help(_("Choose a title for the report"))
menu.add_option(category_name, "report_title", self.title)
border = BooleanOption(_('Include a border'), False)
border.set_help(_("Whether to make a border around the report."))
menu.add_option(category_name, "inc_border", border)
prnnum = BooleanOption(_('Include Page Numbers'), False)
prnnum.set_help(_("Whether to print page numbers on each page."))
menu.add_option(category_name, "inc_pagenum", prnnum)
self.scale = EnumeratedListOption(_("Scale tree to fit"), 0)
self.scale.add_item(0, _("Do not scale tree"))
self.scale.add_item(1, _("Scale tree to fit page width only"))
self.scale.add_item(2, _("Scale tree to fit the size of the page"))
self.scale.set_help(
_("Whether to scale the tree to fit a specific paper size"))
menu.add_option(category_name, "scale_tree", self.scale)
self.scale.connect('value-changed', self.__check_blank)
if "BKI" not in self.name.split(","):
self.__onepage = BooleanOption(
_("Resize Page to Fit Tree size\n"
"\n"
"Note: Overrides options in the 'Paper Option' tab"
),
False)
self.__onepage.set_help(
_("Whether to resize the page to fit the size \n"
"of the tree. Note: the page will have a \n"
"non standard size.\n"
"\n"
"With this option selected, the following will happen:\n"
"\n"
"With the 'Do not scale tree' option the page\n"
" is resized to the height/width of the tree\n"
"\n"
"With 'Scale tree to fit page width only' the height of\n"
" the page is resized to the height of the tree\n"
"\n"
"With 'Scale tree to fit the size of the page' the page\n"
" is resized to remove any gap in either height or width"
))
menu.add_option(category_name, "resize_page", self.__onepage)
self.__onepage.connect('value-changed', self.__check_blank)
else:
self.__onepage = None
self.__blank = BooleanOption(_('Include Blank Pages'), True)
self.__blank.set_help(_("Whether to include pages that are blank."))
menu.add_option(category_name, "inc_blank", self.__blank)
self.__check_blank()
##################
category_name = _("Report Options (2)")
stdoptions.add_name_format_option(menu, category_name)
stdoptions.add_living_people_option(menu, category_name)
stdoptions.add_private_data_option(menu, category_name)
locale_opt = stdoptions.add_localization_option(menu, category_name)
stdoptions.add_date_format_option(menu, category_name, locale_opt)
##################
category_name = _("Display")
disp = TextOption(_("Father\nDisplay Format"),
["$n",
"%s $b" % _BORN,
"-{%s $d}" % _DIED])
disp.set_help(_("Display format for the fathers box."))
menu.add_option(category_name, "father_disp", disp)
#Will add when libsubstkeyword supports it.
#missing = EnumeratedListOption(_("Replace missing\nplaces\\dates \
# with"), 0)
#missing.add_item(0, _("Does not display anything"))
#missing.add_item(1, _("Displays '_____'"))
#missing.set_help(_("What will print when information is not known"))
#menu.add_option(category_name, "miss_val", missing)
disp_mom = TextOption(_("Mother\nDisplay Format"),
["$n",
"%s $b" % _BORN,
"%s $m" % _MARR,
"-{%s $d}" % _DIED])
disp_mom.set_help(_("Display format for the mothers box."))
menu.add_option(category_name, "mother_disp", disp_mom)
center_disp = EnumeratedListOption(_("Center person uses\n"
"which format"), 0)
center_disp.add_item(0, _("Use Fathers Display format"))
center_disp.add_item(1, _("Use Mothers display format"))
center_disp.set_help(_("The display format for the center person"))
menu.add_option(category_name, "center_uses", center_disp)
self.incmarr = BooleanOption(_('Include Marriage box'), False)
self.incmarr.set_help(
_("Whether to include a separate marital box in the report"))
menu.add_option(category_name, "inc_marr", self.incmarr)
self.incmarr.connect('value-changed', self._incmarr_changed)
self.marrdisp = StringOption(_("Marriage\nDisplay Format"),
"%s $m" % _MARR)
self.marrdisp.set_help(_("Display format for the marital box."))
menu.add_option(category_name, "marr_disp", self.marrdisp)
self._incmarr_changed()
##################
category_name = _("Advanced")
repldisp = TextOption(
_("Replace Display Format:\n'Replace this'/' with this'"),
[])
repldisp.set_help(_("i.e.\nUnited States of America/U.S.A"))
menu.add_option(category_name, "replace_list", repldisp)
# TODO this code is never used and so I conclude it is for future use
# self.__include_images = BooleanOption(
# _('Include thumbnail images of people'), False)
# self.__include_images.set_help(
# _("Whether to include thumbnails of people."))
# menu.add_option(category_name, "includeImages",
# self.__include_images)
self.usenote = BooleanOption(_('Include a note'), False)
self.usenote.set_help(_("Whether to include a note on the report."))
menu.add_option(category_name, "inc_note", self.usenote)
self.usenote.connect('value-changed', self._usenote_changed)
self.notedisp = TextOption(_("Note"), [])
self.notedisp.set_help(_("Add a note\n\n"
"$T inserts today's date"))
menu.add_option(category_name, "note_disp", self.notedisp)
locales = NoteType(0, 1)
self.notelocal = EnumeratedListOption(_("Note Location"), 0)
for num, text in locales.note_locals():
self.notelocal.add_item(num, text)
self.notelocal.set_help(_("Where to place the note."))
menu.add_option(category_name, "note_place", self.notelocal)
self._usenote_changed()
self.box_Y_sf = NumberOption(_("inter-box scale factor"),
1.00, 0.10, 2.00, 0.01)
self.box_Y_sf.set_help(
_("Make the inter-box spacing bigger or smaller"))
menu.add_option(category_name, "box_Yscale", self.box_Y_sf)
self.box_shadow_sf = NumberOption(_("box shadow scale factor"),
1.00, 0.00, 2.00, 0.01) # down to 0
self.box_shadow_sf.set_help(_("Make the box shadow bigger or smaller"))
menu.add_option(category_name, "shadowscale", self.box_shadow_sf)
def _incmarr_changed(self):
"""
If Marriage box is not enabled, disable Marriage Display Format box
"""
value = self.incmarr.get_value()
self.marrdisp.set_available(value)
def _usenote_changed(self):
"""
If Note box is not enabled, disable Note Location box
"""
value = self.usenote.get_value()
self.notelocal.set_available(value)
def __check_blank(self):
if self.__onepage:
value = not self.__onepage.get_value()
else:
value = True
off = value and (self.scale.get_value() != 2)
self.__blank.set_available(off)
def __fillout_vals(self):
max_gen = self.max_gen.get_value()
old_val = self.fillout.get_value()
item_list = []
item_list.append([0, _("No generations of empty boxes "
"for unknown ancestors")])
if max_gen > 1:
item_list.append([1, _("One Generation of empty boxes "
"for unknown ancestors")])
item_list.extend(
[itr, str(itr) +
_(" Generations of empty boxes for unknown ancestors")]
for itr in range(2, max_gen))
self.fillout.set_items(item_list)
if old_val + 2 > len(item_list):
self.fillout.set_value(len(item_list) - 2)
def make_default_style(self, default_style):
"""Make the default output style for the Ancestor Tree."""
# Paragraph Styles:
font = FontStyle()
font.set_size(9)
font.set_type_face(FONT_SANS_SERIF)
para_style = ParagraphStyle()
para_style.set_font(font)
para_style.set_description(
_('The basic style used for the text display.'))
default_style.add_paragraph_style("AC2-Normal", para_style)
box_shadow = PT2CM(font.get_size()) * .6
font = FontStyle()
font.set_size(9)
font.set_type_face(FONT_SANS_SERIF)
para_style = ParagraphStyle()
para_style.set_font(font)
para_style.set_description(
_('The basic style used for the note display.'))
default_style.add_paragraph_style("AC2-Note", para_style)
font = FontStyle()
font.set_size(16)
font.set_type_face(FONT_SANS_SERIF)
para_style = ParagraphStyle()
para_style.set_font(font)
para_style.set_alignment(PARA_ALIGN_CENTER)
para_style.set_description(_('The style used for the title.'))
default_style.add_paragraph_style("AC2-Title", para_style)
# Draw styles
graph_style = GraphicsStyle()
graph_style.set_paragraph_style("AC2-Normal")
graph_style.set_shadow(1, box_shadow) # shadow set by text size
graph_style.set_fill_color((255, 255, 255))
default_style.add_draw_style("AC2-box", graph_style)
graph_style = GraphicsStyle()
graph_style.set_paragraph_style("AC2-Normal")
#graph_style.set_shadow(0, PT2CM(9)) # shadow set by text size
graph_style.set_fill_color((255, 255, 255))
default_style.add_draw_style("AC2-fam-box", graph_style)
graph_style = GraphicsStyle()
graph_style.set_paragraph_style("AC2-Note")
graph_style.set_fill_color((255, 255, 255))
default_style.add_draw_style("AC2-note-box", graph_style)
# TODO this seems meaningless, as only the text is displayed
graph_style = GraphicsStyle()
graph_style.set_paragraph_style("AC2-Title")
graph_style.set_color((0, 0, 0))
graph_style.set_fill_color((255, 255, 255))
graph_style.set_line_width(0)
graph_style.set_description(_("Cannot edit this reference"))
default_style.add_draw_style("AC2-Title-box", graph_style)
graph_style = GraphicsStyle()
default_style.add_draw_style("AC2-line", graph_style)
#=====================================
#But even if you should suffer for what is right, you are blessed.
#"Do not fear what they fear ; do not be frightened."
#Take Courage
#1 Peter 3:14
| gpl-2.0 | 6,894,781,434,802,397,000 | 37.038634 | 79 | 0.546709 | false | 3.832096 | false | false | false |
lgfausak/sqlbridge | sqlbridge/twisted/dbengine.py | 1 | 6399 | #!/usr/bin/env python
###############################################################################
##
## Copyright (C) 2014 Greg Fausak
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
import sys,os,argparse,six
from twisted.python import log
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from twisted.internet.endpoints import clientFromString
#from myapprunner import MyApplicationRunner
from autobahn.twisted.wamp import ApplicationSession,ApplicationRunner
from autobahn import util
from autobahn.wamp import auth
from autobahn.wamp import types
from autobahn.wamp.exception import ApplicationError
from autobahn.twisted import wamp, websocket
from autobahn.twisted.wamp import ApplicationSession
class DB(ApplicationSession):
"""
An application component providing db access
"""
def __init__(self, *args, **kwargs):
log.msg("__init__")
self.db = {}
self.svar = {}
log.msg("got args {}, kwargs {}".format(args,kwargs))
# reap init variables meant only for us
for i in ( 'engine', 'topic_base', 'dsn', 'authinfo', 'debug', ):
if i in kwargs:
if kwargs[i] is not None:
self.svar[i] = kwargs[i]
del kwargs[i]
log.msg("sending to super.init args {}, kwargs {}".format(args,kwargs))
ApplicationSession.__init__(self, *args, **kwargs)
def onConnect(self):
log.msg("onConnect")
auth_type = 'none'
auth_user = 'anon'
if 'authinfo' in self.svar:
auth_type = self.svar['authinfo']['auth_type']
auth_user = self.svar['authinfo']['auth_user']
log.msg("onConnect with {} {}".format(auth_type, auth_user))
self.join(self.config.realm, [six.u(auth_type)], six.u(auth_user))
def onChallenge(self, challenge):
log.msg("onChallenge - maynard")
password = 'unknown'
if 'authinfo' in self.svar:
password = self.svar['authinfo']['auth_password']
log.msg("onChallenge with password {}".format(password))
if challenge.method == u'wampcra':
if u'salt' in challenge.extra:
key = auth.derive_key(password.encode('utf8'),
challenge.extra['salt'].encode('utf8'),
challenge.extra.get('iterations', None),
challenge.extra.get('keylen', None))
else:
key = password.encode('utf8')
signature = auth.compute_wcs(key, challenge.extra['challenge'].encode('utf8'))
return signature.decode('ascii')
else:
raise Exception("don't know how to compute challenge for authmethod {}".format(challenge.method))
@inlineCallbacks
def onJoin(self, details):
log.msg("db:onJoin session attached {}".format(details))
if 'engine' in self.svar and 'topic_base' in self.svar:
if self.svar['engine'] == 'PG9_4' or self.svar['engine'] == 'PG':
from .db import postgres
dbo = postgres.PG9_4(topic_base = self.svar['topic_base'], app_session = self, debug = self.svar['debug'])
elif self.svar['engine'] == 'MYSQL14_14' or self.svar['engine'] == 'MYSQL':
from .db import mysql
dbo = mysql.MYSQL14_14(topic_base = self.svar['topic_base'], app_session = self, debug = self.svar['debug'])
elif self.svar['engine'] == 'SQLITE3_3_8_2' or self.svar['engine'] == 'SQLITE3' or self.svar['engine'] == 'SQLITE':
from .db import ausqlite3
dbo = ausqlite3.SQLITE3_3_8_2(topic_base = self.svar['topic_base'], app_session = self, debug = self.svar['debug'])
else:
raise Exception("Unsupported dbtype {} ".format(self.svar['engine']))
else:
raise Exception("when instantiating this class DB you must provide engine=X and topic_base=Y")
self.db = { 'instance': dbo }
self.db['registration'] = {}
r = types.RegisterOptions(details_arg = 'details')
self.db['registration']['connect'] = yield self.register(dbo.connect, self.svar['topic_base']+'.connect', options = r)
self.db['registration']['disconnect'] = yield self.register(dbo.disconnect, self.svar['topic_base']+'.disconnect', options = r)
self.db['registration']['query'] = yield self.register(dbo.query, self.svar['topic_base']+'.query', options = r)
self.db['registration']['operation'] = yield self.register(dbo.operation, self.svar['topic_base']+'.operation', options = r)
self.db['registration']['watch'] = yield self.register(dbo.watch, self.svar['topic_base']+'.watch', options = r)
self.db['registration']['info'] = yield self.register(dbo.info, self.svar['topic_base']+'.info', options = r)
if 'dsn' in self.svar:
log.msg("db:onJoin connecting... {}".format(self.svar['dsn']))
yield self.call(self.svar['topic_base'] + '.connect', self.svar['dsn'])
log.msg("db:onJoin connecting established")
log.msg("db bootstrap procedures registered")
def onLeave(self, details):
print("onLeave: {}").format(details)
yield self.db['registration']['connect'].unregister()
yield self.db['registration']['disconnect'].unregister()
yield self.db['registration']['query'].unregister()
yield self.db['registration']['operation'].unregister()
yield self.db['registration']['watch'].unregister()
yield self.db['registration']['info'].unregister()
del self.db
self.disconnect()
return
def onDisconnect(self):
print("onDisconnect:")
reactor.stop()
| apache-2.0 | 2,118,608,279,539,695,600 | 42.828767 | 135 | 0.606188 | false | 3.928177 | false | false | false |
daniel1yuan/Persist | Persist/webapp/views.py | 1 | 6162 | from django.shortcuts import redirect,render
from django.http import Http404, JsonResponse, HttpResponseForbidden, HttpResponse
from django.contrib.auth import authenticate, login, logout
from webapp.models import User, Customer, Habit
from django.core import serializers
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
from datetime import datetime
from webapp.helper import habits_arr, arr_str
import json
import os
# Create your views here.
def index(request):
context = {
'title': 'Persist'
}
if request.user.is_authenticated():
return redirect("home")
else:
return render(request, 'webapp/landing.html', context)
def home(request):
if request.user.is_authenticated():
return render(request, 'webapp/home.html')
else:
return redirect("login_page")
def login_page(request):
if request.user.is_authenticated():
return redirect("home")
context = {
'title': 'Persist'
}
return render(request, 'webapp/login.html', context)
#Authentication Views
@csrf_exempt
def login_user(request):
username = request.POST['username']
password = request.POST['password']
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
return HttpResponse(json.dumps({"success": True}))
else:
return HttpResponse(json.dumps({"success": False}))
def logout_user(request):
logout(request)
return HttpResponse(json.dumps({"success": True}))
@csrf_exempt
def add_user(request):
username = request.POST['username']
password = request.POST['password']
user = User.objects.create_user(username=username, password=password)
customer = Customer(user=user, habits="")
customer.save()
user.save()
return HttpResponse(json.dumps({"success": True}))
def del_cur_user(request):
if request.user.is_authenticated():
user = request.user
user.delete()
return HttpResponse(json.dumps({"success": True}))
else:
return HttpResponse(json.dumps({"success": False}))
def del_user(request):
user = request.user
#Check if the admin is logged on
if user.is_authenticated() and user.has_perm('webapp'):
username = request.POST['username']
user = User.objects.get(username=username)
user.delete()
return HttpResponse(json.dumps({"success": True}))
return HttpResponse(json.dumps({"success": False}))
def is_logged_in(request):
if (request.user.is_authenticated()):
return HttpResponse(json.dumps({"success": True, "logged_in": True}))
else:
return HttpResponse(json.dumps({"success": True, "logged_in": False}))
return HttpResponse(json.dumps({"success": False}))
def get_habit(request):
habit_id = int(request.POST['habit_id'])
try:
habit_obj = Habit.objects.get(pk=habit_id)
print habit_obj.monetary_amount
habit_serial = serializers.serialize('json', [habit_obj])
#[1:-1] to remove brackets?
return HttpResponse(json.dumps(habit_serial[1:-1]), content_type='application/json')
except Habit.DoesNotExist:
return HttpResponse(json.dumps({"pk": -1}))
def create_habit(request):
name = request.POST['name']
description = request.POST['description']
monetary_amount = int(request.POST['monetary_amount'])
end_date = int(int((request.POST['end_date']))/(1000.0))
start_date = int((datetime.utcnow()-datetime(1970,1,1)).total_seconds())
last_clicked = int((datetime.utcnow()-datetime(1970,1,1)).total_seconds())
status = int(request.POST['success_status'])
charity = int(request.POST['charity'])
user = request.user
if (not user.is_authenticated()):
return HttpResponse(json.dumps({"success": False}))
habit = Habit(name=name,description=description,monetary_amount=monetary_amount,end_date=end_date,status=status,charity=charity,user=user,start_date=start_date,last_clicked=last_clicked)
print habit.start_date
habit.save()
user.customer.habits += "," + str(habit.pk)
user.customer.save()
return HttpResponse(json.dumps({"success": True,"pk":habit.pk}))
def delete_habit(request):
try:
user = request.user
customer = user.customer
pk = request.POST['id']
habit = Habit.objects.get(pk=pk)
habits = habits_arr(customer.habits)
index = habits.index(int(pk))
del(habits[index])
customer.habits = arr_str(habits)
customer.save()
habit.delete()
return HttpResponse(json.dumps({"success": True}))
except:
return HttpResponse(json.dumps({"success": False}))
def change_habit(request):
pk = request.POST['id']
habit = Habit.objects.get(pk=pk)
if habit is None:
return HttpResponse(json.dumps({"success": False}))
else:
try:
habit.name = request.POST['name']
except:
habit.name = habit.name
try:
habit.description = request.POST['description']
except:
habit.description = habit.description
try:
habit.monetary_amount = request.POST['monetary_amount']
except:
habit.monetary_amount = habit.monetary_amount
try:
habit.end_date = int((request.POST['end_date']))/(1000.0)
except:
habit.end_date = habit.end_date
try:
habit.last_clicked = int((request.POST['last_clicked']))/(1000.0)
except:
habit.last_clicked = habit.last_clicked
try:
habit.status = request.POST['success_status']
except:
habit.status = habit.status
try:
habit.charity = request.POST['charity']
except:
habit.charity = habit.charity
habit.save()
return HttpResponse(json.dumps({"success": True}))
def get_all_habits(request):
if request.user.is_authenticated():
habits = habits_arr(request.user.customer.habits)
json_dict = {}
for idx in habits:
cur_habit = Habit.objects.get(pk=idx)
cur_serial = serializers.serialize('json',[cur_habit])[1:-1]
json_dict[idx] = cur_serial
return HttpResponse(json.dumps(json_dict))
else:
return HttpResponse(json.dumps({"success": False}))
def get_username(request):
if request.user.is_authenticated():
return HttpResponse(json.dumps({"success": True, "username": request.user.username}))
else:
return HttpResponse(json.dumps({"success": False}))
| mit | 303,794,903,503,346,800 | 31.431579 | 188 | 0.693281 | false | 3.485294 | false | false | false |
alertedsnake/pycrust | pycrust/__init__.py | 1 | 5061 | """
Pycrust
A collection of CherryPy extensions
See also the following submodules:
pycrust.auth
pycrust.saplugin
pycrust.satool
pycrust.tools
"""
__author__ = 'Michael Stella <[email protected]>'
__version__ = '1.0.0'
import inspect, logging, os, sys
import cherrypy
import codecs
class BaseHandler(object):
"""A Base class for web handler objects."""
_cp_config = {}
def log(self, msg, severity=logging.INFO, context=None):
"""Logs to the Cherrypy error log but in a much more pretty way,
with the handler name and line number
"""
if not context:
context = inspect.getouterframes(inspect.currentframe())[1]
cherrypy.log.error(msg=msg.strip().replace('\n', '; '), severity=severity,
context='HANDLER ({}:{}:{})'.format(
self.__class__.__name__, context[3], context[2]))
def log_debug(self, msg):
return self.log(msg, severity=logging.DEBUG,
context=inspect.getouterframes(inspect.currentframe())[1])
def log_info(self, msg):
return self.log(msg, severity=logging.INFO,
context=inspect.getouterframes(inspect.currentframe())[1])
def log_warn(self, msg):
return self.log(msg, severity=logging.WARN,
context=inspect.getouterframes(inspect.currentframe())[1])
def log_error(self, msg):
return self.log(msg, severity=logging.ERROR,
context=inspect.getouterframes(inspect.currentframe())[1])
def log_fatal(self, msg):
return self.log(msg, severity=logging.FATAL,
context=inspect.getouterframes(inspect.currentframe())[1])
def url(*args, **kwargs):
"""Find the given URL using routes. Throws an exception
if you're not using routes.
"""
import routes
if 'absolute' in kwargs and kwargs['absolute']:
del(kwargs['absolute'])
return cherrypy.url(routes.url_for(*args, **kwargs))
return routes.url_for(*args, **kwargs)
def dump_request(*args, **kwargs):
"""Dumps the request out to a file in /tmp, for debugging
Enable by setting, in your config file:
tools.debug_request.on = True
"""
with codecs.open('/tmp/request.%s.txt' % cherrypy.request.method, 'w', encoding='utf-8') as f:
f.write(cherrypy.request.request_line)
f.write("\n")
# write headers
for (k,v) in cherrypy.request.headers.items():
f.write('%s: %s\n' % (k,v))
f.write("\n")
# dump out the POST data when submitted
if ('Content-Type' in cherrypy.request.headers and
'application/x-www-form-urlencoded' in cherrypy.request.headers['Content-Type']):
for (k,v) in cherrypy.request.params.items():
f.write('%s: %s\n' % (k,v))
# otherwise, dump the body
elif cherrypy.request.body:
with cherrypy.request.body.make_file() as fin:
f.write(str(fin.read()))
def dump_response(*args, **kwargs):
"""Dumps the response out to a file in /tmp, for debugging.
Enable by setting, in your config file:
tools.debug_response.on = True
"""
# when a 500 error is displayed, cherrypy handles this
# differently, and we don't really need to dump it out
if not cherrypy.response.status:
return
status = 200
if isinstance(cherrypy.response.status, int):
status = cherrypy.response.status
elif isinstance(cherrypy.response.status, str):
status = int(cherrypy.response.status.split(' ', 1)[0])
with codecs.open('/tmp/response.%d.txt' % status, 'w', encoding='utf-8') as f:
f.write("HTTP/1.1 %s\n" % cherrypy.response.status)
for (k,v) in cherrypy.response.headers.items():
f.write('%s: %s\n' % (k,v))
f.write("Status: %d\n\n" % status)
if cherrypy.response.body:
if sys.version < '3':
f.write(str(cherrypy.response.collapse_body().decode()))
else:
f.write(str(cherrypy.response.collapse_body()))
cherrypy.tools.debug_request = cherrypy.Tool('before_handler', dump_request, priority=1)
cherrypy.tools.debug_response = cherrypy.Tool('on_end_resource', dump_response)
def load_class(fullname):
"""Loads a class given the full dotted class name"""
assert fullname is not None, "fullname must not be None"
modulename, classname = fullname.rsplit('.', 1)
try:
module = __import__(modulename, globals(), locals(), [classname])
except ImportError as e:
cherrypy.log("Error loading module {}".format(modulename), context='ENGINE', severity=loging.ERROR)
raise
try:
cls = getattr(module, classname)
except AttributeError as e:
cherrypy.log("Error loading class {} from module {}".format(classname, modulename),
context='ENGINE', severity=logging.ERROR)
return None
return cls
| mit | 8,430,111,610,056,274,000 | 31.031646 | 107 | 0.610354 | false | 3.902082 | false | false | false |
ayseyo/oclapi | django-nonrel/ocl/oclapi/settings/common.py | 1 | 10523 | import os
from configurations import Configuration
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
class Common(Configuration):
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Jon Payne', '[email protected]'),
('PK Shiu', '[email protected]'),
)
MANAGERS = ADMINS
DEFAULT_FROM_EMAIL = '[email protected]'
EMAIL_HOST = 'openconceptlab.org'
EMAIL_SUBJECT_PREFIX = '[openconceptlab.org] '
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.3/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/New_York'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
DEFAULT_LOCALE = 'en'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# In the deployment environment, comment out the above line, and uncomment the one below
#STATIC_ROOT = '/usr/local/wsgi/static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '+p+lx2*o3ywq+z)%f7929b6)93)^mcc9-0eu9ynq77gc+pe=ck'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'oclapi.middlewares.RequestLogMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'corsheaders',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
# Core OCL app
'oclapi',
# Third-party apps:
'djangotoolbox',
'django_mongodb_engine',
'rest_framework',
'rest_framework.authtoken',
'haystack',
# Project-specific apps:
'users',
'orgs',
'sources',
'concepts',
'collection',
'mappings',
'integration_tests',
)
# Django Rest Framework configuration
REST_FRAMEWORK = {
# Default to token-based authentication; fall back on session-based
# A user gets a unique token upon account creation (residing in the authtoken_token data store).
# To pass an authentication token along with your request, include the following header:
# Authorization: Token [TOKEN_VALUE]
# e.g.
# Authorization: Token ad73f481096c3b6202bce395820199
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
'oclapi.renderers.ZippedJSONRenderer',
),
'DEFAULT_CONTENT_NEGOTIATION_CLASS': 'oclapi.negotiation.OptionallyCompressContentNegotiation',
# Use hyperlinked styles by default.
# Only used if the `serializer_class` attribute is not set on a view.
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.HyperlinkedModelSerializer',
'DEFAULT_PAGINATION_SERIALIZER_CLASS':
'oclapi.serializers.HeaderPaginationSerializer',
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
#'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',
'rest_framework.permissions.IsAuthenticated',
],
'PAGINATE_BY': 10, # Default to 10
'PAGINATE_BY_PARAM': 'limit', # Allow client to override, using `?limit=xxx`.
'MAX_PAGINATE_BY': 100 # Maximum limit allowed when using `?limit=xxx`.
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'oclapi.search_backends.OCLSolrEngine',
'URL': 'http://solr.openconceptlab.org:8983/solr/collection1'
# ...or for multicore...
# 'URL': 'http://127.0.0.1:8983/solr/mysite',
},
}
DATABASES = {
'default': {
'ENGINE': 'django_mongodb_engine',
'HOST': 'mongo.openconceptlab.org',
'NAME': 'ocl',
}
}
BROKER_URL = 'redis://redis.openconceptlab.org:6379/0'
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_METHODS = (
'GET',
)
# CORS_ORIGIN_WHITELIST = (
# 'google.com',
# 'hostname.example.com',
# )
# Haystack processor determines when/how updates to mongo are indexed by Solr
# RealtimeSignalProcessor will update the index for every mongo update, sometimes at
# the cost of performance. BaseSignalProcessor does not update the index at all, which
# means the index must be updated manually (e.g. using the haystack update_index command).
HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor'
HAYSTACK_ITERATOR_LOAD_PER_QUERY = 25
HAYSTACK_SEARCH_RESULTS_PER_PAGE = 25
# Celery settings
CELERY_RESULT_BACKEND = 'redis://redis.openconceptlab.org:6379/0'
# Set these in your postactivate hook if you use virtualenvwrapper
AWS_ACCESS_KEY_ID=os.environ.get('AWS_ACCESS_KEY_ID', '')
AWS_SECRET_ACCESS_KEY=os.environ.get('AWS_SECRET_ACCESS_KEY', '')
AWS_STORAGE_BUCKET_NAME=os.environ.get('AWS_STORAGE_BUCKET_NAME', '')
# Model that stores auxiliary user profile attributes.
# A user must have a profile in order to access the system.
# (A profile is created automatically for any user created using the 'POST /users' endpoint.)
AUTH_PROFILE_MODULE = 'users.UserProfile'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'normal': {
'format': "[%(asctime)s] %(levelname)-8s: %(message)s",
'datefmt': "%Y/%m/%d %H:%M:%S"
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'null': {
'class': 'django.utils.log.NullHandler',
},
'console': {
'class': 'logging.StreamHandler',
'formatter': 'normal',
},
'logfile': {
'level': 'DEBUG',
'class': 'logging.handlers.TimedRotatingFileHandler',
'when': 'midnight',
'filename': os.path.join(BASE_DIR, 'ocl_api.log'),
'formatter': 'normal',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'oclapi': {
'handlers': ['console', 'logfile'],
'level': 'DEBUG',
},
'request_logger': {
'handlers': ['console', 'logfile'],
'level': 'INFO',
},
}
}
| mpl-2.0 | -5,339,695,786,148,565,000 | 35.922807 | 104 | 0.615699 | false | 3.982967 | false | false | false |
stuart-knock/tvb-framework | tvb_test/adapters/visualizers/eegmonitor_test.py | 1 | 4412 | # -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Bogdan Neacsa <[email protected]>
"""
import os
import unittest
import demo_data.sensors as sensors_dataset
from tvb.core.entities.file.files_helper import FilesHelper
from tvb.adapters.visualizers.eeg_monitor import EegMonitor
from tvb.datatypes.surfaces import CorticalSurface
from tvb.datatypes.connectivity import Connectivity
from tvb.datatypes.sensors import SensorsEEG
from tvb_test.core.test_factory import TestFactory
from tvb_test.datatypes.datatypes_factory import DatatypesFactory
from tvb_test.core.base_testcase import TransactionalTestCase
class EEGMonitorTest(TransactionalTestCase):
"""
Unit-tests for EEG Viewer.
"""
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
self.assertTrue(self.connectivity is not None)
self.surface = TestFactory.get_entity(self.test_project, CorticalSurface())
self.assertTrue(self.surface is not None)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),
'EEG_unit_vectors_BrainProducts_62.txt.bz2')
TestFactory.import_sensors(self.test_user, self.test_project, zip_path, 'EEG Sensors')
sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
time_series = self.datatypeFactory.create_timeseries(self.connectivity, 'EEG', sensors)
viewer = EegMonitor()
result = viewer.launch(time_series)
expected_keys = ['tsStateVars', 'tsModes', 'translationStep', 'total_length', 'title',
'timeSetPaths', 'number_of_visible_points', 'normalizedSteps', 'noOfChannels',
'labelsForCheckBoxes', 'label_x', 'graphLabels', 'entities', 'channelsPage']
for key in expected_keys:
self.assertTrue(key in result)
def suite():
"""
Gather all the tests in a test suite.
"""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(EEGMonitorTest))
return test_suite
if __name__ == "__main__":
#So you can run tests from this package individually.
TEST_RUNNER = unittest.TextTestRunner()
TEST_SUITE = suite()
TEST_RUNNER.run(TEST_SUITE) | gpl-2.0 | -6,400,699,188,668,995,000 | 41.028571 | 103 | 0.694923 | false | 3.707563 | true | false | false |
lilydjwg/you-get | src/you_get/extractors/netease.py | 1 | 9738 | #!/usr/bin/env python
from json import loads
import hashlib
import base64
import os
import binascii
try:
from Crypto.Cipher import AES
import xml.etree.ElementTree as ET
has_crypto = True
except ImportError:
has_crypto = False
from ..common import *
from ..extractor import VideoExtractor
from ..util import log
def netease_hymn():
return """
player's Game Over,
u can abandon.
u get pissed,
get pissed,
Hallelujah my King!
errr oh! fuck ohhh!!!!
"""
def encrypted_id(dfsId):
x = [ord(i[0]) for i in netease_hymn().split()]
y = ''.join([chr(i - 61) if i > 96 else chr(i + 32) for i in x])
byte1 = bytearray(y, encoding='ascii')
byte2 = bytearray(str(dfsId), encoding='ascii')
for i in range(len(byte2)):
byte2[i] ^= byte1[i % len(byte1)]
m = hashlib.md5()
m.update(byte2)
result = base64.b64encode(m.digest()).decode('ascii')
result = result.replace('/', '_')
result = result.replace('+', '-')
return result
def make_url(songNet, dfsId):
encId = encrypted_id(dfsId)
mp3_url = "http://%s/%s/%s.mp3" % (songNet, encId, dfsId)
return mp3_url
# for http://open.163.com/movie/2014/12/I/9/MAD7EMDVE_MAD7K95I9.html
keys = ["4fxGZqoGmesXqg2o", "3fxVNqoPmesAqg2o"]
def decrypto_video_url(data, whichkey):
key = keys[whichkey - 1]
cipher = AES.new(key, mode=AES.MODE_ECB)
ciphertext = binascii.a2b_hex(data)
cleartext = cipher.decrypt(ciphertext)
padding = cleartext[-1]
cleartext = cleartext[:-padding]
return cleartext.decode('ascii')
class NetEase(VideoExtractor):
# test URLs:
# http://live.ws.126.net/movie/I/9/2_MAD7EMDVE_MAD7K95I9.xml
# http://live.ws.126.net/movie/V/H/2_MB3M6LDG1_MB3OBKTVH.xml
name = '网易'
if has_crypto:
stream_types = [
{'id': 'SHD', 'video_profile': '超清'},
{'id': 'HD', 'video_profile': '高清'},
{'id': 'SD', 'video_profile': '标清'},
]
else:
stream_types = [
{'id': 'default'},
]
def prepare(self, **kwargs):
# compatibility for _cloud_music_prepare
self.output_dir = kwargs.get('output_dir')
self.info_only = kwargs.get('info_only')
self.subs = []
self.lyrics = None
url = self.url
if "163.fm" in url:
url = get_location(url)
if "music.163.com" in url:
self._cloud_music_prepare(url)
elif has_crypto:
self._crypto_prepare(url)
else:
log.w('PyCrypto not found, '
'high resolution videos may be unavailable.')
self._legacy_prepare(url)
def _crypto_prepare(self, url):
if url.startswith('http://swf.ws.126.net/openplayer/'):
video_id = url.split('-')[2]
assert video_id.startswith('2_')
video_id = video_id[2:]
else:
# http://open.163.com/movie/2015/10/V/H/MB3M6LDG1_MB3OBKTVH.html
video_id = url.split('/')[-1].split('.')[0]
xml = self._get_xml_for_video_id(video_id)
encrypt_key = int(xml.find('encrypt').text)
playurl = xml.find('playurl_origin')
if len(playurl) == 0:
playurl = xml.find('playurl')
streams = {}
for stream in self.stream_types:
e = playurl.find('./%s/mp4' % stream['id'])
if e is not None:
url = decrypto_video_url(e.text, encrypt_key)
streams[stream['id']] = {
'url': url,
'video_profile': stream['video_profile'],
'size': url_size(url),
}
self.streams = streams
for sub in xml.findall('subs/*'):
name = sub.find('name').text
url = sub.find('url').text
self.subs.append((name, url))
def _legacy_prepare(self, url):
if url.startswith('http://swf.ws.126.net/openplayer/'):
video_id = url.split('-')[2]
assert video_id.startswith('2_')
video_id = video_id[2:]
xml = self._get_xml_for_video_id(video_id)
url = xml.find('pageUrl').text
html = get_decoded_html(url)
title = match1(html, "movieDescription='([^']+)'") or \
match1(html, '<title>(.+)</title>')
self.title = title.strip()
src = match1(html, r'<source src="([^"]+)"') or \
match1(html, r'<source type="[^"]+" src="([^"]+)"')
if src:
url = src
else:
url = (match1(html, r'["\'](.+)-list.m3u8["\']') or
match1(html, r'["\'](.+).m3u8["\']')) + ".mp4"
self.streams['default'] = {
'url': url,
}
def _cloud_music_prepare(self, url):
rid = match1(url, r'id=(.*)')
output_dir = self.output_dir
info_only = self.info_only
if rid is None:
rid = match1(url, r'/(\d+)/?$')
if "album" in url:
# FIXME: only last is processed
j = loads(get_content("http://music.163.com/api/album/%s?id=%s&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
artist_name = j['album']['artists'][0]['name']
album_name = j['album']['name']
new_dir = output_dir + '/' + "%s - %s" % (artist_name, album_name)
if not os.path.exists(new_dir):
os.mkdir(new_dir)
if not info_only:
cover_url = j['album']['picUrl']
download_urls([cover_url], "cover", "jpg", 0, new_dir)
for i in j['album']['songs']:
self._song_prepare(i)
try: # download lyrics
assert kwargs['caption']
l = loads(get_content("http://music.163.com/api/song/lyric/?id=%s&lv=-1&csrf_token=" % i['id'], headers={"Referer": "http://music.163.com/"}))
self._lyrics_prepare(i, l["lrc"]["lyric"])
except: pass
elif "playlist" in url:
# FIXME: only last is processed
j = loads(get_content("http://music.163.com/api/playlist/detail?id=%s&csrf_token=" % rid, headers={"Referer": "http://music.163.com/"}))
new_dir = output_dir + '/' + j['result']['name']
if not os.path.exists(new_dir):
os.mkdir(new_dir)
if not info_only:
cover_url = j['result']['coverImgUrl']
download_urls([cover_url], "cover", "jpg", 0, new_dir)
for i in j['result']['tracks']:
self._song_prepare(i)
try: # download lyrics
assert kwargs['caption']
l = loads(get_content("http://music.163.com/api/song/lyric/?id=%s&lv=-1&csrf_token=" % i['id'], headers={"Referer": "http://music.163.com/"}))
self._lyrics_prepare(i, l["lrc"]["lyric"])
except: pass
elif "song" in url:
j = loads(get_content("http://music.163.com/api/song/detail/?id=%s&ids=[%s]&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
self._song_prepare(j["songs"][0])
try: # download lyrics
l = loads(get_content("http://music.163.com/api/song/lyric/?id=%s&lv=-1&csrf_token=" % rid, headers={"Referer": "http://music.163.com/"}))
self._lyrics_prepare(j["songs"][0], l["lrc"]["lyric"])
except:
pass
elif "mv" in url:
j = loads(get_content("http://music.163.com/api/mv/detail/?id=%s&ids=[%s]&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
self._video_prepare(j['data'])
def _song_prepare(self, song):
# test URL: http://music.163.com/#/song?id=29043459
self.title = "%s. %s" % (song['position'], song['name'])
songNet = 'p' + song['mp3Url'].split('/')[2][1:]
s = self.streams
if 'hMusic' in song and song['hMusic'] is not None:
s['hMusic'] = {'url': make_url(songNet, song['hMusic']['dfsId'])}
if 'mp3Url' in song:
s['mp3Url'] = {'url': song['mp3Url']}
if 'bMusic' in song:
s['bMusic'] = {'url': make_url(songNet, song['bMusic']['dfsId'])}
self.stream_types = [
{'id': x} for x in ['hMusic', 'mp3Url', 'bMusic']
]
def _video_prepare(self, vinfo):
# test URL: http://music.163.com/#/mv/343100/
self.title = "%s - %s" % (vinfo['name'], vinfo['artistName'])
s = self.streams
for bitrate, url in vinfo['brs'].items():
s[bitrate] = {'url': url}
self.stream_types = [
{'id': x} for x in sorted(s, key=int, reverse=True)
]
def _lyrics_prepare(self, song, lyrics):
# test URL: http://music.163.com/#/song?id=29043459
title = "%s. %s" % (song['position'], song['name'])
filename = '%s.lrc' % get_filename(title)
self.plain_files.append({
'filename': filename,
'content': lyrics,
})
def _get_xml_for_video_id(self, vid):
xml_url = 'http://live.ws.126.net/movie/%s/%s/2_%s.xml' % (
vid[-2], vid[-1], vid)
xml = get_content(xml_url)
e = ET.fromstring(xml)
self.title = e.find('title').text
return e
def extract(self, **kwargs):
for i in self.streams:
s = self.streams[i]
_, s['container'], s['size'] = url_info(s['url'])
s['src'] = [s['url']]
for name, url in self.subs:
self.caption_tracks[name] = get_content(url)
site = NetEase()
download = site.download_by_url
| mit | -3,439,591,755,401,592,300 | 35.548872 | 162 | 0.518103 | false | 3.292245 | false | false | false |
jlaunonen/kirppu | kirppu/views/csv_utils.py | 1 | 1469 | # -*- coding: utf-8 -*-
import functools
import html
import io
from urllib.parse import quote
from django.conf import settings
from django.http import HttpResponse, StreamingHttpResponse
def strip_generator(fn):
@functools.wraps(fn)
def inner(output, event, generator=False):
if generator:
# Return the generator object only when using StringIO.
return fn(output, event)
for _ in fn(output, event):
pass
return inner
def csv_streamer_view(request, generator, filename_base):
debug = settings.DEBUG and request.GET.get("debug") is not None
def streamer():
if debug:
yield "<!DOCTYPE html>\n<html>\n<body>\n<pre>"
output = io.StringIO()
for a_string in generator(output):
val = output.getvalue()
if debug:
yield html.escape(val, quote=False)
else:
yield val
output.truncate(0)
output.seek(0)
if debug:
yield "</pre>\n</body>\n</html>"
if debug:
response = HttpResponse("".join(streamer()))
else:
response = StreamingHttpResponse(streamer(), content_type="text/plain; charset=utf-8")
if request.GET.get("download") is not None:
response["Content-Disposition"] = 'attachment; filename="%s.csv"' % quote(filename_base, safe="")
response["Content-Type"] = "text/csv; charset=utf-8"
return response
| mit | 4,648,484,677,998,597,000 | 28.979592 | 105 | 0.605174 | false | 4.161473 | false | false | false |
wbrp/dnsimple-zoneimport | setup.py | 1 | 1349 | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from dnsimple_zoneimport import meta
f = open('requirements.txt', 'r')
lines = f.readlines()
requirements = [l.strip().strip('\n') for l in lines if l.strip() and not l.strip().startswith('#')]
readme = open('README.rst').read()
setup(name='dnsimple-zoneimport',
version=meta.version,
description=meta.description,
author=meta.author,
author_email=meta.author_email,
url='https://github.com/wbrp/dnsimple-zoneimport',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
license=meta.license,
keywords='dnsimple dns "zone files" bind import api',
long_description=readme,
install_requires=requirements,
entry_points={
'console_scripts': [
'%s = dnsimple_zoneimport.importer:main' % meta.title.replace('-', '_'),
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: Name Service (DNS)',
'Topic :: Terminals',
],
)
| mit | -6,276,857,376,273,034,000 | 32.725 | 100 | 0.594514 | false | 4.026866 | false | false | false |
thorwhalen/ut | ml/stream/sequences.py | 1 | 6137 |
from sklearn.base import BaseEstimator
from collections import Counter
import pandas as pd
from numpy import sum, nan, isnan
from ut.util.uiter import window
class NextElementPredictor(BaseEstimator):
def predict(self, seqs):
preds = self.predict_proba(seqs)
return [max(pred, key=lambda key: pred[key]) for pred in preds]
def predict_proba(self, seqs):
return list(map(self._predict_proba_conditioned_on_recent_subseq, seqs))
def _predict_proba_conditioned_on_recent_subseq(self, recent_subseq):
raise NotImplementedError("Need to implement this method")
class MarkovNextElementPred(NextElementPredictor):
_list_of_attributes_to_display = ['markov_window', 'empty_element', 'keep_stats_in_memory']
def __init__(self, markov_window=2, empty_element=-1, keep_stats_in_memory=True):
self.markov_window = markov_window
self.keep_stats_in_memory = keep_stats_in_memory
self.empty_element = empty_element
self._empty_element_padding = [empty_element] * (self.markov_window - 1)
@property
def total_tuple_count(self):
"""
:return: Number of observed window tuples (sum of values in self.snip_tuples_counter_)
"""
if self.total_tuple_count_ is not None:
return self.total_tuple_count_
else:
total_tuple_count_ = sum(self.snip_tuples_counter_.values())
if self.keep_stats_in_memory:
self.total_tuple_count_ = total_tuple_count_
return total_tuple_count_
@property
def pair_prob(self):
"""
:return: Series of probabilities (unsmoothed count ratios) indexed by snip pairs
"""
if self.pair_prob_ is not None:
return self.pair_prob_
else:
pair_prob_ = pd.Series(self.snip_tuples_counter_) / self.total_tuple_count
if self.keep_stats_in_memory:
self.pair_probs_ = pair_prob_
return pair_prob_
@property
def element_prob(self):
"""
:return: Series of snips probabilities (unsmoothed count ratios)
"""
if self.element_prob_ is not None:
return self.element_prob_
else:
element_prob_ = (self.pair_prob * self.total_tuple_count)
element_prob_ = element_prob_.groupby(level=0).sum()
element_prob_ = element_prob_.drop(labels=self.empty_element)
# element_prob_ = element_prob_.iloc[
# element_prob_.index.get_level_values(level=0) != self.empty_element]
element_prob_ /= element_prob_.sum()
if self.keep_stats_in_memory:
self.element_prob_ = element_prob_
return element_prob_
@property
def conditional_prob(self):
"""
:return: Series of probabilities of last element (level) conditional on previous ones (including empty elements)
"""
if self.conditional_prob_ is not None:
return self.conditional_prob_
else:
conditional_prob_ = self._drop_empty_elements_of_sr(self.pair_prob, levels=[self.markov_window - 1])
conditional_levels = list(range(self.markov_window - 1))
conditional_prob_ = conditional_prob_.div(
conditional_prob_.groupby(level=conditional_levels).sum(), level=0) # TODO: Only works for two levels
if self.keep_stats_in_memory:
self.conditional_prob_ = conditional_prob_
return conditional_prob_
@property
def initial_element_prob(self):
"""
:return: Series of snips probabilities (unsmoothed count ratios)
"""
if self.initial_element_prob_ is not None:
return self.initial_element_prob_
else:
initial_element_prob_ = self.pair_prob.xs(self.empty_element, level=0, drop_level=True)
initial_element_prob_ /= initial_element_prob_.sum()
if self.keep_stats_in_memory:
self.initial_element_prob_ = initial_element_prob_
return initial_element_prob_
def fit(self, snips_list):
# reset anything previously learned
self._initialize_params()
return self.partial_fit(snips_list)
def partial_fit(self, snips_list):
if not set(['snip_tuples_counter_']).issubset(list(self.__dict__.keys())):
self._initialize_params()
for snips in snips_list:
self._partial_fit_of_a_single_snips(snips)
return self
def _initialize_params(self):
"""
Initializes model params (the snip_tuples_counter_, etc.)
:return: None
"""
self.snip_tuples_counter_ = Counter()
self._reset_properties()
def _reset_properties(self):
"""
Resets some properties that depend on snip_tuples_counter_ to be computed (is used when the later changes)
These will be recomputed when requested.
:return: None
"""
self.total_tuple_count_ = None
self.pair_prob_ = None
self.element_prob_ = None
self.initial_element_prob_ = None
self.conditional_prob_ = None
def _partial_fit_of_a_single_snips(self, snips):
self._reset_properties()
self.snip_tuples_counter_.update(window(self._empty_element_padding + list(snips) + self._empty_element_padding,
n=self.markov_window))
def _drop_empty_elements_of_sr(self, sr, levels=None, renormalize=False):
if levels is None:
levels = list(range(self.markov_window))
for level in levels:
sr = sr.drop(labels=self.empty_element, level=level)
if renormalize:
sr /= sr.sum()
return sr
def _predict_proba_conditioned_on_recent_subseq(self, recent_subseq):
pass
def __repr__(self):
d = {attr: getattr(self, attr) for attr in self._list_of_attributes_to_display if attr in self.__dict__}
d['total_tuple_count'] = self.total_tuple_count
return self.__class__.__name__ + '\n' + str(d)
| mit | 5,309,313,774,185,950,000 | 37.118012 | 120 | 0.608604 | false | 3.838024 | false | false | false |
kivhift/qmk | src/commands/help.py | 1 | 1442 | #
# Copyright (c) 2009-2012 Joshua Hughes <[email protected]>
#
import atexit
import os
import tempfile
import urllib
import webbrowser
import qmk
class HelpCommand(qmk.Command):
'''
View help for all available commands. A new tab will be opened in the
default web browser that contains the help for all of the commands that are
registered.
'''
def __init__(self):
self._name = 'help'
self._help = self.__doc__
h, self.__filename = tempfile.mkstemp(suffix = '.html',
prefix = 'qmkhelp')
os.close(h)
atexit.register(os.remove, self.__filename)
def action(self, arg):
# For now, ignore help requests for specific commands.
# if arg is not None: pass
f = file(self.__filename, 'wb')
f.write('<html><head><title>QMK Help</title></head><body>')
f.write('<h1>QMK Command Help</h1>')
cm = qmk.CommandManager()
f.write('<table border="1"><tr><th>Name</th><th>Help</th></tr>')
for name in cm.commandNames():
cmd = cm.command(name)
ht = cmd.help
f.write('<tr><td><pre>%s</pre></td><td><pre>%s</pre></td></tr>' % (
name, ht.encode('ascii', 'xmlcharrefreplace')))
f.write('</table></body></html>\n')
f.close()
webbrowser.open_new_tab('file:%s' % urllib.pathname2url(
f.name))
def commands(): return [ HelpCommand() ]
| mit | -3,162,210,888,576,345,000 | 31.772727 | 79 | 0.576976 | false | 3.560494 | false | false | false |
demisto/content | Packs/WindowsForensics/Scripts/RegistryParse/RegistryParse_test.py | 1 | 1112 | import json
import RegistryParse as reg_parse
def util_load_json(path):
with open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
def test_get_sub_keys():
key = 'HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList'
folder_output_key = 'Sid'
mock_reg = util_load_json('./test_data/mock_reg_users.json')
expected = util_load_json('./test_data/mock_reg_users_result.json')
actual = reg_parse.get_sub_keys(mock_reg, key, folder_output_key)
for actual_items in actual:
for actual_item in actual_items:
assert actual_item in expected[0] or actual_item in expected[1]
def test_parse_reg_values():
expected = 'C:\\Windows\\ServiceProfiles\\LocalService'
hex_value = 'hex(2):43,00,3a,00,5c,00,57,00,69,00,6e,00,64,00,6f,00,77,\
00,73,00,5c,00,53,00,65,00,72,00,76,00,69,00,63,00,65,00,50,00,72,00,6f,00,\
66,00,69,00,6c,00,65,00,73,00,5c,00,4c,00,6f,00,63,00,61,00,6c,00,53,00,65,\
00,72,00,76,00,69,00,63,00,65,00,00,00'
actual = reg_parse.parse_reg_value(hex_value)
assert actual == expected
| mit | 7,210,574,577,760,528,000 | 38.714286 | 92 | 0.670863 | false | 2.562212 | false | false | false |
KDE/twine2 | kdelibs.py | 1 | 37607 | # -*- coding: utf-8 -*-
# Copyright 2009-2010 Simon Edwards <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import re
import toolkit
import kbindinggenerator.qtkdemacros
import os.path
import kbindinggenerator.sipsymboldata
outputBaseDirectory = "/home/sbe/devel/git/kde/kdebindings/pykde4"
cmakelistBaseDirectory = "/home/sbe/devel/git/kde/kdelibs"
cmakelistPimlibsBaseDirectory = "/home/sbe/devel/git/kde/kdepimlibs"
cmakelistPhononBaseDirectory = "/home/sbe/devel/git/phonon"
kdelibsBuildDirectory = "/home/sbe/devel/git_build/kde/kdelibs"
kdepimlibsBuildDirectory = "/home/sbe/devel/git_build/kde/kdepimlibs"
cmakelistGitBaseDirectory = "/home/sbe/devel/git"
polkitqtBaseDirectory = "/home/sbe/devel/git/polkit-qt"
sipImportDir = "/home/sbe/devel/kdesvninstall/share/sip/PyQt4"
###########################################################################
kdecore = toolkit.ModuleGenerator(
module="PyKDE4.kdecore",
outputDirectory=os.path.join(outputBaseDirectory, "sip/kdecore"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/kdecore"),
mainDocs=os.path.join(cmakelistBaseDirectory,"kdecore/Mainpage.dox"),
# .h file extraction
cmakelists=os.path.join(cmakelistBaseDirectory,"kdecore/CMakeLists.txt"),
ignoreHeaders="""conversion_check.h kallocator.h kdebug.h kcodecs.h kgenericfactory.h ksortablelist.h ktrader.h ktypelist.h kmulticastsocket.h kmulticastsocketdevice.h kdecore_export.h kde_file.h ksocks.h kde_file.h ksharedptr.h klauncher_iface.h k3bufferedsocket.h k3clientsocketbase.h k3datagramsocket.h k3httpproxysocketdevice.h k3iobuffer.h k3processcontroller.h k3process.h k3procio.h k3resolver.h k3reverseresolver.h k3serversocket.h k3socketaddress.h k3socketbase.h k3socketdevice.h k3socks.h k3sockssocketdevice.h k3streamsocket.h qtest_kde.h kdefakes.h kdeversion.h kauth.h ktypelistutils.h ktypetraits.h karchive.h kar.h ktar.h kzip.h kshareddatacache.h kmountpoint.h kdirwatch.h karchive_export.h""".split(" "),
noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["KDECORE_EXPORT","KDE_EXPORT","KIO_EXPORT","KDE_DEPRECATED", "KDECORE_EXPORT_DEPRECATED", "KARCHIVE_EXPORT"]),
# Sip generation
sipImportDirs=[sipImportDir],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip","QtNetwork/QtNetworkmod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["KDECORE_EXPORT","KDE_EXPORT","KIO_EXPORT","KDECORE_EXPORT_DEPRECATED","KARCHIVE_EXPORT"],
ignoreBases=[],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="*",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="pParent",
annotations="TransferThis")
]
)
###########################################################################
kdeui = toolkit.ModuleGenerator(
module="PyKDE4.kdeui",
outputDirectory=os.path.join(outputBaseDirectory,"sip/kdeui"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/kdeui"),
mainDocs=os.path.join(cmakelistBaseDirectory,"kdeui/Mainpage.dox"),
# .h file extraction
cmakelists=[
os.path.join(cmakelistBaseDirectory,"kdeui/CMakeLists.txt")
#os.path.join(cmakelistBaseDirectory,"kdeui/dialogs/CMakeLists.txt"),
#os.path.join(cmakelistBaseDirectory,"kdeui/util/CMakeLists.txt"),
#os.path.join(cmakelistBaseDirectory,"kdeui/widgets/CMakeLists.txt")
],
ignoreHeaders="""kxerrorhandler.h k3iconview.h k3iconviewsearchline.h k3listview.h k3listviewlineedit.h k3listviewsearchline.h netwm_p.h k3mimesourcefactory.h kdeui_export.h fixx11h.h kglobalshortcutinfo_p.h kkeyserver_mac.h kkeyserver_win.h kimagecache.h""".split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["KDEUI_EXPORT","KDE_EXPORT","KDE_DEPRECATED","KDEUI_EXPORT_DEPRECATED"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip","QtXml/QtXmlmod.sip","QtSvg/QtSvgmod.sip","kdecore/kdecoremod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["KDEUI_EXPORT","KDE_EXPORT","KDEUI_EXPORT_DEPRECATED"],
ignoreBases=["Q3GridView"],
noCTSCC=["KWindowSystem","NETRootInfo","NETWinInfo"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer"),
toolkit.PySlotRule(className="KDialogButtonBox",arg1Name="receiver",arg2Name="slot"),
toolkit.PySlotRule(namespaceName="KStandardAction",arg1Name="recvr",arg2Name="slot")
]
)
###########################################################################
kio = toolkit.ModuleGenerator(
module="PyKDE4.kio",
outputDirectory=os.path.join(outputBaseDirectory,"sip/kio"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/kio"),
mainDocs=os.path.join(cmakelistBaseDirectory,"kio/Mainpage.dox"),
# .h file extraction
cmakelists=[
os.path.join(cmakelistBaseDirectory,"kio/CMakeLists.txt"),
os.path.join(cmakelistBaseDirectory,"kfile/CMakeLists.txt")
],
headers=[os.path.join(cmakelistBaseDirectory,"kdecore/io/karchive.h"),
os.path.join(cmakelistBaseDirectory,"kdecore/io/kar.h"),
os.path.join(cmakelistBaseDirectory,"kdecore/io/ktar.h"),
os.path.join(cmakelistBaseDirectory,"kdecore/io/kzip.h")],
ignoreHeaders="""http_slave_defaults.h ioslave_defaults.h kmimetyperesolver.h k3mimetyperesolver.h kfiledetailview.h kfileiconview.h kfiletreeview.h kfiletreeviewitem.h ksslpemcallback.h kpropsdialog.h kio_export.h kdirnotify.h k3filedetailview.h k3fileiconview.h k3filetreeview.h k3filetreeviewitem.h k3mimetyperesolver.h kfiletreebranch.h kfile_export.h kurlbar.h kdebug.h kdebugdbusiface_p.h kdirwatch_p.h klimitediodevice_p.h kprocess_p.h""".split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1,"Q_OS_UNIX": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["KDECORE_EXPORT","KDECORE_EXPORT_DEPRECATED","KIO_EXPORT",
"KFILE_EXPORT","KIO_EXPORT_DEPRECATED","KDE_NO_EXPORT","KDE_EXPORT","KDE_DEPRECATED",
"KDEUI_EXPORT_DEPRECATED","KIO_CONNECTION_EXPORT"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip","QtXml/QtXmlmod.sip","kdecore/kdecoremod.sip","kdeui/kdeuimod.sip","solid/solidmod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["KDECORE_EXPORT","KDECORE_EXPORT_DEPRECATED","KIO_EXPORT","KFILE_EXPORT","KDE_EXPORT","KDEUI_EXPORT_DEPRECATED",
"KIO_CONNECTION_EXPORT","KIO_EXPORT_DEPRECATED"],
#ignoreBases=["Q3GridView"],
noCTSCC=["KonqBookmarkContextMenu","KImportedBookmarkMenu","KBookmark","KBookmarkGroup"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
kutils = toolkit.ModuleGenerator(
module="PyKDE4.kutils",
outputDirectory=os.path.join(outputBaseDirectory,"sip/kutils"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/kutils"),
mainDocs=os.path.join(cmakelistBaseDirectory,"kutils/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistBaseDirectory,"kutils/CMakeLists.txt")],
ignoreHeaders="""kcmodulecontainer.h kutils_export.h kcmutils_export.h kemoticons_export.h kidletime_export.h kprintutils_export.h""".split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["KUTILS_EXPORT","KDE_EXPORT","KDE_DEPRECATED","KCMUTILS_EXPORT","KEMOTICONS_EXPORT","KIDLETIME_EXPORT","KPRINTUTILS_EXPORT"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip","QtXml/QtXmlmod.sip","kdecore/kdecoremod.sip","kdeui/kdeuimod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["KUTILS_EXPORT","KDE_EXPORT","KCMUTILS_EXPORT","KEMOTICONS_EXPORT","KIDLETIME_EXPORT","KPRINTUTILS_EXPORT"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
solid = toolkit.ModuleGenerator(
module="PyKDE4.solid",
outputDirectory=os.path.join(outputBaseDirectory,"sip/solid"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/solid"),
mainDocs=os.path.join(cmakelistBaseDirectory,"solid/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistBaseDirectory,"solid/solid/CMakeLists.txt")],
ignoreHeaders="""solid_export.h""".split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["SOLID_EXPORT","KDE_EXPORT","KDE_DEPRECATED"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip","kdecore/kdecoremod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["SOLID_EXPORT","KDE_EXPORT"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
kparts = toolkit.ModuleGenerator(
module="PyKDE4.kparts",
outputDirectory=os.path.join(outputBaseDirectory,"sip/kparts"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/kparts"),
mainDocs=os.path.join(cmakelistBaseDirectory,"kparts/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistBaseDirectory,"kparts/CMakeLists.txt")],
ignoreHeaders="""componentfactory.h genericfactory.h kparts_export.h""".split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["KPARTS_EXPORT","KDE_EXPORT","KDE_DEPRECATED"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip","QtXml/QtXmlmod.sip","kdecore/kdecoremod.sip","kdeui/kdeuimod.sip","kio/kiomod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["KPARTS_EXPORT","KDE_EXPORT"],
noCTSCC=["GenericFactoryBase"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
plasma = toolkit.ModuleGenerator(
module="PyKDE4.plasma",
outputDirectory=os.path.join(outputBaseDirectory,"sip/plasma"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/plasma"),
mainDocs=os.path.join(cmakelistBaseDirectory,"plasma/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistBaseDirectory,"plasma/CMakeLists.txt")],
ignoreHeaders="""plasma_export.h credentials.h """.split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1, "QT_VERSION": 0x040600},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["PLASMA_EXPORT","PLASMA_EXPORT_DEPRECATED","KDE_EXPORT",
"KDE_DEPRECATED","Q_INVOKABLE"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=[
"QtCore/QtCoremod.sip",
"QtGui/QtGuimod.sip",
"QtNetwork/QtNetworkmod.sip",
"QtSvg/QtSvgmod.sip",
"QtWebKit/QtWebKitmod.sip",
"QtXml/QtXmlmod.sip",
"QtDeclarative/QtDeclarativemod.sip",
"QtScript/QtScriptmod.sip",
"kdecore/kdecoremod.sip",
"kdeui/kdeuimod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["PLASMA_EXPORT","PLASMA_EXPORT_DEPRECATED","KDE_EXPORT"],
#noCTSCC=["GenericFactoryBase"],
ignoreBases=["QSharedData","KShared","QList<KUrl>"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*","QGraphicsWidget*"],
parameterNameMatch=["parent","pParent"],
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*","QGraphicsWidget*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
khtml = toolkit.ModuleGenerator(
module="PyKDE4.khtml",
outputDirectory=os.path.join(outputBaseDirectory,"sip/khtml"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/khtml"),
mainDocs=os.path.join(cmakelistBaseDirectory,"khtml/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistBaseDirectory,"khtml/CMakeLists.txt"),
#os.path.join(cmakelistBaseDirectory,"khtml/dom/CMakeLists.txt")
],
ignoreHeaders="""khtmldefaults.h dom_core.h dom_html.h khtml_events.h khtml_export.h""".split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["KHTML_EXPORT","KDE_EXPORT","KDE_DEPRECATED","Q_INVOKABLE"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=[
"QtCore/QtCoremod.sip",
"QtGui/QtGuimod.sip",
"QtXml/QtXmlmod.sip",
"kdecore/kdecoremod.sip",
"kdeui/kdeuimod.sip",
"kio/kiomod.sip",
"kutils/kutilsmod.sip",
"kparts/kpartsmod.sip",],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["KHTML_EXPORT","KDE_EXPORT"],
noCTSCC=["CSSRule","CSSCharsetRule","CSSFontFaceRule","CSSImportRule","CSSMediaRule","CSSPageRule",
"CSSStyleRule","CSSUnknownRule","CSSStyleSheet","CSSPrimitiveValue","CSSValueList","CSSNamespaceRule"],
ignoreBases=["khtml::KHTMLWidget"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch=["parent"],
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
def KNewStuffMapper(mod,headerName):
print("KNewStuffMapper: "+headerName)
filename = os.path.basename(headerName)
if filename.endswith(".h"):
sipName = filename[:-2]+".sip"
if "knewstuff3" in headerName:
return "knewstuff3_"+sipName
else:
return sipName
return filename
def KNewStuffCppHeaderMapper(mod,filename):
if "knewstuff3" in filename:
return "knewstuff3/" + os.path.basename(filename)
else:
return os.path.basename(filename)
knewstuff = toolkit.ModuleGenerator(
module="PyKDE4.knewstuff",
outputDirectory=os.path.join(outputBaseDirectory,"sip/knewstuff"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/knewstuff"),
mainDocs=os.path.join(cmakelistBaseDirectory,"knewstuff/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistBaseDirectory,"knewstuff/CMakeLists.txt"),
os.path.join(cmakelistBaseDirectory,"knewstuff/knewstuff2/CMakeLists.txt"),
os.path.join(cmakelistBaseDirectory,"knewstuff/knewstuff3/CMakeLists.txt")],
ignoreHeaders="""knewstuff_export.h""".split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["KNEWSTUFF_EXPORT","KNEWSTUFF_EXPORT_DEPRECATED","KDE_EXPORT","KDE_DEPRECATED","Q_INVOKABLE"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=[
"QtCore/QtCoremod.sip",
"QtGui/QtGuimod.sip",
"QtXml/QtXmlmod.sip",
"kdecore/kdecoremod.sip",
"kdeui/kdeuimod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["KNEWSTUFF_EXPORT","KNEWSTUFF_EXPORT_DEPRECATED","KDE_EXPORT"],
#noCTSCC=[],
#ignoreBases=["khtml::KHTMLWidget"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch=["parent"],
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
],
filenameMappingFunction=KNewStuffMapper,
cppHeaderMappingFunction=KNewStuffCppHeaderMapper
)
###########################################################################
dnssd = toolkit.ModuleGenerator(
module="PyKDE4.dnssd",
outputDirectory=os.path.join(outputBaseDirectory,"sip/dnssd"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/dnssd"),
mainDocs=os.path.join(cmakelistBaseDirectory,"dnssd/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistBaseDirectory,"dnssd/CMakeLists.txt")],
ignoreHeaders="""dnssd_export.h settings.h""".split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["KDNSSD_EXPORT","KDE_EXPORT","KDE_DEPRECATED","Q_INVOKABLE"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=[
"QtCore/QtCoremod.sip",
"QtGui/QtGuimod.sip",
"kdecore/kdecoremod.sip",
"kdeui/kdeuimod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["KDNSSD_EXPORT","KDE_EXPORT"],
#noCTSCC=[],
#ignoreBases=["khtml::KHTMLWidget"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch=["parent"],
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
nepomuk = toolkit.ModuleGenerator(
module="PyKDE4.nepomuk",
outputDirectory=os.path.join(outputBaseDirectory,"sip/nepomuk"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/nepomuk"),
mainDocs=os.path.join(cmakelistBaseDirectory,"nepomuk/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistBaseDirectory,"nepomuk/CMakeLists.txt"),
os.path.join(cmakelistBaseDirectory,"nepomuk/query/CMakeLists.txt")],
headers = [os.path.join(kdelibsBuildDirectory,"nepomuk",x)
for x in "ncal.h nco.h ndo.h nfo.h nie.h nmm.h nuao.h pimo.h tmo.h".split(" ")],
ignoreHeaders="""nepomuk_export.h ontologyloader.h desktopontologyloader.h fileontologyloader.h ontologymanager.h nepomukontologyloader.h nepomukquery_export.h kmetadatatagwidget.h ncal.h nco.h ndo.h nexif.h nfo.h nie.h nmm.h nmo.h nuao.h pimo.h tmo.h""".split(" "),
#noUpdateSip=["typedefs.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["NEPOMUK_EXPORT","KDE_EXPORT","KDE_DEPRECATED","Q_INVOKABLE","NEPOMUKQUERY_EXPORT"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=[
"QtCore/QtCoremod.sip",
"kdecore/kdecoremod.sip",
"soprano/sopranomod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["NEPOMUK_EXPORT","KDE_EXPORT","NEPOMUKQUERY_EXPORT"],
noCTSCC=["Term","GroupTerm","AndTerm","OrTerm","LiteralTerm","ResourceTerm","SimpleTerm","ComparisonTerm","ResourceTypeTerm","NegationTerm","OptionalTerm","FileQuery"],
#ignoreBases=["khtml::KHTMLWidget"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch=["parent"],
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
soprano = toolkit.ModuleGenerator(
module="PyKDE4.soprano",
outputDirectory=os.path.join(outputBaseDirectory,"sip/soprano"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/soprano"),
mainDocs=os.path.join(cmakelistGitBaseDirectory,"soprano/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistGitBaseDirectory,"soprano/CMakeLists.txt"),
os.path.join(cmakelistGitBaseDirectory,"soprano/soprano/CMakeLists.txt"),
os.path.join(cmakelistGitBaseDirectory,"soprano/server/CMakeLists.txt"),
#os.path.join(cmakelistGitBaseDirectory,"soprano/server/sparql/CMakeLists.txt"),
os.path.join(cmakelistGitBaseDirectory,"soprano/server/dbus/CMakeLists.txt")],
ignoreHeaders="""soprano_export.h sopranomacros.h soprano.h vocabulary.h iterator.h version.h iteratorbackend.h""".split(" "),
#noUpdateSip=["iterator.sip"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1, "USING_SOPRANO_NRLMODEL_UNSTABLE_API":1, "QT_VERSION": 0x040700},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["SOPRANO_EXPORT","SOPRANO_CLIENT_EXPORT","SOPRANO_SERVER_EXPORT",
"USING_SOPRANO_NRLMODEL_UNSTABLE_API","KDE_EXPORT","KDE_DEPRECATED","Q_INVOKABLE",
"SOPRANO_DEPRECATED"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip","QtNetwork/QtNetworkmod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["SOPRANO_EXPORT","SOPRANO_CLIENT_EXPORT","SOPRANO_SERVER_EXPORT","KDE_EXPORT"],
#noCTSCC=[],
ignoreBases=["IteratorBackend<BindingSet>","Iterator<Node>","Iterator<BindingSet>","Iterator<Statement>"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch=["parent"],
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
akonadi = toolkit.ModuleGenerator(
module="PyKDE4.akonadi",
outputDirectory=os.path.join(outputBaseDirectory,"sip/akonadi"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/akonadi"),
mainDocs=os.path.join(cmakelistPimlibsBaseDirectory,"akonadi/Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistPimlibsBaseDirectory,"akonadi/CMakeLists.txt"),
os.path.join(cmakelistPimlibsBaseDirectory,"akonadi/kmime/CMakeLists.txt"),
os.path.join(cmakelistPimlibsBaseDirectory,"akonadi/kabc/CMakeLists.txt")],
ignoreHeaders="""akonadi_export.h akonadi-kmime_export.h akonadi-kabc_export.h itempayloadinternals_p.h collectionpathresolver_p.h qtest_akonadi.h exception.h contactparts.h cachepolicypage.h resourcebasesettings.h dbusconnectionpool.h """.split(" "),
#addressee.h kabc_export.h
headers=[os.path.join(kdepimlibsBuildDirectory,"akonadi/resourcebasesettings.h")],
# headers=[
# os.path.join(kdepimlibsBuildDirectory, "addressee.h")],
#resourcebase.h agentbase.h
#noUpdateSip=["iterator.sip"],
ignoreBases=["QDBusContext"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros( \
[(re.compile(r'Latin1\( "ISO-8859-1" \)'),r'Latin1'),
(re.compile(r'kmime_mk_trivial_ctor\(\s*(\w+)\s*\)'),r'public: explicit \1( Content *parent = 0 ); \1( Content *parent, const QByteArray &s ); \1( Content *parent, const QString &s, const QByteArray &charset ); ~\1();'),
(re.compile(r'kmime_mk_dptr_ctor\(\s*(\w+)\s*\)'), r'protected: explicit \1( \1::Private *d, KMime::Content *parent = 0 );'),
(re.compile(r'kmime_mk_trivial_ctor_with_name\(\s*(\w+)\s*\)'),r'public: explicit \1( Content *parent = 0 ); \1( Content *parent, const QByteArray &s ); \1( Content *parent, const QString &s, const QByteArray &charset ); ~\1();const char *type() const; static const char *staticType();'),
]),
#[(re.compile(r'AKONADI_COLLECTION_PROPERTIES_PAGE_FACTORY\s*\(\s*(\S+)\s*,\s*(\w+)\s*\)'),r'']),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(["AKONADI_DECLARE_PRIVATE"]),
bareMacros=qtkdemacros.QtBareMacros(["AKONADI_EXPORT","AKONADI_EXPORT_DEPRECATED","KDE_EXPORT",
"KDE_DEPRECATED","Q_INVOKABLE","KABC_EXPORT","KABC_EXPORT_DEPRECATED","AKONADI_KABC_EXPORT","AKONADI_KMIME_EXPORT","AKONADI_KMIME_EXPORT_DEPRECATED","KMIME_EXPORT","KMIME_EXPORT_DEPRECATED"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip","kdeui/kdeuimod.sip","kdecore/kdecoremod.sip","kio/kiomod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["AKONADI_EXPORT","AKONADI_KABC_EXPORT","AKONADI_KMIME_EXPORT","KDE_EXPORT","AKONADI_EXPORT_DEPRECATED","AKONADI_KMIME_EXPORT_DEPRECATED","KABC_EXPORT","KABC_EXPORT_DEPRECATED","KMIME_EXPORT","KMIME_EXPORT_DEPRECATED"],
noCTSCC=["Collection","Entity","Item"],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch=["parent"],
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
polkitqt = toolkit.ModuleGenerator(
module="PyKDE4.polkitqt",
outputDirectory=os.path.join(outputBaseDirectory,"sip/polkitqt"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/polkitqt"),
mainDocs=os.path.join(polkitqtBaseDirectory,"Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(polkitqtBaseDirectory,"CMakeLists.txt")],
ignoreHeaders="""export.h polkitqtversion.h""".split(" "),
#resourcebase.h agentbase.h
#noUpdateSip=["iterator.sip"],
#ignoreBases=["QDBusContext"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["POLKIT_QT_EXPORT","POLKITQT1_EXPORT"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["POLKIT_QT_EXPORT","KDE_EXPORT"],
#noCTSCC=[],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch=["parent"],
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
phonon = toolkit.ModuleGenerator(
module="PyKDE4.phonon",
outputDirectory=os.path.join(outputBaseDirectory,"sip/phonon"),
docsOutputDirectory=os.path.join(outputBaseDirectory, "docs/html/phonon"),
mainDocs=os.path.join(cmakelistPhononBaseDirectory,"Mainpage.dox"),
# .h file extraction
cmakelists=[os.path.join(cmakelistPhononBaseDirectory,"phonon/CMakeLists.txt")],
ignoreHeaders="""phonondefs.h phonon_export.h export.h kaudiodevicelist_export.h phononnamespace.h addoninterface.h volumefaderinterface.h backendinterface.h effectinterface.h mediaobjectinterface.h platformplugin.h audiodataoutputinterface.h audiooutputinterface.h""".split(" "),
noUpdateSip=["phononnamespace.sip"],
ignoreBases=["QSharedData"],
#ignoreBases=["AbstractAudioOutput", "Phonon::AbstractAudioOutput", "QSharedData", "AbstractVideoOutput",
# "Phonon::AbstractVideoOutput"],
# Cpp parsing
preprocessSubstitutionMacros=qtkdemacros.QtPreprocessSubstitutionMacros(),
preprocessorValues={"Q_WS_X11": 1, "QT_VERSION": "0x040400", "_MSC_VER": 0},
macros=qtkdemacros.QtMacros(),
bareMacros=qtkdemacros.QtBareMacros(["PHONON_EXPORT","PHONONEXPERIMENTAL_EXPORT", "PHONON_DEPRECATED",
"PHONON_EXPORT_DEPRECATED", "KAUDIODEVICELIST_EXPORT"]),
# Sip generation
sipImportDirs=[sipImportDir,os.path.join(outputBaseDirectory,"sip")],
sipImports=["QtCore/QtCoremod.sip","QtGui/QtGuimod.sip","QtXml/QtXmlmod.sip","solid/solidmod.sip"],
copyrightNotice=qtkdemacros.copyrightNotice(),
exportMacros=["PHONON_EXPORT", "KDE_EXPORT", "PHONONEXPERIMENTAL_EXPORT", "KAUDIODEVICELIST_EXPORT", "PHONON_DEPRECATED", "PHONON_EXPORT_DEPRECATED"],
#noCTSCC=[],
annotationRules=[
toolkit.AnnotationRule(
methodTypeMatch="ctor",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch=["parent"],
annotations="TransferThis"),
toolkit.AnnotationRule(
methodTypeMatch="function",
parameterTypeMatch=["QWidget*","QObject*"],
parameterNameMatch="parent",
annotations="Transfer")
]
)
###########################################################################
def updateSIP():
kdecore.run()
plasma.run()
kdeui.run()
kio.run()
kutils.run()
solid.run()
kparts.run()
khtml.run()
knewstuff.run()
dnssd.run()
nepomuk.run()
soprano.run()
akonadi.run()
polkitqt.run()
phonon.run()
def updateDocs():
classNames = []
nsNames = []
def UpdateClassNamespaceList(moduleName,sipScopes):
nsNames.append( (moduleName,'global', 'global') )
def ExtractClassNamespace(scope):
for item in scope:
if isinstance(item,sipsymboldata.SymbolData.SipClass):
classNames.append( (moduleName, item.fqPythonName(), item.fqPythonName()) )
ExtractClassNamespace(item)
elif isinstance(item,sipsymboldata.SymbolData.Namespace):
nsTuple = (moduleName,item.fqPythonName(),item.fqPythonName())
if nsTuple not in nsNames:
nsNames.append( nsTuple )
ExtractClassNamespace(item)
for scope in sipScopes:
ExtractClassNamespace(scope)
UpdateClassNamespaceList('kdecore',kdecore.docs())
UpdateClassNamespaceList('plasma',plasma.docs())
UpdateClassNamespaceList('kdeui',kdeui.docs())
UpdateClassNamespaceList('kio',kio.docs())
UpdateClassNamespaceList('kutils',kutils.docs())
UpdateClassNamespaceList('solid',solid.docs())
UpdateClassNamespaceList('kparts',kparts.docs())
UpdateClassNamespaceList('khtml',khtml.docs())
UpdateClassNamespaceList('knewstuff',knewstuff.docs())
UpdateClassNamespaceList('dnssd',dnssd.docs())
UpdateClassNamespaceList('nepomuk',nepomuk.docs())
UpdateClassNamespaceList('soprano',soprano.docs())
UpdateClassNamespaceList('akonadi',akonadi.docs())
UpdateClassNamespaceList('polkitqt',polkitqt.docs())
UpdateClassNamespaceList('phonon',phonon.docs())
print("Writing all classes index:")
toolkit.ModuleGenerator.WriteAllClasses(os.path.join(outputBaseDirectory,"docs/html"),nsNames,classNames)
print("Done")
def main():
updateSIP()
updateDocs()
if __name__=="__main__":
main()
| lgpl-3.0 | -8,934,794,075,207,159,000 | 43.505325 | 738 | 0.658468 | false | 3.653648 | false | false | false |
josiah-wolf-oberholtzer/supriya | tests/nonrealtime/test_nonrealtime_Session_zero_duration.py | 1 | 2475 | import pytest
import supriya.assets.synthdefs
import supriya.nonrealtime
import supriya.synthdefs
import supriya.ugens
def test_manual_with_gate():
session = supriya.nonrealtime.Session(0, 2)
with session.at(0):
group = session.add_group(duration=4)
for i in range(4):
with session.at(i):
group.add_synth(duration=0)
d_recv_commands = pytest.helpers.build_d_recv_commands(
[supriya.assets.synthdefs.default]
)
assert session.to_lists(duration=5) == [
[
0.0,
[
*d_recv_commands,
["/g_new", 1000, 0, 0],
["/s_new", "da0982184cc8fa54cf9d288a0fe1f6ca", 1001, 0, 1000],
["/n_set", 1001, "gate", 0],
],
],
[
1.0,
[
["/s_new", "da0982184cc8fa54cf9d288a0fe1f6ca", 1002, 0, 1000],
["/n_set", 1002, "gate", 0],
],
],
[
2.0,
[
["/s_new", "da0982184cc8fa54cf9d288a0fe1f6ca", 1003, 0, 1000],
["/n_set", 1003, "gate", 0],
],
],
[
3.0,
[
["/s_new", "da0982184cc8fa54cf9d288a0fe1f6ca", 1004, 0, 1000],
["/n_set", 1004, "gate", 0],
],
],
[4.0, [["/n_free", 1000]]],
[5.0, [[0]]],
]
def test_manual_without_gate():
with supriya.synthdefs.SynthDefBuilder() as builder:
source = supriya.ugens.DC.ar(1)
supriya.ugens.Out.ar(bus=0, source=source)
source_synthdef = builder.build()
session = supriya.nonrealtime.Session(0, 1)
with session.at(0):
group = session.add_group(duration=4)
for i in range(4):
with session.at(i):
group.add_synth(duration=0, synthdef=source_synthdef)
assert session.to_lists(duration=10) == [
[
0.0,
[
["/d_recv", bytearray(source_synthdef.compile())],
["/g_new", 1000, 0, 0],
["/s_new", "7839f99c38c2ac4326388a013cdd643c", 1001, 0, 1000],
],
],
[1.0, [["/s_new", "7839f99c38c2ac4326388a013cdd643c", 1002, 0, 1000]]],
[2.0, [["/s_new", "7839f99c38c2ac4326388a013cdd643c", 1003, 0, 1000]]],
[3.0, [["/s_new", "7839f99c38c2ac4326388a013cdd643c", 1004, 0, 1000]]],
[4.0, [["/n_free", 1000]]],
[10.0, [[0]]],
]
| mit | 6,077,712,018,084,925,000 | 29.9375 | 79 | 0.486465 | false | 2.908343 | false | false | false |
pombredanne/hitch | hitch/commandline.py | 1 | 10374 | """High level command line interface to hitch."""
from subprocess import call, PIPE, STDOUT, CalledProcessError, Popen
from hitch.click import command, group, argument, option
from os import path, makedirs, listdir, kill, remove
from sys import stderr, exit, modules, argv
from functools import partial
from hitch import hitchdir
import shutil
import signal
import copy
def check_output(command, stdout=PIPE, stderr=PIPE):
"""Re-implemented subprocess.check_output since it is not available < python 2.7."""
return Popen(command, stdout=stdout, stderr=stderr).communicate()[0]
@group()
def cli():
pass
@command()
@option(
'-p', '--python', default=None,
help="""Create hitch virtualenv using specific python version"""
""" (e.g. /usr/bin/python3). Defaults to using python3 on the system path."""
)
@option(
'-v', '--virtualenv', default=None,
help="""Create hitch virtualenv using specific virtualenv"""
""" (e.g. /usr/bin/virtualenv). Defaults to using virtualenv on the system path."""
)
def init(python, virtualenv):
"""Initialize hitch in this directory."""
if virtualenv is None:
if call(["which", "virtualenv"], stdout=PIPE, stderr=PIPE):
stderr.write("You must have virtualenv installed to use hitch.\n")
stderr.flush()
exit(1)
virtualenv = check_output(["which", "virtualenv"]).decode('utf8').replace("\n", "")
else:
if path.exists(virtualenv):
if python is None:
python = path.join(path.dirname(virtualenv), "python")
else:
stderr.write("{} not found.\n".format(virtualenv))
if python is None:
if call(["which", "python3"], stdout=PIPE, stderr=PIPE):
stderr.write(
"To use Hitch, you must have python 3 installed on your system "
"and available. If your python3 is not on the system path with "
"the name python3, specify its exact location using --python.\n"
)
stderr.flush()
exit(1)
python3 = check_output(["which", "python3"]).decode('utf8').replace("\n", "")
else:
if path.exists(python):
python3 = python
else:
stderr.write("{} not found.\n".format(python))
exit(1)
str_version = check_output([python3, "-V"], stderr=STDOUT).decode('utf8').replace('\n', '')
tuple_version = tuple([int(v) for v in str_version.replace('Python ', '').split('.')])
if tuple_version < (3, 3):
stderr.write(
"The hitch environment must have python >=3.3 installed to be built.\n Your "
"app can run with earlier versions of python, but the testing environment can't.\n"
)
exit(1)
if hitchdir.hitch_exists():
stderr.write("Hitch has already been initialized in this directory or a directory above it.\n")
stderr.write("If you wish to re-initialize hitch in this directory, run 'hitch clean' in the")
stderr.write("directory containing the .hitch directory and run hitch init here again.\n")
stderr.flush()
exit(1)
makedirs(".hitch")
pip = path.abspath(path.join(".hitch", "virtualenv", "bin", "pip"))
call([virtualenv, ".hitch/virtualenv", "--no-site-packages", "--distribute", "-p", python3])
call([pip, "install", "-U", "pip"])
if path.exists("hitchreqs.txt"):
call([pip, "install", "-r", "hitchreqs.txt"])
else:
call([pip, "install", "hitchtest"])
pip_freeze = check_output([pip, "freeze"]).decode('utf8')
with open("hitchreqs.txt", "w") as hitchreqs_handle:
hitchreqs_handle.write(pip_freeze)
def update_requirements():
"""Check hitchreqs.txt match what's installed via pip freeze. If not, update."""
pip = path.join(hitchdir.get_hitch_directory_or_fail(), "virtualenv", "bin", "pip")
hitchreqs_filename = path.join(hitchdir.get_hitch_directory_or_fail(), "..", "hitchreqs.txt")
pip_freeze = check_output([pip, "freeze"]).decode('utf8').split('\n')
hitchreqs_handle = ""
with open(hitchreqs_filename, "r") as hitchreqs_handle:
hitchreqs = hitchreqs_handle.read().split('\n')
if not sorted(pip_freeze) == sorted(hitchreqs):
call([pip, "install", "-r", "hitchreqs.txt"])
pip_freeze = check_output([pip, "freeze"]).decode('utf8')
with open("hitchreqs.txt", "w") as hitchreqs_handle:
hitchreqs_handle.write(pip_freeze)
def get_pip():
"""Get the file path to the hitch pip."""
return path.join(hitchdir.get_hitch_directory_or_fail(), "virtualenv", "bin", "pip")
@command(context_settings={'help_option_names':[],'ignore_unknown_options':True}, help="dd")
@argument('arguments', nargs=-1)
def runpackage(arguments):
# Generic method to run any installed app in the virtualenv whose name starts with hitch*
update_requirements()
binfile = path.join(hitchdir.get_hitch_directory(), "virtualenv", "bin", "hitch{}".format(argv[1]))
command = [binfile, ] + argv[2:]
# When receiving an exit signal, just forward it to process child.
def forward_signal_to_child(pid, signum, frame):
kill(pid, signum)
process = Popen(command)
signal.signal(signal.SIGINT, partial(forward_signal_to_child, process.pid))
signal.signal(signal.SIGTERM, partial(forward_signal_to_child, process.pid))
signal.signal(signal.SIGHUP, partial(forward_signal_to_child, process.pid))
signal.signal(signal.SIGQUIT, partial(forward_signal_to_child, process.pid))
return_code = process.wait()
exit(return_code)
@command()
@argument('package', required=True)
def uninstall(package):
"""Uninstall hitch package."""
pip = get_pip()
call([pip, "uninstall", package] )
pip_freeze = check_output([pip, "freeze"]).decode('utf8')
with open("hitchreqs.txt", "w") as hitchreqs_handle:
hitchreqs_handle.write(pip_freeze)
@command()
@argument('package', required=True)
def install(package):
"""Install hitch package."""
pip = get_pip()
call([pip, "install", package, "-U", ])
pip_freeze = check_output([pip, "freeze"]).decode('utf8')
with open("hitchreqs.txt", "w") as hitchreqs_handle:
hitchreqs_handle.write(pip_freeze)
@command()
def upgrade():
"""Upgrade all installed hitch packages."""
pip = get_pip()
package_list = [
p for p in check_output([pip, "freeze"]).decode('utf8').split('\n')
if p != "" and "==" in p
]
version_fixed_package_list = [p.split("==")[0] for p in package_list]
for package in version_fixed_package_list:
call([pip, "install", package, "-U", ])
pip_freeze = check_output([pip, "freeze"]).decode('utf8')
with open("hitchreqs.txt", "w") as hitchreqs_handle:
hitchreqs_handle.write(pip_freeze)
@command()
def freeze():
"""List installed hitch packages."""
pip = path.join(hitchdir.get_hitch_directory_or_fail(), "virtualenv", "bin", "pip")
call([pip, "freeze", ])
@command()
def clean():
"""Remove the hitch directory entirely."""
if hitchdir.hitch_exists():
hitch_directory = hitchdir.get_hitch_directory_or_fail()
shutil.rmtree(hitch_directory)
else:
stderr.write("No hitch directory found. Doing nothing.\n")
stderr.flush()
@command()
@option(
'-p', '--packages', default=None, help=(
"Specify precise packages to remove - "
"e.g. postgresql, postgresql-9.3.9, python, python2.6.8"
)
)
def cleanpkg(packages):
"""Remove installed packages from the .hitchpkg directory."""
hitchpkg = path.join(path.expanduser("~"), ".hitchpkg")
if path.exists(hitchpkg):
if packages is None:
shutil.rmtree(hitchpkg)
else:
for file_or_dir in os.listdir(hitchpkg):
if file_or_dir.startswith(packages):
if path.isdir(file_or_dir)
shutil.rmtree(path.join(hitchpkg, file_or_dir))
else:
remove(path.join(hitchpkg, file_or_dir))
def run():
"""Run hitch bootstrap CLI"""
def stop_everything(sig, frame):
"""Exit hitch."""
exit(1)
signal.signal(signal.SIGINT, stop_everything)
signal.signal(signal.SIGTERM, stop_everything)
signal.signal(signal.SIGHUP, stop_everything)
signal.signal(signal.SIGQUIT, stop_everything)
if hitchdir.hitch_exists():
if not path.exists(path.join(hitchdir.get_hitch_directory(), "virtualenv", "bin")):
stderr.write("Hitch was initialized in this directory (or one above it), but something.\n")
stderr.write("was corrupted. Try running 'hitch clean' and then run 'hitch init' again.")
stderr.flush()
exit(1)
# Get packages from bin folder that are hitch related
python_bin = path.join(hitchdir.get_hitch_directory(), "virtualenv", "bin", "python")
packages = [
package.replace("hitch", "") for package in listdir(
path.join(hitchdir.get_hitch_directory(), "virtualenv", "bin")
)
if package.startswith("hitch") and package != "hitch"
]
# Add packages that start with hitch* to the list of commands available
for package in packages:
cmd = copy.deepcopy(runpackage)
cmd.name = package
try:
description = check_output([
python_bin, '-c',
'import sys;sys.stdout.write(__import__("hitch{}").commandline.cli.help)'.format(
package
)
]).decode('utf8')
except CalledProcessError:
description = ""
cmd.help = description
cmd.short_help = description
cli.add_command(cmd)
cli.add_command(install)
cli.add_command(uninstall)
cli.add_command(upgrade)
cli.add_command(clean)
cli.add_command(freeze)
cli.add_command(init)
cli.help = "Hitch test runner for:\n\n {0}.".format(hitchdir.get_hitch_directory())
else:
cli.add_command(init)
cli.add_command(clean)
cli.help = "Hitch bootstrapper - '.hitch' directory not detected here."
cli()
if __name__ == '__main__':
run()
| agpl-3.0 | -3,730,368,201,072,467,000 | 36.182796 | 103 | 0.613264 | false | 3.732997 | false | false | false |
locaweb/simplenet | src/simplenet/common/event.py | 1 | 4587 | # Copyright 2012 Locaweb.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author: Thiago Morello, Locaweb.
# @author: Willian Molinari, Locaweb.
# @author: Juliano Martinez, Locaweb.
import socket
from kombu import BrokerConnection, Exchange, Queue
from simplenet.common.config import config, get_logger
logger = get_logger()
class EventManager(object):
def __init__(self):
self.url = config.get("event", "broker")
def raise_fanout_event(self, exchange, event_type, params, **kwargs):
logger.debug("Raising event %s with params: %s" % (event_type, params))
with BrokerConnection(self.url) as conn:
conn.ensure_connection()
media_exchange = Exchange(
"dhcp:fanout:%s" % exchange,
type="fanout",
durable=True)
if 'route' in kwargs:
routing_key = kwargs['route']
else:
queue = Queue(
event_type,
exchange=media_exchange,
routing_key=event_type
)
if params['action'] == 'new' or params['action'] == 'rebuild_queues':
queue(conn.channel()).declare()
return
elif params['action'] == 'remove':
try:
queue(conn.channel()).unbind()
except AttributeError:
queue(conn.channel()).unbind_from(exchange=media_exchange, routing_key=event_type)
return
else:
routing_key = event_type
with conn.Producer(exchange=media_exchange, serializer="json",
routing_key=routing_key) as producer:
logger.debug("Publishing %s" % params)
producer.publish(params)
def raise_event(self, event_type, params, **kwargs):
logger.debug("Raising event %s with params: %s" % (event_type, params))
with BrokerConnection(self.url) as conn:
conn.ensure_connection()
media_exchange = Exchange(
"simplenet",
type="direct",
durable=True)
if 'route' in kwargs:
routing_key = kwargs['route']
else:
queue = Queue(
event_type,
exchange=media_exchange,
routing_key=event_type
)
queue(conn.channel()).declare()
routing_key = event_type
with conn.Producer(exchange=media_exchange, serializer="json",
routing_key=routing_key) as producer:
logger.debug("Publishing %s" % params)
producer.publish(params)
def listen_event(self, queue_name, callback):
with BrokerConnection(self.url) as conn:
conn.ensure_connection()
media_exchange = Exchange(
"simplenet",
type="direct",
durable=True
)
queue = Queue(
queue_name,
exchange=media_exchange,
routing_key=queue_name
)
logger.info("Listening for data...")
with conn.Consumer([queue], callbacks=[callback]) as consumer:
while True:
conn.drain_events()
def bind_queue(self, queue_name, routing_key):
with BrokerConnection(self.url) as conn:
conn.ensure_connection()
media_exchange = Exchange(
"simplenet",
type="direct",
durable=True
)
queue = Queue(
queue_name,
exchange=media_exchange,
routing_key=routing_key
)
queue(conn.channel()).declare()
| mit | 479,172,603,894,329,150 | 33.75 | 106 | 0.51755 | false | 4.699795 | false | false | false |
certik/sfepy | sfepy/base/conf.py | 1 | 10020 | import re
from base import Struct, IndexedStruct, dict_to_struct, pause, output, copy,\
import_file, assert_, get_default
from reader import Reader
_required = ['filename_mesh', 'field_[0-9]+|fields',
'ebc_[0-9]+|ebcs', 'fe', 'equations',
'region_[0-9]+|regions', 'variable_[0-9]+|variables',
'material_[0-9]+|materials', 'integral_[0-9]+|integrals',
'solver_[0-9]+|solvers']
_other = ['epbc_[0-9]+|epbcs', 'lcbc_[0-9]+|lcbcs', 'nbc_[0-9]+|nbcs',
'ic_[0-9]+|ics', 'options']
##
# c: 19.02.2008, r: 19.02.2008
def get_standard_keywords():
return copy( _required ), copy( _other )
##
# c: 10.04.2008, r: 10.04.2008
def tuple_to_conf( name, vals, order ):
conf = Struct( name = name )
for ii, key in enumerate( order ):
setattr( conf, key, vals[ii] )
return conf
##
# Short syntax: key is suffixed with '__<number>' to prevent collisions with
# long syntax keys -> both cases can be used in a single input.
def transform_variables( adict ):
d2 = {}
for ii, (key, conf) in enumerate( adict.iteritems() ):
if isinstance( conf, tuple ):
c2 = tuple_to_conf( key, conf, ['kind', 'field'] )
if len( conf ) >= 3:
kind = c2.kind.split()[0]
if kind == 'unknown':
c2.order = conf[2]
elif kind == 'test':
c2.dual = conf[2]
elif kind == 'parameter':
c2.like = conf[2]
if len( conf ) == 4:
c2.history = conf[3]
d2['variable_%s__%d' % (c2.name, ii)] = c2
else:
c2 = transform_to_struct_1( conf )
d2['variable_'+c2.name] = c2
return d2
##
# c: 10.04.2008, r: 06.05.2008
def transform_ebcs( adict ):
d2 = {}
for ii, (key, conf) in enumerate( adict.iteritems() ):
if isinstance( conf, tuple ):
c2 = tuple_to_conf( key, conf, ['region', 'dofs'] )
d2['ebc_%s__%d' % (c2.name, ii)] = c2
else:
c2 = transform_to_struct_1( conf )
d2['ebc_'+c2.name] = c2
return d2
def transform_ics( adict ):
d2 = {}
for ii, (key, conf) in enumerate( adict.iteritems() ):
if isinstance( conf, tuple ):
c2 = tuple_to_conf( key, conf, ['region', 'dofs'] )
d2['ic_%s__%d' % (c2.name, ii)] = c2
else:
c2 = transform_to_struct_1( conf )
d2['ic_'+c2.name] = c2
return d2
##
# c: 02.05.2008, r: 06.05.2008
def transform_regions( adict ):
d2 = {}
for ii, (key, conf) in enumerate( adict.iteritems() ):
if isinstance( conf, tuple ):
c2 = tuple_to_conf( key, conf, ['select', 'flags'] )
for flag, val in c2.flags.iteritems():
setattr( c2, flag, val )
delattr( c2, 'flags' )
d2['region_%s__%d' % (c2.name, ii)] = c2
else:
c2 = transform_to_struct_1( conf )
d2['region_'+c2.name] = c2
print d2
return d2
##
# c: 20.06.2007, r: 18.02.2008
def transform_to_struct_1( adict ):
return dict_to_struct( adict, flag = (1,) )
def transform_to_i_struct_1( adict ):
return dict_to_struct( adict, flag = (1,), constructor = IndexedStruct )
def transform_to_struct_01( adict ):
return dict_to_struct( adict, flag = (0,1) )
def transform_to_struct_10( adict ):
return dict_to_struct( adict, flag = (1,0) )
transforms = {
'options' : transform_to_i_struct_1,
'solvers' : transform_to_struct_01,
'integrals' : transform_to_struct_01,
'opt' : transform_to_struct_1,
'fe' : transform_to_struct_1,
'regions' : transform_regions,
'shape_opt' : transform_to_struct_10,
'fields' : transform_to_struct_01,
'variables' : transform_variables,
'ebcs' : transform_ebcs,
'epbcs' : transform_to_struct_01,
'nbcs' : transform_to_struct_01,
'lcbcs' : transform_to_struct_01,
'ics' : transform_ics,
}
##
# 27.10.2005, c
class ProblemConf( Struct ):
"""
Problem configuration, corresponding to an input (problem description
file). It validates the input using lists of required and other keywords
that have to/can appear in the input. Default keyword lists can be obtained
by sfepy.base.conf.get_standard_keywords().
ProblemConf instance is used to construct a ProblemDefinition instance via
ProblemDefinition.from_conf( conf ).
"""
##
# c: 25.07.2006, r: 10.07.2008
def from_file( filename, required = None, other = None ):
"""
Loads the problem definition from a file.
The filename can either contain plain definitions, or it can contain
the define() function, in which case it will be called to return the
input definitions.
The job of the define() function is to return a dictionary of
parameters. How the dictionary is constructed is not our business, but
the usual way is to simply have a function define() along these lines
in the input file:
def define():
options = {
'save_eig_vectors' : None,
'eigen_solver' : 'eigen1',
}
region_2 = {
'name' : 'Surface',
'select' : 'nodes of surface',
}
...
return locals()
"""
funmod = import_file( filename )
obj = ProblemConf()
if "define" in funmod.__dict__:
define_dict = funmod.__dict__["define"]()
else:
define_dict = funmod.__dict__
obj.__dict__.update( define_dict )
obj.setup( define_dict, funmod, filename, required, other )
return obj
from_file = staticmethod( from_file )
def from_module( module, required = None, other = None ):
obj = ProblemConf()
obj.__dict__.update( module.__dict__ )
obj.setup( funmod = module, required = required, other = other )
return obj
from_module = staticmethod( from_module )
def from_dict( dict_, funmod, required = None, other = None ):
obj = ProblemConf()
obj.__dict__.update( dict_ )
obj.setup( funmod = funmod, required = required, other = other )
return obj
from_dict = staticmethod( from_dict )
def setup( self, define_dict = None, funmod = None, filename = None,
required = None, other = None ):
define_dict = get_default( define_dict, self.__dict__ )
self._filename = filename
other_missing = self.validate( required = required, other = other )
for name in other_missing:
setattr( self, name, None )
self.transform_input_trivial()
self._raw = {}
for key, val in define_dict.iteritems():
if isinstance( val, dict ):
self._raw[key] = copy( val )
self.transform_input()
self.funmod = funmod
##
# 27.10.2005, c
# 19.09.2006
# 05.06.2007
def _validate_helper( self, items, but_nots ):
keys = self.__dict__.keys()
left_over = keys[:]
if but_nots is not None:
for item in but_nots:
match = re.compile( '^' + item + '$' ).match
for key in keys:
if match( key ):
left_over.remove( key )
missing = []
if items is not None:
for item in items:
found = False
match = re.compile( '^' + item + '$' ).match
for key in keys:
if match( key ):
found = True
left_over.remove( key )
if not found:
missing.append( item )
return left_over, missing
##
# c: 27.10.2005, r: 11.07.2008
def validate( self, required = None, other = None ):
required_left_over, required_missing \
= self._validate_helper( required, other )
other_left_over, other_missing \
= self._validate_helper( other, required )
assert_( required_left_over == other_left_over )
err = False
if required_missing:
err = True
output( 'error: required missing:', required_missing )
if other_left_over:
output( 'left over:', other_left_over )
if err:
raise ValueError
return other_missing
##
# c: 31.10.2005, r: 10.07.2008
def transform_input_trivial( self ):
"""Trivial input transformations."""
##
# Unordered inputs.
tr_list = ['([a-zA-Z0-9]+)_[0-9]+']
# Keywords not in 'required', but needed even empty (e.g. for run_tests).
for key in transforms.keys():
if not self.__dict__.has_key( key ):
self.__dict__[key] = {}
keys = self.__dict__.keys()
for item in tr_list:
match = re.compile( item ).match
for key in keys:
obj = match( key )
if obj:
new = obj.group( 1 ) + 's'
result = {key : self.__dict__[key]}
try:
self.__dict__[new].update( result )
except:
self.__dict__[new] = result
del self.__dict__[key]
def transform_input( self ):
keys = self.__dict__.keys()
for key, transform in transforms.iteritems():
if not key in keys: continue
self.__dict__[key] = transform( self.__dict__[key] )
def get_raw( self, key = None ):
if key is None:
return self._raw
else:
return self._raw[key]
def edit( self, key, newval ):
self.__dict__[key] = transforms[key]( newval )
| bsd-3-clause | 5,469,896,234,037,747,000 | 31.960526 | 81 | 0.518762 | false | 3.724907 | false | false | false |
asposeforcloud/Aspose_Cloud_SDK_For_Python | asposecloud/email/__init__.py | 1 | 7149 | __author__ = 'assadmahmood'
import requests
import json
from asposecloud import Product
from asposecloud import AsposeApp
from asposecloud.common import Utils
# ========================================================================
# DOCUMENT CLASS
# ========================================================================
class Document:
def __init__(self, filename):
self.filename = filename
if not filename:
raise ValueError("filename not specified")
self.base_uri = Product.product_uri + 'email/' + self.filename
def get_property(self, property_name, remote_folder='', storage_type='Aspose', storage_name=None):
"""
:param property_name:
:param remote_folder: storage path to operate
:param storage_type: type of storage e.g Aspose, S3
:param storage_name: name of storage e.g. MyAmazonS3
:return:
"""
str_uri = self.base_uri + '/properties/' + property_name
str_uri = Utils.append_storage(str_uri, remote_folder, storage_type, storage_name)
signed_uri = Utils.sign(str_uri)
response = None
try:
response = requests.get(signed_uri, headers={
'content-type': 'application/json', 'accept': 'application/json', 'x-aspose-client' : 'PYTHONSDK/v1.0'
})
response.raise_for_status()
response = response.json()
except requests.HTTPError as e:
print e
print response.content
exit(1)
return response['EmailProperty']['Value']
def set_property(self, property_name, property_value, remote_folder='', storage_type='Aspose', storage_name=None):
"""
:param property_name:
:param property_value:
:param remote_folder: storage path to operate
:param storage_type: type of storage e.g Aspose, S3
:param storage_name: name of storage e.g. MyAmazonS3
:return:
"""
str_uri = self.base_uri + '/properties/' + property_name
str_uri = Utils.append_storage(str_uri, remote_folder, storage_type, storage_name)
json_data = json.dumps({'Value': property_value})
signed_uri = Utils.sign(str_uri)
response = None
try:
response = requests.put(signed_uri, json_data, headers={
'content-type': 'application/json', 'accept': 'application/json', 'x-aspose-client' : 'PYTHONSDK/v1.0'
})
response.raise_for_status()
response = response.json()
except requests.HTTPError as e:
print e
print response.content
exit(1)
return response['EmailProperty']['Value']
def get_attachment(self, attachment_name, remote_folder='', storage_type='Aspose', storage_name=None):
"""
:param attachment_name:
:param remote_folder: storage path to operate
:param storage_type: type of storage e.g Aspose, S3
:param storage_name: name of storage e.g. MyAmazonS3
:return:
"""
if not attachment_name:
raise ValueError("attachment_name not specified")
str_uri = self.base_uri + '/attachments/' + attachment_name
str_uri = Utils.append_storage(str_uri, remote_folder, storage_type, storage_name)
signed_uri = Utils.sign(str_uri)
response = None
try:
response = requests.get(signed_uri, headers={
'content-type': 'application/json', 'accept': 'application/json', 'x-aspose-client' : 'PYTHONSDK/v1.0'
}, stream=True)
response.raise_for_status()
except requests.HTTPError as e:
print e
print response.content
exit(1)
validate_output = Utils.validate_result(response)
if not validate_output:
output_path = AsposeApp.output_path + attachment_name
Utils.save_file(response, output_path)
return output_path
else:
return validate_output
def add_attachment(self, attachment_name, remote_folder='', storage_type='Aspose', storage_name=None):
"""
:param attachment_name:
:param remote_folder: storage path to operate
:param storage_type: type of storage e.g Aspose, S3
:param storage_name: name of storage e.g. MyAmazonS3
:return:
"""
str_uri = self.base_uri + '/attachments/' + attachment_name
str_uri = Utils.append_storage(str_uri, remote_folder, storage_type, storage_name)
signed_uri = Utils.sign(str_uri)
response = None
try:
response = requests.post(signed_uri, None, headers={
'content-type': 'application/json', 'accept': 'application/json'
})
response.raise_for_status()
response = response.json()
except requests.HTTPError as e:
print e
print response.content
exit(1)
return response
# ========================================================================
# CONVERTER CLASS
# ========================================================================
class Converter:
def __init__(self, filename):
self.filename = filename
if not filename:
raise ValueError("filename not specified")
self.base_uri = Product.product_uri + 'email/' + self.filename
def convert(self, save_format, stream_out=False, output_filename=None,
remote_folder='', storage_type='Aspose', storage_name=None):
"""
convert an email message document to a different format
:param save_format:
:param output_filename:
:param remote_folder: storage path to operate
:param storage_type: type of storage e.g Aspose, S3
:param storage_name: name of storage e.g. MyAmazonS3
:return:
"""
if not save_format:
raise ValueError("save_format not specified")
str_uri = self.base_uri + '?format=' + save_format
str_uri = Utils.append_storage(str_uri, remote_folder, storage_type, storage_name)
signed_uri = Utils.sign(str_uri)
response = None
try:
response = requests.get(signed_uri, headers={
'content-type': 'application/json', 'accept': 'application/json', 'x-aspose-client' : 'PYTHONSDK/v1.0'
}, stream=True)
response.raise_for_status()
except requests.HTTPError as e:
print e
print response.content
exit(1)
validate_output = Utils.validate_result(response)
if not validate_output:
if not stream_out:
if output_filename is None:
output_filename = self.filename
output_path = AsposeApp.output_path + Utils.get_filename(output_filename) + '.' + save_format
Utils.save_file(response, output_path)
return output_path
else:
return response.content
else:
return validate_output
| mit | -1,981,531,942,469,692,000 | 34.567164 | 118 | 0.566093 | false | 4.280838 | false | false | false |
swprojects/wxPieTool | pyimager.py | 1 | 8720 | """
wxPieTool - wxPython Image Embedding Tool
Copyright 2016 Simon Wu <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import base64
import binascii
import multiprocessing
import os
import tempfile
import wx
import importlib.util
from wx.lib.embeddedimage import PyEmbeddedImage
#------------------------------------------------------------------------------
PROCESS_COUNT = multiprocessing.cpu_count() - 1
def WritePyImageFile(output_file, pyfiledata):
""" writes a new pyImages file from pyfiledata """
py_images_file = open(output_file, 'w') # delete any existing file.
"""
Write the relevant header portion and the import statements
Writes Python statements to the output pyImages file.
"""
py_images_file.write('#' + '-'*69 + '\n\n')
line = '# This file was generated by %s\n\n' %("wxImage Embedding Tool")
py_images_file.write(line)
py_images_file.write('import wx\n')
py_images_file.write('from wx.lib.embeddedimage import PyEmbeddedImage\n')
py_images_file.write('\n')
py_images_file.write('image_index = {}\n')
py_images_file.write('image_catalog = {}\n')
"""
Writes the Python code to the output pyImages file that both define an image
and to be able to generate raw data, wx.Image, wx.Bitmap and wx.Icon objects
when its pyImmages file is imported by any Python application.
"""
for index in sorted(pyfiledata.keys()):
values = pyfiledata[index]
name = values["name"]
data = values["data"]
py_images_file.write('#' + '-'*69 + '\n\n')
py_images_file.write('image_catalog["%s"] = PyEmbeddedImage(\n%s\n' % (name, data))
py_images_file.write(' )\n\n')
# When the PyImages file is imported,
# the following dictionary idName value will become a function name.
py_images_file.write('image_index[%s] = "%s"\n' % (str(index), name))
py_images_file.write('\n')
"""
Writes the Get functions at the end of the file
"""
py_images_file.write('#' + '-'*69 + '\n\n')
# get data function
py_images_file.write('def GetData(name):\n')
py_images_file.write(' ')
py_images_file.write('return image_catalog[name].GetData()\n')
py_images_file.write('\n')
# scale image function
py_images_file.write('def ScaleImage(name, width, height):\n')
py_images_file.write(' ')
py_images_file.write('image = image_catalog[name].GetImage()\n')
py_images_file.write(' ')
py_images_file.write('image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)\n')
py_images_file.write(' ')
py_images_file.write('return image\n')
py_images_file.write('\n')
for func_name in ["Image","Bitmap","Icon"]:
py_images_file.write('def Get%s(name, width=-1, height=-1):\n' % func_name)
py_images_file.write(' ')
py_images_file.write('if (width,height) == (-1,-1):\n')
py_images_file.write(' ')
py_images_file.write('return image_catalog[name].Get%s()\n' % func_name)
py_images_file.write(' ')
py_images_file.write('else:\n')
py_images_file.write(' ')
py_images_file.write('image = ScaleImage(name, width, height)\n')
py_images_file.write(' ')
py_images_file.write('image = wx.%s(image)\n' % func_name)
py_images_file.write(' ')
py_images_file.write('return image\n')
py_images_file.write('\n')
py_images_file.close()
#end WritePyImageFile def
#------------------------------------------------------------------------------
def B64EncodeBinaryData(image_data):
"""
B64 encodes a binary byte string. Returns a list of lines of strings
suitable for embedding in a Python file.
"""
# Encode the PNG file's lossless-compressed binary image data into a single, big b64 string.
encoded_data = binascii.b2a_base64(image_data)[:-1]
# encoded_data= image_data.encode("base64")
# encoded_data=image_data
# Chop the b64 character-encoded encoded_data into manageable
# line lengths for writing to a file.
data_list = [] # b64 linesOfEncPngImgData list.
while encoded_data:
line_of_data = encoded_data[:57] # A chunk length of 72 chars
encoded_data = encoded_data[57:] # The remainder of data to be encoded.
# extract the string from b"<str>"
line_of_data = line_of_data.decode("utf8")
line_of_data = ' "%s"' %(line_of_data)
data_list.append(line_of_data)
image_data_list = '\n'.join(data_list)
return image_data_list
#end B64EncodeBinaryData def
#------------------------------------------------------------------------------
def BitmapToPngFile(bitmap, tmp_file) :
"""
Save a wx.Bitmap to a PNG file. The contents of this file is intended
to be b64 encoded in order to finally save it to the output pyImages file.
"""
if bitmap.SaveFile(tmp_file, wx.BITMAP_TYPE_PNG): # SaveFile() success
return True
elif wx.Image(bitmap).SaveFile(tmp_file, wx.BITMAP_TYPE_PNG):
# wx.Bitmap.SaveFile() has failed.
# Try a different save method.
return True
else:
return None
#end BitmapToPngFile def
def CreatePngFileData(path) :
"""
return data of image file, which can than be passed to B64EncodeBinaryData
"""
if not os.path.exists(path):
return None #"File no longer exists. Cancel import"
try:
bitmap = wx.Bitmap(path, wx.BITMAP_TYPE_ANY)
except:
return None #"File no longer exists. Cancel import"
# Is image file bad?
if not bitmap.IsOk():
return None #"File no longer exists. Cancel import"
# Read the original image file and write it to a new PNG file.
tmp_file = tempfile.TemporaryFile()
tmp_file = tmp_file.name # get the path of temporary file
# print(dir(tmp_file))
bmp_to_png = BitmapToPngFile(bitmap, tmp_file)
if not bmp_to_png:
print("cannot write to temporary file")
# Encode the PNG file's lossless-compressed binary image data into a single, big b64 string.
png_file = open(tmp_file, 'rb')
image_data = png_file.read()
# b64 = image_data.encode ('base64')
png_file.close()
os.remove(tmp_file)
# print("creating temporary file",tmp_file, image_data )
from wx.lib.embeddedimage import PyEmbeddedImage
return image_data
#end CreatePngFileData def
def GetPyImageData(pyimage):
"""
Import the embedded_image_file and add its images to image_dict{}.
The file's existance is expected to have been verified.
"""
file_name, file_ext = os.path.splitext(os.path.basename(pyimage))
print(file_name, file_ext)
# import using the full path of the filename
# """http://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path"""
try:
spec = importlib.util.spec_from_file_location("file_name", pyimage)
foo = importlib.util.module_from_spec(spec)
spec.loader.exec_module(foo)
except:
print("Failed to load file. Is it a python file?")
return
# check if the python file is actually a PyImages file.
try:
image_index = foo.image_index # should have been defined
# image_catalog = foo.image_catalog
print(image_index.items())
data = {}
for index, image_name in image_index.items():
data[index] = {"name":image_name,
"data":foo.GetData(image_name),
"bitmap":foo.GetBitmap(image_name)}
except NameError:
print("Failed to load file. Is it a valid PyEmbeddedImage File?" )
return
return data
#end GetPyImageData def | gpl-2.0 | 3,023,426,488,338,498,600 | 34.741803 | 96 | 0.607798 | false | 3.789657 | false | false | false |
pli3/e2-openwbif | plugin/controllers/views/web/powerstate.py | 1 | 5042 | #!/usr/bin/env python
##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
import builtins as builtin
except ImportError:
import __builtin__ as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.4.4'
__CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0)
__CHEETAH_genTime__ = 1447321436.286449
__CHEETAH_genTimestamp__ = 'Thu Nov 12 18:43:56 2015'
__CHEETAH_src__ = '/home/knuth/openpli-oe-core/build/tmp/work/fusionhd-oe-linux/enigma2-plugin-extensions-openwebif/1+gitAUTOINC+5837c87afc-r0/git/plugin/controllers/views/web/powerstate.tmpl'
__CHEETAH_srcLastModified__ = 'Thu Nov 12 18:43:41 2015'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
class powerstate(Template):
##################################################
## CHEETAH GENERATED METHODS
def __init__(self, *args, **KWs):
super(powerstate, self).__init__(*args, **KWs)
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
for k,v in KWs.items():
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
def respond(self, trans=None):
## CHEETAH: main method generated for this template
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
_orig_filter_30375654 = _filter
filterName = u'WebSafe'
if self._CHEETAH__filters.has_key("WebSafe"):
_filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName]
else:
_filter = self._CHEETAH__currentFilter = \
self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter
write(u'''<?xml version="1.0" encoding="UTF-8"?>
<e2powerstate>
\t<e2instandby>
''')
if VFFSL(SL,"instandby",True) : # generated from line 5, col 3
_v = "true"
if _v is not None: write(_filter(_v))
else:
_v = "false"
if _v is not None: write(_filter(_v))
write(u'''\t</e2instandby>
</e2powerstate>
''')
_filter = self._CHEETAH__currentFilter = _orig_filter_30375654
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
##################################################
## CHEETAH GENERATED ATTRIBUTES
_CHEETAH__instanceInitialized = False
_CHEETAH_version = __CHEETAH_version__
_CHEETAH_versionTuple = __CHEETAH_versionTuple__
_CHEETAH_genTime = __CHEETAH_genTime__
_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__
_CHEETAH_src = __CHEETAH_src__
_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__
_mainCheetahMethod_for_powerstate= 'respond'
## END CLASS DEFINITION
if not hasattr(powerstate, '_initCheetahAttributes'):
templateAPIClass = getattr(powerstate, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(powerstate)
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/
##################################################
## if run from command line:
if __name__ == '__main__':
from Cheetah.TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=powerstate()).run()
| gpl-2.0 | 7,429,331,706,846,930,000 | 32.838926 | 192 | 0.626934 | false | 3.674927 | false | false | false |
erigones/esdc-ce | api/mon/backends/abstract/server.py | 1 | 1705 | from django.utils.text import Truncator
from django.utils.translation import ugettext_lazy as _
# noinspection PyProtectedMember
from vms.models.base import _DummyModel, _UserTasksModel
from vms.models import Dc
class AbstractMonitoringServer(_DummyModel, _UserTasksModel):
"""
Abstract model for representing a monitoring server in a DC.
"""
_pk_key = 'mon_server_id'
uri = NotImplemented
name = NotImplemented
address = NotImplemented
connection_id = NotImplemented
# noinspection PyPep8Naming
class Meta:
# Required for api.exceptions.ObjectNotFound
verbose_name_raw = _('Monitoring Server')
# noinspection PyUnusedLocal
def __init__(self, dc):
self.dc = dc
super(AbstractMonitoringServer, self).__init__()
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.name)
@property
def id(self):
return self.dc.id
@property
def owner(self): # Required by _UserTasksModel
return self.dc.owner
@property
def pk(self): # Required by task_log
return str(self.id)
@property
def log_name(self): # Required by task_log
return Truncator(self.uri).chars(32)
@property
def log_alias(self): # Required by task_log
return self.name
@classmethod
def get_content_type(cls): # Required by task_log
return None
@classmethod
def get_object_type(cls, content_type=None): # Required by task_log
return 'monitoringserver'
@classmethod
def get_object_by_pk(cls, pk):
dc = Dc.objects.get_by_id(pk)
return cls(dc)
MonitoringServerClass = AbstractMonitoringServer
| apache-2.0 | 4,198,058,031,038,698,500 | 24.833333 | 72 | 0.653959 | false | 3.892694 | false | false | false |
umlfri/umlfri2 | umlfri2/qtgui/canvas/scrolledcanvaswidget.py | 1 | 1410 | from PyQt5.QtCore import QPoint
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QWheelEvent
from PyQt5.QtWidgets import QScrollArea
from .canvaswidget import CanvasWidget
class ScrolledCanvasWidget(QScrollArea):
def __init__(self, main_window, drawing_area):
super().__init__()
self.__canvas = CanvasWidget(main_window, drawing_area)
self.setWidget(self.__canvas)
self.setWidgetResizable(True)
def wheelEvent(self, event):
if event.modifiers() == Qt.ShiftModifier:
pixelDelta = event.pixelDelta()
angleDelta = event.angleDelta()
if angleDelta.x() == 0 and angleDelta.y() != 0:
delta = angleDelta.y()
orientation = Qt.Horizontal
else:
delta = angleDelta.x()
orientation = Qt.Vertical
super().wheelEvent(QWheelEvent(event.pos(), event.globalPos(),
QPoint(pixelDelta.y(), pixelDelta.x()),
QPoint(angleDelta.y(), angleDelta.x()),
delta, orientation,
event.buttons(), Qt.NoModifier))
else:
super().wheelEvent(event)
@property
def diagram(self):
return self.__canvas.diagram
| gpl-3.0 | 4,295,992,312,250,899,000 | 34.25 | 82 | 0.52766 | false | 4.812287 | false | false | false |
carbureted/shavar-prod-lists | scripts/json_verify.py | 1 | 6963 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import glob
import json
import re
from types import DictType, ListType, UnicodeType
from urlparse import urlparse
parser = argparse.ArgumentParser(description='Verify json files for shavar.')
parser.add_argument("-f", "--file", help="filename to verify")
bad_uris = []
dupe_hosts = {
"properties": [],
"resources": []
}
block_host_uris = []
entity_host_uris = []
errors = []
file_contents = []
file_name = ""
result = 0
def run(file):
global file_name
file_name = file
try:
verify(file)
except:
errors.append("\tError: Problem handling file")
finish()
def verify(file):
try:
with open(file) as f:
raw_data = f.readlines()
# save contents of file, including line numbers
for x in range(0, len(raw_data)):
line_number = x+1
file_contents.append([raw_data[x], line_number])
# attempt to parse file as json
json_obj = json.loads("".join(raw_data))
try:
# determine which schema this file uses
if ("categories" in json_obj):
# google_mapping.json
# disconnect_blacklist.json
find_uris(json_obj["categories"])
else:
# disconnect_entitylist.json
find_uris_in_entities(json_obj)
except:
errors.append("\tError: Can't parse file")
except ValueError as e:
# invalid json formatting
errors.append("\tError: %s" % e)
return
except IOError as e:
# non-existent file
errors.append("\tError: Can't open file: %s" % e)
return
"""
categories_json is expected to match this format:
"categories": {
"Disconnect": [
{
"Facebook": {
"http://www.facebook.com/": [
"facebook.com",
...
]
}
},
{
"Google": {
"http://www.google.com/": [
"2mdn.net",
...
]
}
},
...
],
"Advertising": [
{
"[x+1]": {
"http://www.xplusone.com/": [
"ru4.com",
...
]
}
},
]
...
}
"""
def find_uris(categories_json):
assert type(categories_json) is DictType
for category, category_json in categories_json.iteritems():
assert type(category) is UnicodeType
assert type(category_json) is ListType
for entity in category_json:
assert type(entity) is DictType
for entity_name, entity_json in entity.iteritems():
assert type(entity_name) is UnicodeType
assert type(entity_json) is DictType
# pop dnt out of the dict, so we can iteritems() over the rest
try:
dnt_value = entity_json.pop('dnt', '')
assert dnt_value in ["w3c", "eff", ""]
except AssertionError:
errors.append("%s has bad DNT value: %s" % (entity_name,
dnt_value))
for domain, uris in entity_json.iteritems():
assert type(domain) is UnicodeType
assert type(uris) is ListType
for uri in uris:
check_uri(uri)
block_host_uris.append(uri)
def find_uris_in_entities(entitylist_json):
checked_uris = {
"properties": [],
"resources": []
}
assert len(entitylist_json.items()) > 0
assert type(entitylist_json) is DictType
for entity, types in entitylist_json.iteritems():
assert type(entity) is UnicodeType
assert type(types) is DictType
for host_type, uris in types.iteritems():
assert host_type in ["properties", "resources"]
assert type(uris) is ListType
for uri in uris:
if uri in checked_uris[host_type]:
dupe_hosts[host_type].append(uri)
check_uri(uri)
entity_host_uris.append(uri)
checked_uris[host_type].append(uri)
def check_uri(uri):
# Valid URI:
# no scheme, port, fragment, path or query string
# no disallowed characters
# no leading/trailing garbage
try:
uri.decode('ascii')
except UnicodeEncodeError:
bad_uris.append(uri)
parsed_uri = urlparse(uri)
try:
assert parsed_uri.scheme == ''
# domains of urls without schemes are parsed into 'path' so check path
# for port
assert ':' not in parsed_uri.path
assert parsed_uri.netloc == ''
assert parsed_uri.params == ''
assert parsed_uri.query == ''
assert parsed_uri.fragment == ''
assert len(parsed_uri.path) < 128
except AssertionError:
bad_uris.append(uri)
return
def find_line_number(uri):
line = 0
try:
for x in range(0, len(file_contents)):
temp = file_contents[x][0].decode("utf-8", "ignore")
if re.search(uri, temp):
line = file_contents[x][1]
file_contents.pop(x)
break
except ValueError as e:
print e
line = -1
return str(line)
def make_errors_from_bad_uris():
for bad_uri in bad_uris:
errors.append("\tError: Bad URI: %s\t: in line %s" %
(bad_uri, find_line_number(bad_uri)))
for host_type, hosts in dupe_hosts.iteritems():
for host in hosts:
errors.append("\tDupe: Dupe host: %s\t in line %s" %
(host, find_line_number(host)))
def finish():
make_errors_from_bad_uris()
if (len(errors) == 0):
print "\n" + file_name + " : valid"
else:
global result
result = 1
print "\n" + file_name + " : invalid"
for error in errors:
print error
reset()
def reset():
global bad_uris
bad_uris = []
global dupe_hosts
dupe_hosts = {
"properties": [],
"resources": []
}
global errors
errors = []
global file_contents
file_contents = []
global file_name
file_name = ""
def start(filename=None):
if (filename):
run(filename)
else:
for f in glob.glob("*.json"):
run(f)
args = parser.parse_args()
start(args.file)
print "\n block_host_uris: %s " % len(block_host_uris)
print "\n entity_host_uris: %s " % len(entity_host_uris)
assert "itisatracker.com" in block_host_uris
exit(result)
| gpl-3.0 | -808,360,885,678,404,500 | 27.892116 | 78 | 0.507827 | false | 4.112817 | false | false | false |
kamalx/edx-platform | lms/djangoapps/discussion_api/tests/test_api.py | 1 | 90651 | """
Tests for Discussion API internal interface
"""
from datetime import datetime, timedelta
import itertools
from urlparse import parse_qs, urlparse, urlunparse
from urllib import urlencode
import ddt
import httpretty
import mock
from pytz import UTC
from django.core.exceptions import ValidationError
from django.http import Http404
from django.test.client import RequestFactory
from rest_framework.exceptions import PermissionDenied
from opaque_keys.edx.locator import CourseLocator
from courseware.tests.factories import BetaTesterFactory, StaffFactory
from discussion_api.api import (
create_comment,
create_thread,
delete_comment,
delete_thread,
get_comment_list,
get_course,
get_course_topics,
get_thread_list,
update_comment,
update_thread,
)
from discussion_api.tests.utils import (
CommentsServiceMockMixin,
make_minimal_cs_comment,
make_minimal_cs_thread,
)
from django_comment_common.models import (
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_STUDENT,
Role,
)
from openedx.core.djangoapps.course_groups.models import CourseUserGroupPartitionGroup
from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory
from student.tests.factories import CourseEnrollmentFactory, UserFactory
from util.testing import UrlResetMixin
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.partitions.partitions import Group, UserPartition
def _remove_discussion_tab(course, user_id):
"""
Remove the discussion tab for the course.
user_id is passed to the modulestore as the editor of the module.
"""
course.tabs = [tab for tab in course.tabs if not tab.type == 'discussion']
modulestore().update_item(course, user_id)
@ddt.ddt
class GetCourseTest(UrlResetMixin, ModuleStoreTestCase):
"""Test for get_course"""
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(GetCourseTest, self).setUp()
self.course = CourseFactory.create(org="x", course="y", run="z")
self.user = UserFactory.create()
self.request = RequestFactory().get("/dummy")
self.request.user = self.user
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
def test_nonexistent_course(self):
with self.assertRaises(Http404):
get_course(self.request, CourseLocator.from_string("non/existent/course"))
def test_not_enrolled(self):
unenrolled_user = UserFactory.create()
self.request.user = unenrolled_user
with self.assertRaises(Http404):
get_course(self.request, self.course.id)
def test_discussions_disabled(self):
_remove_discussion_tab(self.course, self.user.id)
with self.assertRaises(Http404):
get_course(self.request, self.course.id)
def test_basic(self):
self.assertEqual(
get_course(self.request, self.course.id),
{
"id": unicode(self.course.id),
"blackouts": [],
"thread_list_url": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz",
"topics_url": "http://testserver/api/discussion/v1/course_topics/x/y/z",
}
)
def test_blackout(self):
# A variety of formats is accepted
self.course.discussion_blackouts = [
["2015-06-09T00:00:00Z", "6-10-15"],
[1433980800000, datetime(2015, 6, 12)],
]
modulestore().update_item(self.course, self.user.id)
result = get_course(self.request, self.course.id)
self.assertEqual(
result["blackouts"],
[
{"start": "2015-06-09T00:00:00+00:00", "end": "2015-06-10T00:00:00+00:00"},
{"start": "2015-06-11T00:00:00+00:00", "end": "2015-06-12T00:00:00+00:00"},
]
)
@ddt.data(None, "not a datetime", "2015", [])
def test_blackout_errors(self, bad_value):
self.course.discussion_blackouts = [
[bad_value, "2015-06-09T00:00:00Z"],
["2015-06-10T00:00:00Z", "2015-06-11T00:00:00Z"],
]
modulestore().update_item(self.course, self.user.id)
result = get_course(self.request, self.course.id)
self.assertEqual(result["blackouts"], [])
@mock.patch.dict("django.conf.settings.FEATURES", {"DISABLE_START_DATES": False})
class GetCourseTopicsTest(UrlResetMixin, ModuleStoreTestCase):
"""Test for get_course_topics"""
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(GetCourseTopicsTest, self).setUp()
self.maxDiff = None # pylint: disable=invalid-name
self.partition = UserPartition(
0,
"partition",
"Test Partition",
[Group(0, "Cohort A"), Group(1, "Cohort B")],
scheme_id="cohort"
)
self.course = CourseFactory.create(
org="x",
course="y",
run="z",
start=datetime.now(UTC),
discussion_topics={"Test Topic": {"id": "non-courseware-topic-id"}},
user_partitions=[self.partition],
cohort_config={"cohorted": True},
days_early_for_beta=3
)
self.user = UserFactory.create()
self.request = RequestFactory().get("/dummy")
self.request.user = self.user
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
def make_discussion_module(self, topic_id, category, subcategory, **kwargs):
"""Build a discussion module in self.course"""
ItemFactory.create(
parent_location=self.course.location,
category="discussion",
discussion_id=topic_id,
discussion_category=category,
discussion_target=subcategory,
**kwargs
)
def get_thread_list_url(self, topic_id_list):
"""
Returns the URL for the thread_list_url field, given a list of topic_ids
"""
path = "http://testserver/api/discussion/v1/threads/"
query_list = [("course_id", unicode(self.course.id))] + [("topic_id", topic_id) for topic_id in topic_id_list]
return urlunparse(("", "", path, "", urlencode(query_list), ""))
def get_course_topics(self):
"""
Get course topics for self.course, using the given user or self.user if
not provided, and generating absolute URIs with a test scheme/host.
"""
return get_course_topics(self.request, self.course.id)
def make_expected_tree(self, topic_id, name, children=None):
"""
Build an expected result tree given a topic id, display name, and
children
"""
topic_id_list = [topic_id] if topic_id else [child["id"] for child in children]
children = children or []
node = {
"id": topic_id,
"name": name,
"children": children,
"thread_list_url": self.get_thread_list_url(topic_id_list)
}
return node
def test_nonexistent_course(self):
with self.assertRaises(Http404):
get_course_topics(self.request, CourseLocator.from_string("non/existent/course"))
def test_not_enrolled(self):
unenrolled_user = UserFactory.create()
self.request.user = unenrolled_user
with self.assertRaises(Http404):
self.get_course_topics()
def test_discussions_disabled(self):
_remove_discussion_tab(self.course, self.user.id)
with self.assertRaises(Http404):
self.get_course_topics()
def test_without_courseware(self):
actual = self.get_course_topics()
expected = {
"courseware_topics": [],
"non_courseware_topics": [
self.make_expected_tree("non-courseware-topic-id", "Test Topic")
],
}
self.assertEqual(actual, expected)
def test_with_courseware(self):
self.make_discussion_module("courseware-topic-id", "Foo", "Bar")
actual = self.get_course_topics()
expected = {
"courseware_topics": [
self.make_expected_tree(
None,
"Foo",
[self.make_expected_tree("courseware-topic-id", "Bar")]
),
],
"non_courseware_topics": [
self.make_expected_tree("non-courseware-topic-id", "Test Topic")
],
}
self.assertEqual(actual, expected)
def test_many(self):
self.course.discussion_topics = {
"A": {"id": "non-courseware-1"},
"B": {"id": "non-courseware-2"},
}
modulestore().update_item(self.course, self.user.id)
self.make_discussion_module("courseware-1", "A", "1")
self.make_discussion_module("courseware-2", "A", "2")
self.make_discussion_module("courseware-3", "B", "1")
self.make_discussion_module("courseware-4", "B", "2")
self.make_discussion_module("courseware-5", "C", "1")
actual = self.get_course_topics()
expected = {
"courseware_topics": [
self.make_expected_tree(
None,
"A",
[
self.make_expected_tree("courseware-1", "1"),
self.make_expected_tree("courseware-2", "2"),
]
),
self.make_expected_tree(
None,
"B",
[
self.make_expected_tree("courseware-3", "1"),
self.make_expected_tree("courseware-4", "2"),
]
),
self.make_expected_tree(
None,
"C",
[self.make_expected_tree("courseware-5", "1")]
),
],
"non_courseware_topics": [
self.make_expected_tree("non-courseware-1", "A"),
self.make_expected_tree("non-courseware-2", "B"),
],
}
self.assertEqual(actual, expected)
def test_sort_key(self):
self.course.discussion_topics = {
"W": {"id": "non-courseware-1", "sort_key": "Z"},
"X": {"id": "non-courseware-2"},
"Y": {"id": "non-courseware-3", "sort_key": "Y"},
"Z": {"id": "non-courseware-4", "sort_key": "W"},
}
modulestore().update_item(self.course, self.user.id)
self.make_discussion_module("courseware-1", "First", "A", sort_key="D")
self.make_discussion_module("courseware-2", "First", "B", sort_key="B")
self.make_discussion_module("courseware-3", "First", "C", sort_key="E")
self.make_discussion_module("courseware-4", "Second", "A", sort_key="F")
self.make_discussion_module("courseware-5", "Second", "B", sort_key="G")
self.make_discussion_module("courseware-6", "Second", "C")
self.make_discussion_module("courseware-7", "Second", "D", sort_key="A")
actual = self.get_course_topics()
expected = {
"courseware_topics": [
self.make_expected_tree(
None,
"First",
[
self.make_expected_tree("courseware-2", "B"),
self.make_expected_tree("courseware-1", "A"),
self.make_expected_tree("courseware-3", "C"),
]
),
self.make_expected_tree(
None,
"Second",
[
self.make_expected_tree("courseware-7", "D"),
self.make_expected_tree("courseware-6", "C"),
self.make_expected_tree("courseware-4", "A"),
self.make_expected_tree("courseware-5", "B"),
]
),
],
"non_courseware_topics": [
self.make_expected_tree("non-courseware-4", "Z"),
self.make_expected_tree("non-courseware-2", "X"),
self.make_expected_tree("non-courseware-3", "Y"),
self.make_expected_tree("non-courseware-1", "W"),
],
}
self.assertEqual(actual, expected)
def test_access_control(self):
"""
Test that only topics that a user has access to are returned. The
ways in which a user may not have access are:
* Module is visible to staff only
* Module has a start date in the future
* Module is accessible only to a group the user is not in
Also, there is a case that ensures that a category with no accessible
subcategories does not appear in the result.
"""
beta_tester = BetaTesterFactory.create(course_key=self.course.id)
CourseEnrollmentFactory.create(user=beta_tester, course_id=self.course.id)
staff = StaffFactory.create(course_key=self.course.id)
for user, group_idx in [(self.user, 0), (beta_tester, 1)]:
cohort = CohortFactory.create(
course_id=self.course.id,
name=self.partition.groups[group_idx].name,
users=[user]
)
CourseUserGroupPartitionGroup.objects.create(
course_user_group=cohort,
partition_id=self.partition.id,
group_id=self.partition.groups[group_idx].id
)
self.make_discussion_module("courseware-1", "First", "Everybody")
self.make_discussion_module(
"courseware-2",
"First",
"Cohort A",
group_access={self.partition.id: [self.partition.groups[0].id]}
)
self.make_discussion_module(
"courseware-3",
"First",
"Cohort B",
group_access={self.partition.id: [self.partition.groups[1].id]}
)
self.make_discussion_module("courseware-4", "Second", "Staff Only", visible_to_staff_only=True)
self.make_discussion_module(
"courseware-5",
"Second",
"Future Start Date",
start=datetime.now(UTC) + timedelta(days=1)
)
student_actual = self.get_course_topics()
student_expected = {
"courseware_topics": [
self.make_expected_tree(
None,
"First",
[
self.make_expected_tree("courseware-2", "Cohort A"),
self.make_expected_tree("courseware-1", "Everybody"),
]
),
],
"non_courseware_topics": [
self.make_expected_tree("non-courseware-topic-id", "Test Topic"),
],
}
self.assertEqual(student_actual, student_expected)
self.request.user = beta_tester
beta_actual = self.get_course_topics()
beta_expected = {
"courseware_topics": [
self.make_expected_tree(
None,
"First",
[
self.make_expected_tree("courseware-3", "Cohort B"),
self.make_expected_tree("courseware-1", "Everybody"),
]
),
self.make_expected_tree(
None,
"Second",
[self.make_expected_tree("courseware-5", "Future Start Date")]
),
],
"non_courseware_topics": [
self.make_expected_tree("non-courseware-topic-id", "Test Topic"),
],
}
self.assertEqual(beta_actual, beta_expected)
self.request.user = staff
staff_actual = self.get_course_topics()
staff_expected = {
"courseware_topics": [
self.make_expected_tree(
None,
"First",
[
self.make_expected_tree("courseware-2", "Cohort A"),
self.make_expected_tree("courseware-3", "Cohort B"),
self.make_expected_tree("courseware-1", "Everybody"),
]
),
self.make_expected_tree(
None,
"Second",
[
self.make_expected_tree("courseware-5", "Future Start Date"),
self.make_expected_tree("courseware-4", "Staff Only"),
]
),
],
"non_courseware_topics": [
self.make_expected_tree("non-courseware-topic-id", "Test Topic"),
],
}
self.assertEqual(staff_actual, staff_expected)
@ddt.ddt
class GetThreadListTest(CommentsServiceMockMixin, UrlResetMixin, ModuleStoreTestCase):
"""Test for get_thread_list"""
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(GetThreadListTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.maxDiff = None # pylint: disable=invalid-name
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/test_path")
self.request.user = self.user
self.course = CourseFactory.create()
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
self.author = UserFactory.create()
self.cohort = CohortFactory.create(course_id=self.course.id)
def get_thread_list(
self,
threads,
page=1,
page_size=1,
num_pages=1,
course=None,
topic_id_list=None,
):
"""
Register the appropriate comments service response, then call
get_thread_list and return the result.
"""
course = course or self.course
self.register_get_threads_response(threads, page, num_pages)
ret = get_thread_list(self.request, course.id, page, page_size, topic_id_list)
return ret
def test_nonexistent_course(self):
with self.assertRaises(Http404):
get_thread_list(self.request, CourseLocator.from_string("non/existent/course"), 1, 1)
def test_not_enrolled(self):
self.request.user = UserFactory.create()
with self.assertRaises(Http404):
self.get_thread_list([])
def test_discussions_disabled(self):
_remove_discussion_tab(self.course, self.user.id)
with self.assertRaises(Http404):
self.get_thread_list([])
def test_empty(self):
self.assertEqual(
self.get_thread_list([]),
{
"results": [],
"next": None,
"previous": None,
"text_search_rewrite": None,
}
)
def test_get_threads_by_topic_id(self):
self.get_thread_list([], topic_id_list=["topic_x", "topic_meow"])
self.assertEqual(urlparse(httpretty.last_request().path).path, "/api/v1/threads")
self.assert_last_query_params({
"course_id": [unicode(self.course.id)],
"sort_key": ["date"],
"sort_order": ["desc"],
"page": ["1"],
"per_page": ["1"],
"recursive": ["False"],
"commentable_ids": ["topic_x,topic_meow"]
})
def test_basic_query_params(self):
self.get_thread_list([], page=6, page_size=14)
self.assert_last_query_params({
"course_id": [unicode(self.course.id)],
"sort_key": ["date"],
"sort_order": ["desc"],
"page": ["6"],
"per_page": ["14"],
"recursive": ["False"],
})
def test_thread_content(self):
source_threads = [
{
"type": "thread",
"id": "test_thread_id_0",
"course_id": unicode(self.course.id),
"commentable_id": "topic_x",
"group_id": None,
"user_id": str(self.author.id),
"username": self.author.username,
"anonymous": False,
"anonymous_to_peers": False,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"thread_type": "discussion",
"title": "Test Title",
"body": "Test body",
"pinned": False,
"closed": False,
"abuse_flaggers": [],
"votes": {"up_count": 4},
"comments_count": 5,
"unread_comments_count": 3,
},
{
"type": "thread",
"id": "test_thread_id_1",
"course_id": unicode(self.course.id),
"commentable_id": "topic_y",
"group_id": self.cohort.id,
"user_id": str(self.author.id),
"username": self.author.username,
"anonymous": False,
"anonymous_to_peers": False,
"created_at": "2015-04-28T22:22:22Z",
"updated_at": "2015-04-28T00:33:33Z",
"thread_type": "question",
"title": "Another Test Title",
"body": "More content",
"pinned": False,
"closed": True,
"abuse_flaggers": [],
"votes": {"up_count": 9},
"comments_count": 18,
"unread_comments_count": 0,
},
]
expected_threads = [
{
"id": "test_thread_id_0",
"course_id": unicode(self.course.id),
"topic_id": "topic_x",
"group_id": None,
"group_name": None,
"author": self.author.username,
"author_label": None,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"type": "discussion",
"title": "Test Title",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"pinned": False,
"closed": False,
"following": False,
"abuse_flagged": False,
"voted": False,
"vote_count": 4,
"comment_count": 5,
"unread_comment_count": 3,
"comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread_id_0",
"endorsed_comment_list_url": None,
"non_endorsed_comment_list_url": None,
"editable_fields": ["following", "voted"],
},
{
"id": "test_thread_id_1",
"course_id": unicode(self.course.id),
"topic_id": "topic_y",
"group_id": self.cohort.id,
"group_name": self.cohort.name,
"author": self.author.username,
"author_label": None,
"created_at": "2015-04-28T22:22:22Z",
"updated_at": "2015-04-28T00:33:33Z",
"type": "question",
"title": "Another Test Title",
"raw_body": "More content",
"rendered_body": "<p>More content</p>",
"pinned": False,
"closed": True,
"following": False,
"abuse_flagged": False,
"voted": False,
"vote_count": 9,
"comment_count": 18,
"unread_comment_count": 0,
"comment_list_url": None,
"endorsed_comment_list_url": (
"http://testserver/api/discussion/v1/comments/?thread_id=test_thread_id_1&endorsed=True"
),
"non_endorsed_comment_list_url": (
"http://testserver/api/discussion/v1/comments/?thread_id=test_thread_id_1&endorsed=False"
),
"editable_fields": ["following", "voted"],
},
]
self.assertEqual(
self.get_thread_list(source_threads),
{
"results": expected_threads,
"next": None,
"previous": None,
"text_search_rewrite": None,
}
)
@ddt.data(
*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False]
)
)
@ddt.unpack
def test_request_group(self, role_name, course_is_cohorted):
cohort_course = CourseFactory.create(cohort_config={"cohorted": course_is_cohorted})
CourseEnrollmentFactory.create(user=self.user, course_id=cohort_course.id)
CohortFactory.create(course_id=cohort_course.id, users=[self.user])
role = Role.objects.create(name=role_name, course_id=cohort_course.id)
role.users = [self.user]
self.get_thread_list([], course=cohort_course)
actual_has_group = "group_id" in httpretty.last_request().querystring
expected_has_group = (course_is_cohorted and role_name == FORUM_ROLE_STUDENT)
self.assertEqual(actual_has_group, expected_has_group)
def test_pagination(self):
# N.B. Empty thread list is not realistic but convenient for this test
self.assertEqual(
self.get_thread_list([], page=1, num_pages=3),
{
"results": [],
"next": "http://testserver/test_path?page=2",
"previous": None,
"text_search_rewrite": None,
}
)
self.assertEqual(
self.get_thread_list([], page=2, num_pages=3),
{
"results": [],
"next": "http://testserver/test_path?page=3",
"previous": "http://testserver/test_path?page=1",
"text_search_rewrite": None,
}
)
self.assertEqual(
self.get_thread_list([], page=3, num_pages=3),
{
"results": [],
"next": None,
"previous": "http://testserver/test_path?page=2",
"text_search_rewrite": None,
}
)
# Test page past the last one
self.register_get_threads_response([], page=3, num_pages=3)
with self.assertRaises(Http404):
get_thread_list(self.request, self.course.id, page=4, page_size=10)
@ddt.data(None, "rewritten search string")
def test_text_search(self, text_search_rewrite):
self.register_get_threads_search_response([], text_search_rewrite)
self.assertEqual(
get_thread_list(
self.request,
self.course.id,
page=1,
page_size=10,
text_search="test search string"
),
{
"results": [],
"next": None,
"previous": None,
"text_search_rewrite": text_search_rewrite,
}
)
self.assert_last_query_params({
"course_id": [unicode(self.course.id)],
"sort_key": ["date"],
"sort_order": ["desc"],
"page": ["1"],
"per_page": ["10"],
"recursive": ["False"],
"text": ["test search string"],
})
@ddt.ddt
class GetCommentListTest(CommentsServiceMockMixin, ModuleStoreTestCase):
"""Test for get_comment_list"""
def setUp(self):
super(GetCommentListTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.maxDiff = None # pylint: disable=invalid-name
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/test_path")
self.request.user = self.user
self.course = CourseFactory.create()
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
self.author = UserFactory.create()
def make_minimal_cs_thread(self, overrides=None):
"""
Create a thread with the given overrides, plus the course_id if not
already in overrides.
"""
overrides = overrides.copy() if overrides else {}
overrides.setdefault("course_id", unicode(self.course.id))
return make_minimal_cs_thread(overrides)
def get_comment_list(self, thread, endorsed=None, page=1, page_size=1):
"""
Register the appropriate comments service response, then call
get_comment_list and return the result.
"""
self.register_get_thread_response(thread)
return get_comment_list(self.request, thread["id"], endorsed, page, page_size)
def test_nonexistent_thread(self):
thread_id = "nonexistent_thread"
self.register_get_thread_error_response(thread_id, 404)
with self.assertRaises(Http404):
get_comment_list(self.request, thread_id, endorsed=False, page=1, page_size=1)
def test_nonexistent_course(self):
with self.assertRaises(Http404):
self.get_comment_list(self.make_minimal_cs_thread({"course_id": "non/existent/course"}))
def test_not_enrolled(self):
self.request.user = UserFactory.create()
with self.assertRaises(Http404):
self.get_comment_list(self.make_minimal_cs_thread())
def test_discussions_disabled(self):
_remove_discussion_tab(self.course, self.user.id)
with self.assertRaises(Http404):
self.get_comment_list(self.make_minimal_cs_thread())
@ddt.data(
*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False],
[True, False],
["no_group", "match_group", "different_group"],
)
)
@ddt.unpack
def test_group_access(
self,
role_name,
course_is_cohorted,
topic_is_cohorted,
thread_group_state
):
cohort_course = CourseFactory.create(
discussion_topics={"Test Topic": {"id": "test_topic"}},
cohort_config={
"cohorted": course_is_cohorted,
"cohorted_discussions": ["test_topic"] if topic_is_cohorted else [],
}
)
CourseEnrollmentFactory.create(user=self.user, course_id=cohort_course.id)
cohort = CohortFactory.create(course_id=cohort_course.id, users=[self.user])
role = Role.objects.create(name=role_name, course_id=cohort_course.id)
role.users = [self.user]
thread = self.make_minimal_cs_thread({
"course_id": unicode(cohort_course.id),
"commentable_id": "test_topic",
"group_id": (
None if thread_group_state == "no_group" else
cohort.id if thread_group_state == "match_group" else
cohort.id + 1
),
})
expected_error = (
role_name == FORUM_ROLE_STUDENT and
course_is_cohorted and
topic_is_cohorted and
thread_group_state == "different_group"
)
try:
self.get_comment_list(thread)
self.assertFalse(expected_error)
except Http404:
self.assertTrue(expected_error)
@ddt.data(True, False)
def test_discussion_endorsed(self, endorsed_value):
with self.assertRaises(ValidationError) as assertion:
self.get_comment_list(
self.make_minimal_cs_thread({"thread_type": "discussion"}),
endorsed=endorsed_value
)
self.assertEqual(
assertion.exception.message_dict,
{"endorsed": ["This field may not be specified for discussion threads."]}
)
def test_question_without_endorsed(self):
with self.assertRaises(ValidationError) as assertion:
self.get_comment_list(
self.make_minimal_cs_thread({"thread_type": "question"}),
endorsed=None
)
self.assertEqual(
assertion.exception.message_dict,
{"endorsed": ["This field is required for question threads."]}
)
def test_empty(self):
discussion_thread = self.make_minimal_cs_thread(
{"thread_type": "discussion", "children": [], "resp_total": 0}
)
self.assertEqual(
self.get_comment_list(discussion_thread),
{"results": [], "next": None, "previous": None}
)
question_thread = self.make_minimal_cs_thread({
"thread_type": "question",
"endorsed_responses": [],
"non_endorsed_responses": [],
"non_endorsed_resp_total": 0
})
self.assertEqual(
self.get_comment_list(question_thread, endorsed=False),
{"results": [], "next": None, "previous": None}
)
self.assertEqual(
self.get_comment_list(question_thread, endorsed=True),
{"results": [], "next": None, "previous": None}
)
def test_basic_query_params(self):
self.get_comment_list(
self.make_minimal_cs_thread({
"children": [make_minimal_cs_comment()],
"resp_total": 71
}),
page=6,
page_size=14
)
self.assert_query_params_equal(
httpretty.httpretty.latest_requests[-2],
{
"recursive": ["True"],
"user_id": [str(self.user.id)],
"mark_as_read": ["True"],
"resp_skip": ["70"],
"resp_limit": ["14"],
}
)
def test_discussion_content(self):
source_comments = [
{
"type": "comment",
"id": "test_comment_1",
"thread_id": "test_thread",
"user_id": str(self.author.id),
"username": self.author.username,
"anonymous": False,
"anonymous_to_peers": False,
"created_at": "2015-05-11T00:00:00Z",
"updated_at": "2015-05-11T11:11:11Z",
"body": "Test body",
"endorsed": False,
"abuse_flaggers": [],
"votes": {"up_count": 4},
"children": [],
},
{
"type": "comment",
"id": "test_comment_2",
"thread_id": "test_thread",
"user_id": str(self.author.id),
"username": self.author.username,
"anonymous": True,
"anonymous_to_peers": False,
"created_at": "2015-05-11T22:22:22Z",
"updated_at": "2015-05-11T33:33:33Z",
"body": "More content",
"endorsed": False,
"abuse_flaggers": [str(self.user.id)],
"votes": {"up_count": 7},
"children": [],
}
]
expected_comments = [
{
"id": "test_comment_1",
"thread_id": "test_thread",
"parent_id": None,
"author": self.author.username,
"author_label": None,
"created_at": "2015-05-11T00:00:00Z",
"updated_at": "2015-05-11T11:11:11Z",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"endorsed": False,
"endorsed_by": None,
"endorsed_by_label": None,
"endorsed_at": None,
"abuse_flagged": False,
"voted": False,
"vote_count": 4,
"children": [],
"editable_fields": ["voted"],
},
{
"id": "test_comment_2",
"thread_id": "test_thread",
"parent_id": None,
"author": None,
"author_label": None,
"created_at": "2015-05-11T22:22:22Z",
"updated_at": "2015-05-11T33:33:33Z",
"raw_body": "More content",
"rendered_body": "<p>More content</p>",
"endorsed": False,
"endorsed_by": None,
"endorsed_by_label": None,
"endorsed_at": None,
"abuse_flagged": True,
"voted": False,
"vote_count": 7,
"children": [],
"editable_fields": ["voted"],
},
]
actual_comments = self.get_comment_list(
self.make_minimal_cs_thread({"children": source_comments})
)["results"]
self.assertEqual(actual_comments, expected_comments)
def test_question_content(self):
thread = self.make_minimal_cs_thread({
"thread_type": "question",
"endorsed_responses": [make_minimal_cs_comment({"id": "endorsed_comment"})],
"non_endorsed_responses": [make_minimal_cs_comment({"id": "non_endorsed_comment"})],
"non_endorsed_resp_total": 1,
})
endorsed_actual = self.get_comment_list(thread, endorsed=True)
self.assertEqual(endorsed_actual["results"][0]["id"], "endorsed_comment")
non_endorsed_actual = self.get_comment_list(thread, endorsed=False)
self.assertEqual(non_endorsed_actual["results"][0]["id"], "non_endorsed_comment")
def test_endorsed_by_anonymity(self):
"""
Ensure thread anonymity is properly considered in serializing
endorsed_by.
"""
thread = self.make_minimal_cs_thread({
"anonymous": True,
"children": [
make_minimal_cs_comment({
"endorsement": {"user_id": str(self.author.id), "time": "2015-05-18T12:34:56Z"}
})
]
})
actual_comments = self.get_comment_list(thread)["results"]
self.assertIsNone(actual_comments[0]["endorsed_by"])
@ddt.data(
("discussion", None, "children", "resp_total"),
("question", False, "non_endorsed_responses", "non_endorsed_resp_total"),
)
@ddt.unpack
def test_cs_pagination(self, thread_type, endorsed_arg, response_field, response_total_field):
"""
Test cases in which pagination is done by the comments service.
thread_type is the type of thread (question or discussion).
endorsed_arg is the value of the endorsed argument.
repsonse_field is the field in which responses are returned for the
given thread type.
response_total_field is the field in which the total number of responses
is returned for the given thread type.
"""
# N.B. The mismatch between the number of children and the listed total
# number of responses is unrealistic but convenient for this test
thread = self.make_minimal_cs_thread({
"thread_type": thread_type,
response_field: [make_minimal_cs_comment()],
response_total_field: 5,
})
# Only page
actual = self.get_comment_list(thread, endorsed=endorsed_arg, page=1, page_size=5)
self.assertIsNone(actual["next"])
self.assertIsNone(actual["previous"])
# First page of many
actual = self.get_comment_list(thread, endorsed=endorsed_arg, page=1, page_size=2)
self.assertEqual(actual["next"], "http://testserver/test_path?page=2")
self.assertIsNone(actual["previous"])
# Middle page of many
actual = self.get_comment_list(thread, endorsed=endorsed_arg, page=2, page_size=2)
self.assertEqual(actual["next"], "http://testserver/test_path?page=3")
self.assertEqual(actual["previous"], "http://testserver/test_path?page=1")
# Last page of many
actual = self.get_comment_list(thread, endorsed=endorsed_arg, page=3, page_size=2)
self.assertIsNone(actual["next"])
self.assertEqual(actual["previous"], "http://testserver/test_path?page=2")
# Page past the end
thread = self.make_minimal_cs_thread({
"thread_type": thread_type,
response_field: [],
response_total_field: 5
})
with self.assertRaises(Http404):
self.get_comment_list(thread, endorsed=endorsed_arg, page=2, page_size=5)
def test_question_endorsed_pagination(self):
thread = self.make_minimal_cs_thread({
"thread_type": "question",
"endorsed_responses": [
make_minimal_cs_comment({"id": "comment_{}".format(i)}) for i in range(10)
]
})
def assert_page_correct(page, page_size, expected_start, expected_stop, expected_next, expected_prev):
"""
Check that requesting the given page/page_size returns the expected
output
"""
actual = self.get_comment_list(thread, endorsed=True, page=page, page_size=page_size)
result_ids = [result["id"] for result in actual["results"]]
self.assertEqual(
result_ids,
["comment_{}".format(i) for i in range(expected_start, expected_stop)]
)
self.assertEqual(
actual["next"],
"http://testserver/test_path?page={}".format(expected_next) if expected_next else None
)
self.assertEqual(
actual["previous"],
"http://testserver/test_path?page={}".format(expected_prev) if expected_prev else None
)
# Only page
assert_page_correct(
page=1,
page_size=10,
expected_start=0,
expected_stop=10,
expected_next=None,
expected_prev=None
)
# First page of many
assert_page_correct(
page=1,
page_size=4,
expected_start=0,
expected_stop=4,
expected_next=2,
expected_prev=None
)
# Middle page of many
assert_page_correct(
page=2,
page_size=4,
expected_start=4,
expected_stop=8,
expected_next=3,
expected_prev=1
)
# Last page of many
assert_page_correct(
page=3,
page_size=4,
expected_start=8,
expected_stop=10,
expected_next=None,
expected_prev=2
)
# Page past the end
with self.assertRaises(Http404):
self.get_comment_list(thread, endorsed=True, page=2, page_size=10)
class CreateThreadTest(CommentsServiceMockMixin, UrlResetMixin, ModuleStoreTestCase):
"""Tests for create_thread"""
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(CreateThreadTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/test_path")
self.request.user = self.user
self.course = CourseFactory.create()
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
self.minimal_data = {
"course_id": unicode(self.course.id),
"topic_id": "test_topic",
"type": "discussion",
"title": "Test Title",
"raw_body": "Test body",
}
@mock.patch("eventtracking.tracker.emit")
def test_basic(self, mock_emit):
self.register_post_thread_response({
"id": "test_id",
"username": self.user.username,
"created_at": "2015-05-19T00:00:00Z",
"updated_at": "2015-05-19T00:00:00Z",
})
actual = create_thread(self.request, self.minimal_data)
expected = {
"id": "test_id",
"course_id": unicode(self.course.id),
"topic_id": "test_topic",
"group_id": None,
"group_name": None,
"author": self.user.username,
"author_label": None,
"created_at": "2015-05-19T00:00:00Z",
"updated_at": "2015-05-19T00:00:00Z",
"type": "discussion",
"title": "Test Title",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"pinned": False,
"closed": False,
"following": False,
"abuse_flagged": False,
"voted": False,
"vote_count": 0,
"comment_count": 0,
"unread_comment_count": 0,
"comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_id",
"endorsed_comment_list_url": None,
"non_endorsed_comment_list_url": None,
"editable_fields": ["following", "raw_body", "title", "topic_id", "type", "voted"],
}
self.assertEqual(actual, expected)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"commentable_id": ["test_topic"],
"thread_type": ["discussion"],
"title": ["Test Title"],
"body": ["Test body"],
"user_id": [str(self.user.id)],
}
)
event_name, event_data = mock_emit.call_args[0]
self.assertEqual(event_name, "edx.forum.thread.created")
self.assertEqual(
event_data,
{
"commentable_id": "test_topic",
"group_id": None,
"thread_type": "discussion",
"title": "Test Title",
"anonymous": False,
"anonymous_to_peers": False,
"options": {"followed": False},
"id": "test_id",
"truncated": False,
"body": "Test body",
"url": "",
"user_forums_roles": [FORUM_ROLE_STUDENT],
"user_course_roles": [],
}
)
def test_following(self):
self.register_post_thread_response({"id": "test_id"})
self.register_subscription_response(self.user)
data = self.minimal_data.copy()
data["following"] = "True"
result = create_thread(self.request, data)
self.assertEqual(result["following"], True)
cs_request = httpretty.last_request()
self.assertEqual(
urlparse(cs_request.path).path,
"/api/v1/users/{}/subscriptions".format(self.user.id)
)
self.assertEqual(cs_request.method, "POST")
self.assertEqual(
cs_request.parsed_body,
{"source_type": ["thread"], "source_id": ["test_id"]}
)
def test_voted(self):
self.register_post_thread_response({"id": "test_id"})
self.register_thread_votes_response("test_id")
data = self.minimal_data.copy()
data["voted"] = "True"
result = create_thread(self.request, data)
self.assertEqual(result["voted"], True)
cs_request = httpretty.last_request()
self.assertEqual(urlparse(cs_request.path).path, "/api/v1/threads/test_id/votes")
self.assertEqual(cs_request.method, "PUT")
self.assertEqual(
cs_request.parsed_body,
{"user_id": [str(self.user.id)], "value": ["up"]}
)
def test_course_id_missing(self):
with self.assertRaises(ValidationError) as assertion:
create_thread(self.request, {})
self.assertEqual(assertion.exception.message_dict, {"course_id": ["This field is required."]})
def test_course_id_invalid(self):
with self.assertRaises(ValidationError) as assertion:
create_thread(self.request, {"course_id": "invalid!"})
self.assertEqual(assertion.exception.message_dict, {"course_id": ["Invalid value."]})
def test_nonexistent_course(self):
with self.assertRaises(ValidationError) as assertion:
create_thread(self.request, {"course_id": "non/existent/course"})
self.assertEqual(assertion.exception.message_dict, {"course_id": ["Invalid value."]})
def test_not_enrolled(self):
self.request.user = UserFactory.create()
with self.assertRaises(ValidationError) as assertion:
create_thread(self.request, self.minimal_data)
self.assertEqual(assertion.exception.message_dict, {"course_id": ["Invalid value."]})
def test_discussions_disabled(self):
_remove_discussion_tab(self.course, self.user.id)
with self.assertRaises(ValidationError) as assertion:
create_thread(self.request, self.minimal_data)
self.assertEqual(assertion.exception.message_dict, {"course_id": ["Invalid value."]})
def test_invalid_field(self):
data = self.minimal_data.copy()
data["type"] = "invalid_type"
with self.assertRaises(ValidationError):
create_thread(self.request, data)
@ddt.ddt
class CreateCommentTest(CommentsServiceMockMixin, UrlResetMixin, ModuleStoreTestCase):
"""Tests for create_comment"""
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(CreateCommentTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/test_path")
self.request.user = self.user
self.course = CourseFactory.create()
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
self.register_get_thread_response(
make_minimal_cs_thread({
"id": "test_thread",
"course_id": unicode(self.course.id),
"commentable_id": "test_topic",
})
)
self.minimal_data = {
"thread_id": "test_thread",
"raw_body": "Test body",
}
@ddt.data(None, "test_parent")
@mock.patch("eventtracking.tracker.emit")
def test_success(self, parent_id, mock_emit):
if parent_id:
self.register_get_comment_response({"id": parent_id, "thread_id": "test_thread"})
self.register_post_comment_response(
{
"id": "test_comment",
"username": self.user.username,
"created_at": "2015-05-27T00:00:00Z",
"updated_at": "2015-05-27T00:00:00Z",
},
thread_id="test_thread",
parent_id=parent_id
)
data = self.minimal_data.copy()
if parent_id:
data["parent_id"] = parent_id
actual = create_comment(self.request, data)
expected = {
"id": "test_comment",
"thread_id": "test_thread",
"parent_id": parent_id,
"author": self.user.username,
"author_label": None,
"created_at": "2015-05-27T00:00:00Z",
"updated_at": "2015-05-27T00:00:00Z",
"raw_body": "Test body",
"rendered_body": "<p>Test body</p>",
"endorsed": False,
"endorsed_by": None,
"endorsed_by_label": None,
"endorsed_at": None,
"abuse_flagged": False,
"voted": False,
"vote_count": 0,
"children": [],
"editable_fields": ["raw_body", "voted"]
}
self.assertEqual(actual, expected)
expected_url = (
"/api/v1/comments/{}".format(parent_id) if parent_id else
"/api/v1/threads/test_thread/comments"
)
self.assertEqual(
urlparse(httpretty.last_request().path).path,
expected_url
)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"body": ["Test body"],
"user_id": [str(self.user.id)]
}
)
expected_event_name = (
"edx.forum.comment.created" if parent_id else
"edx.forum.response.created"
)
expected_event_data = {
"discussion": {"id": "test_thread"},
"commentable_id": "test_topic",
"options": {"followed": False},
"id": "test_comment",
"truncated": False,
"body": "Test body",
"url": "",
"user_forums_roles": [FORUM_ROLE_STUDENT],
"user_course_roles": [],
}
if parent_id:
expected_event_data["response"] = {"id": parent_id}
actual_event_name, actual_event_data = mock_emit.call_args[0]
self.assertEqual(actual_event_name, expected_event_name)
self.assertEqual(actual_event_data, expected_event_data)
def test_voted(self):
self.register_post_comment_response({"id": "test_comment"}, "test_thread")
self.register_comment_votes_response("test_comment")
data = self.minimal_data.copy()
data["voted"] = "True"
result = create_comment(self.request, data)
self.assertEqual(result["voted"], True)
cs_request = httpretty.last_request()
self.assertEqual(urlparse(cs_request.path).path, "/api/v1/comments/test_comment/votes")
self.assertEqual(cs_request.method, "PUT")
self.assertEqual(
cs_request.parsed_body,
{"user_id": [str(self.user.id)], "value": ["up"]}
)
def test_thread_id_missing(self):
with self.assertRaises(ValidationError) as assertion:
create_comment(self.request, {})
self.assertEqual(assertion.exception.message_dict, {"thread_id": ["This field is required."]})
def test_thread_id_not_found(self):
self.register_get_thread_error_response("test_thread", 404)
with self.assertRaises(ValidationError) as assertion:
create_comment(self.request, self.minimal_data)
self.assertEqual(assertion.exception.message_dict, {"thread_id": ["Invalid value."]})
def test_nonexistent_course(self):
self.register_get_thread_response(
make_minimal_cs_thread({"id": "test_thread", "course_id": "non/existent/course"})
)
with self.assertRaises(ValidationError) as assertion:
create_comment(self.request, self.minimal_data)
self.assertEqual(assertion.exception.message_dict, {"thread_id": ["Invalid value."]})
def test_not_enrolled(self):
self.request.user = UserFactory.create()
with self.assertRaises(ValidationError) as assertion:
create_comment(self.request, self.minimal_data)
self.assertEqual(assertion.exception.message_dict, {"thread_id": ["Invalid value."]})
def test_discussions_disabled(self):
_remove_discussion_tab(self.course, self.user.id)
with self.assertRaises(ValidationError) as assertion:
create_comment(self.request, self.minimal_data)
self.assertEqual(assertion.exception.message_dict, {"thread_id": ["Invalid value."]})
@ddt.data(
*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False],
["no_group", "match_group", "different_group"],
)
)
@ddt.unpack
def test_group_access(self, role_name, course_is_cohorted, thread_group_state):
cohort_course = CourseFactory.create(cohort_config={"cohorted": course_is_cohorted})
CourseEnrollmentFactory.create(user=self.user, course_id=cohort_course.id)
cohort = CohortFactory.create(course_id=cohort_course.id, users=[self.user])
role = Role.objects.create(name=role_name, course_id=cohort_course.id)
role.users = [self.user]
self.register_get_thread_response(make_minimal_cs_thread({
"id": "cohort_thread",
"course_id": unicode(cohort_course.id),
"group_id": (
None if thread_group_state == "no_group" else
cohort.id if thread_group_state == "match_group" else
cohort.id + 1
),
}))
self.register_post_comment_response({}, thread_id="cohort_thread")
data = self.minimal_data.copy()
data["thread_id"] = "cohort_thread"
expected_error = (
role_name == FORUM_ROLE_STUDENT and
course_is_cohorted and
thread_group_state == "different_group"
)
try:
create_comment(self.request, data)
self.assertFalse(expected_error)
except ValidationError as err:
self.assertTrue(expected_error)
self.assertEqual(
err.message_dict,
{"thread_id": ["Invalid value."]}
)
def test_invalid_field(self):
data = self.minimal_data.copy()
del data["raw_body"]
with self.assertRaises(ValidationError):
create_comment(self.request, data)
@ddt.ddt
class UpdateThreadTest(CommentsServiceMockMixin, UrlResetMixin, ModuleStoreTestCase):
"""Tests for update_thread"""
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(UpdateThreadTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/test_path")
self.request.user = self.user
self.course = CourseFactory.create()
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
def register_thread(self, overrides=None):
"""
Make a thread with appropriate data overridden by the overrides
parameter and register mock responses for both GET and PUT on its
endpoint.
"""
cs_data = make_minimal_cs_thread({
"id": "test_thread",
"course_id": unicode(self.course.id),
"commentable_id": "original_topic",
"username": self.user.username,
"user_id": str(self.user.id),
"created_at": "2015-05-29T00:00:00Z",
"updated_at": "2015-05-29T00:00:00Z",
"thread_type": "discussion",
"title": "Original Title",
"body": "Original body",
})
cs_data.update(overrides or {})
self.register_get_thread_response(cs_data)
self.register_put_thread_response(cs_data)
def test_empty(self):
"""Check that an empty update does not make any modifying requests."""
# Ensure that the default following value of False is not applied implicitly
self.register_get_user_response(self.user, subscribed_thread_ids=["test_thread"])
self.register_thread()
update_thread(self.request, "test_thread", {})
for request in httpretty.httpretty.latest_requests:
self.assertEqual(request.method, "GET")
def test_basic(self):
self.register_thread()
actual = update_thread(self.request, "test_thread", {"raw_body": "Edited body"})
expected = {
"id": "test_thread",
"course_id": unicode(self.course.id),
"topic_id": "original_topic",
"group_id": None,
"group_name": None,
"author": self.user.username,
"author_label": None,
"created_at": "2015-05-29T00:00:00Z",
"updated_at": "2015-05-29T00:00:00Z",
"type": "discussion",
"title": "Original Title",
"raw_body": "Edited body",
"rendered_body": "<p>Edited body</p>",
"pinned": False,
"closed": False,
"following": False,
"abuse_flagged": False,
"voted": False,
"vote_count": 0,
"comment_count": 0,
"unread_comment_count": 0,
"comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread",
"endorsed_comment_list_url": None,
"non_endorsed_comment_list_url": None,
"editable_fields": ["following", "raw_body", "title", "topic_id", "type", "voted"],
}
self.assertEqual(actual, expected)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"course_id": [unicode(self.course.id)],
"commentable_id": ["original_topic"],
"thread_type": ["discussion"],
"title": ["Original Title"],
"body": ["Edited body"],
"user_id": [str(self.user.id)],
"anonymous": ["False"],
"anonymous_to_peers": ["False"],
"closed": ["False"],
"pinned": ["False"],
}
)
def test_nonexistent_thread(self):
self.register_get_thread_error_response("test_thread", 404)
with self.assertRaises(Http404):
update_thread(self.request, "test_thread", {})
def test_nonexistent_course(self):
self.register_thread({"course_id": "non/existent/course"})
with self.assertRaises(Http404):
update_thread(self.request, "test_thread", {})
def test_not_enrolled(self):
self.register_thread()
self.request.user = UserFactory.create()
with self.assertRaises(Http404):
update_thread(self.request, "test_thread", {})
def test_discussions_disabled(self):
_remove_discussion_tab(self.course, self.user.id)
self.register_thread()
with self.assertRaises(Http404):
update_thread(self.request, "test_thread", {})
@ddt.data(
*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False],
["no_group", "match_group", "different_group"],
)
)
@ddt.unpack
def test_group_access(self, role_name, course_is_cohorted, thread_group_state):
cohort_course = CourseFactory.create(cohort_config={"cohorted": course_is_cohorted})
CourseEnrollmentFactory.create(user=self.user, course_id=cohort_course.id)
cohort = CohortFactory.create(course_id=cohort_course.id, users=[self.user])
role = Role.objects.create(name=role_name, course_id=cohort_course.id)
role.users = [self.user]
self.register_thread({
"course_id": unicode(cohort_course.id),
"group_id": (
None if thread_group_state == "no_group" else
cohort.id if thread_group_state == "match_group" else
cohort.id + 1
),
})
expected_error = (
role_name == FORUM_ROLE_STUDENT and
course_is_cohorted and
thread_group_state == "different_group"
)
try:
update_thread(self.request, "test_thread", {})
self.assertFalse(expected_error)
except Http404:
self.assertTrue(expected_error)
@ddt.data(
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
)
def test_author_only_fields(self, role_name):
role = Role.objects.create(name=role_name, course_id=self.course.id)
role.users = [self.user]
self.register_thread({"user_id": str(self.user.id + 1)})
data = {field: "edited" for field in ["topic_id", "title", "raw_body"]}
data["type"] = "question"
expected_error = role_name == FORUM_ROLE_STUDENT
try:
update_thread(self.request, "test_thread", data)
self.assertFalse(expected_error)
except ValidationError as err:
self.assertTrue(expected_error)
self.assertEqual(
err.message_dict,
{field: ["This field is not editable."] for field in data.keys()}
)
@ddt.data(*itertools.product([True, False], [True, False]))
@ddt.unpack
def test_following(self, old_following, new_following):
"""
Test attempts to edit the "following" field.
old_following indicates whether the thread should be followed at the
start of the test. new_following indicates the value for the "following"
field in the update. If old_following and new_following are the same, no
update should be made. Otherwise, a subscription should be POSTed or
DELETEd according to the new_following value.
"""
if old_following:
self.register_get_user_response(self.user, subscribed_thread_ids=["test_thread"])
self.register_subscription_response(self.user)
self.register_thread()
data = {"following": new_following}
result = update_thread(self.request, "test_thread", data)
self.assertEqual(result["following"], new_following)
last_request_path = urlparse(httpretty.last_request().path).path
subscription_url = "/api/v1/users/{}/subscriptions".format(self.user.id)
if old_following == new_following:
self.assertNotEqual(last_request_path, subscription_url)
else:
self.assertEqual(last_request_path, subscription_url)
self.assertEqual(
httpretty.last_request().method,
"POST" if new_following else "DELETE"
)
request_data = (
httpretty.last_request().parsed_body if new_following else
parse_qs(urlparse(httpretty.last_request().path).query)
)
request_data.pop("request_id", None)
self.assertEqual(
request_data,
{"source_type": ["thread"], "source_id": ["test_thread"]}
)
@ddt.data(*itertools.product([True, False], [True, False]))
@ddt.unpack
def test_voted(self, old_voted, new_voted):
"""
Test attempts to edit the "voted" field.
old_voted indicates whether the thread should be upvoted at the start of
the test. new_voted indicates the value for the "voted" field in the
update. If old_voted and new_voted are the same, no update should be
made. Otherwise, a vote should be PUT or DELETEd according to the
new_voted value.
"""
if old_voted:
self.register_get_user_response(self.user, upvoted_ids=["test_thread"])
self.register_thread_votes_response("test_thread")
self.register_thread()
data = {"voted": new_voted}
result = update_thread(self.request, "test_thread", data)
self.assertEqual(result["voted"], new_voted)
last_request_path = urlparse(httpretty.last_request().path).path
votes_url = "/api/v1/threads/test_thread/votes"
if old_voted == new_voted:
self.assertNotEqual(last_request_path, votes_url)
else:
self.assertEqual(last_request_path, votes_url)
self.assertEqual(
httpretty.last_request().method,
"PUT" if new_voted else "DELETE"
)
actual_request_data = (
httpretty.last_request().parsed_body if new_voted else
parse_qs(urlparse(httpretty.last_request().path).query)
)
actual_request_data.pop("request_id", None)
expected_request_data = {"user_id": [str(self.user.id)]}
if new_voted:
expected_request_data["value"] = ["up"]
self.assertEqual(actual_request_data, expected_request_data)
def test_invalid_field(self):
self.register_thread()
with self.assertRaises(ValidationError) as assertion:
update_thread(self.request, "test_thread", {"raw_body": ""})
self.assertEqual(
assertion.exception.message_dict,
{"raw_body": ["This field is required."]}
)
@ddt.ddt
class UpdateCommentTest(CommentsServiceMockMixin, UrlResetMixin, ModuleStoreTestCase):
"""Tests for update_comment"""
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(UpdateCommentTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/test_path")
self.request.user = self.user
self.course = CourseFactory.create()
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
def register_comment(self, overrides=None, thread_overrides=None):
"""
Make a comment with appropriate data overridden by the overrides
parameter and register mock responses for both GET and PUT on its
endpoint. Also mock GET for the related thread with thread_overrides.
"""
cs_thread_data = make_minimal_cs_thread({
"id": "test_thread",
"course_id": unicode(self.course.id)
})
cs_thread_data.update(thread_overrides or {})
self.register_get_thread_response(cs_thread_data)
cs_comment_data = make_minimal_cs_comment({
"id": "test_comment",
"course_id": cs_thread_data["course_id"],
"thread_id": cs_thread_data["id"],
"username": self.user.username,
"user_id": str(self.user.id),
"created_at": "2015-06-03T00:00:00Z",
"updated_at": "2015-06-03T00:00:00Z",
"body": "Original body",
})
cs_comment_data.update(overrides or {})
self.register_get_comment_response(cs_comment_data)
self.register_put_comment_response(cs_comment_data)
def test_empty(self):
"""Check that an empty update does not make any modifying requests."""
self.register_comment()
update_comment(self.request, "test_comment", {})
for request in httpretty.httpretty.latest_requests:
self.assertEqual(request.method, "GET")
@ddt.data(None, "test_parent")
def test_basic(self, parent_id):
self.register_comment({"parent_id": parent_id})
actual = update_comment(self.request, "test_comment", {"raw_body": "Edited body"})
expected = {
"id": "test_comment",
"thread_id": "test_thread",
"parent_id": parent_id,
"author": self.user.username,
"author_label": None,
"created_at": "2015-06-03T00:00:00Z",
"updated_at": "2015-06-03T00:00:00Z",
"raw_body": "Edited body",
"rendered_body": "<p>Edited body</p>",
"endorsed": False,
"endorsed_by": None,
"endorsed_by_label": None,
"endorsed_at": None,
"abuse_flagged": False,
"voted": False,
"vote_count": 0,
"children": [],
"editable_fields": ["raw_body", "voted"]
}
self.assertEqual(actual, expected)
self.assertEqual(
httpretty.last_request().parsed_body,
{
"body": ["Edited body"],
"course_id": [unicode(self.course.id)],
"user_id": [str(self.user.id)],
"anonymous": ["False"],
"anonymous_to_peers": ["False"],
"endorsed": ["False"],
}
)
def test_nonexistent_comment(self):
self.register_get_comment_error_response("test_comment", 404)
with self.assertRaises(Http404):
update_comment(self.request, "test_comment", {})
def test_nonexistent_course(self):
self.register_comment(thread_overrides={"course_id": "non/existent/course"})
with self.assertRaises(Http404):
update_comment(self.request, "test_comment", {})
def test_unenrolled(self):
self.register_comment()
self.request.user = UserFactory.create()
with self.assertRaises(Http404):
update_comment(self.request, "test_comment", {})
def test_discussions_disabled(self):
_remove_discussion_tab(self.course, self.user.id)
self.register_comment()
with self.assertRaises(Http404):
update_comment(self.request, "test_comment", {})
@ddt.data(
*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False],
["no_group", "match_group", "different_group"],
)
)
@ddt.unpack
def test_group_access(self, role_name, course_is_cohorted, thread_group_state):
cohort_course = CourseFactory.create(cohort_config={"cohorted": course_is_cohorted})
CourseEnrollmentFactory.create(user=self.user, course_id=cohort_course.id)
cohort = CohortFactory.create(course_id=cohort_course.id, users=[self.user])
role = Role.objects.create(name=role_name, course_id=cohort_course.id)
role.users = [self.user]
self.register_get_thread_response(make_minimal_cs_thread())
self.register_comment(
{"thread_id": "test_thread"},
thread_overrides={
"id": "test_thread",
"course_id": unicode(cohort_course.id),
"group_id": (
None if thread_group_state == "no_group" else
cohort.id if thread_group_state == "match_group" else
cohort.id + 1
),
}
)
expected_error = (
role_name == FORUM_ROLE_STUDENT and
course_is_cohorted and
thread_group_state == "different_group"
)
try:
update_comment(self.request, "test_comment", {})
self.assertFalse(expected_error)
except Http404:
self.assertTrue(expected_error)
@ddt.data(*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False],
[True, False],
))
@ddt.unpack
def test_raw_body_access(self, role_name, is_thread_author, is_comment_author):
role = Role.objects.create(name=role_name, course_id=self.course.id)
role.users = [self.user]
self.register_comment(
{"user_id": str(self.user.id if is_comment_author else (self.user.id + 1))},
thread_overrides={
"user_id": str(self.user.id if is_thread_author else (self.user.id + 1))
}
)
expected_error = role_name == FORUM_ROLE_STUDENT and not is_comment_author
try:
update_comment(self.request, "test_comment", {"raw_body": "edited"})
self.assertFalse(expected_error)
except ValidationError as err:
self.assertTrue(expected_error)
self.assertEqual(
err.message_dict,
{"raw_body": ["This field is not editable."]}
)
@ddt.data(*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False],
["question", "discussion"],
[True, False],
))
@ddt.unpack
def test_endorsed_access(self, role_name, is_thread_author, thread_type, is_comment_author):
role = Role.objects.create(name=role_name, course_id=self.course.id)
role.users = [self.user]
self.register_comment(
{"user_id": str(self.user.id if is_comment_author else (self.user.id + 1))},
thread_overrides={
"thread_type": thread_type,
"user_id": str(self.user.id if is_thread_author else (self.user.id + 1)),
}
)
expected_error = (
role_name == FORUM_ROLE_STUDENT and
(thread_type == "discussion" or not is_thread_author)
)
try:
update_comment(self.request, "test_comment", {"endorsed": True})
self.assertFalse(expected_error)
except ValidationError as err:
self.assertTrue(expected_error)
self.assertEqual(
err.message_dict,
{"endorsed": ["This field is not editable."]}
)
@ddt.data(*itertools.product([True, False], [True, False]))
@ddt.unpack
def test_voted(self, old_voted, new_voted):
"""
Test attempts to edit the "voted" field.
old_voted indicates whether the comment should be upvoted at the start of
the test. new_voted indicates the value for the "voted" field in the
update. If old_voted and new_voted are the same, no update should be
made. Otherwise, a vote should be PUT or DELETEd according to the
new_voted value.
"""
if old_voted:
self.register_get_user_response(self.user, upvoted_ids=["test_comment"])
self.register_comment_votes_response("test_comment")
self.register_comment()
data = {"voted": new_voted}
result = update_comment(self.request, "test_comment", data)
self.assertEqual(result["voted"], new_voted)
last_request_path = urlparse(httpretty.last_request().path).path
votes_url = "/api/v1/comments/test_comment/votes"
if old_voted == new_voted:
self.assertNotEqual(last_request_path, votes_url)
else:
self.assertEqual(last_request_path, votes_url)
self.assertEqual(
httpretty.last_request().method,
"PUT" if new_voted else "DELETE"
)
actual_request_data = (
httpretty.last_request().parsed_body if new_voted else
parse_qs(urlparse(httpretty.last_request().path).query)
)
actual_request_data.pop("request_id", None)
expected_request_data = {"user_id": [str(self.user.id)]}
if new_voted:
expected_request_data["value"] = ["up"]
self.assertEqual(actual_request_data, expected_request_data)
@ddt.ddt
class DeleteThreadTest(CommentsServiceMockMixin, UrlResetMixin, ModuleStoreTestCase):
"""Tests for delete_thread"""
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(DeleteThreadTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/test_path")
self.request.user = self.user
self.course = CourseFactory.create()
self.thread_id = "test_thread"
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
def register_thread(self, overrides=None):
"""
Make a thread with appropriate data overridden by the overrides
parameter and register mock responses for both GET and DELETE on its
endpoint.
"""
cs_data = make_minimal_cs_thread({
"id": self.thread_id,
"course_id": unicode(self.course.id),
"user_id": str(self.user.id),
})
cs_data.update(overrides or {})
self.register_get_thread_response(cs_data)
self.register_delete_thread_response(cs_data["id"])
def test_basic(self):
self.register_thread()
self.assertIsNone(delete_thread(self.request, self.thread_id))
self.assertEqual(
urlparse(httpretty.last_request().path).path,
"/api/v1/threads/{}".format(self.thread_id)
)
self.assertEqual(httpretty.last_request().method, "DELETE")
def test_thread_id_not_found(self):
self.register_get_thread_error_response("missing_thread", 404)
with self.assertRaises(Http404):
delete_thread(self.request, "missing_thread")
def test_nonexistent_course(self):
self.register_thread({"course_id": "non/existent/course"})
with self.assertRaises(Http404):
delete_thread(self.request, self.thread_id)
def test_not_enrolled(self):
self.register_thread()
self.request.user = UserFactory.create()
with self.assertRaises(Http404):
delete_thread(self.request, self.thread_id)
def test_discussions_disabled(self):
self.register_thread()
_remove_discussion_tab(self.course, self.user.id)
with self.assertRaises(Http404):
delete_thread(self.request, self.thread_id)
@ddt.data(
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
)
def test_non_author_delete_allowed(self, role_name):
role = Role.objects.create(name=role_name, course_id=self.course.id)
role.users = [self.user]
self.register_thread({"user_id": str(self.user.id + 1)})
expected_error = role_name == FORUM_ROLE_STUDENT
try:
delete_thread(self.request, self.thread_id)
self.assertFalse(expected_error)
except PermissionDenied:
self.assertTrue(expected_error)
@ddt.data(
*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False],
["no_group", "match_group", "different_group"],
)
)
@ddt.unpack
def test_group_access(self, role_name, course_is_cohorted, thread_group_state):
"""
Tests group access for deleting a thread
All privileged roles are able to delete a thread. A student role can
only delete a thread if,
the student role is the author and the thread is not in a cohort,
the student role is the author and the thread is in the author's cohort.
"""
cohort_course = CourseFactory.create(cohort_config={"cohorted": course_is_cohorted})
CourseEnrollmentFactory.create(user=self.user, course_id=cohort_course.id)
cohort = CohortFactory.create(course_id=cohort_course.id, users=[self.user])
role = Role.objects.create(name=role_name, course_id=cohort_course.id)
role.users = [self.user]
self.register_thread({
"course_id": unicode(cohort_course.id),
"group_id": (
None if thread_group_state == "no_group" else
cohort.id if thread_group_state == "match_group" else
cohort.id + 1
),
})
expected_error = (
role_name == FORUM_ROLE_STUDENT and
course_is_cohorted and
thread_group_state == "different_group"
)
try:
delete_thread(self.request, self.thread_id)
self.assertFalse(expected_error)
except Http404:
self.assertTrue(expected_error)
@ddt.ddt
class DeleteCommentTest(CommentsServiceMockMixin, UrlResetMixin, ModuleStoreTestCase):
"""Tests for delete_comment"""
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(DeleteCommentTest, self).setUp()
httpretty.reset()
httpretty.enable()
self.addCleanup(httpretty.disable)
self.user = UserFactory.create()
self.register_get_user_response(self.user)
self.request = RequestFactory().get("/test_path")
self.request.user = self.user
self.course = CourseFactory.create()
self.thread_id = "test_thread"
self.comment_id = "test_comment"
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
def register_comment_and_thread(self, overrides=None, thread_overrides=None):
"""
Make a comment with appropriate data overridden by the override
parameters and register mock responses for both GET and DELETE on its
endpoint. Also mock GET for the related thread with thread_overrides.
"""
cs_thread_data = make_minimal_cs_thread({
"id": self.thread_id,
"course_id": unicode(self.course.id)
})
cs_thread_data.update(thread_overrides or {})
self.register_get_thread_response(cs_thread_data)
cs_comment_data = make_minimal_cs_comment({
"id": self.comment_id,
"course_id": cs_thread_data["course_id"],
"thread_id": cs_thread_data["id"],
"username": self.user.username,
"user_id": str(self.user.id),
})
cs_comment_data.update(overrides or {})
self.register_get_comment_response(cs_comment_data)
self.register_delete_comment_response(self.comment_id)
def test_basic(self):
self.register_comment_and_thread()
self.assertIsNone(delete_comment(self.request, self.comment_id))
self.assertEqual(
urlparse(httpretty.last_request().path).path,
"/api/v1/comments/{}".format(self.comment_id)
)
self.assertEqual(httpretty.last_request().method, "DELETE")
def test_comment_id_not_found(self):
self.register_get_comment_error_response("missing_comment", 404)
with self.assertRaises(Http404):
delete_comment(self.request, "missing_comment")
def test_nonexistent_course(self):
self.register_comment_and_thread(
thread_overrides={"course_id": "non/existent/course"}
)
with self.assertRaises(Http404):
delete_comment(self.request, self.comment_id)
def test_not_enrolled(self):
self.register_comment_and_thread()
self.request.user = UserFactory.create()
with self.assertRaises(Http404):
delete_comment(self.request, self.comment_id)
def test_discussions_disabled(self):
self.register_comment_and_thread()
_remove_discussion_tab(self.course, self.user.id)
with self.assertRaises(Http404):
delete_comment(self.request, self.comment_id)
@ddt.data(
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
)
def test_non_author_delete_allowed(self, role_name):
role = Role.objects.create(name=role_name, course_id=self.course.id)
role.users = [self.user]
self.register_comment_and_thread(
overrides={"user_id": str(self.user.id + 1)}
)
expected_error = role_name == FORUM_ROLE_STUDENT
try:
delete_comment(self.request, self.comment_id)
self.assertFalse(expected_error)
except PermissionDenied:
self.assertTrue(expected_error)
@ddt.data(
*itertools.product(
[
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_STUDENT,
],
[True, False],
["no_group", "match_group", "different_group"],
)
)
@ddt.unpack
def test_group_access(self, role_name, course_is_cohorted, thread_group_state):
"""
Tests group access for deleting a comment
All privileged roles are able to delete a comment. A student role can
only delete a comment if,
the student role is the author and the comment is not in a cohort,
the student role is the author and the comment is in the author's cohort.
"""
cohort_course = CourseFactory.create(cohort_config={"cohorted": course_is_cohorted})
CourseEnrollmentFactory.create(user=self.user, course_id=cohort_course.id)
cohort = CohortFactory.create(course_id=cohort_course.id, users=[self.user])
role = Role.objects.create(name=role_name, course_id=cohort_course.id)
role.users = [self.user]
self.register_comment_and_thread(
overrides={"thread_id": "test_thread"},
thread_overrides={
"course_id": unicode(cohort_course.id),
"group_id": (
None if thread_group_state == "no_group" else
cohort.id if thread_group_state == "match_group" else
cohort.id + 1
),
}
)
expected_error = (
role_name == FORUM_ROLE_STUDENT and
course_is_cohorted and
thread_group_state == "different_group"
)
try:
delete_comment(self.request, self.comment_id)
self.assertFalse(expected_error)
except Http404:
self.assertTrue(expected_error)
| agpl-3.0 | -6,897,488,860,505,443,000 | 38.259853 | 118 | 0.553916 | false | 3.988867 | true | false | false |
att-comdev/deckhand | deckhand/common/validation_message.py | 1 | 2620 | # Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Indicates document sanity-check validation failure pre- or post-rendering.
DOCUMENT_SANITY_CHECK_FAILURE = 'D001'
# Indicates document post-rendering validation failure.
DOCUMENT_POST_RENDERING_FAILURE = 'D002'
class ValidationMessage(object):
"""ValidationMessage per UCP convention:
https://github.com/att-comdev/ucp-integration/blob/master/docs/source/api-conventions.rst#output-structure # noqa
Construction of ``ValidationMessage`` message:
:param string message: Validation failure message.
:param boolean error: True or False, if this is an error message.
:param string name: Identifying name of the validation.
:param string level: The severity of validation result, as "Error",
"Warning", or "Info"
:param string schema: The schema of the document being validated.
:param string doc_name: The name of the document being validated.
:param string diagnostic: Information about what lead to the message,
or details for resolution.
"""
def __init__(self,
message='Document validation error.',
error=True,
name='Deckhand validation error',
level='Error',
doc_schema='',
doc_name='',
doc_layer='',
diagnostic=''):
level = 'Error' if error else 'Info'
self._output = {
'message': message,
'error': error,
'name': name,
'documents': [],
'level': level,
'kind': self.__class__.__name__
}
self._output['documents'].append(
dict(schema=doc_schema, name=doc_name, layer=doc_layer))
if diagnostic:
self._output.update(diagnostic=diagnostic)
def format_message(self):
"""Return ``ValidationMessage`` message.
:returns: The ``ValidationMessage`` for the Validation API response.
:rtype: dict
"""
return self._output
| apache-2.0 | 4,281,113,961,153,711,000 | 38.104478 | 118 | 0.646183 | false | 4.418212 | false | false | false |
gwct/grampa | lib/spec_tree.py | 1 | 5143 | import sys, os, reconcore as RC, recontree as RT, global_vars as globs
#############################################################################
def readSpecTree(spec_tree_input, starttime):
if os.path.isfile(spec_tree_input):
spec_tree = open(spec_tree_input, "r").read().replace("\n", "").replace("\r","");
else:
spec_tree = spec_tree_input;
# If the input string is a filename, read the file. Otherwise, just try it as a newick string.
hybrid_spec = "";
spec_tree = RT.remBranchLength(spec_tree);
tips = spec_tree.replace("(","").replace(")","").replace(";","").split(",");
# Remove the branch lengths from the tree string and get the tip labels.
if any(tip.isdigit() for tip in tips):
RC.errorOut(6, "Tip labels cannot be purely numbers. Please add another character.");
if globs.spec_type == 's' and any(tips.count(tip) > 1 for tip in tips):
RC.errorOut(7, "You have entered a tree type (-t) of 's' but there are labels in your tree that appear more than once!");
if globs.spec_type == 'm' and any(tips.count(tip) not in [1,2] for tip in tips):
RC.errorOut(8, "You have entered a tree type (-t) of 'm', species in your tree should appear exactly once or twice.");
# Some error checking based on the tip labels in the tree.
if globs.spec_type == 'm':
hybrid_spec = list(set([tip for tip in tips if tips.count(tip) != 1]));
for h in hybrid_spec:
spec_tree = spec_tree.replace(h, h+"*", 1);
# If the user entered a MUL-tree, some internal re-labeling must be done to those labels that appear twice.
try:
sinfo, st = RT.treeParse(spec_tree);
# Parsing of the species tree.
except:
RC.errorOut(9, "Error reading species tree!");
# Reading the species tree file.
if globs.label_opt:
if globs.v != -1:
print();
print("# The input species tree with internal nodes labeled:");
print(st + "\n");
RC.endProg(starttime);
# The output if --labeltree is set.
return sinfo, st;
#############################################################################
def hInParse(sinfo, st, h1_input, h2_input):
if globs.spec_type == 's':
hybrid_clades, hybrid_nodes = getHClades(h1_input, sinfo, "h1");
copy_clades, copy_nodes = getHClades(h2_input, sinfo, "h2");
# If the input tree is singly-labeled, use the input info from -h1 and -h2 to get the hybrid clades and nodes.
elif globs.spec_type == 'm':
mul_copy_clade = [n for n in sinfo if sinfo[n][2] == 'tip' and '*' in n];
mul_hybrid_clade = [n.replace("*","") for n in mul_copy_clade];
mul_hybrid_node, mul_hybrid_mono = RT.LCA(mul_hybrid_clade, sinfo);
mul_copy_node, mul_copy_mono = RT.LCA(mul_copy_clade, sinfo);
if not mul_hybrid_mono or not mul_copy_mono:
RC.errorOut(13, "All hybrid clades specified in your MUL-tree must be monophyletic! Hybrid clade identified as: " + ",".join(mul_copy_clade));
hybrid_clades, hybrid_nodes, copy_clades, copy_nodes = [mul_hybrid_clade], [mul_hybrid_node], [mul_copy_clade], [mul_copy_node];
# If the input tree is a MUL-tree, we have to determine what the hybrid clades and nodes are.
return hybrid_clades, hybrid_nodes, copy_clades, copy_nodes;
# Parses the input h nodes.
#############################################################################
def getHClades(h_list, sinfo, h_type):
# This function takes a list of lists of -h1 or -h2 inputs and determines if they are clades or node labels. It then retrieves
# the complete lists of hybrid clades and nodes.
if h_list:
if " " in h_list:
h_clades = h_list.split(" ");
h_clades = list(map(set, [tmp_h.split(",") for tmp_h in h_clades]));
else:
h_clades = list(map(set, [h_list.split(",")]));
# Split up the input info. If there is a space, multiple nodes/clades have been specified.
if not all(h in sinfo for hybrid_list in h_clades for h in hybrid_list if not h.isdigit()):
RC.errorOut(10, "Not all -" + h_type + " species are present in your species tree!");
if not all("<" + h + ">" in sinfo for hybrid_list in h_clades for h in hybrid_list if h.isdigit()):
RC.errorOut(11, "Not all -" + h_type + " nodes are present in your species tree!");
# Some error checking to make sure everything the user input is actually in the tree.
h_nodes = [];
for hybrid_clade in h_clades:
hybrid_clade = list(hybrid_clade);
if hybrid_clade[0].isdigit():
h_node = "<" + hybrid_clade[0] + ">";
# If the input was an internal node, add it to the node list here.
else:
h_node, h_mono = RT.LCA(hybrid_clade, sinfo);
if not h_mono:
RC.errorOut(12, "All hybrid clades specified h1 and h2 must be monophyletic!");
# If the input was a clade, retrieve the ancestral node and check if it is monophyletic here.
if h_node not in h_nodes:
h_nodes.append(h_node);
# Add the hybrid node to the nodes list.
# If the user input anything as -h1 or -h2 this parses it.
else:
h_nodes = list(sinfo.keys());
h_clades = [RT.getClade(node, sinfo) for node in h_nodes];
# If the user did not specify -h1 or -h2, this adds all possible nodes to the list.
return h_clades, h_nodes;
#############################################################################
| gpl-3.0 | -548,104,545,586,755,300 | 43.336207 | 145 | 0.634649 | false | 3.030642 | false | false | false |
beeftornado/sentry | src/sentry/roles/manager.py | 1 | 2011 | from __future__ import absolute_import
import six
from collections import OrderedDict
class Role(object):
def __init__(self, priority, id, name, desc="", scopes=(), is_global=False):
assert len(id) <= 32, "Role id must be no more than 32 characters"
self.priority = priority
self.id = id
self.name = name
self.desc = desc
self.scopes = frozenset(scopes)
self.is_global = bool(is_global)
def __str__(self):
return self.name.encode("utf-8")
def __unicode__(self):
return six.text_type(self.name)
def __repr__(self):
return u"<Role: {}>".format(self.id)
def has_scope(self, scope):
return scope in self.scopes
class RoleManager(object):
def __init__(self, config, default=None):
role_list = []
self._roles = OrderedDict()
for idx, role in enumerate(config):
role = Role(idx, **role)
role_list.append(role)
self._roles[role.id] = role
self._choices = tuple((r.id, r.name) for r in role_list)
if default:
self._default = self._roles[default]
else:
self._default = role_list[0]
self._top_dog = role_list[-1]
def __iter__(self):
return six.itervalues(self._roles)
def can_manage(self, role, other):
return self.get(role).priority >= self.get(other).priority
def get(self, id):
return self._roles[id]
def get_all(self):
return list(self._roles.values())
def get_choices(self):
return self._choices
def get_default(self):
return self._default
def get_top_dog(self):
return self._top_dog
def with_scope(self, scope):
for role in self.get_all():
if role.has_scope(scope):
yield role
def with_any_scope(self, scopes):
for role in self.get_all():
if any(role.has_scope(scope) for scope in scopes):
yield role
| bsd-3-clause | 2,672,312,002,791,817,700 | 24.455696 | 80 | 0.567379 | false | 3.815939 | false | false | false |
akrherz/iem | htdocs/DCP/ahpsxml2wxc.py | 1 | 2188 | """Convert the AHPS XML into WXC format"""
import datetime
from paste.request import parse_formvars
from twisted.words.xish import domish, xpath
import requests
def do(nwsli):
"""work"""
res = ""
xml = requests.get(
(
"https://water.weather.gov/ahps2/"
"hydrograph_to_xml.php?gage=%s&output=xml"
)
% (nwsli,)
).content
elementStream = domish.elementStream()
roots = []
results = []
elementStream.DocumentStartEvent = roots.append
elementStream.ElementEvent = lambda elem: roots[0].addChild(elem)
elementStream.DocumentEndEvent = lambda: results.append(roots[0])
res += """IEM %s AHPS2WXC host=0 TimeStamp=%s
5
15 Station
6 UTCDate
4 UTCTime
7 Stage
7 CFS\n""" % (
nwsli,
datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"),
)
elementStream.parse(xml)
elem = results[0]
nodes = xpath.queryForNodes("/site/forecast/datum", elem)
if nodes is None:
return res
i = 0
maxval = {"val": 0, "time": None}
for node in nodes:
utc = datetime.datetime.strptime(
str(node.valid)[:15], "%Y-%m-%dT%H:%M"
)
res += ("%12s%03i %6s %4s %7s %7s\n") % (
nwsli,
i,
utc.strftime("%b %-d"),
utc.strftime("%H%M"),
node.primary,
node.secondary,
)
if float(str(node.primary)) > maxval["val"]:
maxval["val"] = float(str(node.primary))
maxval["time"] = utc
maxval["cfs"] = float(str(node.secondary))
i += 1
if maxval["time"] is not None:
utc = maxval["time"]
res += ("%12sMAX %6s %4s %7s %7s\n") % (
nwsli,
utc.strftime("%b %-d"),
utc.strftime("%H%M"),
maxval["val"],
maxval["cfs"],
)
return res
def application(environ, start_response):
"""Do Fun Things"""
fields = parse_formvars(environ)
nwsli = fields.get("nwsli", "MROI4")[:5]
start_response("200 OK", [("Content-type", "text/plain")])
return [do(nwsli).encode("ascii")]
| mit | -4,720,805,943,521,046,000 | 27.051282 | 69 | 0.528793 | false | 3.445669 | false | false | false |
tictail/claw | claw/utils.py | 1 | 2061 | # -*- coding: utf-8 -*-
import logging
from random import shuffle
from claw.constants import RE_DELIMITER
log = logging.getLogger(__name__)
def safe_format(format_string, *args, **kwargs):
"""
Helper: formats string with any combination of bytestrings/unicode
strings without raising exceptions
"""
try:
if not args and not kwargs:
return format_string
else:
return format_string.format(*args, **kwargs)
# catch encoding errors and transform everything into utf-8 string
# before logging:
except (UnicodeEncodeError, UnicodeDecodeError):
format_string = to_utf8(format_string)
args = [to_utf8(p) for p in args]
kwargs = {k: to_utf8(v) for k, v in kwargs.iteritems()}
return format_string.format(*args, **kwargs)
# ignore other errors
except:
return u''
def to_unicode(str_or_unicode, precise=False):
"""
Safely returns a unicode version of a given string
>>> utils.to_unicode('привет')
u'привет'
>>> utils.to_unicode(u'привет')
u'привет'
If `precise` flag is True, tries to guess the correct encoding first.
"""
encoding = detect_encoding(str_or_unicode) if precise else 'utf-8'
if isinstance(str_or_unicode, str):
return unicode(str_or_unicode, encoding, 'replace')
return str_or_unicode
def to_utf8(str_or_unicode):
"""
Safely returns a UTF-8 version of a given string
>>> utils.to_utf8(u'hi')
'hi'
"""
if isinstance(str_or_unicode, unicode):
return str_or_unicode.encode("utf-8", "ignore")
return str(str_or_unicode)
def random_token(length=7):
vals = ("a b c d e f g h i j k l m n o p q r s t u v w x y z "
"0 1 2 3 4 5 6 7 8 9").split(' ')
shuffle(vals)
return ''.join(vals[:length])
def get_delimiter(msg_body):
delimiter = RE_DELIMITER.search(msg_body)
if delimiter:
delimiter = delimiter.group()
else:
delimiter = '\n'
return delimiter
| apache-2.0 | -4,425,286,584,638,680,600 | 25.802632 | 73 | 0.621993 | false | 3.536458 | false | false | false |
pjh/vm-analyze | analyze/oldscripts/gather_proc.py | 1 | 3609 | #! /usr/bin/env python3.3
# Virtual memory analysis scripts.
# Developed 2012-2014 by Peter Hornyack, [email protected]
# Copyright (c) 2012-2014 Peter Hornyack and University of Washington
from vm_regex import *
from pjh_utils import *
import vm_common as vm
import errno
import os
import re
import stat
import sys
proc_files_we_care_about = ("cmdline", "maps", "smaps", "comm", "status")
'''
output_subdir should have just been created, and should be empty.
'''
def copy_proc_files(pid_dir, output_subdir):
tag = "copy_proc_files"
# pid_dir is a /proc/[pid] directory, and output_subdir is a corresponding
# [pid] subdirectory in the output directory. Scan through the list of
# files that we care about and copy the contents of each one to the output
# directory. Because /proc files are not normal file system files, we
# don't use a copy command, but instead open every file for reading and
# then write every line to the output file.
for fname in proc_files_we_care_about:
proc_fname = "{0}/{1}".format(pid_dir, fname)
out_fname = "{0}/{1}".format(output_subdir, fname)
print_debug(tag, ("copying '{0}' to '{1}'".format(
proc_fname, out_fname)))
vm.copy_proc_file_old(proc_fname, out_fname)
def gather_proc_files(output_dir):
tag = "gather_proc_files"
proc_root = "/proc"
# Scan through all of the files under /proc, and for every process
# subdirectory (names with just a PID), copy the files that we care
# about to a corresponding directory in the output directory.
if not os.path.exists(proc_root):
print_error_exit(tag, ("proc_root directory '{0}' does not "
"exist!").format(proc_root))
dir_contents = os.listdir(proc_root)
for item in dir_contents:
match = valid_pid_dir.match(item)
if match:
pid = match.group(1)
pid_dir = "{0}/{1}".format(proc_root, pid)
if os.path.isdir(pid_dir):
output_subdir = "{0}/{1}".format(output_dir, pid)
os.mkdir(output_subdir)
copy_proc_files(pid_dir, output_subdir)
return
def create_output_dir(output_dir):
tag = "create_output_dir"
if os.path.exists(output_dir):
print_error_exit(tag, "Output directory '{0}' already exists".format(
output_dir))
else:
os.mkdir(output_dir)
print(("Output will be created in directory '{0}'").format(output_dir))
return
def check_requirements(output_dir):
tag = "check_requirements"
# Check for super-user permissions: try to open a /proc file that should
# not be readable by normal users.
kernel_fname = "/proc/kcore"
try:
f = open(kernel_fname, 'r')
f.close()
except IOError as e:
#if (e == errno.EACCES):
print_error_exit(tag, "must be run as root")
if os.path.exists(output_dir):
print_error_exit(tag, ("output directory '{0}' already exists").format(
output_dir))
return
def usage():
print("usage: {0} <output-dir> <user>[:<group>]".format(sys.argv[0]))
print(" <output-dir> will be created, its owner will be set to <user>, ")
print(" and its group will optionally be set to <group>.")
print(" This script must be run with root privilege (in order to read "
"/proc)!")
sys.exit(1)
def parse_args(argv):
tag = "parse_args"
if len(argv) != 3:
usage()
print_debug(tag, 'argv: {0}'.format(argv))
output_dir = argv[1]
usrgrp = argv[2]
return (output_dir, usrgrp)
# Main:
if __name__ == "__main__":
tag = "main"
print_debug(tag, "entered")
(output_dir, usrgrp) = parse_args(sys.argv)
check_requirements(output_dir)
create_output_dir(output_dir)
gather_proc_files(output_dir)
set_owner_group(output_dir, usrgrp)
sys.exit(0)
else:
print("Must run stand-alone")
usage()
sys.exit(1)
| bsd-3-clause | -7,003,235,764,294,394,000 | 27.195313 | 75 | 0.688556 | false | 2.931763 | false | false | false |
andreasvc/disco-dop | web/browse.py | 1 | 12449 | """Web interface to browse a corpus with various visualizations."""
# stdlib
import os
import re
import sys
import glob
import math
import logging
from collections import OrderedDict
from functools import wraps
import matplotlib
matplotlib.use('AGG')
import matplotlib.cm as cm
import pandas
# Flask & co
from flask import Flask, Response
from flask import request, render_template
# disco-dop
from discodop import treebank, treebanktransforms
from discodop.tree import DrawTree
DEBUG = False # when True: enable debugging interface, disable multiprocessing
PASSWD = None # optionally, dict with user=>pass strings
HEADRULES = '../alpino.headrules'
logging.basicConfig(
format='%(asctime)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.DEBUG)
APP = Flask(__name__)
log = APP.logger
STANDALONE = __name__ == '__main__'
CORPUS_DIR = "corpus/"
COLORS = dict(enumerate('''
Black Red Green Orange Blue Turquoise SlateGray Peru Teal Aqua
Aquamarine BlanchedAlmond Brown Burlywood CadetBlue Chartreuse
Chocolate Coral Crimson Cyan Firebrick ForestGreen Fuchsia Gainsboro
Gold Goldenrod Gray GreenYellow HotPink IndianRed Indigo Khaki Lime
YellowGreen Magenta Maroon Yellow MidnightBlue Moccasin NavyBlue Olive
OliveDrab Orchid PapayaWhip Pink Plum PowderBlue Purple RebeccaPurple
RoyalBlue SaddleBrown Salmon SandyBrown SeaGreen Sienna Silver SkyBlue
SlateBlue Tan Thistle Tomato Violet Wheat'''.split()))
WORDLIST = pandas.read_table('sonar-word.freqsort.lower.gz',
encoding='utf8', index_col=0, header=None, names=['word', 'count'],
nrows=20000).index
def getdeplen(item):
"""Compute dependency length."""
tree = item.tree.copy(True)
deps = treebank.dependencies(tree)
a, b = treebank.deplen(deps)
return ([abs(x - y) > 7 for x, _, y in deps], a / b if b else 0)
# cannot highlight due to removing punct
# return (None, a / b if b else 0)
def getmodifiers(item):
"""Count and highlight REL/PP-modifiers."""
nodes = list(item.tree.subtrees(lambda n: n.label in ('REL', 'PP')
and treebanktransforms.function(n) == 'mod'))
return toboolvec(len(item.sent), {a for x in nodes
for a in x.leaves()}), len(nodes)
def toboolvec(length, indices):
"""Convert a list of indices into a list of booleans."""
return [n in indices for n in range(length)]
# Functions that accept item object with item.tree and item.sent members;
# return tuple (wordhighlights, sentweight).
FILTERS = {
'average dependency length': getdeplen,
'd-level': lambda i: (None, treebanktransforms.dlevel(i.tree)),
'rare words': lambda i: (list(~pandas.Index(
t.lower() for t in i.sent
).isin(WORDLIST)
& pandas.Series([ # filter names
'eigen' not in n.source[treebank.MORPH]
for n in
sorted(i.tree.subtrees(lambda n: isinstance(n[0], int)),
key=lambda n: n[0])])
), None),
'PP/REL modifiers': getmodifiers,
'punctuation': lambda i:
(None, max('.,\'"?!(:;'.find(t) + 1 for t in i.sent)),
'direct speech': lambda i:
(None, re.match(r"^- .*$|(?:^|.* )['\"](?: .*|$)",
' '.join(i.sent)) is not None),
}
def torgb(val, mappable):
"""Return hexadecimal HTML color string."""
return '#%02x%02x%02x' % mappable.to_rgba(val, bytes=True)[:3]
def charvalues(sent, values):
"""Project token values to character values.
>>> sorted(charvalues(['The', 'cat', 'is', 'on', 'the', 'mat'],
... [0, 0, 1, 1, 0, 1]))
[0, 1, 2, 3, 8, 9, 10, 14, 15, 16, 17]
"""
assert len(sent) == len(values)
result = []
for a, b in zip(sent, values):
result.extend([b] * (len(a) + 1))
return result
# http://flask.pocoo.org/snippets/8/
def check_auth(username, password):
"""This function is called to check if a username / password
combination is valid."""
return PASSWD is None or (username in PASSWD
and password == PASSWD[username])
def authenticate():
"""Sends a 401 response that enables basic auth."""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
"""Decorator to require basic authentication for route."""
@wraps(f)
def decorated(*args, **kwargs):
"""This docstring intentionally left blank."""
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
# end snipppet
def applyhighlight(sent, high1, high2, colorvec=None):
"""Return a version of sent where given char. indices are highlighted."""
cur = None
start = 0
out = []
for n, _ in enumerate(sent):
if colorvec is not None:
if cur != COLORS.get(colorvec[n], 'gray'):
out.append(sent[start:n])
if cur is not None:
out.append('</font>')
out.append('<font color=%s>' % COLORS.get(colorvec[n], 'gray'))
start = n
cur = COLORS.get(colorvec[n], 'gray')
elif n in high1:
if cur != 'red':
out.append(sent[start:n])
if cur is not None:
out.append('</span>')
out.append('<span class=r>')
start = n
cur = 'red'
elif n in high2:
if cur != 'blue':
out.append(sent[start:n])
if cur is not None:
out.append('</span>')
out.append('<span class=b>')
start = n
cur = 'blue'
else:
if cur is not None:
out.append(sent[start:n])
out.append('</span>')
start = n
cur = None
out.append(sent[start:])
if cur is not None:
out.append('</font>')
return ''.join(out)
def addsentweight(x):
wordhighlights, sentweight = x
if sentweight is None:
return wordhighlights, sum(wordhighlights)
return x
@APP.route('/browse')
@requires_auth
def browsetrees():
"""Browse through trees in a file."""
chunk = 20 # number of trees to fetch for one request
if 'text' in request.args and 'sent' in request.args:
textno = int(request.args['text'])
sentno = int(request.args['sent'])
start = max(1, sentno - sentno % chunk)
stop = start + chunk
nofunc = 'nofunc' in request.args
nomorph = 'nomorph' in request.args
filename = os.path.join(CORPUS_DIR, TEXTS[textno] + '.export')
trees = CORPORA[filename].itertrees(start, stop)
results = ['<pre id="t%s"%s>%s\n%s</pre>' % (n,
' style="display: none; "' if 'ajax' in request.args else '',
', '.join('%s: %.3g' % (f, addsentweight(FILTERS[f](item))[1])
for f in sorted(FILTERS)),
DrawTree(item.tree, item.sent).text(
unicodelines=True, html=True))
for n, (_key, item) in enumerate(trees, start)]
if 'ajax' in request.args:
return '\n'.join(results)
prevlink = '<a id=prev>prev</a>'
if sentno > chunk:
prevlink = '<a href="browse?text=%d;sent=%d" id=prev>prev</a>' % (
textno, sentno - chunk + 1)
nextlink = '<a id=next>next</a>'
nextlink = '<a href="browse?text=%d;sent=%d" id=next>next</a>' % (
textno, sentno + chunk + 1)
return render_template('browse.html', textno=textno, sentno=sentno,
text=TEXTS[textno], totalsents=1000,
trees=results, prevlink=prevlink, nextlink=nextlink,
chunk=chunk, nofunc=nofunc, nomorph=nomorph,
mintree=start, maxtree=stop)
return '<h1>Browse through trees</h1>\n<ol>\n%s</ol>\n' % '\n'.join(
'<li><a href="browse?text=%d;sent=1;nomorph">%s</a> ' % (n, text)
for n, text in enumerate(TEXTS))
@APP.route('/')
@APP.route('/browsesents')
@requires_auth
def browsesents():
"""Browse through sentences in a file; highlight selectable features."""
chunk = 20 # number of sentences per page
if 'text' in request.args and 'sent' in request.args:
textno = int(request.args['text'])
sentno = int(request.args['sent'])
sentno = max(chunk // 2 + 1, sentno)
start = max(1, sentno - chunk // 2)
stop = start + chunk
filename = os.path.join(CORPUS_DIR, TEXTS[textno] + '.export')
feat = request.args.get('feat', next(iter(FILTERS)))
trees = list(CORPORA[filename].itertrees(start, stop))
results = []
values = [addsentweight(FILTERS[feat](item))
for n, (_key, item) in enumerate(trees, start)]
norm = matplotlib.colors.Normalize(
vmin=0, vmax=max(a for _, a in values) * 2)
mappable = cm.ScalarMappable(norm, 'YlOrBr')
for n, ((_key, item), (wordhighlights, sentweight)) in enumerate(
zip(trees, values), start):
if sentweight is None:
sentweight = sum(wordhighlights)
if wordhighlights is not None:
xsent = applyhighlight(
' '.join(item.sent), None, None,
colorvec=charvalues(item.sent, wordhighlights))
else:
xsent = ' '.join(item.sent)
results.append(
'<a href="browse?text=%d;sent=%d" '
'style="text-decoration: none; color: black;">'
'<span style="background: %s; " title="%s: %.3g">'
' %s </span></a>' % (textno, n,
torgb(sentweight, mappable), feat, sentweight, xsent))
legend = 'Feature: [ %s ]<br>' % ', '.join(f if f == feat
else ('<a href="browsesents?text=%d;sent=%d;feat=%s">'
'%s</a>' % (textno, sentno, f, f))
for f in sorted(FILTERS))
legend += 'Legend: ' + ''.join(
'<span style="background-color: %s; width: 30px; '
'display: inline-block; text-align: center; ">'
'%d</span>' % (torgb(n, mappable), n)
for n in range(0,
int(math.ceil(max(a for _, a in values))) + 1))
prevlink = '<a id=prev>prev</a>'
if sentno > chunk:
prevlink = (
'<a href="browsesents?text=%d;sent=%d;feat=%s" id=prev>'
'prev</a>' % (textno, sentno - chunk, feat))
nextlink = '<a id=next>next</a>'
nextlink = ('<a href="browsesents?text=%d;sent=%d;feat=%s" id=next>'
'next</a>' % (textno, sentno + chunk, feat))
return render_template('browsesents.html', textno=textno,
sentno=sentno, text=TEXTS[textno],
totalsents='??', # FIXME
sents=results, prevlink=prevlink, nextlink=nextlink,
chunk=chunk, mintree=start, legend=legend,
query=request.args.get('query', ''),
engine='')
return render_template('browsemain.html',
texts=TEXTS)
def querydict(queries):
"""Return an OrderedDict of names and queries.
name is abbreviated query if not given."""
result = OrderedDict()
for line in (x for x in queries.splitlines() if x.strip()):
if ':' in line and line[:line.index(':')].isalnum():
name, query = line.split(':', 1)
else:
name = line[:100] + ('' if len(line) < 100 else '...')
query = line
if '\t' in query:
normquery, query = query.split('\t')
else:
normquery = None
result[name] = normquery, query
return result
def getcorpus():
"""Get list of files and number of lines in them."""
files = sorted(glob.glob(os.path.join(CORPUS_DIR, '*.export')))
assert files, ('no corpus files with extension .export '
'found.')
texts = [os.path.splitext(os.path.basename(a))[0] for a in files]
corpora = {filename: treebank.NegraCorpusReader(filename,
headrules=HEADRULES, punct='move')
for filename in files}
if os.path.exists('metadata.csv'):
metadata = pandas.read_csv('metadata.csv', index_col=0)
assert set(metadata.index) == set(texts), (
'metadata.csv does not match list of files.\n'
'only in metadata: %s\nonly in files: %s' % (
set(metadata.index) - set(texts),
set(texts) - set(metadata.index)))
metadata = metadata.loc[texts]
else:
metadata = None
return texts, corpora, metadata
class QueryStringRedirectMiddleware(object):
"""Support ; as query delimiter.
http://flask.pocoo.org/snippets/43/"""
def __init__(self, application):
self.application = application
def __call__(self, environ, start_response):
qs = environ.get('QUERY_STRING', '')
environ['QUERY_STRING'] = qs.replace(';', '&')
return self.application(environ, start_response)
APP.wsgi_app = QueryStringRedirectMiddleware(APP.wsgi_app)
log.info('loading corpus.')
if STANDALONE:
from getopt import gnu_getopt, GetoptError
try:
opts, _args = gnu_getopt(sys.argv[1:], '',
['port=', 'ip=', 'numproc=', 'debug'])
opts = dict(opts)
except GetoptError as err:
print('error: %r' % err, file=sys.stderr)
sys.exit(2)
DEBUG = '--debug' in opts
# NB: load corpus regardless of whether running standalone:
(TEXTS, CORPORA, METADATA) = getcorpus()
log.info('corpus loaded.')
try:
with open('treesearchpasswd.txt', 'rt') as fileobj:
PASSWD = {a.strip(): b.strip() for a, b
in (line.split(':', 1) for line in fileobj)}
log.info('password protection enabled.')
except IOError:
log.info('no password protection.')
if STANDALONE:
APP.run(use_reloader=False,
host=opts.get('--ip', '0.0.0.0'),
port=int(opts.get('--port', 5003)),
debug=DEBUG)
| gpl-2.0 | -5,266,283,117,673,447,000 | 31.674541 | 79 | 0.660053 | false | 2.921615 | false | false | false |
MERegistro/meregistro | meregistro/apps/seguridad/models/Rol.py | 1 | 1354 | # -*- coding: UTF-8 -*-
from django.db import models
from apps.seguridad.models import Credencial, TipoAmbito
class Rol(models.Model):
ROL_ADMIN_NACIONAL = 'AdminNacional'
ROL_ADMIN_SEGURIDAD = 'AdminSeguridad'
ROL_REFERENTE_JURISDICCIONAL = 'ReferenteJurisdiccional'
ROL_REFERENTE_INSTITUCIONAL = 'ReferenteInstitucional'
nombre = models.CharField(max_length=40)
descripcion = models.CharField(max_length=255)
credenciales = models.ManyToManyField(Credencial, related_name='roles')
tipos_ambito_asignable = models.ManyToManyField(TipoAmbito, related_name='roles')
roles_asignables = models.ManyToManyField('self', related_name='roles_asignadores', symmetrical=False)
path = models.CharField(max_length=255)
padre = models.ForeignKey('self', null=True, blank=True)
class Meta:
app_label = 'seguridad'
def __unicode__(self):
return self.descripcion
def save(self):
if self.padre is None:
padre_path = '/'
else:
padre_path = self.padre.path
self.path = padre_path + str(self.id) + '/'
models.Model.save(self)
def asigna(self, rol):
return bool(self.roles_asignables.filter(id=rol.id))
def asignableAAmbito(self, ambito):
return bool(self.tipos_ambito_asignable.filter(id=ambito.tipo.id))
| bsd-3-clause | 5,661,577,332,660,196,000 | 31.238095 | 106 | 0.679468 | false | 3.127021 | false | false | false |
osspeak/osspeak | osspeak/recognition/commands/monitor.py | 1 | 3535 | import threading
import collections
import log
import copy
import asyncio
import settings
import clargs
from recognition.actions.library import pywindow
from recognition.commands import loader
from recognition.actions import perform
from communication import topics, pubsub
import time
def create_message_subscriptions(msg_list, command_module_controller):
pubsub.subscribe(topics.RELOAD_COMMAND_MODULE_FILES, lambda: set_message(msg_list, topics.RELOAD_COMMAND_MODULE_FILES))
pubsub.subscribe(topics.RELOAD_GRAMMAR, lambda: set_message(msg_list, topics.RELOAD_GRAMMAR))
pubsub.subscribe(topics.PERFORM_COMMANDS,
lambda grammar_id, words: perform_commands(command_module_controller, grammar_id, words))
def start_watching_user_state():
msg_list = [None]
command_module_file_pattern = settings.settings['file_pattern']
module_loader = loader.StaticFileCommandModuleLoader(settings.settings['command_directory'], command_module_file_pattern)
command_module_controller = loader.CommandModuleController(module_loader)
command_module_controller.command_modules = command_module_controller.initialize_command_modules()
engine_status_history = collections.deque([], 10)
create_message_subscriptions(msg_list, command_module_controller)
fut = watch_user_system_state(msg_list, command_module_controller)
asyncio.ensure_future(fut)
async def watch_user_system_state(msg_list, command_module_controller):
from recognition.actions.library.stdlib import namespace, engine
previous_window = None
previous_state = None
previous_engine_settings = copy.copy(engine.settings)
initial_load_done = False
while True:
current_state = copy.copy(namespace['state'])
current_window = pywindow.foreground_window().title.lower()
current_engine_settings = copy.copy(engine.settings)
is_different_window = current_window != previous_window
is_different_state = current_state != previous_state
is_different_engine_settings = current_engine_settings != previous_engine_settings
msg = msg_list[0]
if is_different_window or is_different_state or msg:
msg_list[0] = None
new_active_modules = command_module_controller.get_active_modules(current_window)
reload_files = msg == topics.RELOAD_COMMAND_MODULE_FILES
if new_active_modules != command_module_controller.active_command_modules or reload_files:
initialize_modules = not initial_load_done or reload_files
command_module_controller.load_modules(current_window, initialize_modules=False)
initial_load_done = True
elif msg == topics.RELOAD_GRAMMAR:
raise NotImplementedError
command_module_controller.load_and_send_grammar()
previous_window = current_window
previous_state = current_state
if is_different_engine_settings:
pubsub.publish(topics.SET_ENGINE_SETTINGS, current_engine_settings)
previous_engine_settings = current_engine_settings
await asyncio.sleep(1)
def set_message(msg_list, msg):
msg_list[0] = msg
def perform_commands(command_module_controller: loader.CommandModuleController, grammar_id: str, words):
try:
grammar_context = command_module_controller.grammars[grammar_id]
except KeyError:
log.logger.warning(f'Grammar {grammar_id} no longer exists')
return
perform.perform_commands(grammar_context, words) | mit | 532,628,761,851,594,700 | 47.438356 | 125 | 0.722772 | false | 4.067894 | false | false | false |
jamielennox/django-openstack-auth-websso | openstack_auth_websso/plugin.py | 1 | 2014 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient.v3 import client as v3_client
from openstack_auth.plugin import base
from openstack_auth import exceptions
from openstack_auth import utils
__all__ = ['FederatedTokenPlugin']
class FederatedTokenPlugin(base.BasePlugin):
"""Authenticate against keystone with an existing token."""
def get_plugin(self, auth_url=None, token=None, project_id=None,
**kwargs):
if not all((auth_url, token)):
return None
if utils.get_keystone_version() >= 3:
return v3_auth.Token(auth_url=auth_url,
token=token,
project_id=project_id,
reauthenticate=False)
else:
return v2_auth.Token(auth_url=auth_url,
token=token,
tenant_id=project_id,
reauthenticate=False)
def list_projects(self, session, auth_plugin, auth_ref=None):
if utils.get_keystone_version() < 3:
msg = _('Cannot list federated tokens from v2 API')
raise exceptions.KeystoneAuthException(msg)
client = v3_client.Client(session=session, auth=auth_plugin)
return client.federation.projects.list()
| apache-2.0 | -5,599,959,279,583,941,000 | 37.730769 | 75 | 0.644985 | false | 4.266949 | false | false | false |
0ps/wfuzz | src/wfuzz/mixins.py | 1 | 1871 | from .plugin_api.urlutils import parse_url
from .exception import FuzzExceptBadInstall
# python 2 and 3
import sys
if sys.version_info >= (3, 0):
from urllib.parse import urljoin
else:
from urlparse import urljoin
class FuzzRequestSoupMixing(object):
def get_soup(self):
try:
from bs4 import BeautifulSoup
except ImportError:
raise FuzzExceptBadInstall("You need to install beautifulsoup4 first!")
soup = BeautifulSoup(self.content, 'html.parser')
return soup
class FuzzRequestUrlMixing(object):
# urlparse functions
@property
def urlparse(self):
return parse_url(self.url)
@property
def urlp(self):
return parse_url(self.url)
@property
def pstrip(self):
return self.to_cache_key()
@property
def is_path(self):
if self.code == 200 and self.url[-1] == '/':
return True
elif self.code >= 300 and self.code < 400:
if "Location" in self.headers.response and self.headers.response["Location"][-1] == '/':
return True
elif self.code == 401:
if self.url[-1] == '/':
return True
return False
@property
def recursive_url(self):
if self.code >= 300 and self.code < 400 and "Location" in self.headers.response:
new_url = self.headers.response["Location"]
if not new_url[-1] == '/':
new_url += "/"
# taking into consideration redirections to /xxx/ without full URL
new_url = urljoin(self.url, new_url)
elif self.code == 401 or self.code == 200:
new_url = self.url
if not self.url[-1] == '/':
new_url = "/"
else:
raise Exception("Error generating recursive url")
return new_url + "FUZZ"
| gpl-2.0 | 6,004,915,391,877,477,000 | 27.348485 | 100 | 0.5783 | false | 4.049784 | false | false | false |
google-research/google-research | summae/human_and_extractive.py | 1 | 8614 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Human and extractive baseline evaluation.
human_and_extractive \
--data_dir=$ROCSTORIES_DATA \
--eval_subset=test
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
from absl import logging
import numpy as np
import six
from six.moves import range
import tensorflow.compat.v1 as tf
from rouge import rouge_scorer
from rouge import scoring
from summae import p2s_eval
from summae import util
FLAGS = flags.FLAGS
flags.DEFINE_string('data_dir', '.', 'Data directory.')
flags.DEFINE_string('eval_subset', 'test',
'which subset (valid/test) to eval/decode.')
flags.DEFINE_string('output_dir', '/tmp/12342',
'local directory to save extractive oracle')
flags.DEFINE_string('vocab_file', '',
'Subword vocab file.') # for detok first sentence
my_rouge_scorer = rouge_scorer.RougeScorer(['rouge1', 'rouge2', 'rougeL'],
use_stemmer=True)
def get_extracts(s):
# get 5 sentences as the extractive baselines
sents = s.feature_lists.feature_list['untokenized_sentences'].feature
assert len(sents) == 5
return tuple([sents[i].bytes_list.value[0] for i in range(5)])
def human_ave(summ_list):
"""Average pairwise rouge between two human summaries."""
agg = scoring.BootstrapAggregator()
for s1_id, s1 in enumerate(summ_list):
for s2_id, s2 in enumerate(summ_list):
if s1_id >= s2_id: # only compute for s1_id < s2_id
continue
s2_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(s2), p2s_eval.TRUNC_LEN)
s1_s2_trunc_score = my_rouge_scorer.score(s1, s2_trunc)
agg.add_scores(s1_s2_trunc_score)
agg_ave = agg.aggregate()
score_ave = {
rouge_type: agg_ave[rouge_type].mid for rouge_type in agg_ave # mid=mean
}
nwords_ave = np.mean([p2s_eval.count_words(s) for s in summ_list])
return (score_ave, nwords_ave)
def human_max(summ_list):
"""Maximum pairwise rouge between any two human summaries."""
score_max = None
rouge_1r_trunc_max = 0
for s1_id, s1 in enumerate(summ_list):
for s2_id, s2 in enumerate(summ_list):
if s1_id >= s2_id:
continue
s2_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(s2), p2s_eval.TRUNC_LEN)
s1_s2_trunc_score = my_rouge_scorer.score(s1, s2_trunc)
if s1_s2_trunc_score['rouge1'].recall >= rouge_1r_trunc_max:
score_max = s1_s2_trunc_score
rouge_1r_trunc_max = s1_s2_trunc_score['rouge1'].recall
nwords_max = np.max([p2s_eval.count_words(s) for s in summ_list])
return (score_max, nwords_max)
def extract_ave(e, summ_list):
"""Average rouge between ith sentence and human summaries."""
agg = scoring.BootstrapAggregator()
e_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(e),
p2s_eval.TRUNC_LEN) # get_summary_first_sentence may not be necessary
for s in summ_list:
s_e_trunc_score = my_rouge_scorer.score(s, e_trunc)
agg.add_scores(s_e_trunc_score)
agg_ave = agg.aggregate()
score_ave = {
rouge_type: agg_ave[rouge_type].mid for rouge_type in agg_ave # mid=mean
}
nwords_e = p2s_eval.count_words(e)
return (score_ave, nwords_e)
def extract_oracle(extract_list, summ_list):
"""Choose sentence with maximum average rouge."""
# Choose sentence with maximum average rouge.
score_accum = []
for e in extract_list:
e_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(e),
p2s_eval.TRUNC_LEN) # get_summary_first_sentence may not be necessary
accum_rouge_1r_trunc = 0
for s in summ_list:
s_e_trunc_score = my_rouge_scorer.score(s, e_trunc)
# for computing accumulative rouge
accum_rouge_1r_trunc += s_e_trunc_score['rouge1'].recall
score_accum.append(accum_rouge_1r_trunc)
e_id_o = np.argmax(score_accum)
e_o = extract_list[e_id_o]
# Compute average rouge for the oracle sentence
agg = scoring.BootstrapAggregator()
e_o_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(e_o),
p2s_eval.TRUNC_LEN) # get_summary_first_sentence may not be necessary
for s in summ_list:
e_o_trunc_score = my_rouge_scorer.score(s, e_o_trunc)
agg.add_scores(e_o_trunc_score)
agg_o = agg.aggregate()
score_o = {
rouge_type: agg_o[rouge_type].mid for rouge_type in agg_o # mid=mean
}
nwords_o = p2s_eval.count_words(e_o)
return (score_o, nwords_o, e_o)
def print_agg_score(label, agg, nwords):
print(
'%s: \n\t rouge-1r-trunc20=%.3f \t rouge-Lr-trunc20=%.3f \t nwords=%.1f' %
(label, agg.aggregate()['rouge1'].mid.recall,
agg.aggregate()['rougeL'].mid.recall, np.mean(nwords)))
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
tf.io.gfile.mkdir(FLAGS.output_dir)
data_file = os.path.join(
FLAGS.data_dir,
'rocstories_gt.' + six.ensure_str(FLAGS.eval_subset) + '.tfrecord')
seq_ex_list = util.get_seq_exs(data_file)
print('Input data %s' % data_file)
# Human summary baselines.
# We have 3 human summaries for each example, and
# 2 human performance variants:
# 1. 'a': average pairwise rouge between two summaries
# 2. 'm': maximum pairwise rouge between any two summaries
agg_human = {}
nwords_human = {}
for h in ['a', 'm']:
agg_human[h] = scoring.BootstrapAggregator()
nwords_human[h] = []
# Extractive baselines
# 1. '1','2','3','4','5': rouge between ith sentence and human summary
# 2. 'o': for each example, choose sentence with maximum average rouge
agg_extract = {}
nwords_extract = {}
for e in [str(x) for x in list(range(5))] + ['o']:
agg_extract[e] = scoring.BootstrapAggregator()
nwords_extract[e] = []
# human performance
sent2oracle = {}
for ex in seq_ex_list:
summ_list = p2s_eval.get_summaries(ex)
summ_list = [x.decode('utf-8') for x in summ_list]
# human eval
score, nwords = human_ave(summ_list)
agg_human['a'].add_scores(score)
nwords_human['a'].append(nwords)
score, nwords = human_max(summ_list)
agg_human['m'].add_scores(score)
nwords_human['m'].append(nwords)
# extractive eval
extract_list = get_extracts(ex)
extract_list = [x.decode('utf-8') for x in extract_list]
for e_id, e in enumerate(extract_list):
score, nwords = extract_ave(e, summ_list)
agg_extract[str(e_id)].add_scores(score)
nwords_extract[str(e_id)].append(nwords)
score, nwords, e_o = extract_oracle(extract_list, summ_list)
agg_extract['o'].add_scores(score)
nwords_extract['o'].append(nwords)
# save story and oracle sentence for future use
first = p2s_eval.get_first_sentence(ex)
if first in sent2oracle:
logging.fatal('duplicate first sentence: %s', str(first))
sent2oracle[first] = (' '.join(extract_list), e_o) # (story, oracle)
# write each example and the corresponding oracle to disk
tk, _ = util.get_tokenizer_with_special(FLAGS.vocab_file, [])
def detok(s):
return tk.decode(util.strip_after_eos(s))
keys_sorted = sorted(sent2oracle.keys(), key=detok)
out_file = os.path.join(
FLAGS.output_dir, 'rocstories_gt.' + six.ensure_str(FLAGS.eval_subset) +
'.firstsent2oracle.txt')
with tf.gfile.Open(out_file, 'w') as f:
for k in keys_sorted:
f.write('%s\n' % (sent2oracle[k][1]))
# print out rouge scores for human performance
print_agg_score('human average', agg_human['a'], nwords_human['a'])
print_agg_score('human max', agg_human['m'], nwords_human['m'])
for e_id in range(5):
print_agg_score('extractive baseline{}'.format(e_id),
agg_extract[str(e_id)], nwords_extract[str(e_id)])
print_agg_score('extractive oracle', agg_extract['o'], nwords_extract['o'])
if __name__ == '__main__':
app.run(main)
| apache-2.0 | -9,174,223,925,574,483,000 | 34.01626 | 80 | 0.666241 | false | 3.016106 | false | false | false |
jpvanhal/cloudsizzle | cloudsizzle/asi/server.py | 1 | 11139 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2009-2010 CloudSizzle Team
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import logging
from cloudsizzle.kp import SIBConnection, Triple, bnode, uri
from cloudsizzle import settings
from cloudsizzle.asi.importer import user_to_rdf
from cloudsizzle.asi.service import AbstractService, \
ASIServiceKnowledgeProcessor
from cloudsizzle.asi.asi_friends_connection import \
ASIFriendsConnection as ASIConnection
LOG = logging.getLogger('cloudsizzle.asi.server')
PEOPLE_BASE_URI = 'http://cos.alpha.sizl.org/people/'
class SessionStore(object):
def __init__(self):
self._sessions = {}
def __del__(self):
for ac in self._sessions.itervalues():
ac.close()
def __getitem__(self, key):
return self._sessions[key]
def login(self, username, password):
msg = "Logging in to ASI with username '{0}' and password '{1}'."
LOG.debug(msg.format(username, password))
ac = ASIConnection(
base_url=settings.ASI_BASE_URL,
app_name=settings.ASI_APP_NAME,
app_password=settings.ASI_APP_PASSWORD,
username=username,
password=password)
response = ac.open()
try:
user_id = response['entry']['user_id']
self._sessions[user_id] = ac
LOG.debug("Logged in with user_id {0}!".format(user_id))
return ac.session['entry']
except KeyError:
ac.close()
LOG.warning("Logging in failed: {0}".format(response['messages']))
return response
def logout(self, user_id):
LOG.debug('Logging out user with user_id {0}.'.format(user_id))
try:
ac = self._sessions[user_id]
ac.close()
del self._sessions[user_id]
except KeyError:
msg = 'Logging out failed: user {0} was not logged in.'
LOG.warning(msg.format(user_id))
class AbstractServer(AbstractService):
"""Abstract base class for building the server side of a request-response
type service.
AbstractServer subscribes to service requests and provides a method for
responding to these requests.
"""
def __init__(self, sc):
super(AbstractServer, self).__init__(sc)
@property
def query_triple(self):
return Triple(None, 'rdf:type', self.request_type)
def respond(self, request_id, response):
"""Respond to a service request.
request_id -- The ID of the service request.
response -- A dict containing the response data.
"""
response['rdf:type'] = self.response_type
response['response_to'] = uri(request_id)
LOG.debug(
'Responding to request {0} with {1}.'.format(request_id, response))
response_triples = []
for key, values in response.iteritems():
if not isinstance(values, list):
values = [values]
for value in values:
response_triples.append(Triple(bnode('id'), key, value))
self.sc.insert(response_triples)
class LoginServer(AbstractServer):
def __init__(self, sc, session_store):
super(LoginServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'Login'
def process(self, id_, data):
response = self.session_store.login(data['username'],
data['password'])
self.respond(id_, response)
class LogoutServer(AbstractServer):
def __init__(self, sc, session_store):
super(LogoutServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'Logout'
def process(self, id_, data):
self.session_store.logout(data['user_id'])
class RegisterServer(AbstractServer):
def __init__(self, sc):
super(RegisterServer, self).__init__(sc)
@property
def name(self):
return 'Register'
def process(self, id_, data):
with ASIConnection(
base_url=settings.ASI_BASE_URL,
app_name=settings.ASI_APP_NAME,
app_password=settings.ASI_APP_PASSWORD) as ac:
user_info = ac.create_user(
username=data['username'],
password=data['password'],
email=data['email'])
if 'messages' not in user_info:
# Copy user info from ASI to SIB.
triples = user_to_rdf(user_info)
self.sc.insert(triples)
user_id = user_info['id']
response = {'user_id': user_id}
else:
messages = user_info['messages']
response = {'messages': messages}
self.respond(id_, response)
class RejectFriendRequestServer(AbstractServer):
def __init__(self, sc, session_store):
super(RejectFriendRequestServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'RejectFriendRequest'
def process(self, id_, data):
user_id = str(data['user_id'])
friend_id = str(data['friend_id'])
try:
ac = self.session_store[user_id]
except KeyError, e:
print e
response = {'messages': 'did not login ASi'}
else:
result = ac.reject_friend_request(friend_id)
user_uri = '%sID#%s' % (PEOPLE_BASE_URI, user_id)
friend_uri = '%sID#%s' % (PEOPLE_BASE_URI, friend_id)
# Remove from my view
remove_triple = Triple(
user_uri,
uri('http://cos.alpha.sizl.org/people#PendingFriend'),
friend_uri)
self.sc.remove(remove_triple)
response = {'result': str(result)}
self.respond(id_, response)
class RemoveFriendsServer(AbstractServer):
def __init__(self, sc, session_store):
super(RemoveFriendsServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'RemoveFriends'
def process(self, id_, data):
user_id = str(data['user_id'])
friend_id = str(data['friend_id'])
try:
ac = self.session_store[user_id]
except KeyError, e:
print e
response = {'messages': 'did not login ASi'}
else:
ac.remove_friend(friend_id)
user_uri = '%sID#%s' % (PEOPLE_BASE_URI, user_id)
friend_uri = '%sID#%s' % (PEOPLE_BASE_URI, friend_id)
# Remove from my view
remove_triple1 = Triple(
user_uri,
uri('http://cos.alpha.sizl.org/people#Friend'),
friend_uri)
# Remove from my friend's view
remove_triple2 = Triple(
friend_uri,
uri('http://cos.alpha.sizl.org/people#Friend'),
user_uri)
result = self.sc.remove([remove_triple1, remove_triple2])
response = {'result': str(result)}
self.respond(id_, response)
class AddFriendsServer(AbstractServer):
def __init__(self, sc, session_store):
super(AddFriendsServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'AddFriends'
def process(self, id_, data):
user_id = str(data['user_id'])
friend_id = str(data['friend_id'])
try:
ac = self.session_store[user_id]
except KeyError, e:
print e
response = {'messages': 'did not login ASi'}
else:
pending_friends = ac.get_pending_friend_requests()
my_pending_friend_list = []
try:
for pending_friend in pending_friends['entry']:
my_pending_friend_list.append(pending_friend['id'])
except KeyError, e:
print e
result = ac.add_friend(friend_id)
response = {'result': str(result)}
if friend_id in my_pending_friend_list:
user_uri = '%sID#%s' % (PEOPLE_BASE_URI, user_id)
friend_uri = '%sID#%s' % (PEOPLE_BASE_URI, friend_id)
# Remove from my view
remove_triple = Triple(
user_uri,
uri('http://cos.alpha.sizl.org/people#PendingFriend'),
friend_uri)
self.sc.remove(remove_triple)
# Add to friend's view
insert_triple1 = Triple(
friend_uri,
uri('http://cos.alpha.sizl.org/people#Friend'),
user_uri)
# Add to my view
insert_triple2 = Triple(
user_uri,
uri('http://cos.alpha.sizl.org/people#Friend'),
friend_uri)
self.sc.insert([insert_triple1, insert_triple2])
else:
user_uri = '%sID#%s' % (PEOPLE_BASE_URI, user_id)
friend_uri = '%sID#%s' % (PEOPLE_BASE_URI, friend_id)
# Add to friend's view
insert_triple = Triple(
friend_uri,
uri('http://cos.alpha.sizl.org/people#PendingFriend'),
user_uri)
self.sc.insert(insert_triple)
self.respond(id_, response)
def main():
session_store = SessionStore()
with SIBConnection('ASI service server', method='preconfigured') as sc:
services = (
LoginServer(sc, session_store),
LogoutServer(sc, session_store),
RegisterServer(sc),
AddFriendsServer(sc, session_store),
RemoveFriendsServer(sc, session_store),
RejectFriendRequestServer(sc, session_store),
)
asi_server_kp = ASIServiceKnowledgeProcessor(services)
asi_server_kp.start()
try:
raw_input('Press enter to stop.\n')
finally:
asi_server_kp.stop()
if __name__ == '__main__':
main()
| mit | -7,420,965,687,537,019,000 | 30.735043 | 79 | 0.573301 | false | 3.988185 | false | false | false |
diplomacy/research | diplomacy_research/__init__.py | 1 | 1696 | # ==============================================================================
# Copyright 2019 - Philip Paquette
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Diplomacy Research """
# Setting up root logger
import os
import logging
import sys
# Adding path to proto/ dir
sys.path.append(os.path.join(os.path.dirname(__file__), 'proto'))
LOGGING_LEVEL = {'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG}.get(os.environ.get('DIPLOMACY_LOGGING', 'INFO'), logging.INFO)
# Defining root logger
ROOT_LOGGER = logging.getLogger('diplomacy_research')
ROOT_LOGGER.setLevel(LOGGING_LEVEL)
ROOT_LOGGER.propagate = False
# Adding output to stdout by default
STREAM_HANDLER = logging.StreamHandler(sys.stdout)
STREAM_HANDLER.setLevel(logging.DEBUG)
FORMATTER = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
STREAM_HANDLER.setFormatter(FORMATTER)
ROOT_LOGGER.addHandler(STREAM_HANDLER)
| mit | 6,087,254,447,154,937,000 | 42.487179 | 103 | 0.655071 | false | 4.229426 | false | true | false |
MiczFlor/Booktype | lib/booktype/constants.py | 1 | 22740 | # This file is part of Booktype.
# Copyright (c) 2012 Aleksandar Erkalovic <[email protected]>
#
# Booktype is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Booktype is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Booktype. If not, see <http://www.gnu.org/licenses/>.
import os
from django.utils.translation import ugettext_noop
# SSL cert verification during request using 'requests' lib
REQUESTS_VERIFY_SSL_CERT = True
# SECURITY CLASS
BOOKTYPE_BASE_SECURITY_CLASS = 'booktype.utils.security.base.BaseSecurity'
# Should track changes be turned on for the book
BOOK_TRACK_CHANGES = False
# CHAPTER STATUS RELATED
CHAPTER_STATUS_LIST = [
{'name': ugettext_noop('new'), 'color': '#3a87ad'},
{'name': ugettext_noop('needs content'), 'color': '#ff0000'},
{'name': ugettext_noop('completed'), 'color': '#5cb85c'},
{'name': ugettext_noop('to be proofed'), 'color': '#f0ad4e'}
]
CHAPTER_STATUS_DEFAULT = CHAPTER_STATUS_LIST[0]['name']
# IMPORTERS RELATED STUFF
BOOKTYPE_IMPORTERS = {
'epub': ('booktype.importer.epub', 'import_epub'),
'docx': ('booktype.importer.docx', 'import_docx')
}
# Default styles matched so far. We'll add more in future
# these constants are used on docimporter.py to correctly
# assign classes to imported elements
DOCX_PARAGRAPH_STYLES_MAP = {
'AuthorName': 'authorname',
'Reference': 'reference',
'Citation': 'bk-cite'
}
# Which elements are considered <h1> style
H1_STYLES = ['heading1']
# Which elements are considered <h2> style
H2_STYLES = ['heading2']
# Which elements are considered <h3> style
H3_STYLES = ['heading3']
# Which elements are considered <h4> style
H4_STYLES = ['heading4']
# Which elements are considered <h5> style
H5_STYLES = ['heading5']
# Which elements are considered <h6> style
H6_STYLES = ['heading6']
# All of our Heading styles
DOCX_HEADING_STYLES = H1_STYLES + H2_STYLES + H3_STYLES + H4_STYLES + H5_STYLES + H6_STYLES
DOCX_HEADING_STYLES_TUPLE = (
('h1', H1_STYLES),
('h2', H2_STYLES),
('h3', H3_STYLES),
('h4', H4_STYLES),
('h5', H5_STYLES),
('h6', H6_STYLES)
)
# This will allow settings custom class on clients
DOCX_IMPORTER_CLASS = 'booktype.importer.WordImporter'
# END IMPORTERS STUFF
# SERVER RELATED
THIS_BOOKI_SERVER = os.environ.get('HTTP_HOST', 'booktype-demo.sourcefabric.org')
# ADMINISTRATIVE RELATED
CREATE_BOOK_VISIBLE = True
CREATE_BOOK_LICENSE = ""
FREE_REGISTRATION = True
ADMIN_CREATE_BOOKS = False
ADMIN_IMPORT_BOOKS = False
BOOKTYPE_MAX_USERS = 0
BOOKTYPE_MAX_BOOKS = 0
BOOKTYPE_BOOKS_PER_USER = -1
GROUP_LIST_PAGE_SIZE = 20
USER_LIST_PAGE_SIZE = 20
BOOK_LIST_PAGE_SIZE = 20
# google analytics
USE_GOOGLE_ANALYTICS = False
GOOGLE_ANALYTICS_ID = ''
# reports
REPORTS_EMAIL_FROM = '[email protected]'
REPORTS_EMAIL_USERS = ['[email protected]']
REPORTS_CUSTOM_FONT_PATH = False
MAX_ADDITIONAL_METADATA = 3
# IMPORT RELATED
EPUB_COVER_MIN_DPI = 300
EPUB_COVER_MIN_SIZE = 500
EPUB_COVER_MAX_SIZE = 2800
EPUB_COVER_MAX_PIXELS = 3200000
# PUBLISHING RELATED
PUBLISH_OPTIONS = ['mpdf', 'screenpdf', 'epub3', 'epub2', 'icml', 'docx', 'mobi', 'xhtml']
# mobi conversion
# Options are "kindlegen" or "calibre"
MOBI_CONVERT = "calibre"
KINDLEGEN_PATH = "kindlegen"
CALIBRE_PATH = "ebook-convert"
CALIBRE_ARGS = ""
OBJAVI_URL = "http://objavi.booktype.pro/objavi.cgi"
ESPRI_URL = "http://objavi.booktype.pro/espri.cgi"
# theme plugins
BOOKTYPE_THEME_PLUGINS = {
'custom': 'booktype.apps.themes.convert.custom',
'academic': 'booktype.apps.themes.convert.academic'
}
# define path to module where class ExportBook is located
BOOKTYPE_EXPORT_CLASS_MODULE = 'booktype.apps.export.utils'
EXPORT_WAIT_FOR = 90
# convert constants
CONVERT_EDITOR_WIDTH = 898
XHTML_DOCUMENT_WIDTH = 2480
MOBI_DOCUMENT_WIDTH = 1500
EPUB_DOCUMENT_WIDTH = 1500
# editor stuff here
EDITOR_AUTOSAVE_ENABLED = False # disabled by default
EDITOR_AUTOSAVE_DELAY = 60 # time in seconds
EDITOR_SETTINGS_ROLES_SHOW_PERMISSIONS = 0
# end editor stuff
EPUB_NOT_ALLOWED_TAGS = (
# 'strip' - drop tag, leave content
# 'drop' - drop tag, drop content
# 'replace' - replace tag with 'replacement'
# EXAMPLES:
# {'tag': 'i', 'action': 'strip'},
# {'tag': 'b', 'action': 'drop'},
# {
# 'tag': 'u',
# 'action': 'replace',
# 'replacement': {
# 'tag': 'span',
# 'attrs': (
# ('style', 'text-decoration: underline;'),
# ('class', 'happy'),
# )
# }
# },
)
# According to epubcheck, after(inside) body tag,
# on the 1st level of deepness, must be only the next list of tags.
# If tag doesn't fit requierements, it will be replaced with "<p>"
EPUB_AVAILABLE_INBODY_ROOT_TAGS = (
'address', 'blockquote', 'del', 'div', 'dl', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'hr', 'ins', 'noscript', 'ns:svg', 'ol', 'p', 'pre', 'script', 'table', 'ul'
)
# mapping tag and allowed attributes in it
# required by epubcheck
EPUB_ALLOWED_TAG_ATTRS = (
('ol', ('class', 'dir', 'id', 'lang', 'style', 'title', 'xml:lang')),
)
EXPORT_SETTINGS = {
'mpdf': [
{u'name': u'size', u'value': u'A4'}, {u'name': u'custom_width', u'value': u''},
{u'name': u'custom_height', u'value': u''}, {u'name': u'top_margin', u'value': u'20'},
{u'name': u'side_margin', u'value': u'20'}, {u'name': u'bottom_margin', u'value': u'20'},
{u'name': u'gutter', u'value': u'20'}, {u'name': u'show_header', u'value': u'on'},
{u'name': u'header_margin', u'value': u'10'}, {u'name': u'show_footer', u'value': u'on'},
{u'name': u'footer_margin', u'value': u'10'}, {u'name': u'bleed_size', u'value': u''},
{u'name': u'styling', u'value': u''}, {u'name': u'crop_marks', u'value': u'off'}],
'screenpdf': [
{u'name': u'size', u'value': u'A4'}, {u'name': u'custom_width', u'value': u''},
{u'name': u'custom_height', u'value': u''}, {u'name': u'top_margin', u'value': u'20'},
{u'name': u'side_margin', u'value': u'20'}, {u'name': u'bottom_margin', u'value': u'20'},
{u'name': u'gutter', u'value': u'20'}, {u'name': u'show_header', u'value': u'on'},
{u'name': u'header_margin', u'value': u'10'}, {u'name': u'show_footer', u'value': u'on'},
{u'name': u'footer_margin', u'value': u'10'}, {u'name': u'cover_image', u'value': u' '},
{u'name': u'styling', u'value': u''}],
'epub2': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'epub3': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'icml': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'docx': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'mobi': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'xhtml': [{u'name': u'styling', u'value': u''}]
}
INCH_TO_MM = 25.4
PAGE_SIZE_DATA = {
'comicbook': (6.625 * INCH_TO_MM, 10.25 * INCH_TO_MM),
"pocket": (4.25 * INCH_TO_MM, 6.875 * INCH_TO_MM),
"usletter": (8.5 * INCH_TO_MM, 11 * INCH_TO_MM),
"ustrade6x9": (6 * INCH_TO_MM, 9 * INCH_TO_MM),
"ustrade": (6 * INCH_TO_MM, 9 * INCH_TO_MM),
"landscape9x7": (9 * INCH_TO_MM, 7 * INCH_TO_MM),
"square7.5": (7.5 * INCH_TO_MM, 7.5 * INCH_TO_MM),
"royal": (6.139 * INCH_TO_MM, 9.21 * INCH_TO_MM),
"crownquarto": (7.444 * INCH_TO_MM, 9.681 * INCH_TO_MM),
"square8.5": (8.5 * INCH_TO_MM, 8.5 * INCH_TO_MM),
"us5.5x8.5": (5.5 * INCH_TO_MM, 8.5 * INCH_TO_MM),
"digest": (5.5 * INCH_TO_MM, 8.5 * INCH_TO_MM),
"us5x8": (5 * INCH_TO_MM, 8 * INCH_TO_MM),
"us7x10": (7 * INCH_TO_MM, 10 * INCH_TO_MM),
"a5": (148, 210),
"a4": (210, 297),
"a3 (nz tabloid)": (297, 420),
"a2 (nz broadsheet)": (420, 594),
"a1": (594, 841),
"b5": (176, 250),
"b4": (250, 353),
"b3": (353, 500),
"b2": (500, 707),
"b1": (707, 1000),
# Not so sure about next 3
"uk tabloid": (11 * INCH_TO_MM, 17 * INCH_TO_MM),
"uk broadsheet": (18 * INCH_TO_MM, 24 * INCH_TO_MM),
"us broadsheet": (15 * INCH_TO_MM, 22.75 * INCH_TO_MM),
"berliner" : (315, 470),
"foolscap (f4)": (210, 330),
"oamaru broadsheet" :(382, 540),
"oamaru tabloid": (265, 380),
}
# These are default options for CSS settings
BOOKTYPE_CSS_BOOK = ('.objavi-chapter{ color: #000; }'
'a { text-decoration:none; color:#000; } '
'h1 .initial{ color: #000; } '
'.objavi-subsection{ display: block; '
'page-break-before: always; '
'/* page-break-after: always;*/ '
'text-transform: uppercase; font-size: 20pt; }'
'body .objavi-subsection:first-child{ '
'page-break-before: avoid; } '
'.objavi-subsection .initial { '
'font-size: 1em; color: #000; }'
'.objavi-subsection-heading { font-size: 20pt; '
'text-align: center; '
'line-height: 300px; font-weight: normal; } '
'h1 { page-break-before: always; } '
'table { float: none; }'
'h1.frontpage{ page-break-after:always; margin-top:70%; '
'font-size: 20pt; '
'text-align: center; page-break-before: avoid; '
'font-weight: normal; }'
'div.copyright{ padding: 1em; } '
'/* TOC ******************************/ '
'table { float: none; } '
'table.toc { font-size: 1.1em; width: 95%; } '
'table.toc td{ vertical-align:top padding-left: 0.5em; } '
'td.chapter { padding: 0 0.5em; text-align: right; } '
'table.toc td.pagenumber { text-align: right; '
'vertical-align:bottom; } '
'td.section { padding-top: 1.1em; font-weight: bold; } '
'/* End TOC **************************/ '
'pre { overflow: hidden; white-space: pre-wrap; } '
'h1 h2 h3 h4 h5 h6{ page-break-after: avoid; '
'page-break-inside: avoid; } '
'.page-break{ page-break-before: always; height: 7em; '
'display: block; } '
'#right-footer { text-align: right; } '
'#left-footer { text-align: left; } '
'a { word-wrap: break-word; } '
'.objavi-no-page-break { page-break-inside: avoid; } '
'.unseen{ z-index: -66; margin-left: -1000pt; }'
'sup {vertical-align:text-top;font-size:0.7em; }'
'img { max-width: 95%; }'
'p { word-wrap: break-word; }'
'li { word-wrap: break-word; }'
'#InsertNote_NoteList { word-wrap: break-word; }')
BOOKTYPE_CSS_BOOKJS = ('/* DOCUMENT */ @page { size: auto;}'
'body { word-break: break-word; -webkit-hyphens: auto;'
'hyphens: auto; font-family: "Liberation Serif";'
'background-color: white;}' '/* CONTENT */'
'img { max-width: 90%; height: auto;'
'image-resolution: from-image;}'
'sup { font-size: 80%;}'
'p { line-height: 130%; word-break: break-word;'
'/* text-align: justify; */'
'text-align: left;}'
'a { color: #000; text-decoration: none; '
'word-wrap: break-word;}'
'ol ul { text-align: justify;}'
'li { margin-left: 1em; word-wrap: break-word; '
'page-break-inside: avoid; windows: 4; orphans: 4;}'
'/* HEADINGS */'
'h1 {}'
'h1 .initial { display: none;}'
'h1 .subtitle {}'
'h1 .author { display: block; margin-top: 0.2in; '
'font-weight: normal;}'
'h1 .comma { font-size: 22pt; display: none;}'
'h2 { page-break-after: avoid;}'
'h3 { page-break-after: avoid;}'
'h4 { page-break-after: avoid;}'
'h5 { font-weight: normal; text-align: left;'
'page-break-after: avoid;}'
'/* CODE BLOCKS */'
'pre { white-space: pre-wrap; /* css-3 */ '
'white-space: -moz-pre-wrap; /* Mozilla since 1999 */'
'white-space: -pre-wrap;/* Opera 4-6 */'
'white-space: -o-pre-wrap; /* Opera 7 */'
'word-wrap: break-word; /* Internet Explorer 5.5+ */'
'widows:4; orphans:4;}'
'code {}'
'/* TOC */'
'#pagination-toc-title { font-size: 20pt; '
'font-weight: 700; text-align: left; '
'padding-bottom: .4in;}'
'.pagination-toc-entry {/* width: 6.2in; */ '
'width: 90%; display: block; padding-bottom: .3in; '
'font-size: 16pt;}'
'.pagination-toc-entry .pagination-toc-pagenumber { '
'font-weight: 400; display: inline-block; '
'vertical-align: text-bottom; font-size: 16pt; '
'float:right; '
'/* SET AUTOMATICALLY */}'
'.pagination-toc-entry.section { font-weight:700; '
'font-size: 16pt; text-transform: uppercase; '
'padding-bottom: .3in;}'
'/* FRONT MATTER */'
'#booktitle { margin-top: 1.7in; font-size: 26pt; '
'font-weight: normal; text-align: center; '
'text-transform: uppercase;}'
'#booksubtitle { font-size: 22px; margin-top: 0.2in; '
'font-weight: normal; text-align: center;}'
'#bookeditors { padding-top: 1.5in; '
'font-weight: normal; text-align: center; '
'font-size: 24pt;}'
'#bookpress { padding-top: 1.8in; font-weight: normal;'
'text-align: center; font-size: 24pt;}'
'#copyrightpage { font-weight: normal; '
'font-size: 18pt; padding-top: 0.2in;}'
'/* HEADER */'
'.pagination-header {font-size: 12pt;'
'font-weight: light;}'
'.pagination-pagenumber {font-size: 12pt;}'
'.pagination-header '
'.pagination-section { display: none; }'
'.pagination-toc-text .initial { display: none; }'
'.pagination-chapter .initial { display: none; }'
'/* MISC */'
'.imagecaption { font-size: 9pt; padding-left: 0.2in;'
'line-height: 18px; text-align: justify;'
'font-weight: normal; display: block;}'
'.pagebreak { -webkit-region-break-after: always;}'
'.pagebreakbefore{'
' -webkit-region-break-before: always;}'
'.objavi-chapter .initial { display: none;}'
'.objavi-subsection { display: none;}'
'.objavi-subsection-heading { '
'line-height: 120px !important; '
'/* work-around to make section title pages no longer '
'than one page */ font-size: 22px; font-weight: bold;'
' text-align: left; display: none;}'
'@media screen { .page { border: solid 1px #000;'
' margin-bottom: .2in; }'
'body { background-color: #efefef; }}'
'#InsertNote_NoteList { word-wrap: break-word;}')
BOOKTYPE_CSS_EBOOK = ('.objavi-chapter{ color: #000; display:none;} '
'a { text-decoration:none; color:#000;} '
'h1 .initial{ color: #000; display:none;} '
'.objavi-subsection{ display: block; '
'page-break-before: always;} '
'body .objavi-subsection:first-child{ '
'page-break-before: avoid;} '
'.objavi-subsection .initial { color: #000; '
'display:none;} .objavi-subsection-heading {'
'font-size: 20pt; text-align: center; '
'line-height: 300px; font-weight: normal;}'
'table { float: none;} h1.frontpage{'
'page-break-after:always; margin-top:70%; '
'font-size: 20pt; text-align: center;'
'page-break-before: avoid; max-width: 700pt; '
'font-weight: normal;} div.copyright{padding: 1em;}'
'/* TOC ******************************/'
'table { float: none;}'
'table.toc { font-size: 1.1em; width: 95%;}'
'table.toc td{ vertical-align:top; padding-left: 0.5em;}'
'td.chapter { padding: 0 0.5em; text-align: right;} '
'table.toc td.pagenumber { text-align: right; '
'vertical-align:bottom;} '
'td.section { padding-top: 1.1em; font-weight: bold;}'
'/* End TOC **************************/ '
'img { max-width: 500px; height: auto;}'
'.objavi-no-page-break {page-break-inside: avoid;} '
'.unseen { z-index: -66; margin-left: -1000pt;} '
'.objavi-subsection-heading{ height:860px; '
'font-size:0px; display:block;}')
BOOKTYPE_CSS_PDF = ('.objavi-subsection{ display: block; '
'page-break-before: always; /* page-break-after: always;*/'
'text-transform: uppercase; font-size: 20pt; } '
'body .objavi-subsection:first-child{ '
'page-break-before: avoid; } '
'.objavi-subsection .initial { font-size: 1em;'
'color: #000; } .objavi-subsection-heading {'
'font-size: 20pt; text-align: center; line-height: 300px;'
'font-weight: normal;} h1 { page-break-before: always; } '
'table { float: none; } '
'h1.frontpage{ page-break-after:always; margin-top:70%; '
'font-size: 20pt; text-align: center; '
'page-break-before: avoid; font-weight: normal; } '
'div.copyright{ padding: 1em; } '
'/* TOC ******************************/ '
'table { float: none; } '
'table.toc { font-size: 1.1em; width: 95%; } '
'table.toc td{ vertical-align:top; padding-left: 0.5em; } '
'td.chapter { padding: 0 0.5em; text-align: right; } '
'table.toc td.pagenumber { text-align: right; '
'vertical-align:bottom; } td.section { padding-top: 1.1em;'
'font-weight: bold; } '
'/* End TOC **************************/ '
'pre { overflow: hidden; white-space: pre-wrap; } '
'h1, h2, h3, h4, h5, h6{ page-break-after: avoid; '
'page-break-inside: avoid; } '
'.page-break{ page-break-before: always; height: 7em;'
'display: block; } a { word-wrap: break-word; } '
'.objavi-no-page-break { page-break-inside: avoid; } '
'/*To force a blank page it is sometimes necessary to '
'add unseen content. Display:none and visibility: hidden'
' do not work -- the renderer realises that they are not '
'there and skips the page. So we add a tiny bit of text '
'beyond the margin of the page. */ '
'.unseen{ z-index: -66; margin-left: -1000pt; }'
'img { max-width: 95%; } p { word-wrap: break-word; }'
'li { word-wrap: break-word; }'
'#InsertNote_NoteList { word-wrap: break-word; } ')
BOOKTYPE_CSS_ODT = ('body {} #book-title { font-size: 64pt; '
'page-break-before: avoid; margin-bottom: 12em; '
'max-width: 700px;} .unseen { display: none;}'
'.chapter { color: #000;} h1 .initial { color: #000; '
'font-size: 2em;} body .subsection:first-child {} '
'h1 { page-break-before: always;} '
'.objavi-subsection{ text-transform: uppercase; '
'font-size: 20pt;} .objavi-subsection .initial { '
'font-size: 1em; color: #000;}'
'.objavi-subsection-heading{ font-size: 36pt; '
'font-weight: bold; page-break-before: always;} '
'table { float: none;} h1.frontpage{ font-size: 64pt; '
'text-align: center; max-width: 700px;} '
'div.copyright{ padding: 1em;} pre { max-width:700px; '
'overflow: hidden;} '
'img { max-width: 700px; height: auto;}')
| agpl-3.0 | 6,846,859,636,964,900,000 | 45.790123 | 97 | 0.497845 | false | 3.351511 | false | false | false |
pepitogithub/PythonScripts | musica/drumExFachade.py | 1 | 2964 | # import pygame
import threading
import drumExMachina
class Fasade:
"""
Matrix
-> reproducir()
-> pausar()
-> salir()
-> volumen-general()
-> tempo()
-> figura()
-> agregar-pista()
-> quitar-pista()
-> activar-pista()
-> desactivar-pista()
Pista
-> nombre()
-> setear-instrumento()
-> canal-midi()
-> volumen()
-> duracion()
-> activar()
-> desactivar()
-> editar-partitura()
-> agregar-seccion()
-> quitar-seccion()
-> modificar-seccion()
Partitura
-> agregar-seccion()
-> quitar-seccion()
-> modificar-seccion()
Seccion
-> duracion()
-> pulsos()
-> ritmo()
-> rotacion()
-> notas()
"""
def __init__(self):
pass
class _DXFConsola:
salir_codes = [0, "0", "salir", "exit"]
def __init__(self):
pass
def loop(self):
salir = False
while not salir:
user_input = input("> ")
salir = user_input in self.salir_codes
class _DXFGrafico:
def __init__(self, ancho=1200, alto=800):
self.alto = alto
self.ancho = ancho
self.screen = pygame.display.set_mode([self.ancho, self.alto])
pygame.display.set_caption("Drum Ex Machina")
def loop(self):
self.engine.loop()
pygame.init()
clock = pygame.time.Clock()
salir = False
while not salir:
for event in pygame.event.get():
if event.type == pygame.QUIT:
salir = True
pygame.draw.rect(self.screen, [255,0,0], [75, 10, 50, 20] , 1)
pygame.display.flip()
pygame.time.delay(50)
class DrumExFacade:
"""
Interfaz de DrumExMachina.
Tiene dos modos de uso, consola y grafico.
"""
def __init__(self, modo='consola', ancho=1200, alto=800):
self.modo = modo
self.engine = None
# Modo: Consola | grafico
self.engine = _DXFConsola() if modo == 'consola' else _DXFGrafico(alto, ancho)
def loop(self):
DXM_thread = threading.Thread(target=drumExMachina.testeos)
DXM_thread.start()
self.engine.loop()
DXF_thread.exit()
DXM_thread.exit()
DXF = DrumExFacade("consola")
DXF_thread = threading.Thread(target=DXF.loop)
DXF_thread.start() | gpl-2.0 | 718,095,634,692,036,600 | 22.164063 | 86 | 0.430162 | false | 3.884666 | false | false | false |
stvstnfrd/edx-platform | lms/djangoapps/courseware/views/views.py | 1 | 89915 | """
Courseware views functions
"""
import json
import logging
from collections import OrderedDict, namedtuple
from datetime import datetime
import bleach
import requests
import six
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import AnonymousUser, User # lint-amnesty, pylint: disable=imported-auth-user
from django.core.exceptions import PermissionDenied
from django.db import transaction
from django.db.models import Q, prefetch_related_objects
from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import redirect
from django.template.context_processors import csrf
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.http import urlquote_plus
from django.utils.text import slugify
from django.utils.translation import ugettext
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext_noop
from django.views.decorators.cache import cache_control
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_GET, require_http_methods, require_POST
from django.views.generic import View
from edx_django_utils import monitoring as monitoring_utils
from edx_django_utils.monitoring import set_custom_attribute, set_custom_attributes_for_course_key
from ipware.ip import get_client_ip
from markupsafe import escape
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from pytz import UTC
from requests.exceptions import ConnectionError, Timeout # pylint: disable=redefined-builtin
from rest_framework import status
from rest_framework.decorators import api_view, throttle_classes
from rest_framework.response import Response
from rest_framework.throttling import UserRateThrottle
from six import text_type
from web_fragments.fragment import Fragment
from lms.djangoapps.survey import views as survey_views
from common.djangoapps.course_modes.models import CourseMode, get_course_prices
from common.djangoapps.edxmako.shortcuts import marketing_link, render_to_response, render_to_string
from lms.djangoapps.edxnotes.helpers import is_feature_enabled
from lms.djangoapps.ccx.custom_exception import CCXLocatorValidationException
from lms.djangoapps.certificates import api as certs_api
from lms.djangoapps.certificates.models import CertificateStatuses
from lms.djangoapps.commerce.utils import EcommerceService
from lms.djangoapps.course_home_api.toggles import course_home_mfe_dates_tab_is_active
from openedx.features.course_experience.url_helpers import get_learning_mfe_home_url, is_request_from_learning_mfe
from lms.djangoapps.courseware.access import has_access, has_ccx_coach_role
from lms.djangoapps.courseware.access_utils import check_course_open_for_learner, check_public_access
from lms.djangoapps.courseware.courses import (
can_self_enroll_in_course,
course_open_for_self_enrollment,
get_course,
get_course_date_blocks,
get_course_overview_with_access,
get_course_with_access,
get_courses,
get_current_child,
get_permission_for_course_about,
get_studio_url,
sort_by_announcement,
sort_by_start_date
)
from lms.djangoapps.courseware.date_summary import verified_upgrade_deadline_link
from lms.djangoapps.courseware.exceptions import CourseAccessRedirect, Redirect
from lms.djangoapps.courseware.masquerade import setup_masquerade
from lms.djangoapps.courseware.model_data import FieldDataCache
from lms.djangoapps.courseware.models import BaseStudentModuleHistory, StudentModule
from lms.djangoapps.courseware.permissions import ( # lint-amnesty, pylint: disable=unused-import
MASQUERADE_AS_STUDENT,
VIEW_COURSE_HOME,
VIEW_COURSEWARE,
VIEW_XQA_INTERFACE
)
from lms.djangoapps.courseware.user_state_client import DjangoXBlockUserStateClient
from lms.djangoapps.experiments.utils import get_experiment_user_metadata_context
from lms.djangoapps.grades.api import CourseGradeFactory
from lms.djangoapps.instructor.enrollment import uses_shib
from lms.djangoapps.instructor.views.api import require_global_staff
from lms.djangoapps.verify_student.services import IDVerificationService
from openedx.core.djangoapps.catalog.utils import get_programs, get_programs_with_type
from openedx.core.djangoapps.certificates import api as auto_certs_api
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.credit.api import (
get_credit_requirement_status,
is_credit_course,
is_user_eligible_for_credit
)
from openedx.core.djangoapps.enrollments.api import add_enrollment, get_enrollment # lint-amnesty, pylint: disable=unused-import
from openedx.core.djangoapps.enrollments.permissions import ENROLL_IN_COURSE
from openedx.core.djangoapps.models.course_details import CourseDetails
from openedx.core.djangoapps.plugin_api.views import EdxFragmentView
from openedx.core.djangoapps.programs.utils import ProgramMarketingDataExtender
from openedx.core.djangoapps.self_paced.models import SelfPacedConfiguration
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.util.user_messages import PageLevelMessages
from openedx.core.djangoapps.zendesk_proxy.utils import create_zendesk_ticket
from openedx.core.djangolib.markup import HTML, Text
from openedx.core.lib.mobile_utils import is_request_from_mobile_app
from openedx.features.content_type_gating.models import ContentTypeGatingConfig
from openedx.features.course_duration_limits.access import generate_course_expired_fragment
from openedx.features.course_experience import DISABLE_UNIFIED_COURSE_TAB_FLAG, course_home_url_name
from openedx.features.course_experience.course_tools import CourseToolsPluginManager
from openedx.features.course_experience.url_helpers import get_legacy_courseware_url
from openedx.features.course_experience.utils import dates_banner_should_display
from openedx.features.course_experience.views.course_dates import CourseDatesFragmentView
from openedx.features.course_experience.waffle import ENABLE_COURSE_ABOUT_SIDEBAR_HTML
from openedx.features.course_experience.waffle import waffle as course_experience_waffle
from openedx.features.enterprise_support.api import data_sharing_consent_required
from common.djangoapps.student.models import CourseEnrollment, UserTestGroup
from common.djangoapps.track import segment
from common.djangoapps.util.cache import cache, cache_if_anonymous
from common.djangoapps.util.db import outer_atomic
from common.djangoapps.util.milestones_helpers import get_prerequisite_courses_display
from common.djangoapps.util.views import ensure_valid_course_key, ensure_valid_usage_key
from xmodule.course_module import COURSE_VISIBILITY_PUBLIC, COURSE_VISIBILITY_PUBLIC_OUTLINE
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError, NoPathToItem
from xmodule.tabs import CourseTabList
from xmodule.x_module import STUDENT_VIEW
from ..context_processor import user_timezone_locale_prefs
from ..entrance_exams import user_can_skip_entrance_exam
from ..module_render import get_module, get_module_by_usage_id, get_module_for_descriptor
from ..tabs import _get_dynamic_tabs
from ..toggles import COURSEWARE_OPTIMIZED_RENDER_XBLOCK
log = logging.getLogger("edx.courseware")
# Only display the requirements on learner dashboard for
# credit and verified modes.
REQUIREMENTS_DISPLAY_MODES = CourseMode.CREDIT_MODES + [CourseMode.VERIFIED]
CertData = namedtuple(
"CertData", ["cert_status", "title", "msg", "download_url", "cert_web_view_url"]
)
EARNED_BUT_NOT_AVAILABLE_CERT_STATUS = 'earned_but_not_available'
AUDIT_PASSING_CERT_DATA = CertData(
CertificateStatuses.audit_passing,
_('Your enrollment: Audit track'),
_('You are enrolled in the audit track for this course. The audit track does not include a certificate.'),
download_url=None,
cert_web_view_url=None
)
HONOR_PASSING_CERT_DATA = CertData(
CertificateStatuses.honor_passing,
_('Your enrollment: Honor track'),
_('You are enrolled in the honor track for this course. The honor track does not include a certificate.'),
download_url=None,
cert_web_view_url=None
)
INELIGIBLE_PASSING_CERT_DATA = {
CourseMode.AUDIT: AUDIT_PASSING_CERT_DATA,
CourseMode.HONOR: HONOR_PASSING_CERT_DATA
}
GENERATING_CERT_DATA = CertData(
CertificateStatuses.generating,
_("We're working on it..."),
_(
"We're creating your certificate. You can keep working in your courses and a link "
"to it will appear here and on your Dashboard when it is ready."
),
download_url=None,
cert_web_view_url=None
)
INVALID_CERT_DATA = CertData(
CertificateStatuses.invalidated,
_('Your certificate has been invalidated'),
_('Please contact your course team if you have any questions.'),
download_url=None,
cert_web_view_url=None
)
REQUESTING_CERT_DATA = CertData(
CertificateStatuses.requesting,
_('Congratulations, you qualified for a certificate!'),
_("You've earned a certificate for this course."),
download_url=None,
cert_web_view_url=None
)
UNVERIFIED_CERT_DATA = CertData(
CertificateStatuses.unverified,
_('Certificate unavailable'),
_(
u'You have not received a certificate because you do not have a current {platform_name} '
'verified identity.'
).format(platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)),
download_url=None,
cert_web_view_url=None
)
EARNED_BUT_NOT_AVAILABLE_CERT_DATA = CertData(
EARNED_BUT_NOT_AVAILABLE_CERT_STATUS,
_('Your certificate will be available soon!'),
_('After this course officially ends, you will receive an email notification with your certificate.'),
download_url=None,
cert_web_view_url=None
)
def _downloadable_cert_data(download_url=None, cert_web_view_url=None):
return CertData(
CertificateStatuses.downloadable,
_('Your certificate is available'),
_("You've earned a certificate for this course."),
download_url=download_url,
cert_web_view_url=cert_web_view_url
)
def user_groups(user):
"""
TODO (vshnayder): This is not used. When we have a new plan for groups, adjust appropriately.
"""
if not user.is_authenticated:
return []
# TODO: Rewrite in Django
key = 'user_group_names_{user.id}'.format(user=user)
cache_expiration = 60 * 60 # one hour
# Kill caching on dev machines -- we switch groups a lot
group_names = cache.get(key)
if settings.DEBUG:
group_names = None
if group_names is None:
group_names = [u.name for u in UserTestGroup.objects.filter(users=user)]
cache.set(key, group_names, cache_expiration)
return group_names
@ensure_csrf_cookie
@cache_if_anonymous()
def courses(request):
"""
Render "find courses" page. The course selection work is done in courseware.courses.
"""
courses_list = []
course_discovery_meanings = getattr(settings, 'COURSE_DISCOVERY_MEANINGS', {})
if not settings.FEATURES.get('ENABLE_COURSE_DISCOVERY'):
courses_list = get_courses(request.user)
if configuration_helpers.get_value("ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"]):
courses_list = sort_by_start_date(courses_list)
else:
courses_list = sort_by_announcement(courses_list)
# Add marketable programs to the context.
programs_list = get_programs_with_type(request.site, include_hidden=False)
return render_to_response(
"courseware/courses.html",
{
'courses': courses_list,
'course_discovery_meanings': course_discovery_meanings,
'programs_list': programs_list,
}
)
class PerUserVideoMetadataThrottle(UserRateThrottle):
"""
setting rate limit for yt_video_metadata API
"""
rate = settings.RATE_LIMIT_FOR_VIDEO_METADATA_API
@ensure_csrf_cookie
@login_required
@api_view(['GET'])
@throttle_classes([PerUserVideoMetadataThrottle])
def yt_video_metadata(request):
"""
Will hit the youtube API if the key is available in settings
:return: youtube video metadata
"""
video_id = request.GET.get('id', None)
metadata, status_code = load_metadata_from_youtube(video_id, request)
return Response(metadata, status=status_code, content_type='application/json')
def load_metadata_from_youtube(video_id, request):
"""
Get metadata about a YouTube video.
This method is used via the standalone /courses/yt_video_metadata REST API
endpoint, or via the video XBlock as a its 'yt_video_metadata' handler.
"""
metadata = {}
status_code = 500
if video_id and settings.YOUTUBE_API_KEY and settings.YOUTUBE_API_KEY != 'PUT_YOUR_API_KEY_HERE':
yt_api_key = settings.YOUTUBE_API_KEY
yt_metadata_url = settings.YOUTUBE['METADATA_URL']
yt_timeout = settings.YOUTUBE.get('TEST_TIMEOUT', 1500) / 1000 # converting milli seconds to seconds
headers = {}
http_referer = None
try:
# This raises an attribute error if called from the xblock yt_video_metadata handler, which passes
# a webob request instead of a django request.
http_referer = request.META.get('HTTP_REFERER')
except AttributeError:
# So here, let's assume it's a webob request and access the referer the webob way.
http_referer = request.referer
if http_referer:
headers['Referer'] = http_referer
payload = {'id': video_id, 'part': 'contentDetails', 'key': yt_api_key}
try:
res = requests.get(yt_metadata_url, params=payload, timeout=yt_timeout, headers=headers)
status_code = res.status_code
if res.status_code == 200:
try:
res_json = res.json()
if res_json.get('items', []):
metadata = res_json
else:
logging.warning(u'Unable to find the items in response. Following response '
u'was received: {res}'.format(res=res.text))
except ValueError:
logging.warning(u'Unable to decode response to json. Following response '
u'was received: {res}'.format(res=res.text))
else:
logging.warning(u'YouTube API request failed with status code={status} - '
u'Error message is={message}'.format(status=status_code, message=res.text))
except (Timeout, ConnectionError):
logging.warning(u'YouTube API request failed because of connection time out or connection error')
else:
logging.warning(u'YouTube API key or video id is None. Please make sure API key and video id is not None')
return metadata, status_code
@ensure_csrf_cookie
@ensure_valid_course_key
def jump_to_id(request, course_id, module_id):
"""
This entry point allows for a shorter version of a jump to where just the id of the element is
passed in. This assumes that id is unique within the course_id namespace
"""
course_key = CourseKey.from_string(course_id)
items = modulestore().get_items(course_key, qualifiers={'name': module_id})
if len(items) == 0:
raise Http404(
u"Could not find id: {0} in course_id: {1}. Referer: {2}".format(
module_id, course_id, request.META.get("HTTP_REFERER", "")
))
if len(items) > 1:
log.warning(
u"Multiple items found with id: %s in course_id: %s. Referer: %s. Using first: %s",
module_id,
course_id,
request.META.get("HTTP_REFERER", ""),
text_type(items[0].location)
)
return jump_to(request, course_id, text_type(items[0].location))
@ensure_csrf_cookie
def jump_to(_request, course_id, location):
"""
Show the page that contains a specific location.
If the location is invalid or not in any class, return a 404.
Otherwise, delegates to the index view to figure out whether this user
has access, and what they should see.
"""
try:
course_key = CourseKey.from_string(course_id)
usage_key = UsageKey.from_string(location).replace(course_key=course_key)
except InvalidKeyError:
raise Http404(u"Invalid course_key or usage_key") # lint-amnesty, pylint: disable=raise-missing-from
try:
redirect_url = get_legacy_courseware_url(course_key, usage_key, _request)
except ItemNotFoundError:
raise Http404(u"No data at this location: {0}".format(usage_key)) # lint-amnesty, pylint: disable=raise-missing-from
except NoPathToItem:
raise Http404(u"This location is not in any class: {0}".format(usage_key)) # lint-amnesty, pylint: disable=raise-missing-from
return redirect(redirect_url)
@ensure_csrf_cookie
@ensure_valid_course_key
@data_sharing_consent_required
def course_info(request, course_id):
"""
Display the course's info.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
# TODO: LEARNER-611: This can be deleted with Course Info removal. The new
# Course Home is using its own processing of last accessed.
def get_last_accessed_courseware(course, request, user):
"""
Returns the courseware module URL that the user last accessed, or None if it cannot be found.
"""
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, request.user, course, depth=2
)
course_module = get_module_for_descriptor(
user,
request,
course,
field_data_cache,
course.id,
course=course,
will_recheck_access=True,
)
chapter_module = get_current_child(course_module)
if chapter_module is not None:
section_module = get_current_child(chapter_module)
if section_module is not None:
url = reverse('courseware_section', kwargs={
'course_id': text_type(course.id),
'chapter': chapter_module.url_name,
'section': section_module.url_name
})
return url
return None
course_key = CourseKey.from_string(course_id)
# If the unified course experience is enabled, redirect to the "Course" tab
if not DISABLE_UNIFIED_COURSE_TAB_FLAG.is_enabled(course_key):
return redirect(reverse(course_home_url_name(course_key), args=[course_id]))
with modulestore().bulk_operations(course_key):
course = get_course_with_access(request.user, 'load', course_key)
can_masquerade = request.user.has_perm(MASQUERADE_AS_STUDENT, course)
masquerade, user = setup_masquerade(request, course_key, can_masquerade, reset_masquerade_data=True)
# LEARNER-612: CCX redirect handled by new Course Home (DONE)
# LEARNER-1697: Transition banner messages to new Course Home (DONE)
# if user is not enrolled in a course then app will show enroll/get register link inside course info page.
user_is_enrolled = CourseEnrollment.is_enrolled(user, course.id)
show_enroll_banner = request.user.is_authenticated and not user_is_enrolled
# If the user is not enrolled but this is a course that does not support
# direct enrollment then redirect them to the dashboard.
if not user_is_enrolled and not can_self_enroll_in_course(course_key):
return redirect(reverse('dashboard'))
# LEARNER-170: Entrance exam is handled by new Course Outline. (DONE)
# If the user needs to take an entrance exam to access this course, then we'll need
# to send them to that specific course module before allowing them into other areas
if not user_can_skip_entrance_exam(user, course):
return redirect(reverse('courseware', args=[text_type(course.id)]))
# Construct the dates fragment
dates_fragment = None
if request.user.is_authenticated:
# TODO: LEARNER-611: Remove enable_course_home_improvements
if SelfPacedConfiguration.current().enable_course_home_improvements:
# Shared code with the new Course Home (DONE)
dates_fragment = CourseDatesFragmentView().render_to_fragment(request, course_id=course_id)
# Shared code with the new Course Home (DONE)
# Get the course tools enabled for this user and course
course_tools = CourseToolsPluginManager.get_enabled_course_tools(request, course_key)
course_homepage_invert_title =\
configuration_helpers.get_value(
'COURSE_HOMEPAGE_INVERT_TITLE',
False
)
course_homepage_show_subtitle =\
configuration_helpers.get_value(
'COURSE_HOMEPAGE_SHOW_SUBTITLE',
True
)
course_homepage_show_org =\
configuration_helpers.get_value('COURSE_HOMEPAGE_SHOW_ORG', True)
course_title = course.display_number_with_default
course_subtitle = course.display_name_with_default
if course_homepage_invert_title:
course_title = course.display_name_with_default
course_subtitle = course.display_number_with_default
context = {
'request': request,
'masquerade_user': user,
'course_id': text_type(course_key),
'url_to_enroll': CourseTabView.url_to_enroll(course_key),
'cache': None,
'course': course,
'course_title': course_title,
'course_subtitle': course_subtitle,
'show_subtitle': course_homepage_show_subtitle,
'show_org': course_homepage_show_org,
'can_masquerade': can_masquerade,
'masquerade': masquerade,
'supports_preview_menu': True,
'studio_url': get_studio_url(course, 'course_info'),
'show_enroll_banner': show_enroll_banner,
'user_is_enrolled': user_is_enrolled,
'dates_fragment': dates_fragment,
'course_tools': course_tools,
}
context.update(
get_experiment_user_metadata_context(
course,
user,
)
)
# Get the URL of the user's last position in order to display the 'where you were last' message
context['resume_course_url'] = None
# TODO: LEARNER-611: Remove enable_course_home_improvements
if SelfPacedConfiguration.current().enable_course_home_improvements:
context['resume_course_url'] = get_last_accessed_courseware(course, request, user)
if not check_course_open_for_learner(user, course):
# Disable student view button if user is staff and
# course is not yet visible to students.
context['disable_student_access'] = True
context['supports_preview_menu'] = False
return render_to_response('courseware/info.html', context)
class StaticCourseTabView(EdxFragmentView):
"""
View that displays a static course tab with a given name.
"""
@method_decorator(ensure_csrf_cookie)
@method_decorator(ensure_valid_course_key)
def get(self, request, course_id, tab_slug, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Displays a static course tab page with a given name
"""
course_key = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'load', course_key)
tab = CourseTabList.get_tab_by_slug(course.tabs, tab_slug)
if tab is None:
raise Http404
# Show warnings if the user has limited access
CourseTabView.register_user_access_warning_messages(request, course)
return super(StaticCourseTabView, self).get(request, course=course, tab=tab, **kwargs) # lint-amnesty, pylint: disable=super-with-arguments
def render_to_fragment(self, request, course=None, tab=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Renders the static tab to a fragment.
"""
return get_static_tab_fragment(request, course, tab)
def render_standalone_response(self, request, fragment, course=None, tab=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Renders this static tab's fragment to HTML for a standalone page.
"""
return render_to_response('courseware/static_tab.html', {
'course': course,
'active_page': 'static_tab_{0}'.format(tab['url_slug']),
'tab': tab,
'fragment': fragment,
'disable_courseware_js': True,
})
class CourseTabView(EdxFragmentView):
"""
View that displays a course tab page.
"""
@method_decorator(ensure_csrf_cookie)
@method_decorator(ensure_valid_course_key)
@method_decorator(data_sharing_consent_required)
def get(self, request, course_id, tab_type, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Displays a course tab page that contains a web fragment.
"""
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = get_course_with_access(request.user, 'load', course_key)
try:
# Render the page
course_tabs = course.tabs + _get_dynamic_tabs(course, request.user)
tab = CourseTabList.get_tab_by_type(course_tabs, tab_type)
page_context = self.create_page_context(request, course=course, tab=tab, **kwargs)
# Show warnings if the user has limited access
# Must come after masquerading on creation of page context
self.register_user_access_warning_messages(request, course)
set_custom_attributes_for_course_key(course_key)
return super(CourseTabView, self).get(request, course=course, page_context=page_context, **kwargs) # lint-amnesty, pylint: disable=super-with-arguments
except Exception as exception: # pylint: disable=broad-except
return CourseTabView.handle_exceptions(request, course_key, course, exception)
@staticmethod
def url_to_enroll(course_key):
"""
Returns the URL to use to enroll in the specified course.
"""
url_to_enroll = reverse('about_course', args=[text_type(course_key)])
if settings.FEATURES.get('ENABLE_MKTG_SITE'):
url_to_enroll = marketing_link('COURSES')
return url_to_enroll
@staticmethod
def register_user_access_warning_messages(request, course):
"""
Register messages to be shown to the user if they have limited access.
"""
allow_anonymous = check_public_access(course, [COURSE_VISIBILITY_PUBLIC])
if request.user.is_anonymous and not allow_anonymous:
if CourseTabView.course_open_for_learner_enrollment(course):
PageLevelMessages.register_warning_message(
request,
Text(_(u"To see course content, {sign_in_link} or {register_link}.")).format(
sign_in_link=HTML(u'<a href="/login?next={current_url}">{sign_in_label}</a>').format(
sign_in_label=_("sign in"),
current_url=urlquote_plus(request.path),
),
register_link=HTML(u'<a href="/register?next={current_url}">{register_label}</a>').format(
register_label=_("register"),
current_url=urlquote_plus(request.path),
),
),
once_only=True
)
else:
PageLevelMessages.register_warning_message(
request,
Text(_(u"{sign_in_link} or {register_link}.")).format(
sign_in_link=HTML(u'<a href="/login?next={current_url}">{sign_in_label}</a>').format(
sign_in_label=_("Sign in"),
current_url=urlquote_plus(request.path),
),
register_link=HTML(u'<a href="/register?next={current_url}">{register_label}</a>').format(
register_label=_("register"),
current_url=urlquote_plus(request.path),
),
)
)
else:
if not CourseEnrollment.is_enrolled(request.user, course.id) and not allow_anonymous:
# Only show enroll button if course is open for enrollment.
if CourseTabView.course_open_for_learner_enrollment(course):
enroll_message = _(u'You must be enrolled in the course to see course content. \
{enroll_link_start}Enroll now{enroll_link_end}.')
PageLevelMessages.register_warning_message(
request,
Text(enroll_message).format(
enroll_link_start=HTML('<button class="enroll-btn btn-link">'),
enroll_link_end=HTML('</button>')
)
)
else:
PageLevelMessages.register_warning_message(
request,
Text(_('You must be enrolled in the course to see course content.'))
)
@staticmethod
def course_open_for_learner_enrollment(course):
return (course_open_for_self_enrollment(course.id)
and not course.invitation_only
and not CourseMode.is_masters_only(course.id))
@staticmethod
def handle_exceptions(request, course_key, course, exception):
u"""
Handle exceptions raised when rendering a view.
"""
if isinstance(exception, Redirect) or isinstance(exception, Http404): # lint-amnesty, pylint: disable=consider-merging-isinstance
raise # lint-amnesty, pylint: disable=misplaced-bare-raise
if settings.DEBUG:
raise # lint-amnesty, pylint: disable=misplaced-bare-raise
user = request.user
log.exception(
u"Error in %s: user=%s, effective_user=%s, course=%s",
request.path,
getattr(user, 'real_user', user),
user,
text_type(course_key),
)
try:
return render_to_response(
'courseware/courseware-error.html',
{
'staff_access': has_access(user, 'staff', course),
'course': course,
},
status=500,
)
except:
# Let the exception propagate, relying on global config to
# at least return a nice error message
log.exception("Error while rendering courseware-error page")
raise
def create_page_context(self, request, course=None, tab=None, **kwargs):
"""
Creates the context for the fragment's template.
"""
can_masquerade = request.user.has_perm(MASQUERADE_AS_STUDENT, course)
supports_preview_menu = tab.get('supports_preview_menu', False)
if supports_preview_menu:
masquerade, masquerade_user = setup_masquerade(
request,
course.id,
can_masquerade,
reset_masquerade_data=True,
)
request.user = masquerade_user
else:
masquerade = None
context = {
'course': course,
'tab': tab,
'active_page': tab.get('type', None),
'can_masquerade': can_masquerade,
'masquerade': masquerade,
'supports_preview_menu': supports_preview_menu,
'uses_bootstrap': True,
'disable_courseware_js': True,
}
# Avoid Multiple Mathjax loading on the 'user_profile'
if 'profile_page_context' in kwargs:
context['load_mathjax'] = kwargs['profile_page_context'].get('load_mathjax', True)
context.update(
get_experiment_user_metadata_context(
course,
request.user,
)
)
return context
def render_to_fragment(self, request, course=None, page_context=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Renders the course tab to a fragment.
"""
tab = page_context['tab']
return tab.render_to_fragment(request, course, **kwargs)
def render_standalone_response(self, request, fragment, course=None, tab=None, page_context=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Renders this course tab's fragment to HTML for a standalone page.
"""
if not page_context:
page_context = self.create_page_context(request, course=course, tab=tab, **kwargs)
tab = page_context['tab']
page_context['fragment'] = fragment
return render_to_response('courseware/tab-view.html', page_context)
@ensure_csrf_cookie
@ensure_valid_course_key
def syllabus(request, course_id):
"""
Display the course's syllabus.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
course_key = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
return render_to_response('courseware/syllabus.html', {
'course': course,
'staff_access': staff_access,
})
def registered_for_course(course, user):
"""
Return True if user is registered for course, else False
"""
if user is None:
return False
if user.is_authenticated:
return CourseEnrollment.is_enrolled(user, course.id)
else:
return False
class EnrollStaffView(View):
"""
Displays view for registering in the course to a global staff user.
User can either choose to 'Enroll' or 'Don't Enroll' in the course.
Enroll: Enrolls user in course and redirects to the courseware.
Don't Enroll: Redirects user to course about page.
Arguments:
- request : HTTP request
- course_id : course id
Returns:
- RedirectResponse
"""
template_name = 'enroll_staff.html'
@method_decorator(require_global_staff)
@method_decorator(ensure_valid_course_key)
def get(self, request, course_id):
"""
Display enroll staff view to global staff user with `Enroll` and `Don't Enroll` options.
"""
user = request.user
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = get_course_with_access(user, 'load', course_key)
if not registered_for_course(course, user):
context = {
'course': course,
'csrftoken': csrf(request)["csrf_token"]
}
return render_to_response(self.template_name, context)
@method_decorator(require_global_staff)
@method_decorator(ensure_valid_course_key)
def post(self, request, course_id):
"""
Either enrolls the user in course or redirects user to course about page
depending upon the option (Enroll, Don't Enroll) chosen by the user.
"""
_next = six.moves.urllib.parse.quote_plus(request.GET.get('next', 'info'), safe='/:?=')
course_key = CourseKey.from_string(course_id)
enroll = 'enroll' in request.POST
if enroll:
add_enrollment(request.user.username, course_id)
log.info(
u"User %s enrolled in %s via `enroll_staff` view",
request.user.username,
course_id
)
return redirect(_next)
# In any other case redirect to the course about page.
return redirect(reverse('about_course', args=[text_type(course_key)]))
@ensure_csrf_cookie
@ensure_valid_course_key
@cache_if_anonymous()
def course_about(request, course_id):
"""
Display the course's about page.
"""
course_key = CourseKey.from_string(course_id)
# If a user is not able to enroll in a course then redirect
# them away from the about page to the dashboard.
if not can_self_enroll_in_course(course_key):
return redirect(reverse('dashboard'))
# If user needs to be redirected to course home then redirect
if _course_home_redirect_enabled():
return redirect(reverse(course_home_url_name(course_key), args=[text_type(course_key)]))
with modulestore().bulk_operations(course_key):
permission = get_permission_for_course_about()
course = get_course_with_access(request.user, permission, course_key)
course_details = CourseDetails.populate(course)
modes = CourseMode.modes_for_course_dict(course_key)
registered = registered_for_course(course, request.user)
staff_access = bool(has_access(request.user, 'staff', course))
studio_url = get_studio_url(course, 'settings/details')
if request.user.has_perm(VIEW_COURSE_HOME, course):
course_target = reverse(course_home_url_name(course.id), args=[text_type(course.id)])
else:
course_target = reverse('about_course', args=[text_type(course.id)])
show_courseware_link = bool(
(
request.user.has_perm(VIEW_COURSEWARE, course)
) or settings.FEATURES.get('ENABLE_LMS_MIGRATION')
)
# If the ecommerce checkout flow is enabled and the mode of the course is
# professional or no id professional, we construct links for the enrollment
# button to add the course to the ecommerce basket.
ecomm_service = EcommerceService()
ecommerce_checkout = ecomm_service.is_enabled(request.user)
ecommerce_checkout_link = ''
ecommerce_bulk_checkout_link = ''
single_paid_mode = None
if ecommerce_checkout:
if len(modes) == 1 and list(modes.values())[0].min_price:
single_paid_mode = list(modes.values())[0]
else:
# have professional ignore other modes for historical reasons
single_paid_mode = modes.get(CourseMode.PROFESSIONAL)
if single_paid_mode and single_paid_mode.sku:
ecommerce_checkout_link = ecomm_service.get_checkout_page_url(single_paid_mode.sku)
if single_paid_mode and single_paid_mode.bulk_sku:
ecommerce_bulk_checkout_link = ecomm_service.get_checkout_page_url(single_paid_mode.bulk_sku)
registration_price, course_price = get_course_prices(course) # lint-amnesty, pylint: disable=unused-variable
# Used to provide context to message to student if enrollment not allowed
can_enroll = bool(request.user.has_perm(ENROLL_IN_COURSE, course))
invitation_only = course.invitation_only
is_course_full = CourseEnrollment.objects.is_course_full(course)
# Register button should be disabled if one of the following is true:
# - Student is already registered for course
# - Course is already full
# - Student cannot enroll in course
active_reg_button = not (registered or is_course_full or not can_enroll)
is_shib_course = uses_shib(course)
# get prerequisite courses display names
pre_requisite_courses = get_prerequisite_courses_display(course)
# Overview
overview = CourseOverview.get_from_id(course.id)
sidebar_html_enabled = course_experience_waffle().is_enabled(ENABLE_COURSE_ABOUT_SIDEBAR_HTML)
allow_anonymous = check_public_access(course, [COURSE_VISIBILITY_PUBLIC, COURSE_VISIBILITY_PUBLIC_OUTLINE])
context = {
'course': course,
'course_details': course_details,
'staff_access': staff_access,
'studio_url': studio_url,
'registered': registered,
'course_target': course_target,
'is_cosmetic_price_enabled': settings.FEATURES.get('ENABLE_COSMETIC_DISPLAY_PRICE'),
'course_price': course_price,
'ecommerce_checkout': ecommerce_checkout,
'ecommerce_checkout_link': ecommerce_checkout_link,
'ecommerce_bulk_checkout_link': ecommerce_bulk_checkout_link,
'single_paid_mode': single_paid_mode,
'show_courseware_link': show_courseware_link,
'is_course_full': is_course_full,
'can_enroll': can_enroll,
'invitation_only': invitation_only,
'active_reg_button': active_reg_button,
'is_shib_course': is_shib_course,
# We do not want to display the internal courseware header, which is used when the course is found in the
# context. This value is therefore explicitly set to render the appropriate header.
'disable_courseware_header': True,
'pre_requisite_courses': pre_requisite_courses,
'course_image_urls': overview.image_urls,
'sidebar_html_enabled': sidebar_html_enabled,
'allow_anonymous': allow_anonymous,
}
return render_to_response('courseware/course_about.html', context)
@ensure_csrf_cookie
@cache_if_anonymous()
def program_marketing(request, program_uuid):
"""
Display the program marketing page.
"""
program_data = get_programs(uuid=program_uuid)
if not program_data:
raise Http404
program = ProgramMarketingDataExtender(program_data, request.user).extend()
program['type_slug'] = slugify(program['type'])
skus = program.get('skus')
ecommerce_service = EcommerceService()
context = {'program': program}
if program.get('is_learner_eligible_for_one_click_purchase') and skus:
context['buy_button_href'] = ecommerce_service.get_checkout_page_url(*skus, program_uuid=program_uuid)
context['uses_bootstrap'] = True
return render_to_response('courseware/program_marketing.html', context)
@login_required
@ensure_csrf_cookie
@ensure_valid_course_key
def dates(request, course_id):
"""
Display the course's dates.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
from lms.urls import COURSE_DATES_NAME, RESET_COURSE_DEADLINES_NAME
course_key = CourseKey.from_string(course_id)
if course_home_mfe_dates_tab_is_active(course_key) and not request.user.is_staff:
microfrontend_url = get_learning_mfe_home_url(course_key=course_key, view_name=COURSE_DATES_NAME)
raise Redirect(microfrontend_url)
# Enable NR tracing for this view based on course
monitoring_utils.set_custom_attribute('course_id', text_type(course_key))
monitoring_utils.set_custom_attribute('user_id', request.user.id)
monitoring_utils.set_custom_attribute('is_staff', request.user.is_staff)
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=False)
masquerade = None
can_masquerade = request.user.has_perm(MASQUERADE_AS_STUDENT, course)
if can_masquerade:
masquerade, masquerade_user = setup_masquerade(
request,
course.id,
can_masquerade,
reset_masquerade_data=True,
)
request.user = masquerade_user
user_is_enrolled = CourseEnrollment.is_enrolled(request.user, course_key)
user_is_staff = bool(has_access(request.user, 'staff', course_key))
# Render the full content to enrolled users, as well as to course and global staff.
# Unenrolled users who are not course or global staff are redirected to the Outline Tab.
if not user_is_enrolled and not user_is_staff:
raise CourseAccessRedirect(reverse('openedx.course_experience.course_home', args=[course_id]))
course_date_blocks = get_course_date_blocks(course, request.user, request,
include_access=True, include_past_dates=True)
learner_is_full_access = not ContentTypeGatingConfig.enabled_for_enrollment(request.user, course_key)
# User locale settings
user_timezone_locale = user_timezone_locale_prefs(request)
user_timezone = user_timezone_locale['user_timezone']
user_language = user_timezone_locale['user_language']
missed_deadlines, missed_gated_content = dates_banner_should_display(course_key, request.user)
context = {
'course': course,
'course_date_blocks': course_date_blocks,
'verified_upgrade_link': verified_upgrade_deadline_link(request.user, course=course),
'learner_is_full_access': learner_is_full_access,
'user_timezone': user_timezone,
'user_language': user_language,
'supports_preview_menu': True,
'can_masquerade': can_masquerade,
'masquerade': masquerade,
'on_dates_tab': True,
'content_type_gating_enabled': ContentTypeGatingConfig.enabled_for_enrollment(
user=request.user,
course_key=course_key,
),
'missed_deadlines': missed_deadlines,
'missed_gated_content': missed_gated_content,
'reset_deadlines_url': reverse(RESET_COURSE_DEADLINES_NAME),
'has_ended': course.has_ended(),
}
return render_to_response('courseware/dates.html', context)
@transaction.non_atomic_requests
@login_required
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@ensure_valid_course_key
@data_sharing_consent_required
def progress(request, course_id, student_id=None):
""" Display the progress page. """
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
return _progress(request, course_key, student_id)
def _progress(request, course_key, student_id):
"""
Unwrapped version of "progress".
User progress. We show the grade bar and every problem score.
Course staff are allowed to see the progress of students in their class.
"""
if student_id is not None:
try:
student_id = int(student_id)
# Check for ValueError if 'student_id' cannot be converted to integer.
except ValueError:
raise Http404 # lint-amnesty, pylint: disable=raise-missing-from
course = get_course_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
can_masquerade = request.user.has_perm(MASQUERADE_AS_STUDENT, course)
masquerade = None
if student_id is None or student_id == request.user.id:
# This will be a no-op for non-staff users, returning request.user
masquerade, student = setup_masquerade(request, course_key, can_masquerade, reset_masquerade_data=True)
else:
try:
coach_access = has_ccx_coach_role(request.user, course_key)
except CCXLocatorValidationException:
coach_access = False
has_access_on_students_profiles = staff_access or coach_access
# Requesting access to a different student's profile
if not has_access_on_students_profiles:
raise Http404
try:
student = User.objects.get(id=student_id)
except User.DoesNotExist:
raise Http404 # lint-amnesty, pylint: disable=raise-missing-from
# NOTE: To make sure impersonation by instructor works, use
# student instead of request.user in the rest of the function.
# The pre-fetching of groups is done to make auth checks not require an
# additional DB lookup (this kills the Progress page in particular).
prefetch_related_objects([student], 'groups')
if request.user.id != student.id:
# refetch the course as the assumed student
course = get_course_with_access(student, 'load', course_key, check_if_enrolled=True)
# NOTE: To make sure impersonation by instructor works, use
# student instead of request.user in the rest of the function.
course_grade = CourseGradeFactory().read(student, course)
courseware_summary = list(course_grade.chapter_grades.values())
studio_url = get_studio_url(course, 'settings/grading')
# checking certificate generation configuration
enrollment_mode, _ = CourseEnrollment.enrollment_mode_for_user(student, course_key)
course_expiration_fragment = generate_course_expired_fragment(student, course)
context = {
'course': course,
'courseware_summary': courseware_summary,
'studio_url': studio_url,
'grade_summary': course_grade.summary,
'can_masquerade': can_masquerade,
'staff_access': staff_access,
'masquerade': masquerade,
'supports_preview_menu': True,
'student': student,
'credit_course_requirements': credit_course_requirements(course_key, student),
'course_expiration_fragment': course_expiration_fragment,
'certificate_data': get_cert_data(student, course, enrollment_mode, course_grade)
}
context.update(
get_experiment_user_metadata_context(
course,
student,
)
)
with outer_atomic():
response = render_to_response('courseware/progress.html', context)
return response
def _downloadable_certificate_message(course, cert_downloadable_status): # lint-amnesty, pylint: disable=missing-function-docstring
if certs_api.has_html_certificates_enabled(course):
if certs_api.get_active_web_certificate(course) is not None:
return _downloadable_cert_data(
download_url=None,
cert_web_view_url=certs_api.get_certificate_url(
course_id=course.id, uuid=cert_downloadable_status['uuid']
)
)
elif not cert_downloadable_status['is_pdf_certificate']:
return GENERATING_CERT_DATA
return _downloadable_cert_data(download_url=cert_downloadable_status['download_url'])
def _missing_required_verification(student, enrollment_mode):
return (
enrollment_mode in CourseMode.VERIFIED_MODES and not IDVerificationService.user_is_verified(student)
)
def _certificate_message(student, course, enrollment_mode): # lint-amnesty, pylint: disable=missing-function-docstring
if certs_api.is_certificate_invalid(student, course.id):
return INVALID_CERT_DATA
cert_downloadable_status = certs_api.certificate_downloadable_status(student, course.id)
if cert_downloadable_status.get('earned_but_not_available'):
return EARNED_BUT_NOT_AVAILABLE_CERT_DATA
if cert_downloadable_status['is_generating']:
return GENERATING_CERT_DATA
if cert_downloadable_status['is_unverified']:
return UNVERIFIED_CERT_DATA
if cert_downloadable_status['is_downloadable']:
return _downloadable_certificate_message(course, cert_downloadable_status)
if _missing_required_verification(student, enrollment_mode):
return UNVERIFIED_CERT_DATA
return REQUESTING_CERT_DATA
def get_cert_data(student, course, enrollment_mode, course_grade=None):
"""Returns students course certificate related data.
Arguments:
student (User): Student for whom certificate to retrieve.
course (Course): Course object for which certificate data to retrieve.
enrollment_mode (String): Course mode in which student is enrolled.
course_grade (CourseGrade): Student's course grade record.
Returns:
returns dict if course certificate is available else None.
"""
cert_data = _certificate_message(student, course, enrollment_mode)
if not CourseMode.is_eligible_for_certificate(enrollment_mode, status=cert_data.cert_status):
return INELIGIBLE_PASSING_CERT_DATA.get(enrollment_mode)
if cert_data.cert_status == EARNED_BUT_NOT_AVAILABLE_CERT_STATUS:
return cert_data
certificates_enabled_for_course = certs_api.cert_generation_enabled(course.id)
if course_grade is None:
course_grade = CourseGradeFactory().read(student, course)
if not auto_certs_api.can_show_certificate_message(course, student, course_grade, certificates_enabled_for_course):
return
if not certs_api.get_active_web_certificate(course) and not auto_certs_api.is_valid_pdf_certificate(cert_data):
return
return cert_data
def credit_course_requirements(course_key, student):
"""Return information about which credit requirements a user has satisfied.
Arguments:
course_key (CourseKey): Identifier for the course.
student (User): Currently logged in user.
Returns: dict if the credit eligibility enabled and it is a credit course
and the user is enrolled in either verified or credit mode, and None otherwise.
"""
# If credit eligibility is not enabled or this is not a credit course,
# short-circuit and return `None`. This indicates that credit requirements
# should NOT be displayed on the progress page.
if not (settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY", False) and is_credit_course(course_key)):
return None
# This indicates that credit requirements should NOT be displayed on the progress page.
enrollment = CourseEnrollment.get_enrollment(student, course_key)
if enrollment and enrollment.mode not in REQUIREMENTS_DISPLAY_MODES:
return None
# Credit requirement statuses for which user does not remain eligible to get credit.
non_eligible_statuses = ['failed', 'declined']
# Retrieve the status of the user for each eligibility requirement in the course.
# For each requirement, the user's status is either "satisfied", "failed", or None.
# In this context, `None` means that we don't know the user's status, either because
# the user hasn't done something (for example, submitting photos for verification)
# or we're waiting on more information (for example, a response from the photo
# verification service).
requirement_statuses = get_credit_requirement_status(course_key, student.username)
# If the user has been marked as "eligible", then they are *always* eligible
# unless someone manually intervenes. This could lead to some strange behavior
# if the requirements change post-launch. For example, if the user was marked as eligible
# for credit, then a new requirement was added, the user will see that they're eligible
# AND that one of the requirements is still pending.
# We're assuming here that (a) we can mitigate this by properly training course teams,
# and (b) it's a better user experience to allow students who were at one time
# marked as eligible to continue to be eligible.
# If we need to, we can always manually move students back to ineligible by
# deleting CreditEligibility records in the database.
if is_user_eligible_for_credit(student.username, course_key):
eligibility_status = "eligible"
# If the user has *failed* any requirements (for example, if a photo verification is denied),
# then the user is NOT eligible for credit.
elif any(requirement['status'] in non_eligible_statuses for requirement in requirement_statuses):
eligibility_status = "not_eligible"
# Otherwise, the user may be eligible for credit, but the user has not
# yet completed all the requirements.
else:
eligibility_status = "partial_eligible"
return {
'eligibility_status': eligibility_status,
'requirements': requirement_statuses,
}
def _course_home_redirect_enabled():
"""
Return True value if user needs to be redirected to course home based on value of
`ENABLE_MKTG_SITE` and `ENABLE_COURSE_HOME_REDIRECT feature` flags
Returns: boolean True or False
"""
if configuration_helpers.get_value(
'ENABLE_MKTG_SITE', settings.FEATURES.get('ENABLE_MKTG_SITE', False)
) and configuration_helpers.get_value(
'ENABLE_COURSE_HOME_REDIRECT', settings.FEATURES.get('ENABLE_COURSE_HOME_REDIRECT', True)
):
return True
@login_required
@ensure_valid_course_key
def submission_history(request, course_id, student_username, location):
"""Render an HTML fragment (meant for inclusion elsewhere) that renders a
history of all state changes made by this user for this problem location.
Right now this only works for problems because that's all
StudentModuleHistory records.
"""
course_key = CourseKey.from_string(course_id)
try:
usage_key = UsageKey.from_string(location).map_into_course(course_key)
except (InvalidKeyError, AssertionError):
return HttpResponse(escape(_(u'Invalid location.')))
course = get_course_overview_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
# Permission Denied if they don't have staff access and are trying to see
# somebody else's submission history.
if (student_username != request.user.username) and (not staff_access):
raise PermissionDenied
user_state_client = DjangoXBlockUserStateClient()
try:
history_entries = list(user_state_client.get_history(student_username, usage_key))
except DjangoXBlockUserStateClient.DoesNotExist:
return HttpResponse(escape(_(u'User {username} has never accessed problem {location}').format(
username=student_username,
location=location
)))
# This is ugly, but until we have a proper submissions API that we can use to provide
# the scores instead, it will have to do.
csm = StudentModule.objects.filter(
module_state_key=usage_key,
student__username=student_username,
course_id=course_key)
scores = BaseStudentModuleHistory.get_history(csm)
if len(scores) != len(history_entries):
log.warning(
u"Mismatch when fetching scores for student "
u"history for course %s, user %s, xblock %s. "
u"%d scores were found, and %d history entries were found. "
u"Matching scores to history entries by date for display.",
course_id,
student_username,
location,
len(scores),
len(history_entries),
)
scores_by_date = {
score.created: score
for score in scores
}
scores = [
scores_by_date[history.updated]
for history in history_entries
]
context = {
'history_entries': history_entries,
'scores': scores,
'username': student_username,
'location': location,
'course_id': text_type(course_key)
}
return render_to_response('courseware/submission_history.html', context)
def get_static_tab_fragment(request, course, tab):
"""
Returns the fragment for the given static tab
"""
loc = course.id.make_usage_key(
tab.type,
tab.url_slug,
)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, request.user, modulestore().get_item(loc), depth=0
)
tab_module = get_module(
request.user, request, loc, field_data_cache, static_asset_path=course.static_asset_path, course=course
)
logging.debug(u'course_module = %s', tab_module)
fragment = Fragment()
if tab_module is not None:
try:
fragment = tab_module.render(STUDENT_VIEW, {})
except Exception: # pylint: disable=broad-except
fragment.content = render_to_string('courseware/error-message.html', None)
log.exception(
u"Error rendering course=%s, tab=%s", course, tab['url_slug']
)
return fragment
@require_GET
@ensure_valid_course_key
def get_course_lti_endpoints(request, course_id):
"""
View that, given a course_id, returns the a JSON object that enumerates all of the LTI endpoints for that course.
The LTI 2.0 result service spec at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
says "This specification document does not prescribe a method for discovering the endpoint URLs." This view
function implements one way of discovering these endpoints, returning a JSON array when accessed.
Arguments:
request (django request object): the HTTP request object that triggered this view function
course_id (unicode): id associated with the course
Returns:
(django response object): HTTP response. 404 if course is not found, otherwise 200 with JSON body.
"""
course_key = CourseKey.from_string(course_id)
try:
course = get_course(course_key, depth=2)
except ValueError:
return HttpResponse(status=404)
anonymous_user = AnonymousUser()
anonymous_user.known = False # make these "noauth" requests like module_render.handle_xblock_callback_noauth
lti_descriptors = modulestore().get_items(course.id, qualifiers={'category': 'lti'})
lti_descriptors.extend(modulestore().get_items(course.id, qualifiers={'category': 'lti_consumer'}))
lti_noauth_modules = [
get_module_for_descriptor(
anonymous_user,
request,
descriptor,
FieldDataCache.cache_for_descriptor_descendents(
course_key,
anonymous_user,
descriptor
),
course_key,
course=course
)
for descriptor in lti_descriptors
]
endpoints = [
{
'display_name': module.display_name,
'lti_2_0_result_service_json_endpoint': module.get_outcome_service_url(
service_name='lti_2_0_result_rest_handler') + "/user/{anon_user_id}",
'lti_1_1_result_service_xml_endpoint': module.get_outcome_service_url(
service_name='grade_handler'),
}
for module in lti_noauth_modules
]
return HttpResponse(json.dumps(endpoints), content_type='application/json') # lint-amnesty, pylint: disable=http-response-with-content-type-json, http-response-with-json-dumps
@login_required
def course_survey(request, course_id):
"""
URL endpoint to present a survey that is associated with a course_id
Note that the actual implementation of course survey is handled in the
views.py file in the Survey Djangoapp
"""
course_key = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'load', course_key, check_survey_complete=False)
redirect_url = reverse(course_home_url_name(course.id), args=[course_id])
# if there is no Survey associated with this course,
# then redirect to the course instead
if not course.course_survey_name:
return redirect(redirect_url)
return survey_views.view_student_survey(
request.user,
course.course_survey_name,
course=course,
redirect_url=redirect_url,
is_required=course.course_survey_required,
)
def is_course_passed(student, course, course_grade=None):
"""
check user's course passing status. return True if passed
Arguments:
student : user object
course : course object
course_grade (CourseGrade) : contains student grade details.
Returns:
returns bool value
"""
if course_grade is None:
course_grade = CourseGradeFactory().read(student, course)
return course_grade.passed
# Grades can potentially be written - if so, let grading manage the transaction.
@transaction.non_atomic_requests
@require_POST
def generate_user_cert(request, course_id):
"""Start generating a new certificate for the user.
Certificate generation is allowed if:
* The user has passed the course, and
* The user does not already have a pending/completed certificate.
Note that if an error occurs during certificate generation
(for example, if the queue is down), then we simply mark the
certificate generation task status as "error" and re-run
the task with a management command. To students, the certificate
will appear to be "generating" until it is re-run.
Args:
request (HttpRequest): The POST request to this view.
course_id (unicode): The identifier for the course.
Returns:
HttpResponse: 200 on success, 400 if a new certificate cannot be generated.
"""
if not request.user.is_authenticated:
log.info(u"Anon user trying to generate certificate for %s", course_id)
return HttpResponseBadRequest(
_(u'You must be signed in to {platform_name} to create a certificate.').format(
platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)
)
)
student = request.user
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key, depth=2)
if not course:
return HttpResponseBadRequest(_("Course is not valid"))
if certs_api.is_using_certificate_allowlist_and_is_on_allowlist(student, course_key):
log.info(f'{course_key} is using allowlist certificates, and the user {student.id} is on its allowlist. '
f'Attempt will be made to generate an allowlist certificate.')
certs_api.generate_allowlist_certificate_task(student, course_key)
return HttpResponse()
if not is_course_passed(student, course):
log.info(u"User %s has not passed the course: %s", student.username, course_id)
return HttpResponseBadRequest(_("Your certificate will be available when you pass the course."))
certificate_status = certs_api.certificate_downloadable_status(student, course.id)
log.info(
u"User %s has requested for certificate in %s, current status: is_downloadable: %s, is_generating: %s",
student.username,
course_id,
certificate_status["is_downloadable"],
certificate_status["is_generating"],
)
if certificate_status["is_downloadable"]:
return HttpResponseBadRequest(_("Certificate has already been created."))
elif certificate_status["is_generating"]:
return HttpResponseBadRequest(_("Certificate is being created."))
else:
# If the certificate is not already in-process or completed,
# then create a new certificate generation task.
# If the certificate cannot be added to the queue, this will
# mark the certificate with "error" status, so it can be re-run
# with a management command. From the user's perspective,
# it will appear that the certificate task was submitted successfully.
certs_api.generate_user_certificates(student, course.id, course=course, generation_mode='self')
_track_successful_certificate_generation(student.id, course.id)
return HttpResponse()
def _track_successful_certificate_generation(user_id, course_id):
"""
Track a successful certificate generation event.
Arguments:
user_id (str): The ID of the user generating the certificate.
course_id (CourseKey): Identifier for the course.
Returns:
None
"""
event_name = 'edx.bi.user.certificate.generate'
segment.track(user_id, event_name, {
'category': 'certificates',
'label': text_type(course_id)
})
def enclosing_sequence_for_gating_checks(block):
"""
Return the first ancestor of this block that is a SequenceDescriptor.
Returns None if there is no such ancestor. Returns None if you call it on a
SequenceDescriptor directly.
We explicitly test against the three known tag types that map to sequences
(even though two of them have been long since deprecated and are never
used). We _don't_ test against SequentialDescriptor directly because:
1. A direct comparison on the type fails because we magically mix it into a
SequenceDescriptorWithMixins object.
2. An isinstance check doesn't give us the right behavior because Courses
and Sections both subclass SequenceDescriptor. >_<
Also important to note that some content isn't contained in Sequences at
all. LabXchange uses learning pathways, but even content inside courses like
`static_tab`, `book`, and `about` live outside the sequence hierarchy.
"""
seq_tags = ['sequential', 'problemset', 'videosequence']
# If it's being called on a Sequence itself, then don't bother crawling the
# ancestor tree, because all the sequence metadata we need for gating checks
# will happen automatically when rendering the render_xblock view anyway,
# and we don't want weird, weird edge cases where you have nested Sequences
# (which would probably "work" in terms of OLX import).
if block.location.block_type in seq_tags:
return None
ancestor = block
while ancestor and ancestor.location.block_type not in seq_tags:
ancestor = ancestor.get_parent() # Note: CourseDescriptor's parent is None
return ancestor
@require_http_methods(["GET", "POST"])
@ensure_valid_usage_key
@xframe_options_exempt
@transaction.non_atomic_requests
@ensure_csrf_cookie
def render_xblock(request, usage_key_string, check_if_enrolled=True):
"""
Returns an HttpResponse with HTML content for the xBlock with the given usage_key.
The returned HTML is a chromeless rendering of the xBlock (excluding content of the containing courseware).
"""
from lms.urls import RESET_COURSE_DEADLINES_NAME
from openedx.features.course_experience.urls import COURSE_HOME_VIEW_NAME
usage_key = UsageKey.from_string(usage_key_string)
usage_key = usage_key.replace(course_key=modulestore().fill_in_run(usage_key.course_key))
course_key = usage_key.course_key
# Gathering metrics to make performance measurements easier.
set_custom_attributes_for_course_key(course_key)
set_custom_attribute('usage_key', usage_key_string)
set_custom_attribute('block_type', usage_key.block_type)
requested_view = request.GET.get('view', 'student_view')
if requested_view != 'student_view' and requested_view != 'public_view': # lint-amnesty, pylint: disable=consider-using-in
return HttpResponseBadRequest(
u"Rendering of the xblock view '{}' is not supported.".format(bleach.clean(requested_view, strip=True))
)
staff_access = has_access(request.user, 'staff', course_key)
_course_masquerade, request.user = setup_masquerade(request, course_key, staff_access)
with modulestore().bulk_operations(course_key):
# verify the user has access to the course, including enrollment check
try:
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=check_if_enrolled)
except CourseAccessRedirect:
raise Http404("Course not found.") # lint-amnesty, pylint: disable=raise-missing-from
# get the block, which verifies whether the user has access to the block.
recheck_access = request.GET.get('recheck_access') == '1'
block, _ = get_module_by_usage_id(
request, str(course_key), str(usage_key), disable_staff_debug_info=True, course=course,
will_recheck_access=recheck_access
)
student_view_context = request.GET.dict()
student_view_context['show_bookmark_button'] = request.GET.get('show_bookmark_button', '0') == '1'
student_view_context['show_title'] = request.GET.get('show_title', '1') == '1'
is_learning_mfe = is_request_from_learning_mfe(request)
# Right now, we only care about this in regards to the Learning MFE because it results
# in a bad UX if we display blocks with access errors (repeated upgrade messaging).
# If other use cases appear, consider removing the is_learning_mfe check or switching this
# to be its own query parameter that can toggle the behavior.
student_view_context['hide_access_error_blocks'] = is_learning_mfe and recheck_access
enable_completion_on_view_service = False
completion_service = block.runtime.service(block, 'completion')
if completion_service and completion_service.completion_tracking_enabled():
if completion_service.blocks_to_mark_complete_on_view({block}):
enable_completion_on_view_service = True
student_view_context['wrap_xblock_data'] = {
'mark-completed-on-view-after-delay': completion_service.get_complete_on_view_delay_ms()
}
missed_deadlines, missed_gated_content = dates_banner_should_display(course_key, request.user)
# Some content gating happens only at the Sequence level (e.g. "has this
# timed exam started?").
ancestor_seq = enclosing_sequence_for_gating_checks(block)
if ancestor_seq:
seq_usage_key = ancestor_seq.location
# We have a Descriptor, but I had trouble getting a SequenceModule
# from it (even using ._xmodule to force the conversion) because the
# runtime wasn't properly initialized. This view uses multiple
# runtimes (including Blockstore), so I'm pulling it from scratch
# based on the usage_key. We'll have to watch the performance impact
# of this. :(
seq_module_descriptor, _ = get_module_by_usage_id(
request, str(course_key), str(seq_usage_key), disable_staff_debug_info=True, course=course
)
# I'm not at all clear why get_module_by_usage_id returns the
# descriptor or why I need to manually force it to load the module
# like this manually instead of the proxying working, but trial and
# error has led me here. Hopefully all this weirdness goes away when
# SequenceModule gets converted to an XBlock in:
# https://github.com/edx/edx-platform/pull/25965
seq_module = seq_module_descriptor._xmodule # pylint: disable=protected-access
# If the SequenceModule feels that gating is necessary, redirect
# there so we can have some kind of error message at any rate.
if seq_module.descendants_are_gated():
return redirect(
reverse(
'render_xblock',
kwargs={'usage_key_string': str(seq_module.location)}
)
)
fragment = block.render(requested_view, context=student_view_context)
optimization_flags = get_optimization_flags_for_content(block, fragment)
context = {
'fragment': fragment,
'course': course,
'disable_accordion': True,
'allow_iframing': True,
'disable_header': True,
'disable_footer': True,
'disable_window_wrap': True,
'enable_completion_on_view_service': enable_completion_on_view_service,
'edx_notes_enabled': is_feature_enabled(course, request.user),
'staff_access': staff_access,
'xqa_server': settings.FEATURES.get('XQA_SERVER', 'http://your_xqa_server.com'),
'missed_deadlines': missed_deadlines,
'missed_gated_content': missed_gated_content,
'has_ended': course.has_ended(),
'web_app_course_url': reverse(COURSE_HOME_VIEW_NAME, args=[course.id]),
'on_courseware_page': True,
'verified_upgrade_link': verified_upgrade_deadline_link(request.user, course=course),
'is_learning_mfe': is_learning_mfe,
'is_mobile_app': is_request_from_mobile_app(request),
'reset_deadlines_url': reverse(RESET_COURSE_DEADLINES_NAME),
**optimization_flags,
}
return render_to_response('courseware/courseware-chromeless.html', context)
def get_optimization_flags_for_content(block, fragment):
"""
Return a dict with a set of display options appropriate for the block.
This is going to start in a very limited way.
"""
safe_defaults = {
'enable_mathjax': True
}
# Only run our optimizations on the leaf HTML and ProblemBlock nodes. The
# mobile apps access these directly, and we don't have to worry about
# XBlocks that dynamically load content, like inline discussions.
usage_key = block.location
# For now, confine ourselves to optimizing just the HTMLBlock
if usage_key.block_type != 'html':
return safe_defaults
if not COURSEWARE_OPTIMIZED_RENDER_XBLOCK.is_enabled(usage_key.course_key):
return safe_defaults
inspector = XBlockContentInspector(block, fragment)
flags = dict(safe_defaults)
flags['enable_mathjax'] = inspector.has_mathjax_content()
return flags
class XBlockContentInspector:
"""
Class to inspect rendered XBlock content to determine dependencies.
A lot of content has been written with the assumption that certain
JavaScript and assets are available. This has caused us to continue to
include these assets in the render_xblock view, despite the fact that they
are not used by the vast majority of content.
In order to try to provide faster load times for most users on most content,
this class has the job of detecting certain patterns in XBlock content that
would imply these dependencies, so we know when to include them or not.
"""
def __init__(self, block, fragment):
self.block = block
self.fragment = fragment
def has_mathjax_content(self):
"""
Returns whether we detect any MathJax in the fragment.
Note that this only works for things that are rendered up front. If an
XBlock is capable of modifying the DOM afterwards to inject math content
into the page, this will not catch it.
"""
# The following pairs are used to mark Mathjax syntax in XBlocks. There
# are other options for the wiki, but we don't worry about those here.
MATHJAX_TAG_PAIRS = [
(r"\(", r"\)"),
(r"\[", r"\]"),
("[mathjaxinline]", "[/mathjaxinline]"),
("[mathjax]", "[/mathjax]"),
]
content = self.fragment.body_html()
for (start_tag, end_tag) in MATHJAX_TAG_PAIRS:
if start_tag in content and end_tag in content:
return True
return False
# Translators: "percent_sign" is the symbol "%". "platform_name" is a
# string identifying the name of this installation, such as "edX".
FINANCIAL_ASSISTANCE_HEADER = _(
u'{platform_name} now offers financial assistance for learners who want to earn Verified Certificates but'
u' who may not be able to pay the Verified Certificate fee. Eligible learners may receive up to 90{percent_sign} off' # lint-amnesty, pylint: disable=line-too-long
' the Verified Certificate fee for a course.\nTo apply for financial assistance, enroll in the'
' audit track for a course that offers Verified Certificates, and then complete this application.'
' Note that you must complete a separate application for each course you take.\n We plan to use this'
' information to evaluate your application for financial assistance and to further develop our'
' financial assistance program.'
)
def _get_fa_header(header):
return header.\
format(percent_sign="%",
platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)).split('\n')
FA_INCOME_LABEL = ugettext_noop('Annual Household Income')
FA_REASON_FOR_APPLYING_LABEL = ugettext_noop('Tell us about your current financial situation. Why do you need assistance?') # lint-amnesty, pylint: disable=line-too-long
FA_GOALS_LABEL = ugettext_noop('Tell us about your learning or professional goals. How will a Verified Certificate in this course help you achieve these goals?') # lint-amnesty, pylint: disable=line-too-long
FA_EFFORT_LABEL = ugettext_noop('Tell us about your plans for this course. What steps will you take to help you complete the course work and receive a certificate?') # lint-amnesty, pylint: disable=line-too-long
FA_SHORT_ANSWER_INSTRUCTIONS = _('Use between 1250 and 2500 characters or so in your response.')
@login_required
def financial_assistance(_request):
"""Render the initial financial assistance page."""
return render_to_response('financial-assistance/financial-assistance.html', {
'header_text': _get_fa_header(FINANCIAL_ASSISTANCE_HEADER)
})
@login_required
@require_POST
def financial_assistance_request(request):
"""Submit a request for financial assistance to Zendesk."""
try:
data = json.loads(request.body.decode('utf8'))
# Simple sanity check that the session belongs to the user
# submitting an FA request
username = data['username']
if request.user.username != username:
return HttpResponseForbidden()
course_id = data['course']
course = modulestore().get_course(CourseKey.from_string(course_id))
legal_name = data['name']
email = data['email']
country = data['country']
income = data['income']
reason_for_applying = data['reason_for_applying']
goals = data['goals']
effort = data['effort']
marketing_permission = data['mktg-permission']
ip_address = get_client_ip(request)[0]
except ValueError:
# Thrown if JSON parsing fails
return HttpResponseBadRequest(u'Could not parse request JSON.')
except InvalidKeyError:
# Thrown if course key parsing fails
return HttpResponseBadRequest(u'Could not parse request course key.')
except KeyError as err:
# Thrown if fields are missing
return HttpResponseBadRequest(u'The field {} is required.'.format(text_type(err)))
zendesk_submitted = create_zendesk_ticket(
legal_name,
email,
u'Financial assistance request for learner {username} in course {course_name}'.format(
username=username,
course_name=course.display_name
),
u'Financial Assistance Request',
tags={'course_id': course_id},
# Send the application as additional info on the ticket so
# that it is not shown when support replies. This uses
# OrderedDict so that information is presented in the right
# order.
additional_info=OrderedDict((
('Username', username),
('Full Name', legal_name),
('Course ID', course_id),
(FA_INCOME_LABEL, income),
('Country', country),
('Allowed for marketing purposes', 'Yes' if marketing_permission else 'No'),
(FA_REASON_FOR_APPLYING_LABEL, '\n' + reason_for_applying + '\n\n'),
(FA_GOALS_LABEL, '\n' + goals + '\n\n'),
(FA_EFFORT_LABEL, '\n' + effort + '\n\n'),
('Client IP', ip_address),
)),
group='Financial Assistance',
)
if not (zendesk_submitted == 200 or zendesk_submitted == 201): # lint-amnesty, pylint: disable=consider-using-in
# The call to Zendesk failed. The frontend will display a
# message to the user.
return HttpResponse(status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return HttpResponse(status=status.HTTP_204_NO_CONTENT)
@login_required
def financial_assistance_form(request):
"""Render the financial assistance application form page."""
user = request.user
enrolled_courses = get_financial_aid_courses(user)
incomes = ['Less than $5,000', '$5,000 - $10,000', '$10,000 - $15,000', '$15,000 - $20,000', '$20,000 - $25,000',
'$25,000 - $40,000', '$40,000 - $55,000', '$55,000 - $70,000', '$70,000 - $85,000',
'$85,000 - $100,000', 'More than $100,000']
annual_incomes = [
{'name': _(income), 'value': income} for income in incomes # lint-amnesty, pylint: disable=translation-of-non-string
]
return render_to_response('financial-assistance/apply.html', {
'header_text': _get_fa_header(FINANCIAL_ASSISTANCE_HEADER),
'student_faq_url': marketing_link('FAQ'),
'dashboard_url': reverse('dashboard'),
'account_settings_url': reverse('account_settings'),
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
'user_details': {
'email': user.email,
'username': user.username,
'name': user.profile.name,
'country': text_type(user.profile.country.name),
},
'submit_url': reverse('submit_financial_assistance_request'),
'fields': [
{
'name': 'course',
'type': 'select',
'label': _('Course'),
'placeholder': '',
'defaultValue': '',
'required': True,
'options': enrolled_courses,
'instructions': ugettext(
'Select the course for which you want to earn a verified certificate. If'
' the course does not appear in the list, make sure that you have enrolled'
' in the audit track for the course.'
)
},
{
'name': 'income',
'type': 'select',
'label': _(FA_INCOME_LABEL), # lint-amnesty, pylint: disable=translation-of-non-string
'placeholder': '',
'defaultValue': '',
'required': True,
'options': annual_incomes,
'instructions': _('Specify your annual household income in US Dollars.')
},
{
'name': 'reason_for_applying',
'type': 'textarea',
'label': _(FA_REASON_FOR_APPLYING_LABEL), # lint-amnesty, pylint: disable=translation-of-non-string
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'name': 'goals',
'type': 'textarea',
'label': _(FA_GOALS_LABEL), # lint-amnesty, pylint: disable=translation-of-non-string
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'name': 'effort',
'type': 'textarea',
'label': _(FA_EFFORT_LABEL), # lint-amnesty, pylint: disable=translation-of-non-string
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'placeholder': '',
'name': 'mktg-permission',
'label': _(
'I allow edX to use the information provided in this application '
'(except for financial information) for edX marketing purposes.'
),
'defaultValue': '',
'type': 'checkbox',
'required': False,
'instructions': '',
'restrictions': {}
}
],
})
def get_financial_aid_courses(user):
""" Retrieve the courses eligible for financial assistance. """
financial_aid_courses = []
for enrollment in CourseEnrollment.enrollments_for_user(user).order_by('-created'):
if enrollment.mode != CourseMode.VERIFIED and \
enrollment.course_overview and \
enrollment.course_overview.eligible_for_financial_aid and \
CourseMode.objects.filter(
Q(_expiration_datetime__isnull=True) | Q(_expiration_datetime__gt=datetime.now(UTC)),
course_id=enrollment.course_id,
mode_slug=CourseMode.VERIFIED).exists():
financial_aid_courses.append(
{
'name': enrollment.course_overview.display_name,
'value': text_type(enrollment.course_id)
}
)
return financial_aid_courses
| agpl-3.0 | 646,148,226,935,186,800 | 41.980402 | 212 | 0.656131 | false | 4.068552 | false | false | false |
CasataliaLabs/biscuit_drishtiman | v4l_capture_example.py | 1 | 1279 | import Image
import select
import v4l2capture
import numpy
import pylab
import time
# Open the video device.
#~ video = v4l2capture.Video_device("/dev/video0")
video = v4l2capture.Video_device("http://admin:@192.168.1.105/snapshot.cgi?.mjpeg")
# Suggest an image size to the device. The device may choose and
# return another size if it doesn't support the suggested one.
size_x, size_y = video.set_format(800, 448)
# Create a buffer to store image data in. This must be done before
# calling 'start' if v4l2capture is compiled with libv4l2. Otherwise
# raises IOError.
video.create_buffers(1)
# Send the buffer to the device. Some devices require this to be done
# before calling 'start'.
video.queue_all_buffers()
# Start the device. This lights the LED if it's a camera that has one.
video.start()
# Wait for the device to fill the buffer.
select.select((video,), (), ())
# The rest is easy :-)
image_data = video.read()
video.close()
#~ image = Image.fromstring("L", (size_x, size_y), image_data)
image = Image.fromstring("RGB", (size_x, size_y), image_data)
imageNumpy = numpy.asarray(image)
pylab.imshow(imageNumpy)
pylab.show()
#~ a = input('test')
#time.sleep(4)
#image.save("image.jpg")
#print "Saved image.jpg (Size: " + str(size_x) + " x " + str(size_y) + ")"
| gpl-3.0 | 5,290,132,691,312,513,000 | 28.744186 | 83 | 0.713839 | false | 3.067146 | false | false | false |
vipod/pyzimbra | pyzimbra/soap_auth.py | 2 | 6473 | # -*- coding: utf-8 -*-
"""
################################################################################
# Copyright (c) 2010, Ilgar Mashayev
#
# E-mail: [email protected]
# Website: http://github.com/ilgarm/pyzimbra
################################################################################
# This file is part of pyzimbra.
#
# Pyzimbra is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyzimbra is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyzimbra. If not, see <http://www.gnu.org/licenses/>.
################################################################################
Soap related methods and classes.
@author: ilgar
"""
from pyzimbra import zconstant, sconstant, util
from pyzimbra.auth import AuthException, AuthToken, Authenticator
from pyzimbra.soap import SoapException
from time import time
import SOAPpy
import hashlib
import hmac
import logging
class SoapAuthenticator(Authenticator):
"""
Soap authenticator.
"""
# --------------------------------------------------------------- properties
# -------------------------------------------------------------------- bound
def __init__(self):
Authenticator.__init__(self)
self.log = logging.getLogger(__name__)
# ------------------------------------------------------------------ unbound
def authenticate_admin(self, transport, account_name, password):
"""
Authenticates administrator using username and password.
"""
Authenticator.authenticate_admin(self, transport, account_name, password)
auth_token = AuthToken()
auth_token.account_name = account_name
params = {sconstant.E_NAME: account_name,
sconstant.E_PASSWORD: password}
self.log.debug('Authenticating admin %s' % account_name)
try:
res = transport.invoke(zconstant.NS_ZIMBRA_ADMIN_URL,
sconstant.AuthRequest,
params,
auth_token)
except SoapException as exc:
raise AuthException(unicode(exc), exc)
auth_token.token = res.authToken
auth_token.session_id = res.sessionId
self.log.info('Authenticated admin %s, session id %s'
% (account_name, auth_token.session_id))
return auth_token
def authenticate(self, transport, account_name, password=None):
"""
Authenticates account using soap method.
"""
Authenticator.authenticate(self, transport, account_name, password)
if password == None:
return self.pre_auth(transport, account_name)
else:
return self.auth(transport, account_name, password)
def auth(self, transport, account_name, password):
"""
Authenticates using username and password.
"""
auth_token = AuthToken()
auth_token.account_name = account_name
attrs = {sconstant.A_BY: sconstant.V_NAME}
account = SOAPpy.Types.stringType(data=account_name, attrs=attrs)
params = {sconstant.E_ACCOUNT: account,
sconstant.E_PASSWORD: password}
self.log.debug('Authenticating account %s' % account_name)
try:
res = transport.invoke(zconstant.NS_ZIMBRA_ACC_URL,
sconstant.AuthRequest,
params,
auth_token)
except SoapException as exc:
raise AuthException(unicode(exc), exc)
if type(res) is tuple:
auth_token.token = res[0].authToken
else:
auth_token.token = res.authToken
if hasattr(res, 'sessionId'):
auth_token.session_id = res.sessionId
self.log.info('Authenticated account %s, session id %s'
% (account_name, auth_token.session_id))
return auth_token
def pre_auth(self, transport, account_name):
"""
Authenticates using username and domain key.
"""
auth_token = AuthToken()
auth_token.account_name = account_name
domain = util.get_domain(account_name)
if domain == None:
raise AuthException('Invalid auth token account')
if domain in self.domains:
domain_key = self.domains[domain]
else:
domain_key = None
if domain_key == None:
raise AuthException('Invalid domain key for domain %s' % domain)
self.log.debug('Initialized domain key for account %s'
% account_name)
expires = 0
timestamp = int(time() * 1000)
pak = hmac.new(domain_key, '%s|%s|%s|%s' %
(account_name, sconstant.E_NAME, expires, timestamp),
hashlib.sha1).hexdigest()
attrs = {sconstant.A_BY: sconstant.V_NAME}
account = SOAPpy.Types.stringType(data=account_name, attrs=attrs)
attrs = {sconstant.A_TIMESTAMP: timestamp, sconstant.A_EXPIRES: expires}
preauth = SOAPpy.Types.stringType(data=pak,
name=sconstant.E_PREAUTH,
attrs=attrs)
params = {sconstant.E_ACCOUNT: account,
sconstant.E_PREAUTH: preauth}
self.log.debug('Authenticating account %s using domain key'
% account_name)
try:
res = transport.invoke(zconstant.NS_ZIMBRA_ACC_URL,
sconstant.AuthRequest,
params,
auth_token)
except SoapException as exc:
raise AuthException(unicode(exc), exc)
auth_token.token = res.authToken
auth_token.session_id = res.sessionId
self.log.info('Authenticated account %s, session id %s'
% (account_name, auth_token.session_id))
return auth_token
| lgpl-3.0 | -5,685,805,696,362,786,000 | 34.371585 | 81 | 0.549359 | false | 4.470304 | false | false | false |
beeftornado/sentry | src/sentry/api/endpoints/group_events.py | 1 | 5127 | from __future__ import absolute_import
import six
from datetime import timedelta
from django.utils import timezone
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from functools import partial
from sentry import eventstore
from sentry.api.base import EnvironmentMixin
from sentry.api.bases import GroupEndpoint
from sentry.api.event_search import get_filter, InvalidSearchQuery
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.helpers.environments import get_environments
from sentry.api.helpers.events import get_direct_hit_response
from sentry.api.serializers import EventSerializer, serialize, SimpleEventSerializer
from sentry.api.paginator import GenericOffsetPaginator
from sentry.api.utils import get_date_range_from_params, InvalidParams
from sentry.search.utils import InvalidQuery, parse_query
class NoResults(Exception):
pass
class GroupEventsError(Exception):
pass
class GroupEventsEndpoint(GroupEndpoint, EnvironmentMixin):
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:qparam bool full: if this is set to true then the event payload will
include the full event body, including the stacktrace.
Set to 1 to enable.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
try:
environments = get_environments(request, group.project.organization)
query, tags = self._get_search_query_and_tags(request, group, environments)
except InvalidQuery as exc:
return Response({"detail": six.text_type(exc)}, status=400)
except (NoResults, ResourceDoesNotExist):
return Response([])
try:
start, end = get_date_range_from_params(request.GET, optional=True)
except InvalidParams as e:
raise ParseError(detail=six.text_type(e))
try:
return self._get_events_snuba(request, group, environments, query, tags, start, end)
except GroupEventsError as exc:
raise ParseError(detail=six.text_type(exc))
def _get_events_snuba(self, request, group, environments, query, tags, start, end):
default_end = timezone.now()
default_start = default_end - timedelta(days=90)
params = {
"group_ids": [group.id],
"project_id": [group.project_id],
"organization_id": group.project.organization_id,
"start": start if start else default_start,
"end": end if end else default_end,
}
direct_hit_resp = get_direct_hit_response(request, query, params, "api.group-events")
if direct_hit_resp:
return direct_hit_resp
if environments:
params["environment"] = [env.name for env in environments]
full = request.GET.get("full", False)
try:
snuba_filter = get_filter(request.GET.get("query", None), params)
except InvalidSearchQuery as e:
raise ParseError(detail=six.text_type(e))
snuba_filter.conditions.append(["event.type", "!=", "transaction"])
data_fn = partial(eventstore.get_events, referrer="api.group-events", filter=snuba_filter)
serializer = EventSerializer() if full else SimpleEventSerializer()
return self.paginate(
request=request,
on_results=lambda results: serialize(results, request.user, serializer),
paginator=GenericOffsetPaginator(data_fn=data_fn),
)
def _get_search_query_and_tags(self, request, group, environments=None):
raw_query = request.GET.get("query")
if raw_query:
query_kwargs = parse_query([group.project], raw_query, request.user, environments)
query = query_kwargs.pop("query", None)
tags = query_kwargs.pop("tags", {})
else:
query = None
tags = {}
if environments:
env_names = set(env.name for env in environments)
if "environment" in tags:
# If a single environment was passed as part of the query, then
# we'll just search for that individual environment in this
# query, even if more are selected.
if tags["environment"] not in env_names:
# An event can only be associated with a single
# environment, so if the environments associated with
# the request don't contain the environment provided as a
# tag lookup, the query cannot contain any valid results.
raise NoResults
else:
# XXX: Handle legacy backends here. Just store environment as a
# single tag if we only have one so that we don't break existing
# usage.
tags["environment"] = list(env_names) if len(env_names) > 1 else env_names.pop()
return query, tags
| bsd-3-clause | 4,531,243,865,721,410,000 | 38.744186 | 98 | 0.634484 | false | 4.469922 | false | false | false |
vygr/Python-PCB | pcb.py | 1 | 3869 | #!/opt/local/bin/pypy -tt
# -*- coding: utf-8 -*-
#Copyright (C) 2014 Chris Hinsley All Rights Reserved
import sys, argparse, router
from copy import deepcopy
from ast import literal_eval
from mymath import *
def main():
parser = argparse.ArgumentParser(description = 'Pcb layout optimizer.', formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument('infile', nargs = '?', type = argparse.FileType('r'), default = sys.stdin, help = 'filename, default stdin')
parser.add_argument('--t', nargs = 1, type = int, default = [600], help = 'timeout in seconds, default 600')
parser.add_argument('--v', nargs = 1, type = int, default = [0], choices = range(0, 2), help = 'verbosity level 0..1, default 0')
parser.add_argument('--s', nargs = 1, type = int, default = [1], help = 'number of samples, default 1')
parser.add_argument('--r', nargs = 1, type = int, default = [1], choices = range(1, 5), help = 'grid resolution 1..4, default 1')
parser.add_argument('--z', nargs = 1, type = int, default = [0], choices = range(0, 2), help = 'minimize vias 0..1, default 0')
parser.add_argument('--d', nargs = 1, type = int, default = [0], choices = range(0, 6), \
help = 'distance metric 0..5, default 0.\n' \
'0 -> manhattan\n1 -> squared_euclidean\n2 -> euclidean\n3 -> chebyshev\n4 -> reciprocal\n5 -> random')
parser.add_argument('--fr', nargs = 1, type = int, default = [2], choices = range(1, 6), help = 'flood range 1..5, default 2')
parser.add_argument('--xr', nargs = 1, type = int, default = [1], choices = range(0, 6), help = 'even layer x range 0..5, default 1')
parser.add_argument('--yr', nargs = 1, type = int, default = [1], choices = range(0, 6), help = 'odd layer y range 0..5, default 1')
args = parser.parse_args()
flood_range = args.fr[0]
flood_range_x_even_layer = args.xr[0]
flood_range_y_odd_layer = args.yr[0]
path_range = flood_range + 0
path_range_x_even_layer = flood_range_x_even_layer + 0
path_range_y_odd_layer = flood_range_y_odd_layer + 0
routing_flood_vectors = [[(x, y, 0) for x in xrange(-flood_range_x_even_layer, flood_range_x_even_layer + 1) for y in xrange(-flood_range, flood_range + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= flood_range] + [(0, 0, -1), (0, 0, 1)], \
[(x, y, 0) for x in xrange(-flood_range, flood_range + 1) for y in xrange(-flood_range_y_odd_layer, flood_range_y_odd_layer + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= flood_range] + [(0, 0, -1), (0, 0, 1)]]
routing_path_vectors = [[(x, y, 0) for x in xrange(-path_range_x_even_layer, path_range_x_even_layer + 1) for y in xrange(-path_range, path_range + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= path_range] + [(0, 0, -1), (0, 0, 1)], \
[(x, y, 0) for x in xrange(-path_range, path_range + 1) for y in xrange(-path_range_y_odd_layer, path_range_y_odd_layer + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= path_range] + [(0, 0, -1), (0, 0, 1)]]
dfunc = [manhattan_distance, squared_euclidean_distance, euclidean_distance, \
chebyshev_distance, reciprical_distance, random_distance][args.d[0]]
dimensions = literal_eval(args.infile.readline().strip())
pcb = router.Pcb(dimensions, routing_flood_vectors, routing_path_vectors, dfunc, args.r[0], args.v[0], args.z[0])
for line in args.infile:
track = literal_eval(line.strip())
if not track:
break
pcb.add_track(track)
args.infile.close()
pcb.print_pcb()
best_cost = None
best_pcb = None
for i in xrange(args.s[0]):
if not pcb.route(args.t[0]):
pcb.shuffle_netlist()
continue
cost = pcb.cost()
if best_cost == None or cost < best_cost:
best_cost = cost
best_pcb = deepcopy(pcb)
pcb.shuffle_netlist()
if best_pcb != None:
best_pcb.print_netlist()
best_pcb.print_stats()
else:
print []
if __name__ == '__main__':
main()
| gpl-2.0 | -8,110,803,568,652,330,000 | 51.283784 | 158 | 0.632722 | false | 2.711282 | false | false | false |
AntonelliLab/seqcap_processor | bin/aTRAM-master/atram.py | 1 | 8421 | #!/usr/bin/env python3
"""
Start atram.
This wrapper module parses the input arguments and passes them to the module
that does the actual processing (core_atram.py).
"""
import os
import argparse
import textwrap
import lib.db as db
import lib.log as log
import lib.bio as bio
import lib.util as util
import lib.blast as blast
import lib.assembler as assembly
from lib.core_atram import assemble
def parse_command_line():
"""Process command-line arguments."""
description = """
This takes a query sequence and a blast database built with the
atram_preprocessor.py script and builds assemblies.
If you specify more than one query sequence and/or more than one blast
database then aTRAM will build one assembly for each query/blast
DB pair.
NOTE: You may use a text file to hold the command-line arguments
like: @/path/to/args.txt. This is particularly useful when specifying
multiple blast databases or multiple query sequences.
"""
parser = argparse.ArgumentParser(
fromfile_prefix_chars='@',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(description))
parser.add_argument('--version', action='version',
version='%(prog)s {}'.format(db.ATRAM_VERSION))
group = parser.add_argument_group('required arguments')
group.add_argument(
'-b', '--blast-db', '--sra', '--db', '--database',
required=True, metavar='DB', nargs='+',
help="""This needs to match the DB prefix you entered for
atram_preprocessor.py. You may repeat this argument to run the
--query sequence(s) against multiple blast databases.""")
group.add_argument(
'-q', '--query', '--target', '--probe', required=False, nargs='+',
help="""The path to the fasta file with sequences of interest. You may
repeat this argument. If you do then Each --query sequence file
will be run against every --blast-db.""")
group.add_argument(
'-Q', '--query-split', '--target-split', required=False, nargs='+',
help="""The path to the fasta file with multiple sequences of interest.
This will take every sequence in the fasta file and treat it as if
it were its own --query argument. So every sequence in
--query-split will be run against every --blast-db.""")
group.add_argument(
'-o', '--output-prefix', required=True,
help="""This is the prefix of all of the output files. So you can
identify different blast output file sets. You may include a
directory as part of the prefix. aTRAM will add suffixes to
differentiate output files.""")
group.add_argument(
'-a', '--assembler', default='none',
choices=['abyss', 'trinity', 'velvet', 'spades', 'none'],
help="""Which assembler to use. Choosing "none" (the default) will do
a single blast run and stop before any assembly.""")
group.add_argument(
'-i', '--iterations', type=int, default=5, metavar='N',
help="""The number of pipeline iterations. The default is "5".""")
group.add_argument(
'-p', '--protein', action='store_true',
help="""Are the query sequences protein? aTRAM will guess if you skip
this argument.""")
group.add_argument(
'--fraction', type=float, default=1.0,
help="""Use only the specified fraction of the aTRAM database. The
default is 1.0.""")
cpus = min(10, os.cpu_count() - 4 if os.cpu_count() > 4 else 1)
group.add_argument(
'--cpus', '--processes', '--max-processes', type=int, default=cpus,
help="""Number of CPU processors to use. This will also be used for
the assemblers when possible. We will use {} out of {} CPUs.
""".format(cpus, os.cpu_count()))
group.add_argument('--log-file', help="""Log file (full path)".""")
group.add_argument(
'--log-level', choices=['debug', 'info', 'error'], default='info',
help="""Log messages of the given level (or above). 'debug' shows the
most messages and 'error' shows the least. The default is
'info'""")
group.add_argument(
'--path',
help="""If the assembler or blast you want to use is not in your $PATH\
then use this to prepend directories to your path.""")
group.add_argument(
'-t', '--temp-dir', metavar='DIR',
help="""Place temporary files in this directory. All files will be
deleted after aTRAM completes. The directory must exist.""")
group.add_argument(
'--keep-temp-dir', action='store_true',
help="""This flag will keep the temporary files in the --temp-dir
around for debugging.""")
group.add_argument(
'-T', '--timeout', metavar='SECONDS', default=600, type=int,
help="""How many seconds to wait for an assembler or BLAST before
stopping the run. To wait forever set this to 0. The default
is "600" (10 minutes).""")
group = parser.add_argument_group(
'optional values for blast-filtering contigs')
group.add_argument(
'--no-filter', action='store_true',
help="""Do not filter the assembled contigs. This will: set both the
--bit-score and --contig-length to 0""")
group.add_argument(
'--bit-score', type=float, default=70.0, metavar='SCORE',
help="""Remove contigs that have a value less than this. The default
is "70.0". This is turned off by the --no-filter argument.""")
group.add_argument(
'--contig-length', '--length', type=int, default=100,
help="""Remove blast hits that are shorter than this length. The
default is "100". This is turned off by the --no-filter argument.
""")
blast.command_line_args(parser)
assembly.command_line_args(parser)
args = vars(parser.parse_args())
check_query_args(args)
blast.check_args(args)
# Set defaults and adjust arguments based on other arguments
args['cov_cutoff'] = assembly.default_cov_cutoff(args['cov_cutoff'])
args['blast_db'] = blast.touchup_blast_db_names(args['blast_db'])
args['kmer'] = assembly.default_kmer(args['kmer'], args['assembler'])
args['max_target_seqs'] = blast.default_max_target_seqs(
args['max_target_seqs'], args['blast_db'], args['max_memory'])
# Timeout: As always, None != 0
args['timeout'] = max(0, args['timeout'])
if not(args['timeout']):
args['timeout'] = None
setup_blast_args(args)
set_protein_arg(args)
setup_path_arg(args)
find_programs(args)
util.temp_dir_exists(args['temp_dir'], args.get('debug_dir'))
util.set_blast_batch_size(args['batch_size'])
return args
def setup_path_arg(args):
"""Prepend to PATH environment variable if requested."""
if args['path']:
os.environ['PATH'] = '{}:{}'.format(args['path'], os.environ['PATH'])
def setup_blast_args(args):
"""Set up the blast args."""
if args['no_filter']:
args['bit_score'] = 0
args['contig_length'] = 0
def check_query_args(args):
"""Validate the query arguments."""
if not args.get('query') and not args.get('query_split'):
err = 'You must have at least one --query or --query-split argument.'
log.fatal(err)
def set_protein_arg(args):
"""Set up the protein argument."""
if not args['protein'] and args['query']:
args['protein'] = bio.fasta_file_has_protein(args['query'])
def find_programs(args):
"""Make sure we can find the programs needed by the assembler and blast."""
blast.find_program('makeblastdb')
blast.find_program('tblastn')
blast.find_program('blastn')
assembly.find_program(
'abyss', 'bwa', args['assembler'], not args['no_long_reads'])
assembly.find_program('trinity', 'Trinity', args['assembler'])
assembly.find_program(
'trinity', 'Trinity', args['assembler'], args['bowtie2'])
assembly.find_program('velvet', 'velveth', args['assembler'])
assembly.find_program('velvet', 'velvetg', args['assembler'])
assembly.find_program('spades', 'spades.py', args['assembler'])
if __name__ == '__main__':
ARGS = parse_command_line()
assemble(ARGS)
| mit | -5,189,248,304,416,858,000 | 36.762332 | 79 | 0.624273 | false | 3.895005 | false | false | false |
AmericanResearchInstitute/poweru-server | pr_services/user_system/organization_email_domain_manager.py | 1 | 1902 | """
OrgEmailDomain manager class
@author Chris Church <[email protected]>
@copyright Copyright 2011 American Research Institute, Inc.
"""
from pr_services.object_manager import ObjectManager
from pr_services.rpc.service import service_method
import facade
class OrgEmailDomainManager(ObjectManager):
"""
Manage mappings between email domain and automatic organization and role
assignment.
"""
def __init__(self):
""" constructor """
ObjectManager.__init__(self)
self.getters.update({
'email_domain' : 'get_general',
'organization' : 'get_foreign_key',
'role' : 'get_foreign_key',
'effective_role' : 'get_foreign_key',
'effective_role_name' : 'get_general',
})
self.setters.update({
'email_domain' : 'set_general',
'organization' : 'set_foreign_key',
'role' : 'set_foreign_key',
})
self.my_django_model = facade.models.OrgEmailDomain
@service_method
def create(self, auth_token, email_domain, organization, role=None):
"""
Create a new OrgEmailDomain mapping
@param email_domain domain name to look for in user's email address
@param organization organization to be assigned
@param role role to be assigned within organization
@return a reference to the newly created OrgEmailDomain
"""
organization_object = self._find_by_id(organization, facade.models.Organization)
role_object = self._find_by_id(role, facade.models.OrgRole) if role else None
obj = self.my_django_model.objects.create(email_domain=email_domain, organization=organization_object, role=role_object)
self.authorizer.check_create_permissions(auth_token, obj)
return obj
# vim:tabstop=4 shiftwidth=4 expandtab
| bsd-3-clause | 8,118,541,161,619,871,000 | 33.581818 | 128 | 0.640904 | false | 4.099138 | false | false | false |
ros2/rclpy | rclpy/rclpy/action/client.py | 1 | 22966 | # Copyright 2019 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import time
import uuid
import weakref
from action_msgs.msg import GoalStatus
from action_msgs.srv import CancelGoal
from rclpy.executors import await_or_execute
from rclpy.impl.implementation_singleton import rclpy_implementation as _rclpy
from rclpy.qos import qos_profile_action_status_default
from rclpy.qos import qos_profile_services_default
from rclpy.qos import QoSProfile
from rclpy.task import Future
from rclpy.type_support import check_for_type_support
from rclpy.waitable import NumberOfEntities, Waitable
from unique_identifier_msgs.msg import UUID
class ClientGoalHandle():
"""Goal handle for working with Action Clients."""
def __init__(self, action_client, goal_id, goal_response):
self._action_client = action_client
self._goal_id = goal_id
self._goal_response = goal_response
self._status = GoalStatus.STATUS_UNKNOWN
def __eq__(self, other):
return self._goal_id == other.goal_id
def __ne__(self, other):
return self._goal_id != other.goal_id
def __repr__(self):
return 'ClientGoalHandle <id={0}, accepted={1}, status={2}>'.format(
self.goal_id.uuid,
self.accepted,
self.status)
@property
def goal_id(self):
return self._goal_id
@property
def stamp(self):
return self._goal_response.stamp
@property
def accepted(self):
return self._goal_response.accepted
@property
def status(self):
return self._status
def cancel_goal(self):
"""
Send a cancel request for the goal and wait for the response.
Do not call this method in a callback or a deadlock may occur.
:return: The cancel response.
"""
return self._action_client._cancel_goal(self)
def cancel_goal_async(self):
"""
Asynchronous request for the goal be canceled.
:return: a Future instance that completes when the server responds.
:rtype: :class:`rclpy.task.Future` instance
"""
return self._action_client._cancel_goal_async(self)
def get_result(self):
"""
Request the result for the goal and wait for the response.
Do not call this method in a callback or a deadlock may occur.
:return: The result response.
"""
return self._action_client._get_result(self)
def get_result_async(self):
"""
Asynchronously request the goal result.
:return: a Future instance that completes when the result is ready.
:rtype: :class:`rclpy.task.Future` instance
"""
return self._action_client._get_result_async(self)
class ActionClient(Waitable):
"""ROS Action client."""
def __init__(
self,
node,
action_type,
action_name,
*,
callback_group=None,
goal_service_qos_profile=qos_profile_services_default,
result_service_qos_profile=qos_profile_services_default,
cancel_service_qos_profile=qos_profile_services_default,
feedback_sub_qos_profile=QoSProfile(depth=10),
status_sub_qos_profile=qos_profile_action_status_default
):
"""
Create an ActionClient.
:param node: The ROS node to add the action client to.
:param action_type: Type of the action.
:param action_name: Name of the action.
Used as part of the underlying topic and service names.
:param callback_group: Callback group to add the action client to.
If None, then the node's default callback group is used.
:param goal_service_qos_profile: QoS profile for the goal service.
:param result_service_qos_profile: QoS profile for the result service.
:param cancel_service_qos_profile: QoS profile for the cancel service.
:param feedback_sub_qos_profile: QoS profile for the feedback subscriber.
:param status_sub_qos_profile: QoS profile for the status subscriber.
"""
if callback_group is None:
callback_group = node.default_callback_group
super().__init__(callback_group)
# Import the typesupport for the action module if not already done
check_for_type_support(action_type)
self._node = node
self._action_type = action_type
self._action_name = action_name
with node.handle:
self._client_handle = _rclpy.ActionClient(
node.handle,
action_type,
action_name,
goal_service_qos_profile.get_c_qos_profile(),
result_service_qos_profile.get_c_qos_profile(),
cancel_service_qos_profile.get_c_qos_profile(),
feedback_sub_qos_profile.get_c_qos_profile(),
status_sub_qos_profile.get_c_qos_profile()
)
self._is_ready = False
# key: UUID in bytes, value: weak reference to ClientGoalHandle
self._goal_handles = {}
# key: goal request sequence_number, value: Future for goal response
self._pending_goal_requests = {}
# key: goal request sequence_number, value: UUID
self._sequence_number_to_goal_id = {}
# key: cancel request sequence number, value: Future for cancel response
self._pending_cancel_requests = {}
# key: result request sequence number, value: Future for result response
self._pending_result_requests = {}
# key: UUID in bytes, value: callback function
self._feedback_callbacks = {}
callback_group.add_entity(self)
self._node.add_waitable(self)
def _generate_random_uuid(self):
return UUID(uuid=list(uuid.uuid4().bytes))
def _remove_pending_request(self, future, pending_requests):
"""
Remove a future from the list of pending requests.
This prevents a future from receiving a request and executing its done callbacks.
:param future: a future returned from one of :meth:`send_goal_async`,
:meth:`_cancel_goal_async`, or :meth:`_get_result_async`.
:type future: rclpy.task.Future
:param pending_requests: The list of pending requests.
:type pending_requests: dict
:return: The sequence number associated with the removed future, or
None if the future was not found in the list.
"""
for seq, req_future in list(pending_requests.items()):
if future == req_future:
try:
del pending_requests[seq]
except KeyError:
pass
else:
self.remove_future(future)
return seq
return None
def _remove_pending_goal_request(self, future):
seq = self._remove_pending_request(future, self._pending_goal_requests)
if seq in self._sequence_number_to_goal_id:
del self._sequence_number_to_goal_id[seq]
def _remove_pending_cancel_request(self, future):
self._remove_pending_request(future, self._pending_cancel_requests)
def _remove_pending_result_request(self, future):
self._remove_pending_request(future, self._pending_result_requests)
# Start Waitable API
def is_ready(self, wait_set):
"""Return True if one or more entities are ready in the wait set."""
ready_entities = self._client_handle.is_ready(wait_set)
self._is_feedback_ready = ready_entities[0]
self._is_status_ready = ready_entities[1]
self._is_goal_response_ready = ready_entities[2]
self._is_cancel_response_ready = ready_entities[3]
self._is_result_response_ready = ready_entities[4]
return any(ready_entities)
def take_data(self):
"""Take stuff from lower level so the wait set doesn't immediately wake again."""
data = {}
if self._is_goal_response_ready:
taken_data = self._client_handle.take_goal_response(
self._action_type.Impl.SendGoalService.Response)
# If take fails, then we get (None, None)
if all(taken_data):
data['goal'] = taken_data
if self._is_cancel_response_ready:
taken_data = self._client_handle.take_cancel_response(
self._action_type.Impl.CancelGoalService.Response)
# If take fails, then we get (None, None)
if all(taken_data):
data['cancel'] = taken_data
if self._is_result_response_ready:
taken_data = self._client_handle.take_result_response(
self._action_type.Impl.GetResultService.Response)
# If take fails, then we get (None, None)
if all(taken_data):
data['result'] = taken_data
if self._is_feedback_ready:
taken_data = self._client_handle.take_feedback(
self._action_type.Impl.FeedbackMessage)
# If take fails, then we get None
if taken_data is not None:
data['feedback'] = taken_data
if self._is_status_ready:
taken_data = self._client_handle.take_status(
self._action_type.Impl.GoalStatusMessage)
# If take fails, then we get None
if taken_data is not None:
data['status'] = taken_data
return data
async def execute(self, taken_data):
"""
Execute work after data has been taken from a ready wait set.
This will set results for Future objects for any received service responses and
call any user-defined callbacks (e.g. feedback).
"""
if 'goal' in taken_data:
sequence_number, goal_response = taken_data['goal']
if sequence_number in self._sequence_number_to_goal_id:
goal_handle = ClientGoalHandle(
self,
self._sequence_number_to_goal_id[sequence_number],
goal_response)
if goal_handle.accepted:
goal_uuid = bytes(goal_handle.goal_id.uuid)
if goal_uuid in self._goal_handles:
raise RuntimeError(
'Two goals were accepted with the same ID ({})'.format(goal_handle))
self._goal_handles[goal_uuid] = weakref.ref(goal_handle)
self._pending_goal_requests[sequence_number].set_result(goal_handle)
else:
self._node.get_logger().warning(
'Ignoring unexpected goal response. There may be more than '
f"one action server for the action '{self._action_name}'"
)
if 'cancel' in taken_data:
sequence_number, cancel_response = taken_data['cancel']
if sequence_number in self._pending_cancel_requests:
self._pending_cancel_requests[sequence_number].set_result(cancel_response)
else:
self._node.get_logger().warning(
'Ignoring unexpected cancel response. There may be more than '
f"one action server for the action '{self._action_name}'"
)
if 'result' in taken_data:
sequence_number, result_response = taken_data['result']
if sequence_number in self._pending_result_requests:
self._pending_result_requests[sequence_number].set_result(result_response)
else:
self._node.get_logger().warning(
'Ignoring unexpected result response. There may be more than '
f"one action server for the action '{self._action_name}'"
)
if 'feedback' in taken_data:
feedback_msg = taken_data['feedback']
goal_uuid = bytes(feedback_msg.goal_id.uuid)
# Call a registered callback if there is one
if goal_uuid in self._feedback_callbacks:
await await_or_execute(self._feedback_callbacks[goal_uuid], feedback_msg)
if 'status' in taken_data:
# Update the status of all goal handles maintained by this Action Client
for status_msg in taken_data['status'].status_list:
goal_uuid = bytes(status_msg.goal_info.goal_id.uuid)
status = status_msg.status
if goal_uuid in self._goal_handles:
goal_handle = self._goal_handles[goal_uuid]()
if goal_handle is not None:
goal_handle._status = status
# Remove "done" goals from the list
if (GoalStatus.STATUS_SUCCEEDED == status or
GoalStatus.STATUS_CANCELED == status or
GoalStatus.STATUS_ABORTED == status):
del self._goal_handles[goal_uuid]
else:
# Weak reference is None
del self._goal_handles[goal_uuid]
def get_num_entities(self):
"""Return number of each type of entity used in the wait set."""
num_entities = self._client_handle.get_num_entities()
return NumberOfEntities(*num_entities)
def add_to_wait_set(self, wait_set):
"""Add entities to wait set."""
self._client_handle.add_to_waitset(wait_set)
# End Waitable API
def send_goal(self, goal, **kwargs):
"""
Send a goal and wait for the result.
Do not call this method in a callback or a deadlock may occur.
See :meth:`send_goal_async` for more info about keyword arguments.
Unlike :meth:`send_goal_async`, this method returns the final result of the
action (not a goal handle).
:param goal: The goal request.
:type goal: action_type.Goal
:return: The result response.
:rtype: action_type.Result
:raises: TypeError if the type of the passed goal isn't an instance of
the Goal type of the provided action when the service was
constructed.
"""
if not isinstance(goal, self._action_type.Goal):
raise TypeError()
event = threading.Event()
def unblock(future):
nonlocal event
event.set()
send_goal_future = self.send_goal_async(goal, **kwargs)
send_goal_future.add_done_callback(unblock)
event.wait()
if send_goal_future.exception() is not None:
raise send_goal_future.exception()
goal_handle = send_goal_future.result()
result = self._get_result(goal_handle)
return result
def send_goal_async(self, goal, feedback_callback=None, goal_uuid=None):
"""
Send a goal and asynchronously get the result.
The result of the returned Future is set to a ClientGoalHandle when receipt of the goal
is acknowledged by an action server.
:param goal: The goal request.
:type goal: action_type.Goal
:param feedback_callback: Callback function for feedback associated with the goal.
:type feedback_callback: function
:param goal_uuid: Universally unique identifier for the goal.
If None, then a random UUID is generated.
:type: unique_identifier_msgs.UUID
:return: a Future instance to a goal handle that completes when the goal request
has been accepted or rejected.
:rtype: :class:`rclpy.task.Future` instance
:raises: TypeError if the type of the passed goal isn't an instance of
the Goal type of the provided action when the service was
constructed.
"""
if not isinstance(goal, self._action_type.Goal):
raise TypeError()
request = self._action_type.Impl.SendGoalService.Request()
request.goal_id = self._generate_random_uuid() if goal_uuid is None else goal_uuid
request.goal = goal
sequence_number = self._client_handle.send_goal_request(request)
if sequence_number in self._pending_goal_requests:
raise RuntimeError(
'Sequence ({}) conflicts with pending goal request'.format(sequence_number))
if feedback_callback is not None:
# TODO(jacobperron): Move conversion function to a general-use package
goal_uuid = bytes(request.goal_id.uuid)
self._feedback_callbacks[goal_uuid] = feedback_callback
future = Future()
self._pending_goal_requests[sequence_number] = future
self._sequence_number_to_goal_id[sequence_number] = request.goal_id
future.add_done_callback(self._remove_pending_goal_request)
# Add future so executor is aware
self.add_future(future)
return future
def _cancel_goal(self, goal_handle):
"""
Send a cancel request for an active goal and wait for the response.
Do not call this method in a callback or a deadlock may occur.
:param goal_handle: Handle to the goal to cancel.
:type goal_handle: :class:`ClientGoalHandle`
:return: The cancel response.
"""
event = threading.Event()
def unblock(future):
nonlocal event
event.set()
future = self._cancel_goal_async(goal_handle)
future.add_done_callback(unblock)
event.wait()
if future.exception() is not None:
raise future.exception()
return future.result()
def _cancel_goal_async(self, goal_handle):
"""
Send a cancel request for an active goal and asynchronously get the result.
:param goal_handle: Handle to the goal to cancel.
:type goal_handle: :class:`ClientGoalHandle`
:return: a Future instance that completes when the cancel request has been processed.
:rtype: :class:`rclpy.task.Future` instance
"""
if not isinstance(goal_handle, ClientGoalHandle):
raise TypeError(
'Expected type ClientGoalHandle but received {}'.format(type(goal_handle)))
cancel_request = CancelGoal.Request()
cancel_request.goal_info.goal_id = goal_handle.goal_id
sequence_number = self._client_handle.send_cancel_request(cancel_request)
if sequence_number in self._pending_cancel_requests:
raise RuntimeError(
'Sequence ({}) conflicts with pending cancel request'.format(sequence_number))
future = Future()
self._pending_cancel_requests[sequence_number] = future
future.add_done_callback(self._remove_pending_cancel_request)
# Add future so executor is aware
self.add_future(future)
return future
def _get_result(self, goal_handle):
"""
Request the result for an active goal and wait for the response.
Do not call this method in a callback or a deadlock may occur.
:param goal_handle: Handle to the goal to get the result for.
:type goal_handle: :class:`ClientGoalHandle`
:return: The result response.
"""
event = threading.Event()
def unblock(future):
nonlocal event
event.set()
future = self._get_result_async(goal_handle)
future.add_done_callback(unblock)
event.wait()
if future.exception() is not None:
raise future.exception()
return future.result()
def _get_result_async(self, goal_handle):
"""
Request the result for an active goal asynchronously.
:param goal_handle: Handle to the goal to cancel.
:type goal_handle: :class:`ClientGoalHandle`
:return: a Future instance that completes when the get result request has been processed.
:rtype: :class:`rclpy.task.Future` instance
"""
if not isinstance(goal_handle, ClientGoalHandle):
raise TypeError(
'Expected type ClientGoalHandle but received {}'.format(type(goal_handle)))
result_request = self._action_type.Impl.GetResultService.Request()
result_request.goal_id = goal_handle.goal_id
sequence_number = self._client_handle.send_result_request(result_request)
if sequence_number in self._pending_result_requests:
raise RuntimeError(
'Sequence ({}) conflicts with pending result request'.format(sequence_number))
future = Future()
self._pending_result_requests[sequence_number] = future
future.add_done_callback(self._remove_pending_result_request)
# Add future so executor is aware
self.add_future(future)
return future
def server_is_ready(self):
"""
Check if there is an action server ready to process requests from this client.
:return: True if an action server is ready, False otherwise.
"""
with self._node.handle:
return self._client_handle.is_action_server_available()
def wait_for_server(self, timeout_sec=None):
"""
Wait for an action sever to be ready.
Returns as soon as an action server is ready for this client.
:param timeout_sec: Number of seconds to wait until an action server is available.
If None, then wait indefinitely.
:return: True if an action server is available, False if the timeout is exceeded.
"""
# TODO(jacobperron): Remove arbitrary sleep time and return as soon as server is ready
# See https://github.com/ros2/rclpy/issues/58
sleep_time = 0.25
if timeout_sec is None:
timeout_sec = float('inf')
while self._node.context.ok() and not self.server_is_ready() and timeout_sec > 0.0:
time.sleep(sleep_time)
timeout_sec -= sleep_time
return self.server_is_ready()
def destroy(self):
"""Destroy the underlying action client handle."""
if self._client_handle is None:
return
with self._node.handle:
self._client_handle.destroy_when_not_in_use()
self._node.remove_waitable(self)
self._client_handle = None
def __del__(self):
"""Destroy the underlying action client handle."""
self.destroy()
| apache-2.0 | -5,743,371,639,841,132,000 | 38.057823 | 97 | 0.613254 | false | 4.313674 | false | false | false |
mmalyska/eve-wspace | evewspace/Map/views.py | 1 | 36404 | # Eve W-Space
# Copyright (C) 2013 Andrew Austin and other contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. An additional term under section
# 7 of the GPL is included in the LICENSE file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime, timedelta
import json
import csv
import pytz
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist
from django.http import Http404, HttpResponseRedirect, HttpResponse
from django.template.response import TemplateResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.template.loader import render_to_string
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.models import Group, Permission
from django.shortcuts import render, get_object_or_404
from django.db.models import Q
from Map.models import *
from Map import utils, signals
from core.utils import get_config
# Decorator to check map permissions. Takes request and map_id
# Permissions are 0 = None, 1 = View, 2 = Change
# When used without a permission=x specification, requires Change access
def require_map_permission(permission=2):
def _dec(view_func):
def _view(request, map_id, *args, **kwargs):
current_map = get_object_or_404(Map, pk=map_id)
if current_map.get_permission(request.user) < permission:
raise PermissionDenied
else:
return view_func(request, map_id, *args, **kwargs)
_view.__name__ = view_func.__name__
_view.__doc__ = view_func.__doc__
_view.__dict__ = view_func.__dict__
return _view
return _dec
@login_required
@require_map_permission(permission=1)
def get_map(request, map_id):
"""Get the map and determine if we have permissions to see it.
If we do, then return a TemplateResponse for the map. If map does not
exist, return 404. If we don't have permission, return PermissionDenied.
"""
current_map = get_object_or_404(Map, pk=map_id)
context = {
'map': current_map,
'access': current_map.get_permission(request.user),
}
return TemplateResponse(request, 'map.html', context)
@login_required
@require_map_permission(permission=1)
def map_checkin(request, map_id):
# Initialize json return dict
json_values = {}
current_map = get_object_or_404(Map, pk=map_id)
# AJAX requests should post a JSON datetime called loadtime
# # back that we use to get recent logs.
if 'loadtime' not in request.POST:
return HttpResponse(json.dumps({'error': "No loadtime"}),
mimetype="application/json")
time_string = request.POST['loadtime']
load_time = datetime.strptime(time_string, "%Y-%m-%d %H:%M:%S.%f")
load_time = load_time.replace(tzinfo=pytz.utc)
if request.is_igb_trusted:
dialog_html = _checkin_igb_trusted(request, current_map)
if dialog_html is not None:
json_values.update({'dialogHTML': dialog_html})
log_list = MapLog.objects.filter(timestamp__gt=load_time,
visible=True,
map=current_map)
log_string = render_to_string('log_div.html', {'logs': log_list})
json_values.update({'logs': log_string})
return HttpResponse(json.dumps(json_values), mimetype="application/json")
@login_required
@require_map_permission(permission=1)
def map_refresh(request, map_id):
"""
Returns an HttpResponse with the updated systemJSON for an asynchronous
map refresh.
"""
if not request.is_ajax():
raise PermissionDenied
current_map = get_object_or_404(Map, pk=map_id)
result = [
datetime.strftime(datetime.now(pytz.utc),
"%Y-%m-%d %H:%M:%S.%f"),
utils.MapJSONGenerator(current_map,
request.user).get_systems_json()
]
return HttpResponse(json.dumps(result))
def _checkin_igb_trusted(request, current_map):
"""
Runs the specific code for the case that the request came from an igb that
trusts us, returns None if no further action is required, returns a string
containing the html for a system add dialog if we detect that a new system
needs to be added
"""
current_system = System.objects.get(name=request.eve_systemname)
old_system = None
result = None
threshold = datetime.now(pytz.utc) - timedelta(minutes=5)
recently_active = request.user.locations.filter(
timestamp__gt=threshold,
charactername=request.eve_charname
).all()
if recently_active.count():
old_system = request.user.locations.get(
charactername=request.eve_charname
).system
#Conditions for the system to be automagically added to the map.
if (
old_system in current_map
and current_system not in current_map
and not _is_moving_from_kspace_to_kspace(old_system, current_system)
and recently_active.count()
):
context = {
'oldsystem': current_map.systems.filter(
system=old_system).all()[0],
'newsystem': current_system,
'wormholes': utils.get_possible_wh_types(old_system,
current_system),
}
result = render_to_string('igb_system_add_dialog.html', context,
context_instance=RequestContext(request))
current_system.add_active_pilot(request.user, request.eve_charname,
request.eve_shipname,
request.eve_shiptypename)
return result
def _is_moving_from_kspace_to_kspace(old_system, current_system):
"""
returns whether we are moving through kspace
:param old_system:
:param current_system:
:return:
"""
return old_system.is_kspace() and current_system.is_kspace()
def get_system_context(ms_id):
map_system = get_object_or_404(MapSystem, pk=ms_id)
#If map_system represents a k-space system get the relevant KSystem object
if map_system.system.is_kspace():
system = map_system.system.ksystem
else:
system = map_system.system.wsystem
scan_threshold = datetime.now(pytz.utc) - timedelta(
hours=int(get_config("MAP_SCAN_WARNING", None).value)
)
interest_offset = int(get_config("MAP_INTEREST_TIME", None).value)
interest_threshold = (datetime.now(pytz.utc)
- timedelta(minutes=interest_offset))
scan_warning = system.lastscanned < scan_threshold
if interest_offset > 0:
interest = (map_system.interesttime and
map_system.interesttime > interest_threshold)
else:
interest = map_system.interesttime
# Include any SiteTracker fleets that are active
st_fleets = map_system.system.stfleets.filter(ended=None).all()
return {'system': system, 'mapsys': map_system,
'scanwarning': scan_warning, 'isinterest': interest,
'stfleets': st_fleets}
@login_required
@require_map_permission(permission=2)
def add_system(request, map_id):
"""
AJAX view to add a system to a current_map. Requires POST containing:
topMsID: map_system ID of the parent map_system
bottomSystem: Name of the new system
topType: WormholeType name of the parent side
bottomType: WormholeType name of the new side
timeStatus: Wormhole time status integer value
massStatus: Wormhole mass status integer value
topBubbled: 1 if Parent side bubbled
bottomBubbled: 1 if new side bubbled
friendlyName: Friendly name for the new map_system
"""
if not request.is_ajax():
raise PermissionDenied
try:
# Prepare data
current_map = Map.objects.get(pk=map_id)
top_ms = MapSystem.objects.get(pk=request.POST.get('topMsID'))
bottom_sys = System.objects.get(
name=request.POST.get('bottomSystem')
)
top_type = WormholeType.objects.get(
name=request.POST.get('topType')
)
bottom_type = WormholeType.objects.get(
name=request.POST.get('bottomType')
)
time_status = int(request.POST.get('timeStatus'))
mass_status = int(request.POST.get('massStatus'))
top_bubbled = "1" == request.POST.get('topBubbled')
bottom_bubbled = "1" == request.POST.get('bottomBubbled')
# Add System
bottom_ms = current_map.add_system(
request.user, bottom_sys,
request.POST.get('friendlyName'), top_ms
)
# Add Wormhole
bottom_ms.connect_to(top_ms, top_type, bottom_type, top_bubbled,
bottom_bubbled, time_status, mass_status)
return HttpResponse()
except ObjectDoesNotExist:
return HttpResponse(status=400)
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def remove_system(request, map_id, ms_id):
"""
Removes the supplied map_system from a map.
"""
system = get_object_or_404(MapSystem, pk=ms_id)
system.remove_system(request.user)
return HttpResponse()
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=1)
def system_details(request, map_id, ms_id):
"""
Returns a html div representing details of the System given by ms_id in
map map_id
"""
if not request.is_ajax():
raise PermissionDenied
return render(request, 'system_details.html', get_system_context(ms_id))
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=1)
def system_menu(request, map_id, ms_id):
"""
Returns the html for system menu
"""
if not request.is_ajax():
raise PermissionDenied
return render(request, 'system_menu.html', get_system_context(ms_id))
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=1)
def system_tooltips(request, map_id):
"""
Returns the system tooltips for map_id
"""
if not request.is_ajax():
raise PermissionDenied
ms_list = MapSystem.objects.filter(map_id=map_id)\
.select_related('parent_wormhole', 'system__region')\
.iterator()
return render(request, 'system_tooltip.html', {'map_systems': ms_list})
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=1)
def wormhole_tooltips(request, map_id):
"""Takes a POST request from AJAX with a Wormhole ID and renders the
wormhole tooltip for that ID to response.
"""
if not request.is_ajax():
raise PermissionDenied
cur_map = get_object_or_404(Map, pk=map_id)
ms_list = MapSystem.objects.filter(map=cur_map).all()
whs = Wormhole.objects.filter(top__in=ms_list).all()
return render(request, 'wormhole_tooltip.html', {'wormholes': whs})
# noinspection PyUnusedLocal
@login_required()
@require_map_permission(permission=2)
def collapse_system(request, map_id, ms_id):
"""
Mark the system as collapsed.
"""
if not request.is_ajax():
raise PermissionDenied
map_sys = get_object_or_404(MapSystem, pk=ms_id)
parent_wh = map_sys.parent_wormhole
parent_wh.collapsed = True
parent_wh.save()
return HttpResponse()
# noinspection PyUnusedLocal
@login_required()
@require_map_permission(permission=2)
def resurrect_system(request, map_id, ms_id):
"""
Unmark the system as collapsed.
"""
if not request.is_ajax():
raise PermissionDenied
map_sys = get_object_or_404(MapSystem, pk=ms_id)
parent_wh = map_sys.parent_wormhole
parent_wh.collapsed = False
parent_wh.save()
return HttpResponse()
# noinspection PyUnusedLocal
@login_required()
@require_map_permission(permission=2)
def mark_scanned(request, map_id, ms_id):
"""Takes a POST request from AJAX with a system ID and marks that system
as scanned.
"""
if request.is_ajax():
map_system = get_object_or_404(MapSystem, pk=ms_id)
map_system.system.lastscanned = datetime.now(pytz.utc)
map_system.system.save()
return HttpResponse()
else:
raise PermissionDenied
# noinspection PyUnusedLocal
@login_required()
def manual_location(request, map_id, ms_id):
"""Takes a POST request form AJAX with a System ID and marks the user as
being active in that system.
"""
if request.is_ajax():
map_system = get_object_or_404(MapSystem, pk=ms_id)
map_system.system.add_active_pilot(request.user, "OOG Browser",
"Unknown", "Uknown")
return HttpResponse()
else:
raise PermissionDenied
# noinspection PyUnusedLocal
@login_required()
@require_map_permission(permission=2)
def set_interest(request, map_id, ms_id):
"""Takes a POST request from AJAX with an action and marks that system
as having either utcnow or None as interesttime. The action can be either
"set" or "remove".
"""
if request.is_ajax():
action = request.POST.get("action", "none")
if action == "none":
raise Http404
system = get_object_or_404(MapSystem, pk=ms_id)
if action == "set":
system.interesttime = datetime.now(pytz.utc)
system.save()
return HttpResponse()
if action == "remove":
system.interesttime = None
system.save()
return HttpResponse()
return HttpResponse(status=418)
else:
raise PermissionDenied
def _update_sig_from_tsv(signature, row):
COL_SIG = 0
COL_SIG_TYPE = 3
COL_SIG_GROUP = 2
COL_SIG_SCAN_GROUP = 1
COL_SIG_STRENGTH = 4
COL_DISTANCE = 5
info = row[COL_SIG_TYPE]
updated = False
sig_type = None
if (row[COL_SIG_SCAN_GROUP] == "Cosmic Signature"
or row[COL_SIG_SCAN_GROUP] == "Cosmic Anomaly"
):
try:
sig_type = SignatureType.objects.get(
longname=row[COL_SIG_GROUP])
except:
sig_type = None
else:
sig_type = None
if info and sig_type:
updated = True
if sig_type:
signature.sigtype = sig_type
signature.updated = updated or signature.updated
if info:
signature.info = info
return signature
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def bulk_sig_import(request, map_id, ms_id):
"""
GET gets a bulk signature import form. POST processes it, creating sigs
with blank info and type for each sig ID detected.
"""
if not request.is_ajax():
raise PermissionDenied
map_system = get_object_or_404(MapSystem, pk=ms_id)
k = 0
if request.method == 'POST':
reader = csv.reader(request.POST.get('paste', '').decode(
'utf-8').splitlines(), delimiter="\t")
COL_SIG = 0
COL_STRENGTH = 4
for row in reader:
# To prevent pasting of POSes into the sig importer, make sure
# the strength column is present
try:
test_var = row[COL_STRENGTH]
except IndexError:
return HttpResponse('A valid signature paste was not found',
status=400)
if k < 75:
sig_id = utils.convert_signature_id(row[COL_SIG])
sig = Signature.objects.get_or_create(sigid=sig_id,
modified_by=request.user,
system=map_system.system)[0]
sig = _update_sig_from_tsv(sig, row)
sig.modified_by = request.user
sig.save()
signals.signature_update.send_robust(sig, user=request.user,
map=map_system.map,
signal_strength=row[COL_STRENGTH])
k += 1
map_system.map.add_log(request.user,
"Imported %s signatures for %s(%s)."
% (k, map_system.system.name,
map_system.friendlyname), True)
map_system.system.lastscanned = datetime.now(pytz.utc)
map_system.system.save()
return HttpResponse()
else:
return TemplateResponse(request, "bulk_sig_form.html",
{'mapsys': map_system})
@login_required
@require_map_permission(permission=2)
def toggle_sig_owner(request, map_id, ms_id, sig_id=None):
if not request.is_ajax():
raise PermissionDenied
sig = get_object_or_404(Signature, pk=sig_id)
sig.toggle_ownership(request.user)
return HttpResponse()
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def edit_signature(request, map_id, ms_id, sig_id=None):
"""
GET gets a pre-filled edit signature form.
POST updates the signature with the new information and returns a
blank add form.
"""
if not request.is_ajax():
raise PermissionDenied
map_system = get_object_or_404(MapSystem, pk=ms_id)
action = None
if sig_id != None:
signature = get_object_or_404(Signature, pk=sig_id)
created = False
if not signature.owned_by:
signature.toggle_ownership(request.user)
if request.method == 'POST':
form = SignatureForm(request.POST)
if form.is_valid():
ingame_id = utils.convert_signature_id(form.cleaned_data['sigid'])
if sig_id == None:
signature, created = Signature.objects.get_or_create(
system=map_system.system, sigid=ingame_id)
signature.sigid = ingame_id
signature.updated = True
signature.info = form.cleaned_data['info']
if request.POST['sigtype'] != '':
sigtype = form.cleaned_data['sigtype']
else:
sigtype = None
signature.sigtype = sigtype
signature.modified_by = request.user
signature.save()
map_system.system.lastscanned = datetime.now(pytz.utc)
map_system.system.save()
if created:
action = 'Created'
else:
action = 'Updated'
if signature.owned_by:
signature.toggle_ownership(request.user)
map_system.map.add_log(request.user,
"%s signature %s in %s (%s)" %
(action, signature.sigid, map_system.system.name,
map_system.friendlyname))
signals.signature_update.send_robust(signature, user=request.user,
map=map_system.map)
else:
return TemplateResponse(request, "edit_sig_form.html",
{'form': form,
'system': map_system, 'sig': signature})
form = SignatureForm()
if sig_id == None or action == 'Updated':
return TemplateResponse(request, "add_sig_form.html",
{'form': form, 'system': map_system})
else:
return TemplateResponse(request, "edit_sig_form.html",
{'form': SignatureForm(instance=signature),
'system': map_system, 'sig': signature})
# noinspection PyUnusedLocal
@login_required()
@require_map_permission(permission=1)
def get_signature_list(request, map_id, ms_id):
"""
Determines the proper escalationThreshold time and renders
system_signatures.html
"""
if not request.is_ajax():
raise PermissionDenied
system = get_object_or_404(MapSystem, pk=ms_id)
escalation_downtimes = int(get_config("MAP_ESCALATION_BURN",
request.user).value)
return TemplateResponse(request, "system_signatures.html",
{'system': system,
'downtimes': escalation_downtimes})
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def mark_signature_cleared(request, map_id, ms_id, sig_id):
"""
Marks a signature as having its NPCs cleared.
"""
if not request.is_ajax():
raise PermissionDenied
sig = get_object_or_404(Signature, pk=sig_id)
sig.clear_rats()
return HttpResponse()
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def escalate_site(request, map_id, ms_id, sig_id):
"""
Marks a site as having been escalated.
"""
if not request.is_ajax():
raise PermissionDenied
sig = get_object_or_404(Signature, pk=sig_id)
sig.escalate()
return HttpResponse()
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def activate_signature(request, map_id, ms_id, sig_id):
"""
Marks a site activated.
"""
if not request.is_ajax():
raise PermissionDenied
sig = get_object_or_404(Signature, pk=sig_id)
sig.activate()
return HttpResponse()
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def delete_signature(request, map_id, ms_id, sig_id):
"""
Deletes a signature.
"""
if not request.is_ajax():
raise PermissionDenied
map_system = get_object_or_404(MapSystem, pk=ms_id)
sig = get_object_or_404(Signature, pk=sig_id)
sig.delete()
map_system.map.add_log(request.user, "Deleted signature %s in %s (%s)."
% (sig.sigid, map_system.system.name,
map_system.friendlyname))
return HttpResponse()
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def manual_add_system(request, map_id, ms_id):
"""
A GET request gets a blank add system form with the provided MapSystem
as top system. The form is then POSTed to the add_system view.
"""
top_map_system = get_object_or_404(MapSystem, pk=ms_id)
systems = System.objects.all()
wormholes = WormholeType.objects.all()
return render(request, 'add_system_box.html',
{'topMs': top_map_system, 'sysList': systems,
'whList': wormholes})
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def edit_system(request, map_id, ms_id):
"""
A GET request gets the edit system dialog pre-filled with current
information.
A POST request saves the posted data as the new information.
POST values are friendlyName, info, and occupied.
"""
if not request.is_ajax():
raise PermissionDenied
map_system = get_object_or_404(MapSystem, pk=ms_id)
if request.method == 'GET':
occupied = map_system.system.occupied.replace("<br />", "\n")
info = map_system.system.info.replace("<br />", "\n")
return TemplateResponse(request, 'edit_system.html',
{'mapsys': map_system,
'occupied': occupied, 'info': info}
)
if request.method == 'POST':
map_system.friendlyname = request.POST.get('friendlyName', '')
if (
(map_system.system.info != request.POST.get('info', '')) or
(map_system.system.occupied !=
request.POST.get('occupied', ''))
):
map_system.system.info = request.POST.get('info', '')
map_system.system.occupied = request.POST.get('occupied', '')
map_system.system.save()
map_system.save()
map_system.map.add_log(request.user, "Edited System: %s (%s)"
% (map_system.system.name,
map_system.friendlyname))
return HttpResponse()
raise PermissionDenied
# noinspection PyUnusedLocal
@login_required
@require_map_permission(permission=2)
def edit_wormhole(request, map_id, wh_id):
"""
A GET request gets the edit wormhole dialog pre-filled with current info.
A POST request saves the posted data as the new info.
POST values are topType, bottomType, massStatus, timeStatus, topBubbled,
and bottomBubbled.
"""
if not request.is_ajax():
raise PermissionDenied
wormhole = get_object_or_404(Wormhole, pk=wh_id)
if request.method == 'GET':
return TemplateResponse(request, 'edit_wormhole.html',
{'wormhole': wormhole}
)
if request.method == 'POST':
wormhole.mass_status = int(request.POST.get('massStatus', 0))
wormhole.time_status = int(request.POST.get('timeStatus', 0))
wormhole.top_type = get_object_or_404(
WormholeType,
name=request.POST.get('topType', 'K162')
)
wormhole.bottom_type = get_object_or_404(
WormholeType,
name=request.POST.get('bottomType', 'K162')
)
wormhole.top_bubbled = request.POST.get('topBubbled', '1') == '1'
wormhole.bottom_bubbled = request.POST.get('bottomBubbled', '1') == '1'
wormhole.save()
wormhole.map.add_log(request.user,
("Updated the wormhole between %s(%s) and %s(%s)."
% (wormhole.top.system.name,
wormhole.top.friendlyname,
wormhole.bottom.system.name,
wormhole.bottom.friendlyname)))
return HttpResponse()
raise PermissiondDenied
@permission_required('Map.add_map')
def create_map(request):
"""
This function creates a map and then redirects to the new map.
"""
if request.method == 'POST':
form = MapForm(request.POST)
if form.is_valid():
new_map = form.save()
new_map.add_log(request.user, "Created the %s map." % new_map.name)
new_map.add_system(request.user, new_map.root, "Root", None)
return HttpResponseRedirect(reverse('Map.views.get_map',
kwargs={'map_id': new_map.pk}))
else:
return TemplateResponse(request, 'new_map.html', {'form': form})
else:
form = MapForm
return TemplateResponse(request, 'new_map.html', {'form': form, })
def _sort_destinations(destinations):
"""
Takes a list of destination tuples and returns the same list, sorted in order of the jumps.
"""
results = []
onVal = 0
for dest in destinations:
if len(results) == 0:
results.append(dest)
else:
while onVal <= len(results):
if onVal == len(results):
results.append(dest)
onVal = 0
break
else:
if dest[1] > results[onVal][1]:
onVal += 1
else:
results.insert(onVal, dest)
onVal = 0
break
return results
# noinspection PyUnusedLocal
@require_map_permission(permission=1)
def destination_list(request, map_id, ms_id):
"""
Returns the destinations of interest tuple for K-space systems and
a blank response for w-space systems.
"""
if not request.is_ajax():
raise PermissionDenied
destinations = Destination.objects.filter(Q(user=None) |
Q(user=request.user))
map_system = get_object_or_404(MapSystem, pk=ms_id)
try:
system = KSystem.objects.get(pk=map_system.system.pk)
rf = utils.RouteFinder()
result = []
for destination in destinations:
result.append((destination.system,
rf.route_length(system,
destination.system) - 1,
round(rf.ly_distance(system,
destination.system), 3)
))
except ObjectDoesNotExist:
return HttpResponse()
return render(request, 'system_destinations.html',
{'system': system, 'destinations': _sort_destinations(result)})
# noinspection PyUnusedLocal
def site_spawns(request, map_id, ms_id, sig_id):
"""
Returns the spawns for a given signature and system.
"""
sig = get_object_or_404(Signature, pk=sig_id)
spawns = SiteSpawn.objects.filter(sigtype=sig.sigtype).all()
if spawns[0].sysclass != 0:
spawns = SiteSpawn.objects.filter(sigtype=sig.sigtype,
sysclass=sig.system.sysclass).all()
return render(request, 'site_spawns.html', {'spawns': spawns})
#########################
#Settings Views #
#########################
@permission_required('Map.map_admin')
def general_settings(request):
"""
Returns and processes the general settings section.
"""
npc_threshold = get_config("MAP_NPC_THRESHOLD", None)
pvp_threshold = get_config("MAP_PVP_THRESHOLD", None)
scan_threshold = get_config("MAP_SCAN_WARNING", None)
interest_time = get_config("MAP_INTEREST_TIME", None)
escalation_burn = get_config("MAP_ESCALATION_BURN", None)
if request.method == "POST":
scan_threshold.value = int(request.POST['scanwarn'])
interest_time.value = int(request.POST['interesttimeout'])
pvp_threshold.value = int(request.POST['pvpthreshold'])
npc_threshold.value = int(request.POST['npcthreshold'])
escalation_burn.value = int(request.POST['escdowntimes'])
scan_threshold.save()
interest_time.save()
pvp_threshold.save()
npc_threshold.save()
escalation_burn.save()
return HttpResponse()
return TemplateResponse(
request, 'general_settings.html',
{'npcthreshold': npc_threshold.value,
'pvpthreshold': pvp_threshold.value,
'scanwarn': scan_threshold.value,
'interesttimeout': interest_time.value,
'escdowntimes': escalation_burn.value}
)
@permission_required('Map.map_admin')
def sites_settings(request):
"""
Returns the site spawns section.
"""
return TemplateResponse(request, 'spawns_settings.html',
{'spawns': SiteSpawn.objects.all()})
@permission_required('Map.map_admin')
def add_spawns(request):
"""
Adds a site spawn.
"""
return HttpResponse()
# noinspection PyUnusedLocal
@permission_required('Map.map_admin')
def delete_spawns(request, spawn_id):
"""
Deletes a site spawn.
"""
return HttpResponse()
# noinspection PyUnusedLocal
@permission_required('Map.map_admin')
def edit_spawns(request, spawn_id):
"""
Alters a site spawn.
"""
return HttpResponse()
def destination_settings(request, user=None):
"""
Returns the destinations section.
"""
if not user:
dest_list = Destination.objects.filter(user=None)
else:
dest_list = Destination.objects.filter(Q(user=None) |
Q(user=request.user))
return TemplateResponse(request, 'dest_settings.html',
{'destinations': dest_list,
'user_context': user})
def add_destination(request, dest_user=None):
"""
Add a destination.
"""
if not dest_user and not request.user.has_perm('Map.map_admin'):
raise PermissionDenied
system = get_object_or_404(KSystem, name=request.POST['systemName'])
Destination(system=system, user=dest_user).save()
return HttpResponse()
def add_personal_destination(request):
"""
Add a personal destination.
"""
return add_destination(request, dest_user=request.user)
def delete_destination(request, dest_id):
"""
Deletes a destination.
"""
destination = get_object_or_404(Destination, pk=dest_id)
if not request.user.has_perm('Map.map_admin') and not destination.user:
raise PermissionDenied
if destination.user and not request.user == destination.user:
raise PermissionDenied
destination.delete()
return HttpResponse()
@permission_required('Map.map_admin')
def sigtype_settings(request):
"""
Returns the signature types section.
"""
return TemplateResponse(request, 'sigtype_settings.html',
{'sigtypes': SignatureType.objects.all()})
# noinspection PyUnusedLocal
@permission_required('Map.map_admin')
def edit_sigtype(request, sigtype_id):
"""
Alters a signature type.
"""
return HttpResponse()
@permission_required('Map.map_admin')
def add_sigtype(request):
"""
Adds a signature type.
"""
return HttpResponse()
# noinspection PyUnusedLocal
@permission_required('Map.map_admin')
def delete_sigtype(request, sigtype_id):
"""
Deletes a signature type.
"""
return HttpResponse()
@permission_required('Map.map_admin')
def map_settings(request, map_id):
"""
Returns and processes the settings section for a map.
"""
subject = get_object_or_404(Map, pk=map_id)
return TemplateResponse(request, 'map_settings_single.html',
{'map': subject})
@permission_required('Map.map_admin')
def delete_map(request, map_id):
"""
Deletes a map.
"""
subject = get_object_or_404(Map, pk=map_id)
subject.delete()
return HttpResponse()
# noinspection PyUnusedLocal
@permission_required('Map.map_admin')
def edit_map(request, map_id):
"""
Alters a map.
"""
return HttpResponse('[]')
@permission_required('Map.map_admin')
def global_permissions(request):
"""
Returns and processes the global permissions section.
"""
if not request.is_ajax():
raise PermissionDenied
group_list = []
admin_perm = Permission.objects.get(codename="map_admin")
unrestricted_perm = Permission.objects.get(codename="map_unrestricted")
add_map_perm = Permission.objects.get(codename="add_map")
if request.method == "POST":
for group in Group.objects.all():
if request.POST.get('%s_unrestricted' % group.pk, None):
if unrestricted_perm not in group.permissions.all():
group.permissions.add(unrestricted_perm)
else:
if unrestricted_perm in group.permissions.all():
group.permissions.remove(unrestricted_perm)
if request.POST.get('%s_add' % group.pk, None):
if add_map_perm not in group.permissions.all():
group.permissions.add(add_map_perm)
else:
if add_map_perm in group.permissions.all():
group.permissions.remove(add_map_perm)
if request.POST.get('%s_admin' % group.pk, None):
if admin_perm not in group.permissions.all():
group.permissions.add(admin_perm)
else:
if admin_perm in group.permissions.all():
group.permissions.remove(admin_perm)
return HttpResponse()
for group in Group.objects.all():
entry = {
'group': group, 'admin': admin_perm in group.permissions.all(),
'unrestricted': unrestricted_perm in group.permissions.all(),
'add_map': add_map_perm in group.permissions.all()
}
group_list.append(entry)
return TemplateResponse(request, 'global_perms.html',
{'groups': group_list})
| gpl-3.0 | -173,141,384,186,333,800 | 33.150094 | 95 | 0.608779 | false | 3.938974 | false | false | false |
tedlaz/pyted | tedutil/db_context_manager.py | 1 | 5076 | '''
Module db_context_manager.py
Connect to sqlite database and perform crud functions
'''
import sqlite3
import os
PATH = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
print(PATH)
def grup(txtv):
'''
Trasforms a string to uppercase special for Greek comparison
'''
ar1 = u"αάΆΑβγδεέΈζηήΉθιίϊΊκλμνξοόΌπρσςτυύΎφχψωώΏ"
ar2 = u"ΑΑΑΑΒΓΔΕΕΕΖΗΗΗΘΙΙΙΙΚΛΜΝΞΟΟΟΠΡΣΣΤΥΥΥΦΧΨΩΩΩ"
ftxt = u''
for letter in txtv:
if letter in ar1:
ftxt += ar2[ar1.index(letter)]
else:
ftxt += letter.upper()
return ftxt
class OpenSqlite:
'''
Context manager class
Use it as:
with Open_sqlite(dbfilename) as db:
your code here ...
'''
def __init__(self, dbfile):
self.dbf = dbfile
self.active = False
self.con = None
self.cur = None
def __enter__(self):
self.con = sqlite3.connect(self.dbf)
self.con.create_function("grup", 1, grup)
self.cur = self.con.cursor()
self.active = True
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.active:
self.cur.close()
self.con.close()
def script(self, sqlscript):
"""Execute an sql script against self.dbf"""
self.con.executescript(sqlscript)
return True
def application_id(self):
'''Get application_id from database file'''
sql = 'PRAGMA application_id;'
try:
rws = self.select(sql)
return rws[0][0]
except:
return -9
def set_application_id(self, idv):
'''Set application_id to database file'''
self.script('PRAGMA application_id = %s;' % idv)
def user_version(self):
'''Get user_version from database file'''
sql = 'PRAGMA user_version;'
try:
rws = self.select(sql)
return rws[0][0]
except:
return -9
def set_user_version(self, version):
'''Set user_version to database file'''
self.script('PRAGMA user_version = %s;' % version)
def select(self, sql):
'''Get a list of tuples with data'''
self.cur.execute(sql)
rows = self.cur.fetchall()
return rows
def select_with_names(self, sql):
'''Get a tuple with column names and a list of tuples with data'''
self.cur.execute(sql)
column_names = tuple([t[0] for t in self.cur.description])
rows = self.cur.fetchall()
return column_names, rows
def select_as_dict(self, sql):
'''Get a list of dictionaries [{}, {}, ...]'''
self.cur.execute(sql)
column_names = [t[0] for t in self.cur.description]
rows = self.cur.fetchall()
diclist = []
for row in rows:
dic = {}
for i, col in enumerate(row):
dic[column_names[i]] = col
diclist.append(dic)
diclen = len(diclist)
if diclen > 0:
return diclist
return [{}]
def select_master_detail_as_dic(self,
idv,
tablemaster,
tabledetail=None,
id_at_end=True):
'''
Get a specific record from table tablemaster with id = idv
If we pass a tabledetail value it gets detail records too
idv : id value of record
tablemaster : Master table name
tabledetail : Detail table name
id_at_end : If True Foreign key is like <masterTable>_id
else is like id_<masterTable>
'''
if id_at_end:
fkeytemplate = '%s_id'
else:
fkeytemplate = 'id_%s'
id_field = fkeytemplate % tablemaster
sql1 = "SELECT * FROM %s WHERE id='%s'" % (tablemaster, idv)
sql2 = "SELECT * FROM %s WHERE %s='%s'" % (tabledetail, id_field, idv)
dic = self.select_as_dict(sql1)[0]
ldic = len(dic)
if ldic == 0:
return dic
if tabledetail:
dic['zlines'] = self.select_as_dict(sql2)
# Remove id_field key
for elm in dic['zlines']:
del elm[id_field]
return dic
if __name__ == '__main__':
DBPATH = '/home/tedlaz/tedfiles/prj/2017/2017a.sql3'
with OpenSqlite(DBPATH) as db:
print(db.select('select * from lmo limit 2;'))
print(db.select_as_dict('select * from vtr_trd limit 10;'))
print(db.select_with_names('select * from lmo limit 2;'))
# print(db.script('PRAGMA application_id = 20170313;'))
print(db.application_id())
print(db.user_version())
print(db.select_master_detail_as_dic(1, 'tr', 'trd', False))
print(db.select_master_detail_as_dic(20, 'tr'))
print(db.select_master_detail_as_dic(200000, 'tr'))
print(db.select_master_detail_as_dic(200000, 'tr', 'trd', False))
| gpl-3.0 | 2,498,087,319,140,315,600 | 30.408805 | 78 | 0.547257 | false | 3.383469 | false | false | false |
Erotemic/local | build_scripts/custom_fletch.py | 1 | 5960 | # -*- coding: utf-8 -*-
#!/usr/bin/env python
import os
from os.path import dirname # NOQA
import sys
def disable_packages():
if pkgname == 'OpenBLAS':
"""
PKGNAME=OpenBLAS
PKGNAME=Zlib
find build/src/ -iname CMakeCache.txt -delete
rm -rf build/src/$PKGNAME*
rm -rf build/tmp/$PKGNAME*
rm -rf ${CMAKE_BUILD_DIR}/build/src/${PKGNAME}*
rm -rf ${CMAKE_BUILD_DIR}/build/tmp/${PKGNAME}*
REMOVE CMAKE VARS ${PKGNAME}_*
"""
cmake_build_dir =
pass
pass
def kwiver():
import utool as ut
ut.codeblock(
r'''
# STARTBLOCK bash
git checkout master
cd ~/code/kwiver
rm -rf ~/code/kwiver/build-py2-nocuda
mkdir -p build-py2-nocuda
cd ~/code/kwiver/build-py2-nocuda
cmake -G "Unix Makefiles" \
-D KWIVER_ENABLE_ARROWS:BOOL=True \
-D KWIVER_ENABLE_C_BINDINGS:BOOL=True \
-D KWIVER_ENABLE_PYTHON:BOOL=True \
-D KWIVER_ENABLE_TESTS:BOOL=True \
-D PYTHON_VERSION=$(python -c "import sys; print(sys.version[0:3])") \
-D fletch_DIR:PATH=~/code/fletch/build-py2-nocuda/ \
~/code/kwiver
''')
def rebase_python3_support():
import utool as ut
ut.codeblock(
r'''
# STARTBLOCK bash
cd ~/code/fletch
git checkout master
# blow away old branch
git branch -D tmp/pre-python3-support
# Recreate the branch
git checkout -b tmp/pre-python3-support
# Merge all prereqs into this branch
git merge dev/find_numpy dev/update-openblas-0.2.20 dev/update-opencv dev/update-vtk dev/update-caffe --no-edit
# or could do it one at a time, but w/e
# git merge dev/find_numpy
# git merge dev/update-openblas-0.2.20 --no-edit
# git merge dev/update-opencv --no-edit
# git merge dev/update-vtk --no-edit
git checkout dev/python3-support
# Find the oldest merge branch after master
# This should be the old tmp/pre-python3-support
OLD_MERGE_POINT=$(python -c "import sys; print(sys.argv[-1])" $(git rev-list --min-parents=2 HEAD ^master))
# Check to make sure its the merge point
git log -n 1 $OLD_MERGE_POINT
echo "$OLD_MERGE_POINT"
# Find the most recent merge
# echo $(python -c "import sys; print(sys.argv[-1])" $(git rev-list --min-parents=1 HEAD ^master))
git checkout tmp/pre-python3-support
git checkout -b tmp/rebased-python3-support
# These should be the relevant python3 commits
git log $OLD_MERGE_POINT..dev/python3-support
# Move all the relevant python3-support commits onto the new pre-python3-support
git cherry-pick $OLD_MERGE_POINT..dev/python3-support
git rebase --onto tmp/rebased-python3-support $OLD_MERGE_POINT
git checkout dev/python3-support
git reset --hard tmp/rebased-python3-support
git push --force
git checkout tmp/pre-python3-support
git push --force
cd ~/code/fletch-expt
git checkout master
git branch -D dev/python3-support
git branch -D tmp/pre-python3-support
git checkout dev/python3-support
# git checkout dev/python3-support
# git checkout -b backup-py3-support
# git checkout dev/python3-support
# git merge --strategy-option=theirs tmp/pre-python3-support
# git rebase -i --strategy-option=theirs tmp/pre-python3-support
# ENDBLOCK bash
''')
pass
def cuda_fletch():
"""
# Find cuda version
nvcc --version
8.0
# Find cudnn version
cat /usr/include/cudnn.h | grep CUDNN_Major -A 2
6.0
ldconfig -p | grep libcuda
ldconfig -p | grep libcudnn
"""
def generate_and_make(repo_dpath, **kwargs):
import utool as ut
cmake_vars = {
# build with
'fletch_BUILD_WITH_PYTHON': True,
'fletch_BUILD_WITH_MATLAB': False,
'fletch_BUILD_WITH_CUDA': False,
'fletch_BUILD_WITH_CUDNN': False,
# select version
'OpenCV_SELECT_VERSION': '3.1.0',
'VTK_SELECT_VERSION': '6.2.0',
'fletch_PYTHON_VERSION': sys.version[0:3],
'PYTHON_EXECUTABLE': sys.executable,
}
ut.update_existing(cmake_vars, kwargs)
DISABLED_LIBS = [ # NOQA
'ITK',
]
VTK_LIBS = [
'VTK',
'TinyXML',
'libxml2',
'Qt',
]
ENABLED_LIBS = [
'Boost', 'Caffe', 'Ceres', 'Eigen', 'FFmpeg', 'GeographicLib',
'GFlags', 'GLog', 'HDF5', 'jom', 'LevelDB', 'libjpeg-turbo', 'libjson',
'libkml', 'libtiff', 'LMDB', 'log4cplus', 'OpenBLAS', 'OpenCV',
'OpenCV_contrib', 'PNG', 'PROJ4', 'Protobuf', 'shapelib', 'Snappy',
'SuiteSparse', 'VXL', 'yasm', 'ZLib',
] + VTK_LIBS
lines = ['cmake -G "Unix Makefiles" -D CMAKE_BUILD_TYPE=RELEASE']
lines += ['-D fletch_ENABLE_{}=True'.format(lib) for lib in ENABLED_LIBS]
lines += ['-D {}={}'.format(key, val) for key, val in cmake_vars.items()]
lines += [repo_dpath]
command = ' '.join(lines)
print(command)
if False:
# import utool as ut
# cmake_retcode = ut.cmd2(command, verbose=True)['ret']
cmake_retcode = os.system(command)
if cmake_retcode == 0:
os.system('make -j9')
if __name__ == '__main__':
r"""
CommandLine:
python ~/local/build_scripts/custom_fletch.py
"""
# repo_dpath = '~/code/fletch'
# repo_dpath = dirname(__file__)
repo_dpath = os.getcwd()
if repo_dpath.endswith('fletch-expt'):
kwargs = dict(
OpenCV_SELECT_VERSION='3.2.0',
VTK_SELECT_VERSION='8.0',
)
generate_and_make(repo_dpath, **kwargs)
elif repo_dpath.endswith('fletch'):
generate_and_make(repo_dpath)
| gpl-3.0 | -2,039,726,112,540,294,700 | 26.850467 | 119 | 0.58104 | false | 3.344557 | false | false | false |
vidyar/testing-yml | setup.py | 1 | 1647 | # dockerpty.
#
# Copyright 2014 Chris Corbyn <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
import os
def fopen(filename):
return open(os.path.join(os.path.dirname(__file__), filename))
def read(filename):
return fopen(filename).read()
setup(
name='dockerpty',
version='0.1.1',
description='Python library to use the pseudo-tty of a docker container',
long_description=read('README.md'),
url='https://github.com/d11wtq/dockerpty',
author='Chris Corbyn',
author_email='[email protected]',
license='Apache 2.0',
keywords='docker, tty, pty, terminal',
packages=['dockerpty'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Environment :: Console',
'Intended Audience :: Developers',
'Topic :: Terminals',
'Topic :: Terminals :: Terminal Emulators/X Terminals',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| apache-2.0 | -6,114,690,253,438,141,000 | 33.3125 | 77 | 0.681239 | false | 3.94964 | false | false | false |
cournape/Bento | bento/commands/build.py | 1 | 2331 | import os
import os.path as op
from bento.utils.utils \
import \
subst_vars
from bento.installed_package_description \
import \
BuildManifest, build_manifest_meta_from_pkg
from bento._config \
import \
BUILD_MANIFEST_PATH
from bento.commands.core \
import \
Option
from bento.commands.core \
import \
Command
from bento.utils \
import \
cpu_count
class SectionWriter(object):
def __init__(self):
self.sections = {}
def store(self, filename, pkg):
meta = build_manifest_meta_from_pkg(pkg)
p = BuildManifest(self.sections, meta, pkg.executables)
if not op.exists(op.dirname(filename)):
os.makedirs(op.dirname(filename))
p.write(filename)
def jobs_callback(option, opt, value, parser):
setattr(parser.values, option.dest, cpu_count())
class BuildCommand(Command):
long_descr = """\
Purpose: build the project
Usage: bentomaker build [OPTIONS]."""
short_descr = "build the project."
common_options = Command.common_options \
+ [Option("-i", "--inplace",
help="Build extensions in place", action="store_true"),
Option("-j", "--jobs",
help="Parallel builds (yaku build only - EXPERIMENTAL)",
dest="jobs", action="callback", callback=jobs_callback),
Option("-v", "--verbose",
help="Verbose output (yaku build only)",
action="store_true")]
def run(self, ctx):
p = ctx.options_context.parser
o, a = p.parse_args(ctx.command_argv)
if o.help:
p.print_help()
return
ctx.compile()
ctx.post_compile()
def finish(self, ctx):
super(BuildCommand, self).finish(ctx)
n = ctx.build_node.make_node(BUILD_MANIFEST_PATH)
ctx.section_writer.store(n.abspath(), ctx.pkg)
def _config_content(paths):
keys = sorted(paths.keys())
n = max([len(k) for k in keys]) + 2
content = []
for name, value in sorted(paths.items()):
content.append('%s = %r' % (name.upper().ljust(n), subst_vars(value, paths)))
return "\n".join(content)
| bsd-3-clause | 4,304,503,386,611,436,500 | 29.272727 | 90 | 0.557701 | false | 3.911074 | false | false | false |
google-research/simclr | tf2/data_util.py | 1 | 18220 | # coding=utf-8
# Copyright 2020 The SimCLR Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific simclr governing permissions and
# limitations under the License.
# ==============================================================================
"""Data preprocessing and augmentation."""
import functools
from absl import flags
import tensorflow.compat.v2 as tf
FLAGS = flags.FLAGS
CROP_PROPORTION = 0.875 # Standard for ImageNet.
def random_apply(func, p, x):
"""Randomly apply function func to x with probability p."""
return tf.cond(
tf.less(
tf.random.uniform([], minval=0, maxval=1, dtype=tf.float32),
tf.cast(p, tf.float32)), lambda: func(x), lambda: x)
def random_brightness(image, max_delta, impl='simclrv2'):
"""A multiplicative vs additive change of brightness."""
if impl == 'simclrv2':
factor = tf.random.uniform([], tf.maximum(1.0 - max_delta, 0),
1.0 + max_delta)
image = image * factor
elif impl == 'simclrv1':
image = tf.image.random_brightness(image, max_delta=max_delta)
else:
raise ValueError('Unknown impl {} for random brightness.'.format(impl))
return image
def to_grayscale(image, keep_channels=True):
image = tf.image.rgb_to_grayscale(image)
if keep_channels:
image = tf.tile(image, [1, 1, 3])
return image
def color_jitter(image, strength, random_order=True, impl='simclrv2'):
"""Distorts the color of the image.
Args:
image: The input image tensor.
strength: the floating number for the strength of the color augmentation.
random_order: A bool, specifying whether to randomize the jittering order.
impl: 'simclrv1' or 'simclrv2'. Whether to use simclrv1 or simclrv2's
version of random brightness.
Returns:
The distorted image tensor.
"""
brightness = 0.8 * strength
contrast = 0.8 * strength
saturation = 0.8 * strength
hue = 0.2 * strength
if random_order:
return color_jitter_rand(
image, brightness, contrast, saturation, hue, impl=impl)
else:
return color_jitter_nonrand(
image, brightness, contrast, saturation, hue, impl=impl)
def color_jitter_nonrand(image,
brightness=0,
contrast=0,
saturation=0,
hue=0,
impl='simclrv2'):
"""Distorts the color of the image (jittering order is fixed).
Args:
image: The input image tensor.
brightness: A float, specifying the brightness for color jitter.
contrast: A float, specifying the contrast for color jitter.
saturation: A float, specifying the saturation for color jitter.
hue: A float, specifying the hue for color jitter.
impl: 'simclrv1' or 'simclrv2'. Whether to use simclrv1 or simclrv2's
version of random brightness.
Returns:
The distorted image tensor.
"""
with tf.name_scope('distort_color'):
def apply_transform(i, x, brightness, contrast, saturation, hue):
"""Apply the i-th transformation."""
if brightness != 0 and i == 0:
x = random_brightness(x, max_delta=brightness, impl=impl)
elif contrast != 0 and i == 1:
x = tf.image.random_contrast(
x, lower=1-contrast, upper=1+contrast)
elif saturation != 0 and i == 2:
x = tf.image.random_saturation(
x, lower=1-saturation, upper=1+saturation)
elif hue != 0:
x = tf.image.random_hue(x, max_delta=hue)
return x
for i in range(4):
image = apply_transform(i, image, brightness, contrast, saturation, hue)
image = tf.clip_by_value(image, 0., 1.)
return image
def color_jitter_rand(image,
brightness=0,
contrast=0,
saturation=0,
hue=0,
impl='simclrv2'):
"""Distorts the color of the image (jittering order is random).
Args:
image: The input image tensor.
brightness: A float, specifying the brightness for color jitter.
contrast: A float, specifying the contrast for color jitter.
saturation: A float, specifying the saturation for color jitter.
hue: A float, specifying the hue for color jitter.
impl: 'simclrv1' or 'simclrv2'. Whether to use simclrv1 or simclrv2's
version of random brightness.
Returns:
The distorted image tensor.
"""
with tf.name_scope('distort_color'):
def apply_transform(i, x):
"""Apply the i-th transformation."""
def brightness_foo():
if brightness == 0:
return x
else:
return random_brightness(x, max_delta=brightness, impl=impl)
def contrast_foo():
if contrast == 0:
return x
else:
return tf.image.random_contrast(x, lower=1-contrast, upper=1+contrast)
def saturation_foo():
if saturation == 0:
return x
else:
return tf.image.random_saturation(
x, lower=1-saturation, upper=1+saturation)
def hue_foo():
if hue == 0:
return x
else:
return tf.image.random_hue(x, max_delta=hue)
x = tf.cond(tf.less(i, 2),
lambda: tf.cond(tf.less(i, 1), brightness_foo, contrast_foo),
lambda: tf.cond(tf.less(i, 3), saturation_foo, hue_foo))
return x
perm = tf.random.shuffle(tf.range(4))
for i in range(4):
image = apply_transform(perm[i], image)
image = tf.clip_by_value(image, 0., 1.)
return image
def _compute_crop_shape(
image_height, image_width, aspect_ratio, crop_proportion):
"""Compute aspect ratio-preserving shape for central crop.
The resulting shape retains `crop_proportion` along one side and a proportion
less than or equal to `crop_proportion` along the other side.
Args:
image_height: Height of image to be cropped.
image_width: Width of image to be cropped.
aspect_ratio: Desired aspect ratio (width / height) of output.
crop_proportion: Proportion of image to retain along the less-cropped side.
Returns:
crop_height: Height of image after cropping.
crop_width: Width of image after cropping.
"""
image_width_float = tf.cast(image_width, tf.float32)
image_height_float = tf.cast(image_height, tf.float32)
def _requested_aspect_ratio_wider_than_image():
crop_height = tf.cast(
tf.math.rint(crop_proportion / aspect_ratio * image_width_float),
tf.int32)
crop_width = tf.cast(
tf.math.rint(crop_proportion * image_width_float), tf.int32)
return crop_height, crop_width
def _image_wider_than_requested_aspect_ratio():
crop_height = tf.cast(
tf.math.rint(crop_proportion * image_height_float), tf.int32)
crop_width = tf.cast(
tf.math.rint(crop_proportion * aspect_ratio * image_height_float),
tf.int32)
return crop_height, crop_width
return tf.cond(
aspect_ratio > image_width_float / image_height_float,
_requested_aspect_ratio_wider_than_image,
_image_wider_than_requested_aspect_ratio)
def center_crop(image, height, width, crop_proportion):
"""Crops to center of image and rescales to desired size.
Args:
image: Image Tensor to crop.
height: Height of image to be cropped.
width: Width of image to be cropped.
crop_proportion: Proportion of image to retain along the less-cropped side.
Returns:
A `height` x `width` x channels Tensor holding a central crop of `image`.
"""
shape = tf.shape(image)
image_height = shape[0]
image_width = shape[1]
crop_height, crop_width = _compute_crop_shape(
image_height, image_width, height / width, crop_proportion)
offset_height = ((image_height - crop_height) + 1) // 2
offset_width = ((image_width - crop_width) + 1) // 2
image = tf.image.crop_to_bounding_box(
image, offset_height, offset_width, crop_height, crop_width)
image = tf.image.resize([image], [height, width],
method=tf.image.ResizeMethod.BICUBIC)[0]
return image
def distorted_bounding_box_crop(image,
bbox,
min_object_covered=0.1,
aspect_ratio_range=(0.75, 1.33),
area_range=(0.05, 1.0),
max_attempts=100,
scope=None):
"""Generates cropped_image using one of the bboxes randomly distorted.
See `tf.image.sample_distorted_bounding_box` for more documentation.
Args:
image: `Tensor` of image data.
bbox: `Tensor` of bounding boxes arranged `[1, num_boxes, coords]`
where each coordinate is [0, 1) and the coordinates are arranged
as `[ymin, xmin, ymax, xmax]`. If num_boxes is 0 then use the whole
image.
min_object_covered: An optional `float`. Defaults to `0.1`. The cropped
area of the image must contain at least this fraction of any bounding
box supplied.
aspect_ratio_range: An optional list of `float`s. The cropped area of the
image must have an aspect ratio = width / height within this range.
area_range: An optional list of `float`s. The cropped area of the image
must contain a fraction of the supplied image within in this range.
max_attempts: An optional `int`. Number of attempts at generating a cropped
region of the image of the specified constraints. After `max_attempts`
failures, return the entire image.
scope: Optional `str` for name scope.
Returns:
(cropped image `Tensor`, distorted bbox `Tensor`).
"""
with tf.name_scope(scope or 'distorted_bounding_box_crop'):
shape = tf.shape(image)
sample_distorted_bounding_box = tf.image.sample_distorted_bounding_box(
shape,
bounding_boxes=bbox,
min_object_covered=min_object_covered,
aspect_ratio_range=aspect_ratio_range,
area_range=area_range,
max_attempts=max_attempts,
use_image_if_no_bounding_boxes=True)
bbox_begin, bbox_size, _ = sample_distorted_bounding_box
# Crop the image to the specified bounding box.
offset_y, offset_x, _ = tf.unstack(bbox_begin)
target_height, target_width, _ = tf.unstack(bbox_size)
image = tf.image.crop_to_bounding_box(
image, offset_y, offset_x, target_height, target_width)
return image
def crop_and_resize(image, height, width):
"""Make a random crop and resize it to height `height` and width `width`.
Args:
image: Tensor representing the image.
height: Desired image height.
width: Desired image width.
Returns:
A `height` x `width` x channels Tensor holding a random crop of `image`.
"""
bbox = tf.constant([0.0, 0.0, 1.0, 1.0], dtype=tf.float32, shape=[1, 1, 4])
aspect_ratio = width / height
image = distorted_bounding_box_crop(
image,
bbox,
min_object_covered=0.1,
aspect_ratio_range=(3. / 4 * aspect_ratio, 4. / 3. * aspect_ratio),
area_range=(0.08, 1.0),
max_attempts=100,
scope=None)
return tf.image.resize([image], [height, width],
method=tf.image.ResizeMethod.BICUBIC)[0]
def gaussian_blur(image, kernel_size, sigma, padding='SAME'):
"""Blurs the given image with separable convolution.
Args:
image: Tensor of shape [height, width, channels] and dtype float to blur.
kernel_size: Integer Tensor for the size of the blur kernel. This is should
be an odd number. If it is an even number, the actual kernel size will be
size + 1.
sigma: Sigma value for gaussian operator.
padding: Padding to use for the convolution. Typically 'SAME' or 'VALID'.
Returns:
A Tensor representing the blurred image.
"""
radius = tf.cast(kernel_size / 2, dtype=tf.int32)
kernel_size = radius * 2 + 1
x = tf.cast(tf.range(-radius, radius + 1), dtype=tf.float32)
blur_filter = tf.exp(-tf.pow(x, 2.0) /
(2.0 * tf.pow(tf.cast(sigma, dtype=tf.float32), 2.0)))
blur_filter /= tf.reduce_sum(blur_filter)
# One vertical and one horizontal filter.
blur_v = tf.reshape(blur_filter, [kernel_size, 1, 1, 1])
blur_h = tf.reshape(blur_filter, [1, kernel_size, 1, 1])
num_channels = tf.shape(image)[-1]
blur_h = tf.tile(blur_h, [1, 1, num_channels, 1])
blur_v = tf.tile(blur_v, [1, 1, num_channels, 1])
expand_batch_dim = image.shape.ndims == 3
if expand_batch_dim:
# Tensorflow requires batched input to convolutions, which we can fake with
# an extra dimension.
image = tf.expand_dims(image, axis=0)
blurred = tf.nn.depthwise_conv2d(
image, blur_h, strides=[1, 1, 1, 1], padding=padding)
blurred = tf.nn.depthwise_conv2d(
blurred, blur_v, strides=[1, 1, 1, 1], padding=padding)
if expand_batch_dim:
blurred = tf.squeeze(blurred, axis=0)
return blurred
def random_crop_with_resize(image, height, width, p=1.0):
"""Randomly crop and resize an image.
Args:
image: `Tensor` representing an image of arbitrary size.
height: Height of output image.
width: Width of output image.
p: Probability of applying this transformation.
Returns:
A preprocessed image `Tensor`.
"""
def _transform(image): # pylint: disable=missing-docstring
image = crop_and_resize(image, height, width)
return image
return random_apply(_transform, p=p, x=image)
def random_color_jitter(image, p=1.0, strength=1.0,
impl='simclrv2'):
def _transform(image):
color_jitter_t = functools.partial(
color_jitter, strength=strength, impl=impl)
image = random_apply(color_jitter_t, p=0.8, x=image)
return random_apply(to_grayscale, p=0.2, x=image)
return random_apply(_transform, p=p, x=image)
def random_blur(image, height, width, p=1.0):
"""Randomly blur an image.
Args:
image: `Tensor` representing an image of arbitrary size.
height: Height of output image.
width: Width of output image.
p: probability of applying this transformation.
Returns:
A preprocessed image `Tensor`.
"""
del width
def _transform(image):
sigma = tf.random.uniform([], 0.1, 2.0, dtype=tf.float32)
return gaussian_blur(
image, kernel_size=height//10, sigma=sigma, padding='SAME')
return random_apply(_transform, p=p, x=image)
def batch_random_blur(images_list, height, width, blur_probability=0.5):
"""Apply efficient batch data transformations.
Args:
images_list: a list of image tensors.
height: the height of image.
width: the width of image.
blur_probability: the probaility to apply the blur operator.
Returns:
Preprocessed feature list.
"""
def generate_selector(p, bsz):
shape = [bsz, 1, 1, 1]
selector = tf.cast(
tf.less(tf.random.uniform(shape, 0, 1, dtype=tf.float32), p),
tf.float32)
return selector
new_images_list = []
for images in images_list:
images_new = random_blur(images, height, width, p=1.)
selector = generate_selector(blur_probability, tf.shape(images)[0])
images = images_new * selector + images * (1 - selector)
images = tf.clip_by_value(images, 0., 1.)
new_images_list.append(images)
return new_images_list
def preprocess_for_train(image,
height,
width,
color_distort=True,
crop=True,
flip=True,
impl='simclrv2'):
"""Preprocesses the given image for training.
Args:
image: `Tensor` representing an image of arbitrary size.
height: Height of output image.
width: Width of output image.
color_distort: Whether to apply the color distortion.
crop: Whether to crop the image.
flip: Whether or not to flip left and right of an image.
impl: 'simclrv1' or 'simclrv2'. Whether to use simclrv1 or simclrv2's
version of random brightness.
Returns:
A preprocessed image `Tensor`.
"""
if crop:
image = random_crop_with_resize(image, height, width)
if flip:
image = tf.image.random_flip_left_right(image)
if color_distort:
image = random_color_jitter(image, strength=FLAGS.color_jitter_strength,
impl=impl)
image = tf.reshape(image, [height, width, 3])
image = tf.clip_by_value(image, 0., 1.)
return image
def preprocess_for_eval(image, height, width, crop=True):
"""Preprocesses the given image for evaluation.
Args:
image: `Tensor` representing an image of arbitrary size.
height: Height of output image.
width: Width of output image.
crop: Whether or not to (center) crop the test images.
Returns:
A preprocessed image `Tensor`.
"""
if crop:
image = center_crop(image, height, width, crop_proportion=CROP_PROPORTION)
image = tf.reshape(image, [height, width, 3])
image = tf.clip_by_value(image, 0., 1.)
return image
def preprocess_image(image, height, width, is_training=False,
color_distort=True, test_crop=True):
"""Preprocesses the given image.
Args:
image: `Tensor` representing an image of arbitrary size.
height: Height of output image.
width: Width of output image.
is_training: `bool` for whether the preprocessing is for training.
color_distort: whether to apply the color distortion.
test_crop: whether or not to extract a central crop of the images
(as for standard ImageNet evaluation) during the evaluation.
Returns:
A preprocessed image `Tensor` of range [0, 1].
"""
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
if is_training:
return preprocess_for_train(image, height, width, color_distort)
else:
return preprocess_for_eval(image, height, width, test_crop)
| apache-2.0 | -7,356,768,281,607,679,000 | 34.105973 | 80 | 0.644237 | false | 3.574652 | false | false | false |
Daniel-CA/odoo-addons | stock_quant_expiry/models/stock_quant.py | 1 | 1792 | # -*- coding: utf-8 -*-
# Copyright 2017 Ainara Galdona - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
@api.depends('lot_id.life_date', 'lot_id.mrp_date')
def _compute_lifespan(self):
for record in self.filtered(lambda x: x.lot_id and
x.lot_id.life_date and x.lot_id.mrp_date):
life_date = fields.Date.from_string(record.lot_id.life_date)
mrp_date = fields.Date.from_string(record.lot_id.mrp_date)
record.lifespan = (life_date - mrp_date).days
def _compute_lifespan_progress(self):
for record in self.filtered(lambda x: x.lot_id and
x.lot_id.life_date and x.lot_id.mrp_date):
life_date = fields.Date.from_string(record.lot_id.life_date)
mrp_date = fields.Date.from_string(record.lot_id.mrp_date)
today = fields.Date.from_string(fields.Date.today())
lifespan = (life_date - mrp_date).days
todayspan = (today - mrp_date).days
if not lifespan:
continue
record.lifespan_progress = float(todayspan) / float(lifespan) * 100
mrp_date = fields.Date(string='Mrp Date', store=True,
related='lot_id.mrp_date')
life_date = fields.Datetime(string='Expiry Date',
related='lot_id.life_date')
lifespan = fields.Integer(string='Lifespan', store=True,
compute='_compute_lifespan')
lifespan_progress = fields.Float(string='Lifespan Progress',
compute='_compute_lifespan_progress')
| agpl-3.0 | -6,778,287,636,719,805,000 | 43.8 | 79 | 0.580915 | false | 3.43295 | false | false | false |
ScanOC/trunk-player | radio/receivers.py | 1 | 1882 | # receivers.py
import json
import logging
import datetime
from django.dispatch import receiver
from django.contrib.auth.models import User
from pinax.stripe.signals import WEBHOOK_SIGNALS
from radio.models import Plan, StripePlanMatrix, Profile
from pinax.stripe.models import Plan as pinax_Plan
# Get an instance of a logger
logger = logging.getLogger(__name__)
@receiver(WEBHOOK_SIGNALS["invoice.payment_succeeded"])
def handle_payment_succeeded(sender, event, **kwargs):
logger.error('----------------------------------------')
logger.error('Stripe Payment Posted')
logger.error(event.customer)
#logger.error(event.webhook_message)
@receiver(WEBHOOK_SIGNALS["customer.subscription.created"])
def handle_subscription_created(sender, event, **kwargs):
hook_message = event.webhook_message
customer = event.customer
stripe_subscription_end = hook_message['data']['object']['current_period_end']
stripe_subscription_plan_id = hook_message['data']['object']['items']['data'][0]['plan']['id']
user = User.objects.get(username=customer)
user_profile = Profile.objects.get(user=user)
stripe_plan = pinax_Plan.objects.get(stripe_id=stripe_subscription_plan_id)
plan_matrix = StripePlanMatrix.objects.get(stripe_plan=stripe_plan)
user_profile.plan = plan_matrix.radio_plan
user_profile.save()
logger.error('Moving Customer {} to plan {}'.format(user, plan_matrix.radio_plan))
logger.error('Stripe customer.subscription.created {}'.format(event.customer))
end_date = datetime.datetime.fromtimestamp(hook_message['data']['object']['current_period_end']).strftime('%c')
logger.error('END TS {}'.format(end_date))
#logger.error('TESTING {}'.format(hook_message['data']['object']['data'][0]))
logger.error('TESTING ID {}'.format(hook_message['data']['object']['items']['data'][0]['plan']['id']))
| mit | -176,158,808,518,551,600 | 37.408163 | 115 | 0.70457 | false | 3.749004 | false | false | false |
Grumbel/rfactorlcd | rfactorlcd/state.py | 1 | 13232 | # rFactor Remote LCD
# Copyright (C) 2014 Ingo Ruhnke <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import rfactorlcd
class LapTime:
def __init__(self):
self.sector1 = 0
self.sector2 = 0
self.sector3 = 0
@property
def total(self):
return self.sector1 + self.sector2 + self.sector3
class LapTimes(object):
"""Lap time history for a vehicle in a single session"""
def __init__(self):
self.laps = {}
self.current_sector = None
@property
def best_time(self):
if self.laps == []:
return 0
else:
return min([lap.total for lap in self.laps])
def last_lap(self):
last_lap = -1
last_times = None
for lap, times in self.laps.items():
if lap > last_lap:
last_lap = lap
last_times = times
return last_times
def update(self, state):
"""Update current LapTime history with info from VehicleState"""
if state.sector == 0 and state.total_laps == 0:
pass
elif self.current_sector != state.sector:
self.current_sector = state.sector
if state.sector == 0:
lap = state.total_laps - 1
else:
lap = state.total_laps
if lap in self.laps:
lap_time = self.laps[lap]
else:
lap_time = LapTime()
self.laps[lap] = lap_time
# set the sector time in the LapTime object
if state.sector == 1:
lap_time.sector1 = state.cur_sector1
elif state.sector == 2:
lap_time.sector2 = state.cur_sector2 - state.cur_sector1
elif state.sector == 0:
lap_time.sector3 = state.last_lap_time - state.cur_sector2
else:
logging.error("unknown sector: %d" % state.sector)
class WheelState(object):
def __init__(self):
self.rotation = 0.0
self.suspension_deflection = 0.0
self.ride_height = 0.0
self.tire_load = 0.0
self.lateral_force = 0.0
self.grip_fract = 0.0
self.brake_temp = 0.0
self.pressure = 0.0
self.temperature = [0.0, 0.0, 0.0]
self.wear = 0.0
self.surface_type = 0
self.flat = 0
self.detached = 0
class VehicleState(object):
def __init__(self):
self.is_player = 0
self.control = 0
self.driver_name = ""
self.vehicle_name = ""
self.vehicle_class = ""
self.total_laps = 0
self.sector = 0
self.finish_status = 0
self.lap_dist = 0
self.path_lateral = 0.0
self.track_edge = 0.0
self.in_pits = 0
self.place = 0
self.time_behind_next = 0.0
self.laps_behind_next = 0
self.time_behind_leader = 0.0
self.laps_behind_leader = 0
self.best_sector1 = 0.0
self.best_sector2 = 0.0
self.best_lap_time = 0.0
self.last_sector1 = 0.0
self.last_sector2 = 0.0
self.last_lap_time = 0.0
self.cur_sector1 = 0.0
self.cur_sector2 = 0.0
self.num_pitstops = 0
self.num_penalties = 0
self.lap_start_et = 0.0
self.lap_times = LapTimes()
class rFactorState(object):
def __init__(self):
self.session_id = 0
# telemetry defaults
self.lap_number = 0
self.lap_start_et = 0.0
self.pos = (0.0, 0.0, 0.0)
self.local_vel = (0.0, 0.0, 0.0)
self.local_accel = (0.0, 0.0, 0.0)
self.ori_x = (0.0, 0.0, 0.0)
self.ori_y = (0.0, 0.0, 0.0)
self.ori_z = (0.0, 0.0, 0.0)
self.local_rot = (0.0, 0.0, 0.0)
self.local_rot_accel = (0.0, 0.0, 0.0)
self.gear = 0
self.rpm = 0.0
self.max_rpm = 0.0
self.clutch_rpm = 0.0
self.fuel = 0.0
self.water_temp = 0.0
self.oil_temp = 0.0
self.throttle = 0.0
self.brake = 0.0
self.steering = 0.0
self.clutch = 0.0
self.steering_arm_force = 0.0
self.scheduled_stops = 0
self.overheating = 0
self.detached = 0
self.dent_severity = [0, 0, 0, 0, 0, 0, 0, 0]
self.wheels = [WheelState(), WheelState(), WheelState(), WheelState()]
self.num_vehicles = 0
self.player = None
self.vehicles = []
# info
self.track_name = ""
self.player_name = ""
self.plr_file_name = ""
self.end_e_t = 0.0
self.max_laps = 0
self.lap_dist = 1.0
# score
self.game_phase = 0
self.yellow_flag_state = 0
self.sector_flag = [0, 0, 0]
self.start_light = 0
self.num_red_lights = 0
self.session = 0
self.current_e_t = 0.0
self.ambient_temp = 0.0
self.track_temp = 0.0
# Backward compatibility hacks:
self.speed = 0
self.laptime = "1:23:45"
self.best_lap_driver = ""
@property
def best_lap_time(self):
if self.vehicles != []:
best = self.vehicles[0].best_lap_time
for veh in self.vehicles[1:]:
if veh.best_lap_time < best:
best = veh.best_lap_time
self.best_lap_driver = veh.driver_name # FIXME: hack
return best
else:
return 0
def on_telemetry(self, msg):
self.delta_time = msg.read_float()
self.lap_number = msg.read_int()
self.lap_start_et = msg.read_float()
# missing: mVehicleName[64]
# missing: mTrackName[64]
self.pos = msg.read_vect()
self.local_vel = msg.read_vect()
self.local_accel = msg.read_vect()
self.ori_x = msg.read_vect()
self.ori_y = msg.read_vect()
self.ori_z = msg.read_vect()
self.local_rot = msg.read_vect()
self.local_rot_accel = msg.read_vect()
self.gear = msg.read_int()
self.rpm = msg.read_float()
self.max_rpm = msg.read_float()
self.clutch_rpm = msg.read_float()
self.fuel = msg.read_float()
self.water_temp = msg.read_float()
self.oil_temp = msg.read_float()
self.throttle = msg.read_float()
self.brake = msg.read_float()
self.steering = msg.read_float()
self.clutch = msg.read_float()
self.steering_arm_force = msg.read_float()
self.scheduled_stops = msg.read_char()
self.overheating = msg.read_char()
self.detached = msg.read_char()
self.dent_severity = msg.read_multi_char(8)
self.last_impact_e_t = msg.read_float()
self.last_impact_magnitude = msg.read_float()
self.last_impact_pos = msg.read_vect()
# give speed in km/h
self.speed = -self.local_vel[2] * 3.6
for i in range(0, 4):
self.wheels[i].rotation = msg.read_float()
self.wheels[i].suspension_deflection = msg.read_float()
self.wheels[i].ride_height = msg.read_float()
self.wheels[i].tire_load = msg.read_float()
self.wheels[i].lateral_force = msg.read_float()
self.wheels[i].grip_fract = msg.read_float()
self.wheels[i].brake_temp = msg.read_float()
self.wheels[i].pressure = msg.read_float()
self.wheels[i].temperature = [msg.read_float(),
msg.read_float(),
msg.read_float()]
self.wheels[i].wear = msg.read_float()
# missing: mTerrainName[16]
self.wheels[i].surface_type = msg.read_char()
self.wheels[i].flat = msg.read_char()
self.wheels[i].detached = msg.read_char()
def on_vehicle(self, msg):
self.num_vehicles = msg.read_int()
if self.num_vehicles != len(self.vehicles):
self.vehicles = []
for i in range(self.num_vehicles):
self.vehicles.append(VehicleState())
for i in range(0, self.num_vehicles):
self.vehicles[i].is_player = msg.read_char()
self.vehicles[i].control = msg.read_char()
self.vehicles[i].driver_name = msg.read_string()
self.vehicles[i].vehicle_name = msg.read_string()
self.vehicles[i].vehicle_class = msg.read_string()
self.vehicles[i].total_laps = msg.read_short()
# rFactor numbers sectors 1, 2, 0, convert them to 0, 1, 2
self.vehicles[i].sector = (msg.read_char() + 2) % 3
self.vehicles[i].finish_status = msg.read_char()
self.vehicles[i].lap_dist = msg.read_float()
self.vehicles[i].path_lateral = msg.read_float()
self.vehicles[i].track_edge = msg.read_float()
self.vehicles[i].in_pits = msg.read_char()
self.vehicles[i].place = msg.read_char()
self.vehicles[i].time_behind_next = msg.read_float()
self.vehicles[i].laps_behind_next = msg.read_int()
self.vehicles[i].time_behind_leader = msg.read_float()
self.vehicles[i].laps_behind_leader = msg.read_int()
self.vehicles[i].best_sector1 = msg.read_float()
self.vehicles[i].best_sector2 = msg.read_float()
self.vehicles[i].best_lap_time = msg.read_float()
# these times are only updated going into a new lap
self.vehicles[i].last_sector1 = msg.read_float()
self.vehicles[i].last_sector2 = msg.read_float()
self.vehicles[i].last_lap_time = msg.read_float()
self.vehicles[i].cur_sector1 = msg.read_float()
self.vehicles[i].cur_sector2 = msg.read_float()
self.vehicles[i].num_pitstops = msg.read_short()
self.vehicles[i].num_penalties = msg.read_short()
self.vehicles[i].lap_start_et = msg.read_float()
self.vehicles[i].pos = msg.read_vect()
self.vehicles[i].local_vel = msg.read_vect()
self.vehicles[i].local_accel = msg.read_vect()
self.vehicles[i].ori_x = msg.read_vect()
self.vehicles[i].ori_y = msg.read_vect()
self.vehicles[i].ori_z = msg.read_vect()
self.vehicles[i].local_rot = msg.read_vect()
self.vehicles[i].local_rot_accel = msg.read_vect()
if self.vehicles[i].is_player:
self.player = self.vehicles[i]
self.vehicles[i].lap_times.update(self.vehicles[i])
def on_score(self, msg):
self.game_phase = msg.read_char()
self.yellow_flag_state = msg.read_char()
self.sector_flag = msg.read_multi_char(3)
self.start_light = msg.read_char()
self.num_red_lights = msg.read_char()
self.in_realtime = msg.read_char()
self.session = msg.read_int()
self.current_e_t = msg.read_float()
self.ambient_temp = msg.read_float()
self.track_temp = msg.read_float()
self.dark_cloud = msg.read_float()
self.raining = msg.read_float()
self.wind = msg.read_vect()
self.on_path_wetness = msg.read_float()
self.off_path_wetness = msg.read_float()
def on_info(self, msg):
self.track_name = msg.read_string()
self.player_name = msg.read_string()
self.plr_file_name = msg.read_string()
self.end_e_t = msg.read_float()
self.max_laps = msg.read_int()
self.lap_dist = msg.read_float()
# missing mResultsStream
def on_start_realtime(self, msg):
pass
def on_end_realtime(self, msg):
pass
def on_start_session(self, msg):
self.session_id += 1
self.vehicles = []
logging.info("on_start_session")
def on_end_session(self, msg):
logging.info("on_end_session")
def dispatch_message(self, tag, payload):
msg = rfactorlcd.BinaryDecoder(payload)
if tag == "STSS":
self.on_start_session(msg)
elif tag == "EDSS":
self.on_end_session(msg)
elif tag == "STRT":
self.on_start_realtime(msg)
elif tag == "EDRT":
self.on_end_realtime(msg)
elif tag == "VHCL":
self.on_vehicle(msg)
elif tag == "TLMT":
self.on_telemetry(msg)
elif tag == "SCOR":
self.on_score(msg)
elif tag == "INFO":
self.on_info(msg)
else:
print "error: unhandled tag: %s" % tag
# EOF #
| gpl-3.0 | -7,897,458,366,094,102,000 | 30.504762 | 78 | 0.546025 | false | 3.291542 | false | false | false |
ellisztamas/faps | faps/pr_unsampled.py | 1 | 2716 | import numpy as np
def pr_unsampled(offspring_diploid, maternal_diploid, allele_freqs, offspring_genotype, maternal_genotype, male_genotype, mu):
"""
Calculate the transitions probability for a given set of parental and offspring
alleles.
Transitipn probabilities are then weight by the probability of drawing the allele
from the population, and the probability that this allele is the true allele, given
observed genotype data and the error rate mu.
ARGUMENTS:
offspring_diploid, maternal_diploid, male_diploid: arrays of diploid genotypes for
the offspring, mothers and fathers.
allele_freqs = vector of population allele frequencies.
offspring_genotype, maternal_genotype, male_genotype: a two-element list of zeroes
and ones indicating the diploid genotype of males, mothers and offspring to be
considered.
mu: point estimate of the genotyping error rate.
RETURNS:
A 3-dimensional array of probabilities indexing offspring, candidate males, and loci.
These are given in linear, rather than log space.
"""
# an array of all possible transition probabilities indexed as [offspring, mother, father].
trans_prob_array = np.array([[[1, 0.5, 0 ],
[0.5,0.25,0 ],
[0, 0, 0 ]],
[[0, 0.5, 1 ],
[0.5,0.5, 0.5],
[1, 0.5, 0 ]],
[[0, 0, 0 ],
[0, 0.25,0.5],
[0, 0.5, 1 ]]])
# the transition probability for the given genotypes.
trans_prob = trans_prob_array[offspring_genotype, maternal_genotype, male_genotype]
# Probabilities that the observed offspring marker data match observed data.
pr_offs = np.zeros([offspring_diploid.shape[0], offspring_diploid.shape[1]])
pr_offs[offspring_diploid == offspring_genotype] = 1-mu
pr_offs[offspring_diploid != offspring_genotype] = mu
# Probabilities that the observed maternal marker data match observed data.
pr_mothers = np.zeros([maternal_diploid.shape[0], maternal_diploid.shape[1]])
pr_mothers[maternal_diploid == maternal_genotype] = 1-mu
pr_mothers[maternal_diploid != maternal_genotype] = mu
# Probability of the father is drawn from population allele frequencies.
if male_genotype is 0: pr_males = allele_freqs**2
if male_genotype is 1: pr_males = allele_freqs*(1-allele_freqs)
if male_genotype is 2: pr_males = (1-allele_freqs)**2
return trans_prob * pr_males * pr_mothers * pr_offs | mit | 5,498,974,815,240,566,000 | 46.666667 | 126 | 0.623343 | false | 3.495495 | false | false | false |
fbradyirl/home-assistant | homeassistant/components/mochad/light.py | 1 | 4739 | """Support for X10 dimmer over Mochad."""
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS,
Light,
PLATFORM_SCHEMA,
)
from homeassistant.components import mochad
from homeassistant.const import CONF_NAME, CONF_PLATFORM, CONF_DEVICES, CONF_ADDRESS
from homeassistant.helpers import config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_BRIGHTNESS_LEVELS = "brightness_levels"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PLATFORM): mochad.DOMAIN,
CONF_DEVICES: [
{
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_ADDRESS): cv.x10_address,
vol.Optional(mochad.CONF_COMM_TYPE): cv.string,
vol.Optional(CONF_BRIGHTNESS_LEVELS, default=32): vol.All(
vol.Coerce(int), vol.In([32, 64, 256])
),
}
],
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up X10 dimmers over a mochad controller."""
devs = config.get(CONF_DEVICES)
add_entities([MochadLight(hass, mochad.CONTROLLER.ctrl, dev) for dev in devs])
return True
class MochadLight(Light):
"""Representation of a X10 dimmer over Mochad."""
def __init__(self, hass, ctrl, dev):
"""Initialize a Mochad Light Device."""
from pymochad import device
self._controller = ctrl
self._address = dev[CONF_ADDRESS]
self._name = dev.get(CONF_NAME, "x10_light_dev_{}".format(self._address))
self._comm_type = dev.get(mochad.CONF_COMM_TYPE, "pl")
self.light = device.Device(ctrl, self._address, comm_type=self._comm_type)
self._brightness = 0
self._state = self._get_device_status()
self._brightness_levels = dev.get(CONF_BRIGHTNESS_LEVELS) - 1
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
def _get_device_status(self):
"""Get the status of the light from mochad."""
with mochad.REQ_LOCK:
status = self.light.get_status().rstrip()
return status == "on"
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def is_on(self):
"""Return true if the light is on."""
return self._state
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_BRIGHTNESS
@property
def assumed_state(self):
"""X10 devices are normally 1-way so we have to assume the state."""
return True
def _calculate_brightness_value(self, value):
return int(value * (float(self._brightness_levels) / 255.0))
def _adjust_brightness(self, brightness):
if self._brightness > brightness:
bdelta = self._brightness - brightness
mochad_brightness = self._calculate_brightness_value(bdelta)
self.light.send_cmd("dim {}".format(mochad_brightness))
self._controller.read_data()
elif self._brightness < brightness:
bdelta = brightness - self._brightness
mochad_brightness = self._calculate_brightness_value(bdelta)
self.light.send_cmd("bright {}".format(mochad_brightness))
self._controller.read_data()
def turn_on(self, **kwargs):
"""Send the command to turn the light on."""
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
with mochad.REQ_LOCK:
if self._brightness_levels > 32:
out_brightness = self._calculate_brightness_value(brightness)
self.light.send_cmd("xdim {}".format(out_brightness))
self._controller.read_data()
else:
self.light.send_cmd("on")
self._controller.read_data()
# There is no persistence for X10 modules so a fresh on command
# will be full brightness
if self._brightness == 0:
self._brightness = 255
self._adjust_brightness(brightness)
self._brightness = brightness
self._state = True
def turn_off(self, **kwargs):
"""Send the command to turn the light on."""
with mochad.REQ_LOCK:
self.light.send_cmd("off")
self._controller.read_data()
# There is no persistence for X10 modules so we need to prepare
# to track a fresh on command will full brightness
if self._brightness_levels == 31:
self._brightness = 0
self._state = False
| apache-2.0 | 1,578,962,292,659,869,400 | 34.365672 | 84 | 0.603292 | false | 4.071306 | false | false | false |
JoErNanO/brianmodel | brianmodel/neuron/neuron.py | 1 | 5023 | #!/usr/bin/python
# coding: utf-8
# #################################################################################
# Copyright (C) 2014 Francesco Giovannini, Neurosys - INRIA CR Nancy - Grand Est
# Authors: Francesco Giovannini
# email: [email protected]
# website: http://neurosys.loria.fr/
# Permission is granted to copy, distribute, and/or modify this program
# under the terms of the GNU General Public License, version 3 or any
# later version published by the Free Software Foundation.
#
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details
# #################################################################################
"""
neuron
~~~~~~~~~~~~~
This module contains an abstraction of a Brian-compatible neuron, represented as a cell with a list of :class:`IonicCurrent`'s.
:copyright 2014 Francesco Giovannini, Neurosys - INRIA CR Nancy - Grand Est
:licence GPLv3, see LICENCE for more details
"""
from utilities import utilities as utilities
import ioniccurrent.ioniccurrentfactory as icf
import yaml
## ***************************************************************************************************************** ##
class Neuron(object):
"""
The :class:`Neuron` represents a biological neuron with a set of properties, and a list of :class:`IonicCurrent`'s flowing through its membrane.
"""
## ************************************************************ ##
def __init__(self, parameters):
"""
Default constructor.
:param area: the area of the soma of the neural cell
:type area: float
:param conductance: the conductance of the neural cell
:type conductance: float
"""
# Initialise attributes
self.parameters = parameters.values()[0]
self.name = parameters.keys()[0]
self.area = self.parameters['area']
self.conductance = self.parameters['conductance']
# Initialise list of defined currents - FG: fixes bug due to having an empty list of defined currents when including all the needed ones
if 'defined' not in self.parameters['currents']: # Keyword 'defined' doesn't exists
self.parameters['currents']['defined'] = []
elif self.parameters['currents']['defined'] is None: # List of 'defined' currents is undefined/empty
self.parameters['currents']['defined'] = []
# Check for parameters specified as include files
for f in self.parameters['currents'].get('included', []):
try:
with open(f) as curr:
# Load included currents from file
includes = yaml.load(curr)
# Add them the list of defined currents
self.parameters['currents'].get('defined', []).extend(includes)
except IOError:
raise IOError('Cannot load current parameter file named ' + f)
# Remove list of includesd currents from dict of currents
self.parameters['currents'].pop('included', [])
# Initialise ionic current factory
self.factory = icf.IonicCurrentFactory()
# Build current list
self.currents = []
for currentParams in self.parameters['currents'].get('defined', []):
tmpCurrent = self.factory.makeIonicCurrent(currentParams, self.area)
self.currents.append(tmpCurrent)
# Store safe string representation of parameters
self._area = utilities.getSafeStringParam(self.area)
self._conductance = utilities.getSafeStringParam(utilities.getSafeStringParam(self.conductance) + ' * ' + self._area)
## ************************************************************ ##
## ************************************************************ ##
def getNeuronString(self):
"""
Generate the string representation of the neural cell model.
"""
res = ""
# Neuron model equation
dvdt = '''dv/dt = ('''
# Add current equations
for curr in self.currents:
dvdt += ''' - ''' + curr.name # Add current name to dvdt equation
res += curr.getIonicCurrentString() # Add current equation to neuron model
dvdt += ''' + I_stim) / ''' + self._conductance + ''' : volt \n''' # Append conductance division
# Check Voltage clamp
if self.parameters.has_key('vClamp') and self.parameters['vClamp']:
dvdt = '''v : volt \n'''
# Stimulus current
istim = '''I_stim : amp'''
# Build final neuron model equation
res = dvdt + res + istim
return res
## ************************************************************ ##
## ***************************************************************************************************************** ##
| gpl-3.0 | 8,160,894,770,479,721,000 | 39.184 | 148 | 0.550468 | false | 4.570519 | false | false | false |
ronnyandersson/zignal | examples/ex_chunks.py | 1 | 2576 | '''
Created on 12 Apr 2020
@author: Ronny Andersson ([email protected])
@copyright: (c) 2020 Ronny Andersson
@license: MIT
Demo of how to iterate over an instance of the Audio class, for chunk-based
processing. Typically the chunks have a size that is a power of two, for
example 256, 1024 or 4096. In this example the chunk size is set to 1000
for simplicity in the plots. The sample rate in this example is also set to
a value that enhances the effect of the example, since hera a chunk equals
to one second of data.
'''
# Standard library
import logging
# Third party
import matplotlib.pyplot as plt
import numpy as np
# Internal
import zignal
if __name__ == '__main__':
logging.basicConfig(
format='%(levelname)-7s: %(module)s.%(funcName)-15s %(message)s',
level='DEBUG',
)
logging.getLogger("matplotlib").setLevel(logging.INFO)
logging.getLogger("zignal").setLevel(logging.DEBUG)
fs = 1000
# Create various ramp signals, to visualise the chunks better. Not real
# audio, but shows in a plot what the chunks look like
a1 = zignal.Audio(fs=fs, initialdata=np.linspace(0, 1, num=(1000/2)))
a2 = zignal.Audio(fs=fs, initialdata=np.linspace(0, -1, num=(1000*1)+500))
a3 = zignal.Audio(fs=fs, initialdata=np.linspace(0, 1, num=(1000*2)+200))
a = zignal.Audio(fs=fs)
a.append(a1, a2, a3)
print(a)
# We now have 2.2 seconds of audio in three channels. This does not add up
# to even chunk sizes, so padding will have to be done in order to iterate.
#
# Three (3) chunks are expected.
for val in a.iter_chunks(chunksize=1000):
print("------------------------------------------------")
print("shape of data in chunk: %s" % str(val.shape))
print(val)
plt.figure(1)
plt.plot(val[:, 0], ls="-", label="a1")
plt.plot(val[:, 1], ls="--", label="a2")
plt.plot(val[:, 2], ls="-.", label="a3")
plt.grid()
plt.ylim(-1.1, 1.1)
plt.xlabel("samples in chunk")
plt.ylabel("magnitude [lin]")
plt.legend(loc="upper right")
plt.show()
# We can pad beforehand if we know how many samples are missing, then no
# padding will occur inside the iterator
b = a.copy()
b.gain(-20) # just to get a debug logging entry
b.pad(nofsamples=800)
print(b)
for val in b.iter_chunks(chunksize=1000):
print("------------------------------------------------")
print("shape of data in chunk: %s" % str(val.shape))
print(val)
print('-- Done --')
| mit | -1,448,987,454,992,654,300 | 32.025641 | 79 | 0.612189 | false | 3.476383 | false | false | false |
modesttree/Projeny | Source/mtm/log/LogStreamConsole.py | 1 | 4086 |
import os
import re
import sys
from mtm.ioc.Inject import Inject
import mtm.util.Util as Util
from mtm.log.Logger import LogType
import shutil
from mtm.util.Assert import *
import mtm.log.ColorConsole as ColorConsole
class AnsiiCodes:
BLACK = "\033[1;30m"
DARKBLACK = "\033[0;30m"
RED = "\033[1;31m"
DARKRED = "\033[0;31m"
GREEN = "\033[1;32m"
DARKGREEN = "\033[0;32m"
YELLOW = "\033[1;33m"
DARKYELLOW = "\033[0;33m"
BLUE = "\033[1;34m"
DARKBLUE = "\033[0;34m"
MAGENTA = "\033[1;35m"
DARKMAGENTA = "\033[0;35m"
CYAN = "\033[1;36m"
DARKCYAN = "\033[0;36m"
WHITE = "\033[1;37m"
DARKWHITE = "\033[0;37m"
END = "\033[0;0m"
class LogStreamConsole:
_log = Inject('Logger')
_sys = Inject('SystemHelper')
_varManager = Inject('VarManager')
_config = Inject('Config')
def __init__(self, verbose, veryVerbose):
self._verbose = verbose or veryVerbose
self._veryVerbose = veryVerbose
self._useColors = self._config.tryGetBool(False, 'LogStreamConsole', 'UseColors')
self._fileStream = None
if self._config.tryGetBool(False, 'LogStreamConsole', 'OutputToFilteredLog'):
self._fileStream = self._getFileStream()
if self._useColors:
self._initColors()
def _initColors(self):
self._defaultColors = ColorConsole.get_text_attr()
self._defaultBg = self._defaultColors & 0x0070
self._defaultFg = self._defaultColors & 0x0007
def log(self, logType, message):
assertIsNotNone(logType)
if logType == LogType.Noise and not self._veryVerbose:
return
if logType == LogType.Debug and not self._verbose:
return
if logType == LogType.Error:
self._output(logType, message, sys.stderr, self._useColors)
else:
self._output(logType, message, sys.stdout, self._useColors)
if self._fileStream:
self._output(logType, message, self._fileStream, False)
def _getFileStream(self):
primaryPath = self._varManager.expand('[LogFilteredPath]')
if not primaryPath:
raise Exception("Could not find path for log file")
previousPath = None
if self._varManager.hasKey('LogFilteredPreviousPath'):
previousPath = self._varManager.expand('[LogFilteredPreviousPath]')
# Keep one old build log
if os.path.isfile(primaryPath) and previousPath:
shutil.copy2(primaryPath, previousPath)
return open(primaryPath, 'w', encoding='utf-8', errors='ignore')
def _getHeadingIndent(self):
return self._log.getCurrentNumHeadings() * " "
def _output(self, logType, message, stream, useColors):
stream.write('\n')
stream.write(self._getHeadingIndent())
if not useColors or logType == LogType.Info:
stream.write(message)
stream.flush()
else:
ColorConsole.set_text_attr(self._getColorAttrs(logType))
stream.write(message)
stream.flush()
ColorConsole.set_text_attr(self._defaultColors)
def _getColorAttrs(self, logType):
if logType == LogType.HeadingStart:
return ColorConsole.FOREGROUND_CYAN | self._defaultBg | ColorConsole.FOREGROUND_INTENSITY
if logType == LogType.HeadingEnd:
return ColorConsole.FOREGROUND_BLACK | self._defaultBg | ColorConsole.FOREGROUND_INTENSITY
if logType == LogType.Good:
return ColorConsole.FOREGROUND_GREEN | self._defaultBg | ColorConsole.FOREGROUND_INTENSITY
if logType == LogType.Warn:
return ColorConsole.FOREGROUND_YELLOW | self._defaultBg | ColorConsole.FOREGROUND_INTENSITY
if logType == LogType.Error:
return ColorConsole.FOREGROUND_RED | self._defaultBg | ColorConsole.FOREGROUND_INTENSITY
assertThat(logType == LogType.Debug or logType == LogType.Noise)
return ColorConsole.FOREGROUND_BLACK | self._defaultBg | ColorConsole.FOREGROUND_INTENSITY
| mit | -1,695,768,849,465,963,800 | 30.430769 | 103 | 0.638767 | false | 3.584211 | false | false | false |
51reboot/actual_09_homework | 10/jinderui/cmdb/user/dbutils.py | 1 | 1788 | # encoding: utf-8
import os,sys
reload(sys)
sys.setdefaultencoding( "utf-8" )
import gconf
import MySQLdb
# encoding: utf-8
import os,sys
reload(sys)
sys.setdefaultencoding( "utf-8" )
import gconf
import MySQLdb
class MySQLConnection(object):
"""docstring for MySQLConnection"""
def __init__(self, host,port,user,passwd,db,charset='utf8'):
self.__host = host
self.__port = port
self.__user = user
self.__passwd = passwd
self.__db = db
self.__charset = charset
self.__conn = None
self.__cur = None
self.__connect()
def __connect(self):
try:
self.__conn = MySQLdb.connect(host=self.__host,port=self.__port, user=self.__user, \
passwd = self.__passwd,db = self.__db,charset=self.__charset)
self.__cur = self.__conn.cursor()
except BaseException as e:
print e
def commit(self):
if self.__conn:
self.__conn.commit()
def execute(self,sql,args=()):
_cnt = 0
if self.__cur:
_cnt = self.__cur.execute(sql,args)
return _cnt
def fetch(self,sql,args=()):
_cnt = 0
_rt_list = []
_cnt = self.execute(sql,args)
if self.__cur:
_rt_list = self.__cur.fetchall()
return _cnt, _rt_list
def close(self):
self.commit()
if self.__cur:
self.__cur.close()
self.__cur = None
if self.__conn:
self.__conn.close()
self.__conn =None
@classmethod
def execute_sql(self,sql,args=(),fetch=True):
_count =0
_rt_list =[]
_conn = MySQLConnection(host=gconf.MYSQL_HOST,port=gconf.MYSQL_PORT, \
db=gconf.MYSQL_DB,user=gconf.MYSQL_USER, passwd=gconf.MYSQL_PASSWORD,charset=gconf.MYSQL_CHARSET)
if fetch:
_count,_rt_list = _conn.fetch(sql,args)
else:
_count = _conn.execute(sql,args)
_conn.close()
return _count,_rt_list
if __name__ == '__main__':
print MySQLConnection.execute_sql('select * from user') | mit | 2,713,146,756,037,389,300 | 21.64557 | 101 | 0.645973 | false | 2.798122 | false | false | false |
Itxaka/st2 | st2api/st2api/controllers/v1/executionviews.py | 1 | 3355 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from pecan.rest import RestController
import six
from st2common import log as logging
from st2common.models.api.base import jsexpose
from st2common.persistence.execution import ActionExecution
LOG = logging.getLogger(__name__)
# List of supported filters and relation between filter name and execution property it represents.
# The same list is used both in ActionExecutionController to map filter names to properties and
# in FiltersController below to generate a list of unique values for each filter for UI so user
# could pick a filter from a drop down.
# If filter is unique for every execution or repeats very rarely (ex. execution id or parent
# reference) it should be also added to IGNORE_FILTERS to avoid bloating FiltersController
# response. Failure to do so will eventually result in Chrome hanging out while opening History
# tab of st2web.
SUPPORTED_FILTERS = {
'action': 'action.ref',
'status': 'status',
'liveaction': 'liveaction.id',
'parent': 'parent',
'rule': 'rule.name',
'runner': 'runner.name',
'timestamp': 'start_timestamp',
'trigger': 'trigger.name',
'trigger_type': 'trigger_type.name',
'trigger_instance': 'trigger_instance.id',
'user': 'liveaction.context.user'
}
# List of filters that are too broad to distinct by them and are very likely to represent 1 to 1
# relation between filter and particular history record.
IGNORE_FILTERS = ['parent', 'timestamp', 'liveaction', 'trigger_instance']
class FiltersController(RestController):
@jsexpose()
def get_all(self):
"""
List all distinct filters.
Handles requests:
GET /executions/views/filters
"""
filters = {}
for name, field in six.iteritems(SUPPORTED_FILTERS):
if name not in IGNORE_FILTERS:
if isinstance(field, six.string_types):
query = '$' + field
else:
dot_notation = list(chain.from_iterable(
('$' + item, '.') for item in field
))
dot_notation.pop(-1)
query = {'$concat': dot_notation}
aggregate = ActionExecution.aggregate([
{'$match': {'parent': None}},
{'$group': {'_id': query}}
])
filters[name] = [res['_id'] for res in aggregate['result'] if res['_id']]
return filters
class ExecutionViewsController(RestController):
filters = FiltersController()
| apache-2.0 | -4,095,253,901,279,048,700 | 38.011628 | 98 | 0.66617 | false | 4.374185 | false | false | false |
JDRomano2/VenomKB | venomkb/archive/scripts/add_go_data.py | 1 | 1105 | import json
from tqdm import tqdm
from venomkb_builder import VenomKB
VKB = VenomKB()
VKB.load_database()
go_annotations_out = {}
for x in tqdm(VKB.proteins):
try:
toxprot = VKB.get_record_from_toxprot(x.venomkb_id, 'dbReference', json=False)
except:
continue
go_annotations = [y for y in toxprot if ('type', 'GO') in y.items()]
this_protein = []
for go in go_annotations:
current = {}
go_id = [z[1] for z in go.items() if z[0] == 'id'][0]
for prop in go:
dict_form = dict(prop.items())
current[dict_form['type']] = dict_form['value']
current['id'] = go_id
# append to temporary list of structured go_annotations
this_protein.append(current)
# push to global list of go_annotations
go_annotations_out[x.venomkb_id] = this_protein
'''
for vkbid, annot_list in tqdm(go_annotations_out.iteritems()):
VKB.add_to_existing(vkbid=vkbid,
new_key='go_annotations',
new_value=annot_list,
replace_if_exist=True)
'''
| gpl-2.0 | 178,348,791,076,406,720 | 27.333333 | 86 | 0.586425 | false | 3.184438 | false | false | false |
minhnd/youtube-subtitle-downloader | youtubesub.py | 1 | 5521 | # -*- coding: utf-8 -*-
"""
Youtube Subtitle Downloader downloads subtitles from Youtube videos
(if those are present) and convert them to SRT format.
Usage: youtubesub.py [-h] [-l] [--language LANGUAGE] [--filename FILENAME]
[--filetype {srt,xml}]
url
positional arguments:
url URL of the Youtube video
optional arguments:
-h, --help show this help message and exit
-l, --list list all available languages
--language LANGUAGE the ISO language code
--filename FILENAME specify the name of subtitle
--filetype {srt,xml} specify the output type of subtitle
Example:
python youtubesub.py --filename subtitle --language en http://www.youtube.com/watch?v=5MgBikgcWnY
:copyright: (c) 2014 by Nguyen Dang Minh (www.minhnd.com)
:license: BSD, see LICENSE for more details.
"""
import urllib2
import urlparse
import argparse
import sys
import xml.etree.ElementTree as ET
class YoutubeSubDownloader():
video_id = None
subtitle = None
languages = {}
def __init__(self, url=None):
self.video_id = self.extractVideoID(url)
self.languages = self.getAvailableLanguages()
if self.languages == {}:
print "There's no subtitle"
sys.exit()
def extractVideoID(self, url=None):
"""
Examples:
- http://youtu.be/5MgBikgcWnY
- http://www.youtube.com/watch?v=5MgBikgcWnY&feature=feed
- http://www.youtube.com/embed/5MgBikgcWnY
- http://www.youtube.com/v/5MgBikgcWnY?version=3&hl=en_US
"""
url_data = urlparse.urlparse(url)
if url_data.hostname == 'youtu.be':
return url_data.path[1:]
if url_data.hostname in ('www.youtube.com', 'youtube.com'):
if url_data.path == '/watch':
query = urlparse.parse_qs(url_data.query)
return query['v'][0]
if url_data.path[:7] == '/embed/':
return url_data.path.split('/')[2]
if url_data.path[:3] == '/v/':
return url_data.path.split('/')[2]
return None
def download(self, language, filename, filetype):
"""Download subtitle of the selected language"""
if language not in self.languages.keys():
print "Theres's no subtitle in this language"
sys.exit()
url = "http://www.youtube.com/api/timedtext?v={0}&lang={1}".format(self.video_id, language)
self.subtitle = urllib2.urlopen(url)
if filetype == "srt":
self.writeSRTFile(filename)
else:
self.writeXMLFile(filename)
def getAvailableLanguages(self):
"""Get all available languages of subtitle"""
url = "http://www.youtube.com/api/timedtext?v=%s&type=list" % self.video_id
xml = urllib2.urlopen(url)
tree = ET.parse(xml)
root = tree.getroot()
languages = {}
for child in root:
languages[child.attrib["lang_code"]] = child.attrib["lang_translated"]
return languages
def list(self):
"""List all available languages of subtitle"""
for key, value in self.languages.iteritems():
print key, value
def writeXMLFile(self, filename=None):
with open(filename + ".xml", 'w') as f:
for line in self.subtitle:
f.write(line)
def writeSRTFile(self, filename=None):
tree = ET.parse(self.subtitle)
root = tree.getroot()
with open(filename + ".srt", 'w') as f:
line = 1
for child in root:
f.write(self.printSRTLine(line, child.attrib["start"], child.attrib["dur"], child.text.encode('utf-8')))
line += 1
def formatSRTTime(self, secTime):
"""Convert a time in seconds (in Google's subtitle) to SRT time format"""
sec, micro = str(secTime).split('.')
m, s = divmod(int(sec), 60)
h, m = divmod(m, 60)
return "{:02}:{:02}:{:02},{}".format(h,m,s,micro)
def printSRTLine(self, line, start, duration, text):
"""Print a subtitle in SRT format"""
end = self.formatSRTTime(float(start) + float(duration))
start = self.formatSRTTime(start)
text = self.convertHTML(text)
return "{}\n{} --> {}\n{}\n\n".format(line, start, end, text)
def convertHTML(self, text):
"""A few HTML encodings replacements.
' to '
"""
return text.replace(''', "'")
def main():
try:
parser = argparse.ArgumentParser(description="Youtube Subtitle Downloader")
parser.add_argument("url", help="URL of the Youtube video")
parser.add_argument("-l", "--list", action="store_true", help="list all available languages")
parser.add_argument("--language", default="en", help="the ISO language code")
parser.add_argument("--filename", default="subtitle", help="specify the name of subtitle")
parser.add_argument("--filetype", default="srt", choices=["srt", "xml"], help="specify the output type of subtitle")
args = parser.parse_args()
downloader = YoutubeSubDownloader(args.url)
if args.list:
print "Available languages:"
f = downloader.list()
downloader.download(args.language, args.filename, args.filetype)
except Exception as e:
print e
if __name__ == '__main__':
main()
| bsd-2-clause | 5,863,344,055,062,149,000 | 35.806667 | 124 | 0.58522 | false | 3.831367 | false | false | false |
thomasorb/orb | orb/utils/io.py | 1 | 33933 | #!/usr/bin/python
# *-* coding: utf-8 *-*
# Author: Thomas Martin <[email protected]>
# File: io.py
## Copyright (c) 2010-2020 Thomas Martin <[email protected]>
##
## This file is part of ORB
##
## ORB is free software: you can redistribute it and/or modify it
## under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## ORB is distributed in the hope that it will be useful, but WITHOUT
## ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
## or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
## License for more details.
##
## You should have received a copy of the GNU General Public License
## along with ORB. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import numpy as np
import time
import warnings
import astropy.io.fits as pyfits
from astropy.io.fits.verify import VerifyWarning, VerifyError, AstropyUserWarning
from astropy.wcs import FITSFixedWarning
import astropy.io.votable
import pandas as pd
import orb.cutils
import h5py
import datetime
import orb.utils.validate
def open_file(file_name, mode='r'):
"""Open a file in write mode (by default) and return a file
object.
Create the file if it doesn't exist (only in write mode).
:param file_name: Path to the file, can be either
relative or absolute.
:param mode: (Optional) Can be 'w' for write mode, 'r' for
read mode and 'a' for append mode.
"""
if mode not in ['w','r','a']:
raise Exception("mode option must be 'w', 'r' or 'a'")
if mode in ['w','a']:
# create folder if it does not exist
dirname = os.path.dirname(file_name)
if dirname != '':
if not os.path.exists(dirname):
os.makedirs(dirname)
return open(file_name, mode)
def write_fits(fits_path, fits_data, fits_header=None,
silent=False, overwrite=True, mask=None,
replace=False, record_stats=False, mask_path=None):
"""Write data in FITS format. If the file doesn't exist create
it with its directories.
If the file already exists add a number to its name before the
extension (unless 'overwrite' option is set to True).
:param fits_path: Path to the file, can be either
relative or absolut.
:param fits_data: Data to be written in the file.
:param fits_header: (Optional) Optional keywords to update or
create. It can be a pyfits.Header() instance or a list of
tuples [(KEYWORD_1, VALUE_1, COMMENT_1), (KEYWORD_2,
VALUE_2, COMMENT_2), ...]. Standard keywords like SIMPLE,
BITPIX, NAXIS, EXTEND does not have to be passed.
:param silent: (Optional) If True turn this function won't
display any message (default False)
:param overwrite: (Optional) If True overwrite the output file
if it exists (default True).
:param mask: (Optional) It not None must be an array with the
same size as the given data but filled with ones and
zeros. Bad values (NaN or Inf) are converted to 1 and the
array is converted to 8 bit unsigned integers (uint8). This
array will be written to the disk with the same path
terminated by '_mask'. The header of the mask FITS file will
be the same as the original data (default None).
:param replace: (Optional) If True and if the file already
exist, new data replace old data in the existing file. NaN
values do not replace old values. Other values replace old
values. New array MUST have the same size as the existing
array. Note that if replace is True, overwrite is
automatically set to True.
:param record_stats: (Optional) If True, record mean and
median of data. Useful if they often have to be computed
(default False).
:param mask_path: (Optional) Path to the corresponding mask image.
.. note:: float64 data is converted to float32 data to avoid
too big files with unnecessary precision
.. note:: Please refer to
http://www.stsci.edu/institute/software_hardware/pyfits/ for
more information on PyFITS module and
http://fits.gsfc.nasa.gov/ for more information on FITS
files.
"""
SECURED_KEYS = ['SIMPLE', 'BITPIX', 'NAXIS', 'NAXIS1',
'NAXIS2', 'NAXIS3', 'EXTEND', 'INHERIT',
'BZERO', 'BSCALE']
if not isinstance(fits_data, np.ndarray):
raise TypeError('Data type must be numpy.ndarray')
start_time = time.time()
# change extension if nescessary
if os.path.splitext(fits_path)[1] != '.fits':
fits_path = os.path.splitext(fits_path)[0] + '.fits'
if mask is not None:
if np.shape(mask) != np.shape(fits_data):
raise ValueError('Mask must have the same shape as data')
if replace: overwrite=True
if overwrite:
warnings.filterwarnings(
'ignore', message='Overwriting existing file.*',
module='astropy.io.*')
if replace and os.path.exists(fits_path):
old_data = read_fits(fits_path)
if old_data.shape == fits_data.shape:
fits_data[np.isnan(fits_data)] = old_data[np.isnan(fits_data)]
else:
raise Exception("New data shape %s and old data shape %s are not the same. Do not set the option 'replace' to True in this case"%(str(fits_data.shape), str(old_data.shape)))
# float64/128 data conversion to float32 to avoid too big files
# with unnecessary precision
if fits_data.dtype == np.float64 or fits_data.dtype == np.float128:
fits_data = fits_data.astype(np.float32)
# complex data cannot be written in fits
if np.iscomplexobj(fits_data):
fits_data = fits_data.real.astype(np.float32)
logging.warning('Complex data cast to float32 (FITS format do not support complex data)')
base_fits_path = fits_path
dirname = os.path.dirname(fits_path)
if (dirname != []) and (dirname != ''):
if not os.path.exists(dirname):
os.makedirs(dirname)
index=0
file_written = False
while not file_written:
if ((not (os.path.exists(fits_path))) or overwrite):
if len(fits_data.shape) > 1:
hdu = pyfits.PrimaryHDU(fits_data.transpose())
elif len(fits_data.shape) == 1:
hdu = pyfits.PrimaryHDU(fits_data[np.newaxis, :])
else: # 1 number only
hdu = pyfits.PrimaryHDU(np.array([fits_data]))
if mask is not None:
# mask conversion to only zeros or ones
mask = mask.astype(float)
mask[np.nonzero(np.isnan(mask))] = 1.
mask[np.nonzero(np.isinf(mask))] = 1.
mask[np.nonzero(mask)] = 1.
mask = mask.astype(np.uint8) # UINT8 is the
# smallest allowed
# type
hdu_mask = pyfits.PrimaryHDU(mask.transpose())
# add header optional keywords
if fits_header is not None:
## remove keys of the passed header which corresponds
## to the description of the data set
for ikey in SECURED_KEYS:
if ikey in fits_header: fits_header.pop(ikey)
hdu.header.extend(fits_header, strip=False,
update=True, end=True)
# Remove 3rd axis related keywords if there is no
# 3rd axis
if len(fits_data.shape) <= 2:
for ikey in range(len(hdu.header)):
if isinstance(hdu.header[ikey], str):
if ('Wavelength axis' in hdu.header[ikey]):
del hdu.header[ikey]
del hdu.header[ikey]
break
if 'CTYPE3' in hdu.header:
del hdu.header['CTYPE3']
if 'CRVAL3' in hdu.header:
del hdu.header['CRVAL3']
if 'CRPIX3' in hdu.header:
del hdu.header['CRPIX3']
if 'CDELT3' in hdu.header:
del hdu.header['CDELT3']
if 'CROTA3' in hdu.header:
del hdu.header['CROTA3']
if 'CUNIT3' in hdu.header:
del hdu.header['CUNIT3']
# add median and mean of the image in the header
# data is nan filtered before
if record_stats:
fdata = fits_data[np.nonzero(~np.isnan(fits_data))]
if np.size(fdata) > 0:
data_mean = np.nanmean(fdata)
data_median = np.nanmedian(fdata)
else:
data_mean = np.nan
data_median = np.nan
hdu.header.set('MEAN', str(data_mean),
'Mean of data (NaNs filtered)',
after=5)
hdu.header.set('MEDIAN', str(data_median),
'Median of data (NaNs filtered)',
after=5)
# add some basic keywords in the header
date = time.strftime("%Y-%m-%d", time.localtime(time.time()))
hdu.header.set('MASK', 'False', '', after=5)
hdu.header.set('DATE', date, 'Creation date', after=5)
hdu.header.set('PROGRAM', "ORB",
'Thomas Martin: [email protected]',
after=5)
# write FITS file
hdu.writeto(fits_path, overwrite=overwrite)
if mask is not None:
hdu_mask.header = hdu.header
hdu_mask.header.set('MASK', 'True', '', after=6)
if mask_path is None:
mask_path = os.path.splitext(fits_path)[0] + '_mask.fits'
hdu_mask.writeto(mask_path, overwrite=overwrite)
if not (silent):
logging.info("Data written as {} in {:.2f} s ".format(
fits_path, time.time() - start_time))
return fits_path
else :
fits_path = (os.path.splitext(base_fits_path)[0] +
"_" + str(index) +
os.path.splitext(base_fits_path)[1])
index += 1
def read_fits(fits_path, no_error=False, nan_filter=False,
return_header=False, return_hdu_only=False,
return_mask=False, silent=False, delete_after=False,
data_index=None, image_mode='classic', chip_index=None,
binning=None, fix_header=True, dtype=float,
mask_path=None):
"""Read a FITS data file and returns its data.
:param fits_path: Path to the file, can be either
relative or absolut.
:param no_error: (Optional) If True this function will only
display a warning message if the file does not exist (so it
does not raise an exception) (default False)
:param nan_filter: (Optional) If True replace NaN by zeros
(default False)
:param return_header: (Optional) If True return a tuple (data,
header) (default False).
:param return_hdu_only: (Optional) If True return FITS header
data unit only. No data will be returned (default False).
:param return_mask: (Optional) If True return only the mask
corresponding to the data file (default False).
:param silent: (Optional) If True no message is displayed
except if an error is raised (default False).
:param delete_after: (Optional) If True delete file after
reading (default False).
:param data_index: (Optional) Index of data in the header data
unit (Default None).
:param image_mode: (Optional) Can be 'sitelle', 'spiomm' or
'classic'. In 'sitelle' mode, the parameter
chip_index must also be set to 0 or 1. In this mode only
one of both SITELLE quadrants is returned. In 'classic' mode
the whole frame is returned (default 'classic').
:param chip_index: (Optional) Index of the chip of the
SITELLE image. Used only if image_mode is set to 'sitelle'
In this case, must be 1 or 2. Else must be None (default
None).
:param binning: (Optional) If not None, returned data is
binned by this amount (must be an integer >= 1)
:param fix_header: (Optional) If True, fits header is
fixed to avoid errors due to header inconsistencies
(e.g. WCS errors) (default True).
:param dtype: (Optional) Data is converted to
the given dtype (e.g. np.float32, default float).
:param mask_path: (Optional) Path to the corresponding mask image.
.. note:: Please refer to
http://www.stsci.edu/institute/software_hardware/pyfits/ for
more information on PyFITS module. And
http://fits.gsfc.nasa.gov/ for more information on FITS
files.
"""
# avoid bugs fits with no data in the first hdu
fits_path = ((fits_path.splitlines())[0]).strip()
if return_mask:
if mask_path is None:
mask_path = os.path.splitext(fits_path)[0] + '_mask.fits'
fits_path = mask_path
try:
warnings.filterwarnings('ignore', module='astropy')
warnings.filterwarnings('ignore', category=ResourceWarning)
hdulist = pyfits.open(fits_path)
if data_index is None:
data_index = get_hdu_data_index(hdulist)
fits_header = hdulist[data_index].header
except Exception as e:
if not no_error:
raise IOError(
"File '%s' could not be opened: {}, {}".format(fits_path, e))
else:
if not silent:
logging.warning(
"File '%s' could not be opened {}, {}".format(fits_path, e))
return None
# Correct header
if fix_header:
if fits_header['NAXIS'] == 2:
if 'CTYPE3' in fits_header: del fits_header['CTYPE3']
if 'CRVAL3' in fits_header: del fits_header['CRVAL3']
if 'CUNIT3' in fits_header: del fits_header['CUNIT3']
if 'CRPIX3' in fits_header: del fits_header['CRPIX3']
if 'CROTA3' in fits_header: del fits_header['CROTA3']
if return_hdu_only:
return hdulist[data_index]
else:
if image_mode == 'classic':
fits_data = np.array(
hdulist[data_index].data.transpose()).astype(dtype)
elif image_mode == 'sitelle':
fits_data = read_sitelle_chip(hdulist[data_index], chip_index)
elif image_mode == 'spiomm':
fits_data, fits_header = read_spiomm_data(
hdulist, fits_path)
else:
raise ValueError("Image_mode must be set to 'sitelle', 'spiomm' or 'classic'")
hdulist.close
if binning is not None:
fits_data = utils.image.bin_image(fits_data, binning)
if (nan_filter):
fits_data = np.nan_to_num(fits_data)
if delete_after:
try:
os.remove(fits_path)
except:
logging.warning("The file '%s' could not be deleted"%fits_path)
if return_header:
return np.squeeze(fits_data), fits_header
else:
return np.squeeze(fits_data)
def get_hdu_data_index(hdul):
"""Return the index of the first header data unit (HDU) containing data.
:param hdul: A pyfits.HDU instance
"""
hdu_data_index = 0
while (hdul[hdu_data_index].data is None):
hdu_data_index += 1
if hdu_data_index >= len(hdul):
raise Exception('No data recorded in FITS file')
return hdu_data_index
def read_sitelle_chip(hdu, chip_index, substract_bias=True):
"""Return chip data of a SITELLE FITS image.
:param hdu: pyfits.HDU Instance of the SITELLE image
:param chip_index: Index of the chip to read. Must be 1 or 2.
:param substract_bias: If True bias is automatically
substracted by using the overscan area (default True).
"""
def get_slice(key, index):
key = '{}{}'.format(key, index)
if key not in hdu.header: raise Exception(
'Bad SITELLE image header')
chip_section = hdu.header[key]
return get_sitelle_slice(chip_section)
def get_data(key, index, frame):
xslice, yslice = get_slice(key, index)
return np.copy(frame[yslice, xslice]).transpose()
if int(chip_index) not in (1,2): raise Exception(
'Chip index must be 1 or 2')
frame = hdu.data.astype(np.float)
# get data without bias substraction
if not substract_bias:
return get_data('DSEC', chip_index, frame)
if chip_index == 1:
amps = ['A', 'B', 'C', 'D']
elif chip_index == 2:
amps = ['E', 'F', 'G', 'H']
xchip, ychip = get_slice('DSEC', chip_index)
data = np.empty((xchip.stop - xchip.start, ychip.stop - ychip.start),
dtype=float)
# removing bias
for iamp in amps:
xamp, yamp = get_slice('DSEC', iamp)
amp_data = get_data('DSEC', iamp, frame)
bias_data = get_data('BSEC', iamp, frame)
overscan_size = int(bias_data.shape[0]/2)
if iamp in ['A', 'C', 'E', 'G']:
bias_data = bias_data[-overscan_size:,:]
else:
bias_data = bias_data[:overscan_size,:]
bias_data = np.mean(bias_data, axis=0)
amp_data = amp_data - bias_data
data[xamp.start - xchip.start: xamp.stop - xchip.start,
yamp.start - ychip.start: yamp.stop - ychip.start] = amp_data
return data
def get_sitelle_slice(slice_str):
"""
Strip a string containing SITELLE like slice coordinates.
:param slice_str: Slice string.
"""
if "'" in slice_str:
slice_str = slice_str[1:-1]
section = slice_str[1:-1].split(',')
x_min = int(section[0].split(':')[0]) - 1
x_max = int(section[0].split(':')[1])
y_min = int(section[1].split(':')[0]) - 1
y_max = int(section[1].split(':')[1])
return slice(x_min,x_max,1), slice(y_min,y_max,1)
def read_spiomm_data(hdu, image_path, substract_bias=True):
"""Return data of an SpIOMM FITS image.
:param hdu: pyfits.HDU Instance of the SpIOMM image
:param image_path: Image path
:param substract_bias: If True bias is automatically
substracted by using the associated bias frame as an
overscan frame. Mean bias level is thus computed along the y
axis of the bias frame (default True).
"""
CENTER_SIZE_COEFF = 0.1
data_index = get_hdu_data_index(hdu)
frame = np.array(hdu[data_index].data.transpose()).astype(np.float)
hdr = hdu[data_index].header
# check presence of a bias
bias_path = os.path.splitext(image_path)[0] + '_bias.fits'
if os.path.exists(bias_path):
bias_frame = read_fits(bias_path)
if substract_bias:
## create overscan line
overscan = orb.cutils.meansigcut2d(bias_frame, axis=1)
frame = (frame.T - overscan.T).T
x_min = int(bias_frame.shape[0]/2.
- CENTER_SIZE_COEFF * bias_frame.shape[0])
x_max = int(bias_frame.shape[0]/2.
+ CENTER_SIZE_COEFF * bias_frame.shape[0] + 1)
y_min = int(bias_frame.shape[1]/2.
- CENTER_SIZE_COEFF * bias_frame.shape[1])
y_max = int(bias_frame.shape[1]/2.
+ CENTER_SIZE_COEFF * bias_frame.shape[1] + 1)
bias_level = np.nanmedian(bias_frame[x_min:x_max, y_min:y_max])
if bias_level is not np.nan:
hdr['BIAS-LVL'] = (
bias_level,
'Bias level (moment, at the center of the frame)')
return frame, hdr
def open_hdf5(file_path, mode):
"""Return a :py:class:`h5py.File` instance with some
informations.
:param file_path: Path to the hdf5 file.
:param mode: Opening mode. Can be 'r', 'r+', 'w', 'w-', 'x',
'a'.
.. note:: Please refer to http://www.h5py.org/.
"""
if mode in ['w', 'a', 'w-', 'x']:
# create folder if it does not exist
dirname = os.path.dirname(file_path)
if dirname != '':
if not os.path.exists(dirname):
os.makedirs(dirname)
f = h5py.File(file_path, mode)
if mode in ['w', 'a', 'w-', 'x', 'r+']:
f.attrs['program'] = 'Created/modified with ORB'
f.attrs['date'] = str(datetime.datetime.now())
return f
def write_hdf5(file_path, data, header=None,
silent=False, overwrite=True, max_hdu_check=True,
compress=False):
"""
Write data in HDF5 format.
A header can be added to the data. This method is useful to
handle an HDF5 data file like a FITS file. It implements most
of the functionality of the method
:py:meth:`core.Tools.write_fits`.
.. note:: The output HDF5 file can contain mutiple data header
units (HDU). Each HDU is in a specific group named 'hdu*', *
being the index of the HDU. The first HDU is named
HDU0. Each HDU contains one data group (HDU*/data) which
contains a numpy.ndarray and one header group
(HDU*/header). Each subgroup of a header group is a keyword
and its associated value, comment and type.
:param file_path: Path to the HDF5 file to create
:param data: A numpy array (numpy.ndarray instance) of numeric
values. If a list of arrays is given, each array will be
placed in a specific HDU. The header keyword must also be
set to a list of headers of the same length.
:param header: (Optional) Optional keywords to update or
create. It can be a pyfits.Header() instance or a list of
tuples [(KEYWORD_1, VALUE_1, COMMENT_1), (KEYWORD_2,
VALUE_2, COMMENT_2), ...]. Standard keywords like SIMPLE,
BITPIX, NAXIS, EXTEND does not have to be passed (default
None). It can also be a list of headers if a list of arrays
has been passed to the option 'data'.
:param max_hdu_check: (Optional): When True, if the input data
is a list (interpreted as a list of data unit), check if
it's length is not too long to make sure that the input list
is not a single data array that has not been converted to a
numpy.ndarray format. If the number of HDU to create is
indeed very long this can be set to False (default True).
:param silent: (Optional) If True turn this function won't
display any message (default False)
:param overwrite: (Optional) If True overwrite the output file
if it exists (default True).
:param compress: (Optional) If True data is compressed using
the SZIP library (see
https://www.hdfgroup.org/doc_resource/SZIP/). SZIP library
must be installed (default False).
.. note:: Please refer to http://www.h5py.org/.
"""
MAX_HDUS = 3
start_time = time.time()
# change extension if nescessary
if os.path.splitext(file_path)[1] != '.hdf5':
file_path = os.path.splitext(file_path)[0] + '.hdf5'
# Check if data is a list of arrays.
if not isinstance(data, list):
data = [data]
if max_hdu_check and len(data) > MAX_HDUS:
raise Exception('Data list length is > {}. As a list is interpreted has a list of data unit make sure to pass a numpy.ndarray instance instead of a list. '.format(MAX_HDUS))
# Check header format
if header is not None:
if isinstance(header, pyfits.Header):
header = [header]
elif isinstance(header, list):
if (isinstance(header[0], list)
or isinstance(header[0], tuple)):
header_seems_ok = False
if (isinstance(header[0][0], list)
or isinstance(header[0][0], tuple)):
# we have a list of headers
if len(header) == len(data):
header_seems_ok = True
elif isinstance(header[0][0], str):
# we only have one header
if len(header[0]) > 2:
header = [header]
header_seems_ok = True
if not header_seems_ok:
raise Exception('Badly formated header')
elif not isinstance(header[0], pyfits.Header):
raise Exception('Header must be a pyfits.Header instance or a list')
else:
raise Exception('Header must be a pyfits.Header instance or a list')
if len(header) != len(data):
raise Exception('The number of headers must be the same as the number of data units.')
# change path if file exists and must not be overwritten
new_file_path = str(file_path)
if not overwrite and os.path.exists(new_file_path):
index = 0
while os.path.exists(new_file_path):
new_file_path = (os.path.splitext(file_path)[0] +
"_" + str(index) +
os.path.splitext(file_path)[1])
index += 1
# open file
with open_hdf5(new_file_path, 'w') as f:
## add data + header
for i in range(len(data)):
idata = data[i]
# Check if data has a valid format.
if not isinstance(idata, np.ndarray):
try:
idata = np.array(idata, dtype=float)
except Exception as e:
raise Exception('Data to write must be convertible to a numpy array of numeric values: {}'.format(e))
# convert data to float32
if idata.dtype == np.float64:
idata = idata.astype(np.float32)
# hdu name
hdu_group_name = 'hdu{}'.format(i)
if compress:
f.create_dataset(
hdu_group_name + '/data', data=idata,
compression='lzf', compression_opts=None)
#compression='szip', compression_opts=('nn', 32))
#compression='gzip', compression_opts=9)
else:
f.create_dataset(
hdu_group_name + '/data', data=idata)
# add header
if header is not None:
iheader = header[i]
if not isinstance(iheader, pyfits.Header):
iheader = pyfits.Header(iheader)
f[hdu_group_name + '/header'] = header_fits2hdf5(
iheader)
logging.info('Data written as {} in {:.2f} s'.format(
new_file_path, time.time() - start_time))
return new_file_path
castables = [int, float, bool, str,
np.int64, np.float64, int, np.float128, np.bool_]
def cast(a, t_str):
if isinstance(t_str, bytes):
t_str = t_str.decode()
if 'type' in t_str: t_str = t_str.replace('type', 'class')
if 'long' in t_str: t_str = t_str.replace('long', 'int')
for _t in castables:
if t_str == repr(_t):
return _t(a)
raise Exception('Bad type string {} should be in {}'.format(t_str, [repr(_t) for _t in castables]))
def dict2array(data):
"""Convert a dictionary to an array that can be written in an hdf5 file
:param data: Must be a dict instance
"""
if not isinstance(data, dict): raise TypeError('data must be a dict')
arr = list()
for key in data:
if type(data[key]) in castables:
_tstr = str(type(data[key]))
arr.append(np.array(
(key, data[key], _tstr)))
else:
logging.debug('{} of type {} not passed to array'.format(key, type(data[key])))
return np.array(arr)
def array2dict(data):
"""Convert an array read from an hdf5 file to a dict.
:param data: array of params returned by dict2array
"""
_dict = dict()
for i in range(len(data)):
_dict[data[i][0]] = cast(data[i][1], data[i][2])
return _dict
def dict2header(params):
"""convert a dict to a pyfits.Header() instance
.. warning:: this is a destructive process, illegal values are
removed from the header.
:param params: a dict instance
"""
# filter illegal header values
cards = list()
for iparam in params:
val = params[iparam]
val_ok = False
for itype in castables:
if isinstance(val, itype):
val_ok = True
if val_ok:
if isinstance(val, bool):
val = int(val)
card = pyfits.Card(
keyword=iparam,
value=val,
comment=None)
try:
card.verify(option='exception')
cards.append(card)
except (VerifyError, ValueError, TypeError):
pass
warnings.simplefilter('ignore', category=VerifyWarning)
warnings.simplefilter('ignore', category=AstropyUserWarning)
warnings.simplefilter('ignore', category=FITSFixedWarning)
header = pyfits.Header(cards)
return header
def header_fits2hdf5(fits_header):
"""convert a pyfits.Header() instance to a header for an hdf5 file
:param fits_header: Header of the FITS file
"""
hdf5_header = list()
for ikey in range(len(fits_header)):
_tstr = str(type(fits_header[ikey]))
ival = np.array(
(list(fits_header.keys())[ikey], str(fits_header[ikey]),
fits_header.comments[ikey], _tstr))
hdf5_header.append(ival)
return np.array(hdf5_header, dtype='S300')
def header_hdf52fits(hdf5_header):
"""convert an hdf5 header to a pyfits.Header() instance.
:param hdf5_header: Header of the HDF5 file
"""
fits_header = pyfits.Header()
for i in range(hdf5_header.shape[0]):
ival = hdf5_header[i,:]
ival = [iival.decode() for iival in ival]
if ival[3] != 'comment':
fits_header[ival[0]] = cast(ival[1], ival[3]), str(ival[2])
else:
fits_header['comment'] = ival[1]
return fits_header
def read_hdf5(file_path, return_header=False, dtype=float):
"""Read an HDF5 data file created with
:py:meth:`core.Tools.write_hdf5`.
:param file_path: Path to the file, can be either
relative or absolute.
:param return_header: (Optional) If True return a tuple (data,
header) (default False).
:param dtype: (Optional) Data is converted to the given type
(e.g. np.float32, default float).
.. note:: Please refer to http://www.h5py.org/."""
with open_hdf5(file_path, 'r') as f:
data = list()
header = list()
for hdu_name in f:
data.append(f[hdu_name + '/data'][:].astype(dtype))
if return_header:
if hdu_name + '/header' in f:
# extract header
header.append(
header_hdf52fits(f[hdu_name + '/header'][:]))
else: header.append(None)
if len(data) == 1:
if return_header:
return data[0], header[0]
else:
return data[0]
else:
if return_header:
return data, header
else:
return data
def cast2hdf5(val):
if val is None:
return 'None'
elif isinstance(val, np.float128):
return val.astype(np.float64)
#elif isinstance(val, int):
# return str(val)
elif isinstance(val, np.ndarray):
if val.dtype == np.float128:
return val.astype(np.float64)
return val
def get_storing_dtype(arr):
if not isinstance(arr, np.ndarray):
raise TypeError('arr must be a numpy.ndarray instance')
if arr.dtype == np.float64:
return np.float32
if arr.dtype == np.complex128:
return np.complex64
else: return arr.dtype
def cast_storing_dtype(arr):
if not isinstance(arr, np.ndarray):
raise TypeError('arr must be a numpy.ndarray instance')
return arr.astype(get_storing_dtype(arr))
def save_dflist(dflist, path):
"""Save a list of dataframes
:param dflist: list of pandas dataframes
:param path: path to the output file
"""
if os.path.exists(path):
os.remove(path)
with open_hdf5(path, 'w') as f:
f.attrs['len'] = len(dflist)
for idf in range(len(dflist)):
if dflist[idf] is not None:
dflist[idf].to_hdf(path, 'df{:06d}'.format(idf),
format='table', mode='a')
def load_dflist(path):
"""Save a list of dataframes
:param path: path to the output file
"""
with open_hdf5(path, 'r') as f:
_len = f.attrs['len']
dflist = list()
for i in range(_len):
try:
idf = pd.read_hdf(path, key='df{:06d}'.format(i))
dflist.append(idf)
except KeyError:
dflist.append(None)
return dflist
def read_votable(votable_file):
"""read a votable and transfer it as as pandas dataframe.
taken from https://gist.github.com/icshih/52ca49eb218a2d5b660ee4a653301b2b
"""
votable = astropy.io.votable.parse(votable_file)
table = votable.get_first_table().to_table(use_names_over_ids=True)
return table.to_pandas()
def save_starlist(path, starlist):
"""Save a star list as a two columnfile X, Y readable by ds9
"""
orb.utils.validate.is_2darray(starlist, object_name='starlist')
if starlist.shape[1] != 2:
raise TypeError('starlist must be of shape (n,2)')
with open_file(path, 'w') as f:
for i in range(starlist.shape[0]):
f.write('{} {}\n'.format(starlist[i,0], starlist[i,1]))
f.flush()
| gpl-3.0 | 8,377,938,406,832,901,000 | 33.946447 | 185 | 0.581646 | false | 3.718685 | false | false | false |
quarckster/cfme_tests | cfme/test_framework/appliance_police.py | 1 | 2826 | import attr
import pytest
import requests
from cfme.utils import ports
from cfme.utils.net import net_check
from cfme.utils.wait import TimedOutError
from cfme.utils.conf import rdb
from fixtures.pytest_store import store
from cfme.fixtures.rdb import Rdb
@attr.s
class AppliancePoliceException(Exception):
message = attr.ib()
port = attr.ib()
def __str__(self):
return "{} (port {})".format(self.message, self.port)
@pytest.fixture(autouse=True, scope="function")
def appliance_police():
if not store.slave_manager:
return
try:
port_numbers = {
'ssh': ports.SSH,
'https': store.current_appliance.ui_port,
'postgres': ports.DB}
port_results = {pn: net_check(pp, force=True) for pn, pp in port_numbers.items()}
for port, result in port_results.items():
if port == 'ssh' and store.current_appliance.is_pod:
# ssh is not available for podified appliance
continue
if not result:
raise AppliancePoliceException('Unable to connect', port_numbers[port])
try:
status_code = requests.get(store.current_appliance.url, verify=False,
timeout=120).status_code
except Exception:
raise AppliancePoliceException('Getting status code failed', port_numbers['https'])
if status_code != 200:
raise AppliancePoliceException('Status code was {}, should be 200'.format(
status_code), port_numbers['https'])
return
except AppliancePoliceException as e:
# special handling for known failure conditions
if e.port == 443:
# Lots of rdbs lately where evm seems to have entirely crashed
# and (sadly) the only fix is a rude restart
store.current_appliance.restart_evm_service(rude=True)
try:
store.current_appliance.wait_for_web_ui(900)
store.write_line('EVM was frozen and had to be restarted.', purple=True)
return
except TimedOutError:
pass
e_message = str(e)
except Exception as e:
e_message = str(e)
# Regardles of the exception raised, we didn't return anywhere above
# time to call a human
msg = 'Help! My appliance {} crashed with: {}'.format(store.current_appliance.url, e_message)
store.slave_manager.message(msg)
if 'appliance_police_recipients' in rdb:
rdb_kwargs = {
'subject': 'RDB Breakpoint: Appliance failure',
'recipients': rdb.appliance_police_recipients,
}
else:
rdb_kwargs = {}
Rdb(msg).set_trace(**rdb_kwargs)
store.slave_manager.message('Resuming testing following remote debugging')
| gpl-2.0 | -7,508,210,430,631,497,000 | 33.888889 | 97 | 0.617127 | false | 4.042918 | false | false | false |
ErasRasmuson/LA | LogAna/Taxi_LongRides_old.py | 1 | 2578 | # -*- coding: cp1252 -*-
"""
###############################################################################
HEADER: Taxi_LongRides.py
AUTHOR: Esa Heikkinen
DATE: 26.06.2018
DOCUMENT: -
VERSION: "$Id$"
REFERENCES: -
PURPOSE:
CHANGES: "$Log$"
###############################################################################
"""
from logdig_analyze_template import *
# ----------------------------- DATA-DRIVEN PART -----------------------------
VARIABLES = {
"STARTTIME-DATE": "2013-01-01",
"STARTTIME-TIME": "00:00:00",
"STOPTIME-DATE": "2013-01-01",
"STOPTIME-TIME": "01:40:00"
}
START = {
"state": "BEGIN",
"func": "start"
}
ESU["BEGIN"] = {
"esu_mode": "SEARCH_EVENT:First:NextRow",
"log_filename_expr": "TaxiRides_small.csv",
"log_varnames": "isStart=START",
"log_timecol_name": "startTime",
"log_start_time_expr": "<STARTTIME-BEGIN>,0",
"log_stop_time_expr": "<STOPTIME>,0",
"TF_state": "END",
"TF_func": "found_begin",
"TN_state": "STOP",
"TN_func": "exit_normal",
"TE_state": "STOP",
"TE_func": "exit_error",
"GUI_line_num": "0"
}
ESU["END"] = {
"esu_mode": "SEARCH_EVENT:First",
"log_filename_expr": "TaxiRides_small.csv_<SET-RIDEID>",
"log_varnames": "isStart=END",
"log_timecol_name": "startTime",
"log_start_time_expr": "<startTime>,0",
"log_stop_time_expr": "<startTime>,7200",
"TF_state": "BEGIN",
"TF_func": "found_end",
"TN_state": "BEGIN",
"TN_func": "not_found_end",
"TE_state": "STOP",
"TE_func": "exit_error",
"GUI_line_num": "1"
}
STOP = {
"func": ""
}
# ----------------------------- FUNCTION PART -----------------------------
def start():
set_datetime_variable("STARTTIME","STARTTIME-DATE","STARTTIME-TIME")
set_datetime_variable("STOPTIME","STOPTIME-DATE","STOPTIME-TIME")
set_sbk_file("Taxi_LongRides","SET-RIDEID","startTime","endTime")
copy_variable("STARTTIME-BEGIN","STARTTIME")
logfiles_data.read("/home/esa/projects/LA/LogFile/PreProsessed/TaxiRides/TaxiRides_small.csv","startTime")
logfiles_data.transform_operation_keyby("/home/esa/projects/LA/LogFile/PreProsessed/TaxiRides/TaxiRides_small.csv","rideId")
def found_begin():
print("found_begin")
copy_variable("SET-RIDEID","rideId")
copy_variable("STARTTIME-BEGIN","startTime")
def found_end():
print("found_end")
def not_found_end():
print("not_found_end")
copy_variable("STARTTIME-BEGIN","startTime")
print_sbk_file()
def exit_normal():
print("exit_normal")
def exit_error():
print("exit_error")
| gpl-3.0 | 2,315,905,248,688,705,500 | 27.32967 | 125 | 0.556633 | false | 2.710831 | false | false | false |
FEniCS/ufl | test/test_apply_function_pullbacks.py | 1 | 12156 | #!/usr/bin/env py.test
# -*- coding: utf-8 -*-
from pytest import raises
from ufl import *
from ufl.algorithms.apply_function_pullbacks import apply_function_pullbacks, apply_single_function_pullbacks
from ufl.algorithms.renumbering import renumber_indices
from ufl.classes import Jacobian, JacobianInverse, JacobianDeterminant, ReferenceValue, CellOrientation
def check_single_function_pullback(g, mappings):
expected = mappings[g]
actual = apply_single_function_pullbacks(g)
rexp = renumber_indices(expected)
ract = renumber_indices(actual)
if not rexp == ract:
print()
print("In check_single_function_pullback:")
print("input:")
print(repr(g))
print("expected:")
print(str(rexp))
print("actual:")
print(str(ract))
print("signatures:")
print((expected**2*dx).signature())
print((actual**2*dx).signature())
print()
assert ract == rexp
def test_apply_single_function_pullbacks_triangle3d():
triangle3d = Cell("triangle", geometric_dimension=3)
cell = triangle3d
domain = as_domain(cell)
UL2 = FiniteElement("DG L2", cell, 1)
U0 = FiniteElement("DG", cell, 0)
U = FiniteElement("CG", cell, 1)
V = VectorElement("CG", cell, 1)
Vd = FiniteElement("RT", cell, 1)
Vc = FiniteElement("N1curl", cell, 1)
T = TensorElement("CG", cell, 1)
S = TensorElement("CG", cell, 1, symmetry=True)
COV2T = FiniteElement("Regge", cell, 0) # (0, 2)-symmetric tensors
CONTRA2T = FiniteElement("HHJ", cell, 0) # (2, 0)-symmetric tensors
Uml2 = UL2*UL2
Um = U*U
Vm = U*V
Vdm = V*Vd
Vcm = Vd*Vc
Tm = Vc*T
Sm = T*S
Vd0 = Vd*U0 # case from failing ffc demo
W = S*T*Vc*Vd*V*U
ul2 = Coefficient(UL2)
u = Coefficient(U)
v = Coefficient(V)
vd = Coefficient(Vd)
vc = Coefficient(Vc)
t = Coefficient(T)
s = Coefficient(S)
cov2t = Coefficient(COV2T)
contra2t = Coefficient(CONTRA2T)
uml2 = Coefficient(Uml2)
um = Coefficient(Um)
vm = Coefficient(Vm)
vdm = Coefficient(Vdm)
vcm = Coefficient(Vcm)
tm = Coefficient(Tm)
sm = Coefficient(Sm)
vd0m = Coefficient(Vd0) # case from failing ffc demo
w = Coefficient(W)
rul2 = ReferenceValue(ul2)
ru = ReferenceValue(u)
rv = ReferenceValue(v)
rvd = ReferenceValue(vd)
rvc = ReferenceValue(vc)
rt = ReferenceValue(t)
rs = ReferenceValue(s)
rcov2t = ReferenceValue(cov2t)
rcontra2t = ReferenceValue(contra2t)
ruml2 = ReferenceValue(uml2)
rum = ReferenceValue(um)
rvm = ReferenceValue(vm)
rvdm = ReferenceValue(vdm)
rvcm = ReferenceValue(vcm)
rtm = ReferenceValue(tm)
rsm = ReferenceValue(sm)
rvd0m = ReferenceValue(vd0m)
rw = ReferenceValue(w)
assert len(w) == 9 + 9 + 3 + 3 + 3 + 1
assert len(rw) == 6 + 9 + 2 + 2 + 3 + 1
assert len(w) == 28
assert len(rw) == 23
assert len(vd0m) == 4
assert len(rvd0m) == 3
# Geometric quantities we need:
J = Jacobian(domain)
detJ = JacobianDeterminant(domain)
Jinv = JacobianInverse(domain)
# o = CellOrientation(domain)
i, j, k, l = indices(4)
# Contravariant H(div) Piola mapping:
M_hdiv = ((1.0/detJ) * J) # Not applying cell orientation here
# Covariant H(curl) Piola mapping: Jinv.T
mappings = {
# Simple elements should get a simple representation
ul2: rul2 / detJ,
u: ru,
v: rv,
vd: as_vector(M_hdiv[i, j]*rvd[j], i),
vc: as_vector(Jinv[j, i]*rvc[j], i),
t: rt,
s: as_tensor([[rs[0], rs[1], rs[2]],
[rs[1], rs[3], rs[4]],
[rs[2], rs[4], rs[5]]]),
cov2t: as_tensor(Jinv[k, i] * rcov2t[k, l] * Jinv[l, j], (i, j)),
contra2t: as_tensor((1.0 / detJ) * (1.0 / detJ)
* J[i, k] * rcontra2t[k, l] * J[j, l], (i, j)),
# Mixed elements become a bit more complicated
uml2: as_vector([ruml2[0] / detJ, ruml2[1] / detJ]),
um: rum,
vm: rvm,
vdm: as_vector([
# V
rvdm[0],
rvdm[1],
rvdm[2],
# Vd
M_hdiv[0, j]*as_vector([rvdm[3], rvdm[4]])[j],
M_hdiv[1, j]*as_vector([rvdm[3], rvdm[4]])[j],
M_hdiv[2, j]*as_vector([rvdm[3], rvdm[4]])[j],
]),
vcm: as_vector([
# Vd
M_hdiv[0, j]*as_vector([rvcm[0], rvcm[1]])[j],
M_hdiv[1, j]*as_vector([rvcm[0], rvcm[1]])[j],
M_hdiv[2, j]*as_vector([rvcm[0], rvcm[1]])[j],
# Vc
Jinv[i, 0]*as_vector([rvcm[2], rvcm[3]])[i],
Jinv[i, 1]*as_vector([rvcm[2], rvcm[3]])[i],
Jinv[i, 2]*as_vector([rvcm[2], rvcm[3]])[i],
]),
tm: as_vector([
# Vc
Jinv[i, 0]*as_vector([rtm[0], rtm[1]])[i],
Jinv[i, 1]*as_vector([rtm[0], rtm[1]])[i],
Jinv[i, 2]*as_vector([rtm[0], rtm[1]])[i],
# T
rtm[2], rtm[3], rtm[4],
rtm[5], rtm[6], rtm[7],
rtm[8], rtm[9], rtm[10],
]),
sm: as_vector([
# T
rsm[0], rsm[1], rsm[2],
rsm[3], rsm[4], rsm[5],
rsm[6], rsm[7], rsm[8],
# S
rsm[9], rsm[10], rsm[11],
rsm[10], rsm[12], rsm[13],
rsm[11], rsm[13], rsm[14],
]),
# Case from failing ffc demo:
vd0m: as_vector([
M_hdiv[0, j]*as_vector([rvd0m[0], rvd0m[1]])[j],
M_hdiv[1, j]*as_vector([rvd0m[0], rvd0m[1]])[j],
M_hdiv[2, j]*as_vector([rvd0m[0], rvd0m[1]])[j],
rvd0m[2]
]),
# This combines it all:
w: as_vector([
# S
rw[0], rw[1], rw[2],
rw[1], rw[3], rw[4],
rw[2], rw[4], rw[5],
# T
rw[6], rw[7], rw[8],
rw[9], rw[10], rw[11],
rw[12], rw[13], rw[14],
# Vc
Jinv[i, 0]*as_vector([rw[15], rw[16]])[i],
Jinv[i, 1]*as_vector([rw[15], rw[16]])[i],
Jinv[i, 2]*as_vector([rw[15], rw[16]])[i],
# Vd
M_hdiv[0, j]*as_vector([rw[17], rw[18]])[j],
M_hdiv[1, j]*as_vector([rw[17], rw[18]])[j],
M_hdiv[2, j]*as_vector([rw[17], rw[18]])[j],
# V
rw[19],
rw[20],
rw[21],
# U
rw[22],
]),
}
# Check functions of various elements outside a mixed context
check_single_function_pullback(ul2, mappings)
check_single_function_pullback(u, mappings)
check_single_function_pullback(v, mappings)
check_single_function_pullback(vd, mappings)
check_single_function_pullback(vc, mappings)
check_single_function_pullback(t, mappings)
check_single_function_pullback(s, mappings)
check_single_function_pullback(cov2t, mappings)
check_single_function_pullback(contra2t, mappings)
# Check functions of various elements inside a mixed context
check_single_function_pullback(uml2, mappings)
check_single_function_pullback(um, mappings)
check_single_function_pullback(vm, mappings)
check_single_function_pullback(vdm, mappings)
check_single_function_pullback(vcm, mappings)
check_single_function_pullback(tm, mappings)
check_single_function_pullback(sm, mappings)
# Check the ridiculous mixed element W combining it all
check_single_function_pullback(w, mappings)
def test_apply_single_function_pullbacks_triangle():
cell = triangle
domain = as_domain(cell)
Ul2 = FiniteElement("DG L2", cell, 1)
U = FiniteElement("CG", cell, 1)
V = VectorElement("CG", cell, 1)
Vd = FiniteElement("RT", cell, 1)
Vc = FiniteElement("N1curl", cell, 1)
T = TensorElement("CG", cell, 1)
S = TensorElement("CG", cell, 1, symmetry=True)
Uml2 = Ul2*Ul2
Um = U*U
Vm = U*V
Vdm = V*Vd
Vcm = Vd*Vc
Tm = Vc*T
Sm = T*S
W = S*T*Vc*Vd*V*U
ul2 = Coefficient(Ul2)
u = Coefficient(U)
v = Coefficient(V)
vd = Coefficient(Vd)
vc = Coefficient(Vc)
t = Coefficient(T)
s = Coefficient(S)
uml2 = Coefficient(Uml2)
um = Coefficient(Um)
vm = Coefficient(Vm)
vdm = Coefficient(Vdm)
vcm = Coefficient(Vcm)
tm = Coefficient(Tm)
sm = Coefficient(Sm)
w = Coefficient(W)
rul2 = ReferenceValue(ul2)
ru = ReferenceValue(u)
rv = ReferenceValue(v)
rvd = ReferenceValue(vd)
rvc = ReferenceValue(vc)
rt = ReferenceValue(t)
rs = ReferenceValue(s)
ruml2 = ReferenceValue(uml2)
rum = ReferenceValue(um)
rvm = ReferenceValue(vm)
rvdm = ReferenceValue(vdm)
rvcm = ReferenceValue(vcm)
rtm = ReferenceValue(tm)
rsm = ReferenceValue(sm)
rw = ReferenceValue(w)
assert len(w) == 4 + 4 + 2 + 2 + 2 + 1
assert len(rw) == 3 + 4 + 2 + 2 + 2 + 1
assert len(w) == 15
assert len(rw) == 14
# Geometric quantities we need:
J = Jacobian(domain)
detJ = JacobianDeterminant(domain)
Jinv = JacobianInverse(domain)
i, j, k, l = indices(4)
# Contravariant H(div) Piola mapping:
M_hdiv = (1.0/detJ) * J
# Covariant H(curl) Piola mapping: Jinv.T
mappings = {
# Simple elements should get a simple representation
ul2: rul2 / detJ,
u: ru,
v: rv,
vd: as_vector(M_hdiv[i, j]*rvd[j], i),
vc: as_vector(Jinv[j, i]*rvc[j], i),
t: rt,
s: as_tensor([[rs[0], rs[1]], [rs[1], rs[2]]]),
# Mixed elements become a bit more complicated
uml2: as_vector([ruml2[0] / detJ, ruml2[1] / detJ]),
um: rum,
vm: rvm,
vdm: as_vector([
# V
rvdm[0],
rvdm[1],
# Vd
M_hdiv[0, j]*as_vector([rvdm[2], rvdm[3]])[j],
M_hdiv[1, j]*as_vector([rvdm[2], rvdm[3]])[j],
]),
vcm: as_vector([
# Vd
M_hdiv[0, j]*as_vector([rvcm[0], rvcm[1]])[j],
M_hdiv[1, j]*as_vector([rvcm[0], rvcm[1]])[j],
# Vc
Jinv[i, 0]*as_vector([rvcm[2], rvcm[3]])[i],
Jinv[i, 1]*as_vector([rvcm[2], rvcm[3]])[i],
]),
tm: as_vector([
# Vc
Jinv[i, 0]*as_vector([rtm[0], rtm[1]])[i],
Jinv[i, 1]*as_vector([rtm[0], rtm[1]])[i],
# T
rtm[2], rtm[3],
rtm[4], rtm[5],
]),
sm: as_vector([
# T
rsm[0], rsm[1],
rsm[2], rsm[3],
# S
rsm[4], rsm[5],
rsm[5], rsm[6],
]),
# This combines it all:
w: as_vector([
# S
rw[0], rw[1],
rw[1], rw[2],
# T
rw[3], rw[4],
rw[5], rw[6],
# Vc
Jinv[i, 0]*as_vector([rw[7], rw[8]])[i],
Jinv[i, 1]*as_vector([rw[7], rw[8]])[i],
# Vd
M_hdiv[0, j]*as_vector([rw[9], rw[10]])[j],
M_hdiv[1, j]*as_vector([rw[9], rw[10]])[j],
# V
rw[11],
rw[12],
# U
rw[13],
]),
}
# Check functions of various elements outside a mixed context
check_single_function_pullback(ul2, mappings)
check_single_function_pullback(u, mappings)
check_single_function_pullback(v, mappings)
check_single_function_pullback(vd, mappings)
check_single_function_pullback(vc, mappings)
check_single_function_pullback(t, mappings)
check_single_function_pullback(s, mappings)
# Check functions of various elements inside a mixed context
check_single_function_pullback(uml2, mappings)
check_single_function_pullback(um, mappings)
check_single_function_pullback(vm, mappings)
check_single_function_pullback(vdm, mappings)
check_single_function_pullback(vcm, mappings)
check_single_function_pullback(tm, mappings)
check_single_function_pullback(sm, mappings)
# Check the ridiculous mixed element W combining it all
check_single_function_pullback(w, mappings)
| lgpl-3.0 | 2,708,946,957,977,316,000 | 29.619647 | 109 | 0.534304 | false | 2.912314 | false | false | false |
1orwell/yrs2013 | fake.py | 1 | 3440 | '''Generate necessary dump files'''
#options
size = 100
regenerate_graph = False
days = 1
force_layout = False
default = str(size)+'.dat'
###
import igraph, pickle, random, os
import math
from collections import OrderedDict
def process(fout):
output = os.path.join('data',fout)
try:
#load graph if previously generated.
g = pickle.load(open('dump.dat'))
print 'Graph loaded from dump.dat'
except IOError:
#generate graph if it does not exist in the directory
print 'Generating graph to dump.dat'
g = igraph.Graph()
g.add_vertices(791)
g.es["weight"] = 1.0
g.delete_vertices([0])
with open('./flu-data/edgeLists/durationCondition/addThenChop/dropoff=0/minimumDuration=1/deltaT=1620/staticWeightedEdgeList_at=1350_min=540_max=2159.txt') as edges:
for edge in edges:
u, v, w = map(int, edge.split())
g[u, v] = 1.0/w
g.delete_vertices(g.vs(_degree_eq = 0))
pickle.dump(g,open('dump.dat','wb'))
print 'Finished'
#take sample of n points
sample = random.sample(range(1,788),790-size)
g.delete_vertices(sample)
print g.summary()
#Fiddle layout
print 'Working out layout'
if force_layout:
#starting everyone at their own location
#coords definition stolen from sim_group_move.py
coords = []
wrap = 10 #positions per row
col_length = int(math.ceil(size/wrap))
for y in range(col_length):
for x in range(wrap):
coords.append((x,y))
print coords
centre = (wrap/2, col_length/2)
else:
l = g.layout_kamada_kawai()
centre = l.centroid()
coords = l.coords
def distance(x, y): return math.sqrt((x[0] - y[0])**2 + (x[1] - y[1])**2)
#sort the coords by their position from the centre
order = sorted(enumerate(coords), key = lambda x: distance(x[1], centre))
order = [x[0] for x in order]
#work out mininum global time
mintime = 1000 #must be less than this
for x in order:
if x == 0: continue
with open('./flu-data/moteFiles/node-'+str(x)) as fin:
line = fin.readline()
if line:
t = int(line.split()[-1])
if t < mintime:
mintime = t
completed = []
times = {}
print 'Generating movement file'
for node in order:
if node == 0: continue
times[node] = OrderedDict({0 : node})
node_name = 'node-'+str(node)
f = open('./flu-data/moteFiles/'+node_name, 'r')
for contact in f:
line = map(int, contact.split())
contact_id = line[0]
time = (line[-1] - mintime + 1)
if contact_id in completed:
current_max = 0
current_time = -1
for t, pos in times[contact_id].items():
if current_time < t <= time:
current_max = pos
current_time = t
position = current_max
times[node][time] = position
completed.append(node)
f.close()
print 'Writing movement file'
out = {'coords': coords, 'movement': times}
pickle.dump(out, open(output, 'wb'))
if __name__ == '__main__':
process(default)
| mit | 7,964,937,547,895,710,000 | 26.96748 | 173 | 0.54157 | false | 3.706897 | false | false | false |
antiface/audiolazy | audiolazy/lazy_io.py | 1 | 14038 | # -*- coding: utf-8 -*-
# This file is part of AudioLazy, the signal processing Python package.
# Copyright (C) 2012-2014 Danilo de Jesus da Silva Bellini
#
# AudioLazy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Created on Fri Jul 20 2012
# danilo [dot] bellini [at] gmail [dot] com
"""
Audio recording input and playing output module
"""
import threading
import struct
import array
# Audiolazy internal imports
from ._internals import deprecate
from .lazy_stream import Stream
from .lazy_misc import DEFAULT_SAMPLE_RATE, blocks
from .lazy_compat import xrange, xmap
from .lazy_math import inf
from .lazy_core import StrategyDict
__all__ = ["chunks", "RecStream", "AudioIO", "AudioThread"]
# Conversion dict from structs.Struct() format symbols to PyAudio constants
_STRUCT2PYAUDIO = {"f": 1, #pyaudio.paFloat32
"i": 2, #pyaudio.paInt32
"h": 8, #pyaudio.paInt16
"b": 16, #pyaudio.paInt8
"B": 32, #pyaudio.paUInt8
}
chunks = StrategyDict("chunks")
chunks.__class__.size = 2048 # Samples
@chunks.strategy("struct")
def chunks(seq, size=None, dfmt="f", byte_order=None, padval=0.):
"""
Chunk generator based on the struct module (Python standard library).
Low-level data blockenizer for homogeneous data as a generator, to help
writing an iterable into a file.
The dfmt should be one char, chosen from the ones in link:
`<http://docs.python.org/library/struct.html#format-characters>`_
Useful examples (integer are signed, use upper case for unsigned ones):
- "b" for 8 bits (1 byte) integer
- "h" for 16 bits (2 bytes) integer
- "i" for 32 bits (4 bytes) integer
- "f" for 32 bits (4 bytes) float (default)
- "d" for 64 bits (8 bytes) float (double)
Byte order follows native system defaults. Other options are in the site:
`<http://docs.python.org/library/struct.html#struct-alignment>`_
They are:
- "<" means little-endian
- ">" means big-endian
Note
----
Default chunk size can be accessed (and changed) via chunks.size.
"""
if size is None:
size = chunks.size
dfmt = str(size) + dfmt
if byte_order is None:
struct_string = dfmt
else:
struct_string = byte_order + dfmt
s = struct.Struct(struct_string)
for block in blocks(seq, size, padval=padval):
yield s.pack(*block)
@chunks.strategy("array")
def chunks(seq, size=None, dfmt="f", byte_order=None, padval=0.):
"""
Chunk generator based on the array module (Python standard library).
See chunk.struct for more help. This strategy uses array.array (random access
by indexing management) instead of struct.Struct and blocks/deque (circular
queue appending) from the chunks.struct strategy.
Hint
----
Try each one to find the faster one for your machine, and chooses
the default one by assigning ``chunks.default = chunks.strategy_name``.
It'll be the one used by the AudioIO/AudioThread playing mechanism.
Note
----
The ``dfmt`` symbols for arrays might differ from structs' defaults.
"""
if size is None:
size = chunks.size
chunk = array.array(dfmt, xrange(size))
idx = 0
for el in seq:
chunk[idx] = el
idx += 1
if idx == size:
yield chunk.tostring()
idx = 0
if idx != 0:
for idx in xrange(idx, size):
chunk[idx] = padval
yield chunk.tostring()
class RecStream(Stream):
"""
Recording Stream
A common Stream class with a ``stop`` method for input data recording
and a ``recording`` read-only property for status.
"""
def __init__(self, device_manager, file_obj, chunk_size, dfmt):
if chunk_size is None:
chunk_size = chunks.size
s = struct.Struct("{0}{1}".format(chunk_size, dfmt))
def rec():
try:
while self._recording:
for k in s.unpack(file_obj.read(chunk_size)):
yield k
finally:
file_obj.close()
self._recording = False # Loop can be broken by StopIteration
self.device_manager.recording_finished(self)
super(RecStream, self).__init__(rec())
self._recording = True
self.device_manager = device_manager
def stop(self):
""" Finishes the recording stream, so it can raise StopIteration """
self._recording = False
@property
def recording(self):
return self._recording
class AudioIO(object):
"""
Multi-thread stream manager wrapper for PyAudio.
"""
def __init__(self, wait=False, api=None):
"""
Constructor to PyAudio Multi-thread manager audio IO interface.
The "wait" input is a boolean about the behaviour on closing the
instance, if it should or not wait for the streaming audio to finish.
Defaults to False. Only works if the close method is explicitly
called.
"""
import pyaudio
self._pa = pa = pyaudio.PyAudio()
self._threads = []
self.wait = wait # Wait threads to finish at end (constructor parameter)
self._recordings = []
# Lockers
self.halting = threading.Lock() # Only for "close" method
self.lock = threading.Lock() # "_threads" access locking
self.finished = False
# Choosing the PortAudio API (needed to use Jack)
if not (api is None):
api_count = pa.get_host_api_count()
apis_gen = xmap(pa.get_host_api_info_by_index, xrange(api_count))
try:
self.api = next(el for el in apis_gen
if el["name"].lower().startswith(api))
except StopIteration:
raise RuntimeError("API '{}' not found".format(api))
def __del__(self):
"""
Default destructor. Use close method instead, or use the class
instance as the expression of a with block.
"""
self.close()
def __exit__(self, etype, evalue, etraceback):
"""
Closing destructor for use internally in a with-expression.
"""
self.close()
def __enter__(self):
"""
To be used only internally, in the with-expression protocol.
"""
return self
def close(self):
"""
Destructor for this audio interface. Waits the threads to finish their
streams, if desired.
"""
with self.halting: # Avoid simultaneous "close" threads
if not self.finished: # Ignore all "close" calls, but the first,
self.finished = True # and any call to play would raise ThreadError
# Closes all playing AudioThread instances
while True:
with self.lock: # Ensure there's no other thread messing around
try:
thread = self._threads[0] # Needless to say: pop = deadlock
except IndexError: # Empty list
break # No more threads
if not self.wait:
thread.stop()
thread.join()
# Closes all recording RecStream instances
while self._recordings:
recst = self._recordings[-1]
recst.stop()
recst.take(inf) # Ensure it'll be closed
# Finishes
assert not self._pa._streams # No stream should survive
self._pa.terminate()
def terminate(self):
"""
Same as "close".
"""
self.close() # Avoids direct calls to inherited "terminate"
def play(self, audio, **kwargs):
"""
Start another thread playing the given audio sample iterable (e.g. a
list, a generator, a NumPy np.ndarray with samples), and play it.
The arguments are used to customize behaviour of the new thread, as
parameters directly sent to PyAudio's new stream opening method, see
AudioThread.__init__ for more.
"""
with self.lock:
if self.finished:
raise threading.ThreadError("Trying to play an audio stream while "
"halting the AudioIO manager object")
new_thread = AudioThread(self, audio, **kwargs)
self._threads.append(new_thread)
new_thread.start()
return new_thread
def thread_finished(self, thread):
"""
Updates internal status about open threads. Should be called only by
the internal closing mechanism of AudioThread instances.
"""
with self.lock:
self._threads.remove(thread)
def recording_finished(self, recst):
"""
Updates internal status about open recording streams. Should be called
only by the internal closing mechanism of children RecStream instances.
"""
self._recordings.remove(recst)
def record(self, chunk_size = None,
dfmt = "f",
channels = 1,
rate = DEFAULT_SAMPLE_RATE,
**kwargs
):
"""
Records audio from device into a Stream.
Parameters
----------
chunk_size :
Number of samples per chunk (block sent to device).
dfmt :
Format, as in chunks(). Default is "f" (Float32).
channels :
Channels in audio stream (serialized).
rate :
Sample rate (same input used in sHz).
Returns
-------
Endless Stream instance that gather data from the audio input device.
"""
if chunk_size is None:
chunk_size = chunks.size
if hasattr(self, "api"):
kwargs.setdefault("input_device_index", self.api["defaultInputDevice"])
channels = kwargs.pop("nchannels", channels) # Backwards compatibility
input_stream = RecStream(self,
self._pa.open(format=_STRUCT2PYAUDIO[dfmt],
channels=channels,
rate=rate,
frames_per_buffer=chunk_size,
input=True,
**kwargs),
chunk_size,
dfmt
)
self._recordings.append(input_stream)
return input_stream
class AudioThread(threading.Thread):
"""
Audio output thread.
This class is a wrapper to ease the use of PyAudio using iterables of
numbers (Stream instances, lists, tuples, NumPy 1D arrays, generators) as
audio data streams.
"""
def __init__(self, device_manager, audio,
chunk_size = None,
dfmt = "f",
channels = 1,
rate = DEFAULT_SAMPLE_RATE,
daemon = True, # This shouldn't survive after crashes
**kwargs
):
"""
Sets a new thread to play the given audio.
Parameters
----------
chunk_size :
Number of samples per chunk (block sent to device).
dfmt :
Format, as in chunks(). Default is "f" (Float32).
channels :
Channels in audio stream (serialized).
rate :
Sample rate (same input used in sHz).
daemon :
Boolean telling if thread should be daemon. Default is True.
"""
super(AudioThread, self).__init__()
self.daemon = daemon # threading.Thread property, couldn't be assigned
# before the superclass constructor
# Stores data needed by the run method
self.audio = audio
self.device_manager = device_manager
self.dfmt = dfmt
self.channels = kwargs.pop("nchannels", channels)
self.chunk_size = chunks.size if chunk_size is None else chunk_size
# Lockers
self.lock = threading.Lock() # Avoid control methods simultaneous call
self.go = threading.Event() # Communication between the 2 threads
self.go.set()
self.halting = False # The stop message
# Get the streaming function
import _portaudio # Just to be slightly faster (per chunk!)
self.write_stream = _portaudio.write_stream
if hasattr(device_manager, "api"):
kwargs.setdefault("output_device_index",
device_manager.api["defaultOutputDevice"])
# Open a new audio output stream
self.stream = device_manager._pa.open(format=_STRUCT2PYAUDIO[dfmt],
channels=channels,
rate=rate,
frames_per_buffer=self.chunk_size,
output=True,
**kwargs)
# Backwards compatibility
nchannels = property(deprecate(lambda self: self.channels))
def run(self):
"""
Plays the audio. This method plays the audio, and shouldn't be called
explicitly, let the constructor do so.
"""
# From now on, it's multi-thread. Let the force be with them.
st = self.stream._stream
for chunk in chunks(self.audio,
size=self.chunk_size*self.nchannels,
dfmt=self.dfmt):
#Below is a faster way to call:
# self.stream.write(chunk, self.chunk_size)
self.write_stream(st, chunk, self.chunk_size, False)
if not self.go.is_set():
self.stream.stop_stream()
if self.halting:
break
self.go.wait()
self.stream.start_stream()
# Finished playing! Destructor-like step: let's close the thread
with self.lock:
if self in self.device_manager._threads: # If not already closed
self.stream.close()
self.device_manager.thread_finished(self)
def stop(self):
""" Stops the playing thread and close """
with self.lock:
self.halting = True
self.go.clear()
def pause(self):
""" Pauses the audio. """
with self.lock:
self.go.clear()
def play(self):
""" Resume playing the audio. """
with self.lock:
self.go.set()
| gpl-3.0 | -8,054,913,346,017,574,000 | 30.195556 | 79 | 0.616826 | false | 4.131254 | false | false | false |
Answeror/aip | aip/imfs/cascade.py | 1 | 1705 | from .base import NameMixin
def load_ext(name, bases):
return need_raw(
name,
bases,
lambda base: base.load(name)
)
def thumbnail_ext(name, width, height, bases):
return need_raw(
name,
bases,
lambda base: base.thumbnail(name, width, height)
)
def mtime_ext(name, bases):
return need_raw(
name,
bases,
lambda base: base.mtime(name)
)
def need_raw(name, bases, f):
assert bases
if len(bases) == 1:
return f(bases[0])
try:
data = f(bases[0])
if data is not None:
return data
except:
pass
data = load_ext(name, bases[1:])
if data is not None:
try:
bases[0].save(name, data)
except:
pass
return f(bases[0])
class Cascade(NameMixin):
def __init__(self, *args):
self.bases = args
assert self.bases
def _load(self, name):
return load_ext(name, self.bases)
def _save(self, name, data):
for base in self.bases:
base.save(name, data)
def _thumbnail(self, name, width, height):
return thumbnail_ext(name, width, height, self.bases)
def _has(self, name):
for base in self.bases:
if base.has(name):
return True
return False
def _remove(self, name):
for base in self.bases:
base.remove(name)
def _mtime(self, name):
return mtime_ext(name, self.bases)
def _cache_timeout(self, name):
for base in self.bases:
ret = base.cache_timeout(name)
if ret is not None:
return ret
return None
| mit | -1,043,821,107,190,842,800 | 20.049383 | 61 | 0.537243 | false | 3.747253 | false | false | false |
JustRamon/SpeechController | SC.py | 1 | 1113 | #!/usr/bin/env python3
import speech_recognition as sr
import ksr10
import time
arm = ksr10.ksr10_class()
while 1:
r = sr.Recognizer()
with sr.Microphone() as source:
print("Say something!")
audio = r.listen(source)
try:
rn = r.recognize_google(audio)
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
if rn == "up":
arm.move("elbow","up")
time.sleep(1.5)
arm.stop()
if rn == "down":
arm.move("elbow","down")
time.sleep(1.5)
arm.stop()
if rn == "light":
arm.lights()
if rn == "grip":
with open ("Save.txt", "r") as file_:
oc = file_.read()
if oc == "1":
arm.move("grip","close")
time.sleep(1.6)
arm.stop()
with open ("Save.txt", "w") as file_:
file_.write("0")
elif oc == "0":
arm.move("grip","open")
time.sleep(1.4)
arm.stop()
with open ("Save.txt", "w") as file_:
file_.write("1")
else:
print "Error, file contains: " + oc
if rn == "stop":
break
| gpl-2.0 | -2,197,478,390,076,488,000 | 22.1875 | 97 | 0.607367 | false | 2.625 | false | false | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.