repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
torchingloom/edx-platform | common/lib/xmodule/xmodule/master_class_module.py | 1 | 15766 | # -*- coding: utf-8 -*-
"""Word cloud is ungraded xblock used by students to
generate and view word cloud.
On the client side we show:
If student does not yet answered - `num_inputs` numbers of text inputs.
If student have answered - words he entered and cloud.
"""
import json
import logging
import datetime
import csv
import StringIO
from pkg_resources import resource_string
from xmodule.raw_module import EmptyDataRawDescriptor
from xmodule.editing_module import MetadataOnlyEditingDescriptor
from xmodule.x_module import XModule
from django.contrib.auth.models import User
from django.utils.timezone import UTC
from xblock.fields import Scope, Dict, Boolean, List, Integer, String
from xmodule.modulestore import Location
log = logging.getLogger(__name__)
from django.utils.translation import ugettext as _
from django.conf import settings
def pretty_bool(value):
"""Check value for possible `True` value.
Using this function we can manage different type of Boolean value
in xml files.
"""
bool_dict = [True, "True", "true", "T", "t", "1"]
return value in bool_dict
class MasterClassFields(object):
"""XFields for word cloud."""
display_name = String(
display_name=_("Display Name"),
help=_("Display name for this module"),
scope=Scope.settings,
default=_("Master Class")
)
total_places = Integer(
display_name=_("Max places"),
help=_("Number of places available for students to register for masterclass."),
scope=Scope.settings,
default=30,
values={"min": 1}
)
autopass_score = Integer(
display_name=_("Autopass score"),
help=_("Autopass score to automaticly pass registration for masterclass."),
scope=Scope.settings,
default=250,
values={"min": 1}
)
problem_id = String(
display_name=_("Masterclass problem id"),
help=_("Full id of the problem which is to be acomplished to pass registration for masterclass."),
scope=Scope.settings,
#default=_("Master Class") # no default
)
auto_register_if_passed = Boolean(
display_name=_("Auto registration"),
help=_("Auto registration for masterclass if a user passed the test"),
scope=Scope.settings,
default=False,
)
# Fields for descriptor.
submitted = Boolean(
help=_("Whether this student has been register for this master class."),
scope=Scope.user_state,
default=False
)
all_registrations = List(
help=_("All registrations from all students."),
scope=Scope.user_state_summary
)
passed_registrations = List(
help=_("Passed registrations."),
scope=Scope.user_state_summary
)
passed_masterclass_test = Boolean(
help=_("Whether this student has passed the task to register for the masterclass."),
scope=Scope.user_state,
default=False
)
class MasterClassModule(MasterClassFields, XModule):
"""MasterClass Xmodule"""
js = {
'coffee': [resource_string(__name__, 'js/src/javascript_loader.coffee')],
'js': [resource_string(__name__, 'js/src/word_cloud/d3.min.js'),
resource_string(__name__, 'js/src/word_cloud/d3.layout.cloud.js'),
resource_string(__name__, 'js/src/master_class/master_class.js'),
resource_string(__name__, 'js/src/master_class/master_class_main.js')]
}
css = {'scss': [resource_string(__name__, 'css/master_class/display.scss')]}
js_module_name = "MasterClass"
def get_state(self):
"""Return success json answer for client."""
total_register = len(self.passed_registrations)
message = ""
message2 = ""
if self.runtime.user.email in self.passed_registrations:
message = _("You have been registered for this master class. We will provide addition information soon.")
elif self.runtime.user.email in self.all_registrations:
message = _("You are pending for registration for this master class. Please visit this page later for result.")
else:
message2 = _("You have not been registered for this master class. Probably you have to pass a test first or there is not enough places.")
if (total_register is None):
total_register = 0
additional_data = {}
allreg = []
passreg = []
for email in self.all_registrations:
try:
user = User.objects.get(email=email)
allreg += [{'email': email, 'name': user.profile.lastname + ' ' + user.profile.firstname + ' ' + user.profile.middlename}]
except:
pass
for email in self.passed_registrations:
try:
user = User.objects.get(email=email)
passreg += [{'email': email, 'name': user.profile.lastname + ' ' + user.profile.firstname + ' ' + user.profile.middlename}]
except:
pass
if self.runtime.user_is_staff:
additional_data['all_registrations'] = allreg
additional_data['passed_registrations'] = passreg
additional_data['is_staff'] = self.runtime.user_is_staff
additional_data['csv_name'] = self.runtime.course_id + " " + self.display_name
if self.submitted and self.runtime.user.email not in self.all_registrations and self.runtime.user.email not in self.passed_registrations:
self.submitted = False
if self.submitted:
data = {
'status': 'success',
'submitted': True,
'is_closed': self.is_past_due(),
'total_places': self.total_places,
'total_register': total_register,
'message': message,
'problem_id': self.problem_id,
'auto_register_if_passed': self.auto_register_if_passed,
}
data.update(additional_data)
return json.dumps(data)
else:
data = {
'status': 'success',
'submitted': False,
'is_closed': self.is_past_due(),
'total_places': self.total_places,
'total_register': total_register,
'problem_id': self.problem_id,
'message': message2,
'auto_register_if_passed': self.auto_register_if_passed,
}
data.update(additional_data)
return json.dumps(data)
def handle_ajax(self, dispatch, data):
"""Ajax handler.
Args:
dispatch: string request slug
data: dict request get parameters
Returns:
json string
"""
if dispatch == 'submit':
if self.is_past_due():
return json.dumps({
'status': 'fail',
'error': 'Registration is closed due to date.'
})
if self.submitted:
return json.dumps({
'status': 'fail',
'error': 'You have already posted your data.'
})
# Student words from client.
# FIXME: we must use raw JSON, not a post data (multipart/form-data)
master_class = data.getall('master_class[]')
if self.problem_id is None:
self.all_registrations.append(self.runtime.user.email)
self.submitted = True
return self.get_state()
problem_location = Location(self.problem_id)
problem_descriptor = self.runtime.descriptor_runtime.modulestore.get_item(problem_location)
problem_score = self.runtime.get_score(self.runtime.course_id, self.runtime.user, problem_descriptor, self.runtime.get_module)
self.passed_masterclass_test = problem_score is not None and len(problem_score) >= 2 and problem_score[0] >= self.autopass_score
if self.passed_masterclass_test:
if self.auto_register_if_passed:
if len(self.passed_registrations) < self.total_places:
self.passed_registrations.append(self.runtime.user.email)
self.submitted = True
else:
self.all_registrations.append(self.runtime.user.email)
self.submitted = True
return self.get_state()
elif dispatch == 'get_state':
return self.get_state()
elif dispatch == 'register':
logging.error(data)
if self.runtime.user_is_staff:
for email in data.getall('emails[]'):
if (len(self.passed_registrations) < self.total_places):
if (self.all_registrations.count(email) > 0):
self.passed_registrations.append(email)
self.all_registrations.remove(email)
subject = u"Подтверждение регистрации на {masterclass}".format(masterclass=self.display_name)
body = u"Уважаемый(ая) {fullname}!\nВаша заявка на {masterclass} была одобрена. Подробности Вы можете узнать по ссылке: {url}.\nС уважением, Команда ГБОУ ЦПМ.".format(
fullname=User.objects.get(email=email).profile.name,
masterclass=self.display_name,
url='https://' + settings.SITE_NAME + '/courses/' + self.course_id + '/jump_to/{}'.format(Location(self.location))
)
mail = self.runtime.bulkmail.create(self.course_id,
self.runtime.user,
'list',
subject,
body,
location=self.id,
to_list=[email]
)
try:
mail.send()
return self.get_state()
except:
return json.dumps({
'status': 'fail',
'msg': _('Your email can not be sent.')
})
else:
return json.dumps({
'status': 'fail',
'error': _("Not enough places for this master class.")
})
return self.get_state()
else:
return json.dumps({
'status': 'fail',
'error': 'Unknown Command!'
})
elif dispatch == 'unregister':
logging.error(data)
if self.runtime.user_is_staff:
for email in data.getall('emails[]'):
if (self.passed_registrations.count(email) > 0):
self.passed_registrations.remove(email)
self.all_registrations.append(email)
return self.get_state()
else:
return json.dumps({
'status': 'fail',
'error': 'Unknown Command!'
})
elif dispatch == 'remove':
logging.error(data)
if self.runtime.user_is_staff:
for email in data.getall('emails[]'):
if (self.passed_registrations.count(email) > 0):
self.passed_registrations.remove(email)
if (self.all_registrations.count(email) > 0):
self.all_registrations.remove(email)
return self.get_state()
else:
return json.dumps({
'status': 'fail',
'error': 'Unknown Command!'
})
elif dispatch == 'csv':
if self.runtime.user_is_staff:
header = [u'Email', u'Фамилия', u'Имя', u'Отчество',]
datatable = {'header': header, 'students': []}
data = []
for email in self.passed_registrations:
datarow = []
user = User.objects.get(email=email)
datarow += [user.email, user.profile.lastname, user.profile.firstname, user.profile.middlename]
data += [datarow]
datatable['data'] = data
return self.return_csv(" ", datatable, encoding="cp1251", dialect="excel-tab")
else:
return json.dumps({
'status': 'fail',
'error': 'Unknown Command!'
})
elif dispatch == 'email':
subject = data.get('subject')
body = data.get('body')
mail = self.runtime.bulkmail.create(self.course_id, self.runtime.user, 'list', subject, body, location=self.id, to_list=self.passed_registrations)
mail.send()
return json.dumps({
'status': 'success',
'msg': _('Your email was successfully queued for sending.')
})
else:
return json.dumps({
'status': 'fail',
'error': 'Unknown Command!'
})
def is_past_due(self):
"""
Is it now past this problem's due date, including grace period?
"""
return (self.due is not None and
datetime.datetime.now(UTC()) > self.due)
def get_html(self):
"""Template rendering."""
logging.info(type(self.location))
logging.info(self.get_progress())
logging.info(self.runtime.seed)
logging.info(self.runtime.anonymous_student_id)
logging.info(self.runtime)
context = {
'display_name': self.display_name,
'due': self.due,
'element_id': self.location.html_id(),
'element_class': self.location.category,
'ajax_url': self.system.ajax_url,
'submitted': self.submitted,
'is_staff': self.runtime.user_is_staff,
'all_registrations': self.all_registrations,
'passed_registrations': self.passed_registrations
}
self.content = self.system.render_template('master_class.html', context)
return self.content
def return_csv(self, func, datatable, file_pointer=None, encoding="utf-8", dialect="excel"):
"""Outputs a CSV file from the contents of a datatable."""
if file_pointer is None:
response = StringIO.StringIO()
else:
response = file_pointer
writer = csv.writer(response, dialect=dialect, quotechar='"', quoting=csv.QUOTE_ALL)
encoded_row = [unicode(s).encode(encoding) for s in datatable['header']]
writer.writerow(encoded_row)
for datarow in datatable['data']:
encoded_row = [unicode(s).encode(encoding) for s in datarow]
writer.writerow(encoded_row)
if file_pointer is None:
return response.getvalue()
else:
return response
class MasterClassDescriptor(MasterClassFields, MetadataOnlyEditingDescriptor, EmptyDataRawDescriptor):
"""Descriptor for MasterClass Xmodule."""
module_class = MasterClassModule
template_dir_name = 'master_class'
| agpl-3.0 | -7,321,570,871,203,777,000 | 39.703125 | 195 | 0.540179 | false | 4.418999 | false | false | false |
googleapis/googleapis-gen | google/cloud/aiplatform/v1/aiplatform-v1-py/google/cloud/aiplatform_v1/services/pipeline_service/transports/grpc_asyncio.py | 1 | 18872 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.aiplatform_v1.types import pipeline_service
from google.cloud.aiplatform_v1.types import training_pipeline
from google.cloud.aiplatform_v1.types import training_pipeline as gca_training_pipeline
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from .base import PipelineServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import PipelineServiceGrpcTransport
class PipelineServiceGrpcAsyncIOTransport(PipelineServiceTransport):
"""gRPC AsyncIO backend transport for PipelineService.
A service for creating and managing Vertex AI's pipelines. This
includes both ``TrainingPipeline`` resources (used for AutoML and
custom training) and ``PipelineJob`` resources (used for Vertex
Pipelines).
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'aiplatform.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'aiplatform.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def create_training_pipeline(self) -> Callable[
[pipeline_service.CreateTrainingPipelineRequest],
Awaitable[gca_training_pipeline.TrainingPipeline]]:
r"""Return a callable for the create training pipeline method over gRPC.
Creates a TrainingPipeline. A created
TrainingPipeline right away will be attempted to be run.
Returns:
Callable[[~.CreateTrainingPipelineRequest],
Awaitable[~.TrainingPipeline]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_training_pipeline' not in self._stubs:
self._stubs['create_training_pipeline'] = self.grpc_channel.unary_unary(
'/google.cloud.aiplatform.v1.PipelineService/CreateTrainingPipeline',
request_serializer=pipeline_service.CreateTrainingPipelineRequest.serialize,
response_deserializer=gca_training_pipeline.TrainingPipeline.deserialize,
)
return self._stubs['create_training_pipeline']
@property
def get_training_pipeline(self) -> Callable[
[pipeline_service.GetTrainingPipelineRequest],
Awaitable[training_pipeline.TrainingPipeline]]:
r"""Return a callable for the get training pipeline method over gRPC.
Gets a TrainingPipeline.
Returns:
Callable[[~.GetTrainingPipelineRequest],
Awaitable[~.TrainingPipeline]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_training_pipeline' not in self._stubs:
self._stubs['get_training_pipeline'] = self.grpc_channel.unary_unary(
'/google.cloud.aiplatform.v1.PipelineService/GetTrainingPipeline',
request_serializer=pipeline_service.GetTrainingPipelineRequest.serialize,
response_deserializer=training_pipeline.TrainingPipeline.deserialize,
)
return self._stubs['get_training_pipeline']
@property
def list_training_pipelines(self) -> Callable[
[pipeline_service.ListTrainingPipelinesRequest],
Awaitable[pipeline_service.ListTrainingPipelinesResponse]]:
r"""Return a callable for the list training pipelines method over gRPC.
Lists TrainingPipelines in a Location.
Returns:
Callable[[~.ListTrainingPipelinesRequest],
Awaitable[~.ListTrainingPipelinesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_training_pipelines' not in self._stubs:
self._stubs['list_training_pipelines'] = self.grpc_channel.unary_unary(
'/google.cloud.aiplatform.v1.PipelineService/ListTrainingPipelines',
request_serializer=pipeline_service.ListTrainingPipelinesRequest.serialize,
response_deserializer=pipeline_service.ListTrainingPipelinesResponse.deserialize,
)
return self._stubs['list_training_pipelines']
@property
def delete_training_pipeline(self) -> Callable[
[pipeline_service.DeleteTrainingPipelineRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete training pipeline method over gRPC.
Deletes a TrainingPipeline.
Returns:
Callable[[~.DeleteTrainingPipelineRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_training_pipeline' not in self._stubs:
self._stubs['delete_training_pipeline'] = self.grpc_channel.unary_unary(
'/google.cloud.aiplatform.v1.PipelineService/DeleteTrainingPipeline',
request_serializer=pipeline_service.DeleteTrainingPipelineRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['delete_training_pipeline']
@property
def cancel_training_pipeline(self) -> Callable[
[pipeline_service.CancelTrainingPipelineRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the cancel training pipeline method over gRPC.
Cancels a TrainingPipeline. Starts asynchronous cancellation on
the TrainingPipeline. The server makes a best effort to cancel
the pipeline, but success is not guaranteed. Clients can use
[PipelineService.GetTrainingPipeline][google.cloud.aiplatform.v1.PipelineService.GetTrainingPipeline]
or other methods to check whether the cancellation succeeded or
whether the pipeline completed despite cancellation. On
successful cancellation, the TrainingPipeline is not deleted;
instead it becomes a pipeline with a
[TrainingPipeline.error][google.cloud.aiplatform.v1.TrainingPipeline.error]
value with a [google.rpc.Status.code][google.rpc.Status.code] of
1, corresponding to ``Code.CANCELLED``, and
[TrainingPipeline.state][google.cloud.aiplatform.v1.TrainingPipeline.state]
is set to ``CANCELLED``.
Returns:
Callable[[~.CancelTrainingPipelineRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'cancel_training_pipeline' not in self._stubs:
self._stubs['cancel_training_pipeline'] = self.grpc_channel.unary_unary(
'/google.cloud.aiplatform.v1.PipelineService/CancelTrainingPipeline',
request_serializer=pipeline_service.CancelTrainingPipelineRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['cancel_training_pipeline']
__all__ = (
'PipelineServiceGrpcAsyncIOTransport',
)
| apache-2.0 | 227,159,472,070,711,040 | 46.417085 | 109 | 0.632524 | false | 4.751259 | false | false | false |
puttarajubr/commcare-hq | custom/ilsgateway/tests/test_locations_sync.py | 1 | 4004 | from datetime import datetime
import json
import os
from django.test import TestCase
from corehq.apps.commtrack.models import CommtrackConfig
from corehq.apps.commtrack.tests.util import bootstrap_domain as initial_bootstrap
from corehq.apps.locations.models import Location, SQLLocation
from custom.ilsgateway.api import Location as Loc, ILSGatewayAPI
from custom.ilsgateway.tests.mock_endpoint import MockEndpoint
from custom.logistics.api import ApiSyncObject
from custom.logistics.commtrack import synchronization
from custom.logistics.models import MigrationCheckpoint
TEST_DOMAIN = 'ilsgateway-commtrack-locations-test'
class LocationSyncTest(TestCase):
def setUp(self):
self.endpoint = MockEndpoint('http://test-api.com/', 'dummy', 'dummy')
self.api_object = ILSGatewayAPI(TEST_DOMAIN, self.endpoint)
self.datapath = os.path.join(os.path.dirname(__file__), 'data')
domain = initial_bootstrap(TEST_DOMAIN)
CommtrackConfig(domain=domain.name).save()
self.api_object.prepare_commtrack_config()
for location in Location.by_domain(TEST_DOMAIN):
location.delete()
def test_create_facility_location(self):
with open(os.path.join(self.datapath, 'sample_locations.json')) as f:
location = Loc(**json.loads(f.read())[0])
ilsgateway_location = self.api_object.location_sync(location)
self.assertEqual(ilsgateway_location.name, location.name)
self.assertEqual(ilsgateway_location.location_type, location.type)
self.assertEqual(ilsgateway_location.longitude, float(location.longitude))
self.assertEqual(ilsgateway_location.latitude, float(location.latitude))
self.assertEqual(int(ilsgateway_location.parent.sql_location.external_id), location.parent_id)
self.assertIsNotNone(ilsgateway_location.linked_supply_point())
self.assertIsNotNone(ilsgateway_location.sql_location.supply_point_id)
def test_create_non_facility_location(self):
with open(os.path.join(self.datapath, 'sample_locations.json')) as f:
location = Loc(**json.loads(f.read())[1])
ilsgateway_location = self.api_object.location_sync(location)
self.assertEqual(ilsgateway_location.name, location.name)
self.assertEqual(ilsgateway_location.location_type, location.type)
self.assertEqual(ilsgateway_location.longitude, float(location.longitude))
self.assertEqual(ilsgateway_location.latitude, float(location.latitude))
self.assertIsNone(ilsgateway_location.parent)
self.assertIsNone(ilsgateway_location.linked_supply_point())
self.assertIsNone(ilsgateway_location.sql_location.supply_point_id)
def test_locations_migration(self):
checkpoint = MigrationCheckpoint(
domain=TEST_DOMAIN,
start_date=datetime.utcnow(),
date=datetime.utcnow(),
api='product',
limit=100,
offset=0
)
location_api = ApiSyncObject(
'location_facility',
self.endpoint.get_locations,
self.api_object.location_sync,
filters=dict(type='facility')
)
synchronization(location_api, checkpoint, None, 100, 0)
self.assertEqual('location_facility', checkpoint.api)
self.assertEqual(100, checkpoint.limit)
self.assertEqual(0, checkpoint.offset)
self.assertEqual(5, len(list(Location.by_domain(TEST_DOMAIN))))
self.assertEqual(5, SQLLocation.objects.filter(domain=TEST_DOMAIN).count())
sql_location = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='DM520053')
self.assertEqual('FACILITY', sql_location.location_type.name)
self.assertIsNotNone(sql_location.supply_point_id)
sql_location2 = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='region-dodoma')
self.assertEqual('REGION', sql_location2.location_type.name)
self.assertIsNone(sql_location2.supply_point_id)
| bsd-3-clause | 6,287,786,364,255,786,000 | 47.829268 | 102 | 0.709041 | false | 3.894942 | true | false | false |
davidvg/google_api | google_api/gmail_api.py | 1 | 13120 | '''
Basic Python3 implementation of some functionality of the Gmail API.
Based on the code from the Gmail API documentation.
Requires a 'secret file' to allow authentication (see [1])
Installation
-----------
In Python3, install the API using pip3:
pip3 install --upgrade google-api-python-client
Install packages:
python3 setup.py develop
[1] https://developers.google.com/gmail/api/quickstart/python
'''
import httplib2
import os.path
import base64
import email
import time
import datetime as dt
from googleapiclient import discovery
from googleapiclient.http import BatchHttpRequest as batchRequest
from oauth2client import file, client, tools
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/gmail_api-python.json
SCOPES = 'https://www.googleapis.com/auth/gmail.readonly'
CLIENT_SECRET_FILE = '../client_secret.json'
APPLICATION_NAME = 'Gmail API downloader'
class Client(object):
def __init__(self, scopes_=SCOPES, secret_=CLIENT_SECRET_FILE):
'''
Initialize the class' variables
'''
# Internals
self.__scopes = scopes_
self.__secret = secret_
self.service = None
# Members
self.msg_ids = []
self.raw_messages = []
self.messages = []
self.__format = None
# Path for storing credentials
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'gmail_api-python.json')
store = file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(self.__secret, self.__scopes)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
# Build the service
http = credentials.authorize(httplib2.Http())
self.service = discovery.build('gmail', 'v1', http=http)
def __parse_id(self, id_):
'''
Parses an id when passed to a function, to make sure it works for
every method.
Seems redundant with Client.get_id() when called on a message.
'''
if isinstance(id_, dict):
return id_['id']
elif isinstance(id_, str):
return id_
else:
# Is it a message?
try:
id_ = id_['id']
except:
print(' >>>> __parse_id(): No valid message id.')
return None
def get_msg_ids_from_labels(self, labels):
'''
'''
# Clear previous msg_ids
self.msg_ids = []
response = self.service.users().messages().list(userId='me',
labelIds=labels
).execute()
# First page of results
if 'messages' in response:
self.msg_ids.extend(response['messages'])
# Check if there are more result pages
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = self.service.users().messages().list(
userId='me',
labelIds=labels,
pageToken = page_token
).execute()
self.msg_ids.extend(response['messages'])
def get_msg_ids_from_query(self, query):
# Clear previous msg_ids
self.msg_ids = []
response = self.service.users().messages().list(userId='me',
q=query,
).execute()
# First page of results
if 'messages' in response:
self.msg_ids.extend(response['messages'])
# Check if there are more result pages
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = self.service.users().messages().list(
userId='me',
q=query,
pageToken = page_token
).execute()
self.msg_ids.extend(response['messages'])
def get_batch_messages(self, msg_ids, format='full'):
'''
Download a group of messages, given its ids.
Arguments:
- msg_ids: a list of message ids as returned by the API.
- format: the format for the downloaded message: 'full', 'raw',
'metadata', 'minimal'
Returns:
- A list with the messages.
'''
# Store current format
self.__format__ = format
messages = []
def callback_(req_id, resp, exception):
if exception:
print(' >>> CallbackException')
pass
else:
messages.append(resp)
def batch_request():
batch = self.service.new_batch_http_request(callback_)
ids_ = [elem['id'] for elem in msg_ids]
for id_ in ids_:
batch.add(self.service.users().messages().get(userId='me',
id=id_,
format=format))
batch.execute()
if len(self.msg_ids) < 1000:
batch_request()
else:
# To Do: implement the case for 1000+ messages
pass
self.raw_messages = messages
def get_message(self, msg_id, format='full'):
# Store current format
self.__format__ = format
# Check type of msg_id argument
msg_id = self.__parse_id(msg_id)
# Get messages
res = self.service.users().messages().get(userId='me',
id=msg_id,
format=format).execute()
return res
def get_messages(self, msg_ids=None, labels=None, query=None, format='full'):
# Store current format
self.__format__ = format
# Get the id for messages corresponding to labels/query
if msg_ids:
self.msg_ids = msg_ids
elif labels:
self.get_msg_ids_from_labels(labels=labels)
elif query:
self.get_msg_ids_from_query(query=query)
else:
print(' >>> get_messages(): No labels or query passed. Nothing is done.')
# Download the messages
self.get_batch_messages(self.msg_ids, format=format)
### Parsing and decoding the messages
'''
Message structure for the different formats
* Full
----
- snippet
- internalDate: ms from Epoch
- id
- payload
- filename
- headers: list of 26 dicts with keys {'name', 'value'}
- Received: date (multiple occurences ?)
- MIME-Version
- Content-Type: text/html, charset
- From
- Subject
- ...
- mimeType: text/html, ...
- parts
- body: dict
- data: base64
- size: int
- sizeEstimate
- historyId
- labelIds: list of labels
- threadId
* Raw
---
- threadId
- snippet
- historyId
- internalDate
- id
- raw: base64
- labelIds
- sizeEstimate
* Metadata: dict with 8 dicts
--------
- threadId
- snippet
- historyId
- inernalDate
- id
- labelIds
- payload: dict
- mimeType: text/html, ...
- headers
- sizeEstimate
* Minimal
-------
- historyId
- id
- labelIds
- sizeEstimate
- snippet
- threadId
'''
def get_id(self, message):
'''
Returns the message id for a single raw message.
'''
return str(message['id'])
def get_labels(self, message):
'''
Returns a list of labels for a single raw message.
'''
return message['labelIds']
def modify_labels(self, obj, add=[], remove=[]):
"""
Adds or removes labels from a message.
"""
id_ = self.__parse_id(obj)
self.service.users().messages().modify(
userId='me',
id=id_,
body={'addLabelIds': add,
'removeLabelIds': remove}).execute()
def is_unread(self, message):
# Check if the message is already been decoded
return 'UNREAD' in message['labels']
def mark_as_read(self, obj):
id_ = self.__parse_id(obj)
self.modify_labels(id_, remove=['UNREAD'])
def get_date(self, message):
''' Returns the reception date for a single raw message in a string
using strftime.
'''
internal = float(message['internalDate'])/1000. # seconds from Epoch
date = time.gmtime(internal)
res = dt.datetime(year=date.tm_year,
month=date.tm_mon,
day=date.tm_mday,
hour=date.tm_hour,
minute=date.tm_min,
second=date.tm_sec)
return res.strftime('%Y-%m-%dT%H:%M:%S')
def get_subject(self, message):
headers = message['payload']['headers']
for h in headers:
if h['name'] == 'Subject':
return h['value']
return None
def get_body(self, message):
if self.__format__ is 'full':
payload = message['payload']
if not 'parts' in payload:
raw = payload['body']['data']
else:
### CHECK THIS!!
raw = payload['parts'][0]['body']['data']
body = base64.urlsafe_b64decode(raw.encode('ASCII'))
elif self.__format__ is 'raw':
raw = message['raw']
raw = base64.urlsafe_b64decode(raw.encode('ASCII'))
mime = email.message_from_bytes(raw)
body = mime.get_payload(decode=True)
return body
def decode_messages(self, keys=None):
'''
For 'full' and 'raw' formats; 'minimal' and 'metadata' have no message
body.
Takes messages stored in Client.raw_messages and extracts info from them.
The result is stored in Client.messages
'''
self.messages = []
for msg in self.raw_messages:
decoded = {}
if not keys:
keys = ['id', 'date', 'snippet', 'body', 'labels', 'subject',
'headers']
for key in keys:
decoded[key] = None
decoded['id'] = self.get_id(msg)
decoded['date'] = self.get_date(msg)
decoded['labels'] = self.get_labels(msg)
decoded['snippet'] = msg['snippet']
if self.__format__ is 'full':
decoded['body'] = self.get_body(msg)
decoded['subject'] = self.get_subject(msg)
decoded['headers'] = msg['payload']['headers']
elif self.__format__ is 'raw':
decoded['body'] = self.get_body(msg)
pass
elif self.__format__ is 'metadata':
# At the moment it returns the payload dictionary
decoded['headers'] = msg['payload']['headers']
elif self.__format__ is 'minimal':
pass
self.messages.append(decoded)
def write(self, message, use='date', to='html'):
"""
Write the body of the message to a file.
- use: which key use to generate name (currently only 'date')
- to: file extension
"""
if use is 'date':
name = message[use]
else:
pass
out = '%s.%s' % (name, to)
if self.__format__ is 'full' or self.__format__ is 'raw':
body = message['body'].decode('utf-8')
with open(out, 'w') as f:
f.write(body)
else:
print(' >>> Client.write(): no body to write (format = %s)'
% self.__format__)
def main():
pass
if __name__ == '__main__':
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
gm = Client()
gm.get_msg_ids_from_labels('Label_59')
ids = gm.msg_ids[:2]
gm.get_messages(msg_ids=ids, format='full')
gm.decode_messages()
m = gm.messages[0]
gm.write(m, to='txt')
| mit | -5,331,380,682,368,593,000 | 32.384224 | 88 | 0.509604 | false | 4.462585 | false | false | false |
Lax/Packages | ganglia-gmond-modules-python-plugins/usr/lib64/ganglia/python_modules/sockstat.py | 1 | 2803 | # sockstat module for ganglia 3.1.x and above
# Copyright (C) Wang Jian <[email protected]>, 2009
import os, sys
import time
last_poll_time = 0
sockstats = {
'tcp_total': 0,
'tcp_established': 0,
'tcp_orphan': 0,
'tcp_timewait': 0,
'udp_total': 0 }
def metric_update():
global sockstats
f = open('/proc/net/sockstat', 'r')
for l in f:
line = l.split()
if (line[0] == 'TCP:'):
sockstats['tcp_total'] = int(line[2])
sockstats['tcp_orphan'] = int(line[4])
sockstats['tcp_established'] = int(line[2]) - int(line[4])
sockstats['tcp_timewait'] = int(line[6])
continue
if (line[0] == 'UDP:'):
sockstats['udp_total'] = int(line[2])
continue
f.close()
def metric_read(name):
global last_poll_time
global sockstats
now_time = time.time()
'''time skewed'''
if now_time < last_poll_time:
last_poll_time = now_time
return 0
'''we cache statistics for 2 sec, it's enough for polling all 3 counters'''
if (now_time - last_poll_time) > 2:
metric_update()
last_poll_time = now_time
return sockstats[name]
descriptors = [{
'name': 'tcp_total',
'call_back': metric_read,
'time_max': 20,
'value_type': 'uint',
'units': 'Sockets',
'slope': 'both',
'format': '%u',
'description': 'Total TCP sockets',
'groups': 'network',
},
{
'name': 'tcp_established',
'call_back': metric_read,
'time_max': 20,
'value_type': 'uint',
'units': 'Sockets',
'slope': 'both',
'format': '%u',
'description': 'TCP established sockets',
'groups': 'network',
},
{
'name': 'tcp_timewait',
'call_back': metric_read,
'time_max': 20,
'value_type': 'uint',
'units': 'Sockets',
'slope': 'both',
'format': '%u',
'description': 'TCP timewait sockets',
'groups': 'network',
},
{
'name': 'udp_total',
'call_back': metric_read,
'time_max': 20,
'value_type': 'uint',
'units': 'Sockets',
'slope': 'both',
'format': '%u',
'description': 'Total UDP sockets',
'groups': 'network',
}]
def metric_init(params):
return descriptors
def metric_cleanup():
pass
# for unit testing
if __name__ == '__main__':
metric_init(None)
for d in descriptors:
v = d['call_back'](d['name'])
print '%s = %d' % (d['name'], v)
print "----"
while 1:
time.sleep(1)
for d in descriptors:
v = d['call_back'](d['name'])
print '%s = %d' % (d['name'], v)
print "----"
| gpl-3.0 | -1,002,256,754,032,724,500 | 23.163793 | 79 | 0.49447 | false | 3.393462 | false | false | false |
paolomonella/ursus | xmlToolBox/minidomToolBox.py | 1 | 7687 | #! /usr/bin/env python
# This is a toolbox I'm using to look for specific things in the XML DOM
##################
# Import modules #
##################
from __future__ import print_function
from xml.dom.minidom import parse, parseString
#import xml.dom.minidom
#################
# Parse the XML #
#################
xmldoc=parse('/home/ilbuonme/siti/paolo.monella/ursus/casanatensis.xml')
###########
# Methods #
###########
def checkIDs():
"""
This function checks whether there are duplicated or
non-sequential xml:id's for <w> elements.
"""
wordElementList = xmldoc.getElementsByTagName('ref')
prevIdN = 0
for r in wordElementList:
#print('cRef: '+r.attributes.getNamedItem('cRef').nodeValue)
for c in r.childNodes:
if c.nodeType == c.ELEMENT_NODE and c.tagName == 'w':
#print(c.attributes.getNamedItem('xml:id').nodeValue, end=', ')
myId = c.attributes.getNamedItem('xml:id').nodeValue
myIdN = int(myId[1:])
#print(myIdN, end=', ')
if not myIdN > prevIdN:
print('Trouble! Not greater...')
#print(myIdN, 'is greater than ', prevIdN)
if myIdN == prevIdN:
print('Trouble! Equal')
def searchPcChildrenOfUnclear():
"""
Print all <pc> elments that are children of <unclear>.
"""
wordElementList = xmldoc.getElementsByTagName('ref')
x = False
for r in wordElementList:
#print('cRef: '+r.attributes.getNamedItem('cRef').nodeValue)
for c in r.childNodes:
if c.nodeType == c.ELEMENT_NODE:
if c.tagName == 'w' and x:
print(c.attributes.getNamedItem('xml:id').nodeValue, end=' viene dopo ')
x = False
if c.tagName == 'unclear':
for w in c.childNodes:
#print(x, end=', ')
if w.nodeType == w.ELEMENT_NODE and w.tagName == 'pc':
print('Eureka!')
print(w.attributes.getNamedItem('n').nodeValue)
x = True
def searchTextNodesChildrenOfUnclear():
"""
Print all textNodes that are children of <unclear>.
"""
wordElementList = xmldoc.getElementsByTagName('ref')
for r in wordElementList:
#print('cRef: '+r.attributes.getNamedItem('cRef').nodeValue)
for c in r.childNodes:
if c.nodeType == c.ELEMENT_NODE:
if c.tagName == 'unclear':
for w in c.childNodes:
if w.nodeType == w.ELEMENT_NODE and w.tagName != 'w':
#print(w.attributes.getNamedItem('n').nodeValue)
print(w.tagName)
if w.nodeType != w.ELEMENT_NODE and w.nodeValue != '\n' and w.nodeValue != '\n\t':
print('"'+w.nodeValue+'"\n---\n')
def listChildrenOfAnElement(elemName):
"""
Return a list of elements that are direct children of the
element with tag name elemName (e.g. 'w' or 'ref').
"""
wordElementList = xmldoc.getElementsByTagName(elemName)
cs=[]
for e in wordElementList:
for c in e.childNodes:
if c.nodeType == c.ELEMENT_NODE:
cs.append(c.tagName)
return(set(cs))
def searchAttrib(elemName):
"""
Check attributes of an element
"""
L = []
wordElementList = xmldoc.getElementsByTagName(elemName)
for e in wordElementList:
if e.attributes.getNamedItem('type'):
n = e.attributes.getNamedItem('type').nodeValue
if n == 'emendation':
if not e.attributes.getNamedItem('cert'):
L.append(e.attributes.getNamedItem('subtype').nodeValue)
#L.append(e.attributes.getNamedItem('subtype').nodeValue)
for l in set(L):
print(l)
def listDescendantsOfElement(myElement):
ds=[]
elementList = xmldoc.getElementsByTagName(myElement)
for w in elementList:
d = w.getElementsByTagName('*')
for x in d:
#if x.nodeType == x.ELEMENT_NODE and x.tagName != 'note':
if x.nodeType == x.ELEMENT_NODE:
ds.append(x.tagName)
for y in set(ds):
print(y)
def graphemeLint():
"""
This function checks that all graphemes encoded directly within
<w> elements (or within those of its descendant element that are
supposed to include graphemes) are actually declared in the
Graphemic Table of Signs. If they are not declared, it prints
an 'Alas!' message.
"""
# Import the graphemes in column 'Grapheme' of GToS.csv into list 'gl'
gl = []
with open('/home/ilbuonme/siti/paolo.monella/ursus/GToS.csv') as gtosFile:
lineCount=0
for l in gtosFile:
if lineCount>0: # I'm skipping the first line (which has the column headers)
gl.append(l[0])
lineCount += 1
# Possible descendants of <w>
allowedElem=['lb', 'pc', 'am', 'choice', 'note', 'expan', 'add', 'hi', 'abbr', 'gap']
noGraphemeContent=['lb', 'pc', 'gap', 'note', 'expan', 'choice'] # <expan> has alphabemes, not graphemes, as content
graphemeContent=['am', 'hi']
# Check the descendants of <w> (elements and textNodes)
elementList = xmldoc.getElementsByTagName('w')
for w in elementList:
g = '' # This is a string including all graphemes in the <w> element
for c in w.childNodes:
if c.nodeType != c.ELEMENT_NODE: # With this we harvest all text nodes directly children of <w>
g = g + c.nodeValue
for x in w.getElementsByTagName('*'):
if x.tagName not in allowedElem:
print('<' + x.tagName + '> is not allowed as a descendant of <w>')
elif x.tagName in graphemeContent: # These elements only have one textNode child, with graphemes
g = g + x.firstChild.nodeValue
elif x.tagName == 'abbr': # Its children can be <am> or <hi> (already taken care of), or textNode
for y in x.childNodes:
if y.nodeType != y.ELEMENT_NODE: # textNode child
g = g + y.nodeValue
else: # element child: the only case as of 2017-03-16 is a <choice> child, so
# no need to worry about this, because its children <abbr>, <expan>
# and <am> are already taken care of
pass
elif x.tagName == 'add': # Its children can be <w> or textNode
for y in x.childNodes:
if y.nodeType != y.ELEMENT_NODE: # textNode child
g = g + y.nodeValue
else: # element child: the only case as of 2017-03-16 is a <choice> child, so
# no need to worry about this, because its children <abbr>, <expan>
# and <am> are already taken care of
pass
for gx in g: # For each character in the graphematic content of <w>
if (gx not in gl) and (gx not in ['\n', '\t']): # If it's not in the GToS (and it's not a tab or newline)
print('Alas! Character "'+gx+'" is not in the Graphemic Table of Signs')
##################
# Call functions #
##################
# List children of <w>
# for x in listChildrenOfAnElement('w'): print(x, end=', ')
# print()
# List descendants of <w>
#graphemeLint()
#listDescendantsOfElement('choice')
searchAttrib('note')
| gpl-2.0 | 6,549,222,712,097,434,000 | 38.420513 | 120 | 0.552751 | false | 3.894124 | false | false | false |
niallrmurphy/pyvern | test_tree.py | 1 | 20890 | #!/usr/bin/env python
# encoding: utf-8
# Niall Richard Murphy <[email protected]>
"""Test the tree (gap-production) object."""
import sys
import constants
import random
import tree
# Perhaps unittest2 is available. Try to import it, for
# those cases where we are running python 2.7.
try:
import unittest2 as unittest
except ImportError:
import unittest
class NodeTest(unittest.TestCase):
def setUp(self):
self.n = tree.Node(supplied_data = "Test")
self.n2 = tree.Node(supplied_data = "Test2")
self.n3 = tree.Node(supplied_data = "Test3")
self.n4 = tree.Node(supplied_data = "Test4")
self.n5 = tree.Node(supplied_data = "Test5")
def test_node_get_used(self):
self.failUnless(self.n.used == False)
def test_node_set_used(self):
self.n.used = True
self.failUnless(self.n.used == True)
def test_node_get_data(self):
self.failUnless(self.n.GetData() == "Test")
def test_node_set_data(self):
self.n.SetData("Wobble")
self.failUnless(self.n.GetData() == "Wobble")
def test_node_getset_left(self):
self.n.SetLeft(self.n2)
self.failUnless(self.n.GetLeft() == self.n2)
def test_node_getset_right(self):
self.n.SetRight(self.n2)
self.failUnless(self.n.GetRight() == self.n2)
def test_node_getset_parent(self):
self.n.SetLeft(self.n2)
self.n2.SetParent(self.n)
self.n.SetRight(self.n3)
self.n3.SetParent(self.n)
self.failUnless(self.n2.GetParent() == self.n)
self.failUnless(self.n3.GetParent() == self.n)
def test_node_getset_level(self):
self.assertEqual(self.n.GetLevel(), 0)
self.n2.SetParent(self.n)
self.n.SetLeft(self.n2)
self.assertEqual(self.n2.GetLevel(), 1)
self.n2.SetLeft(self.n3)
self.n3.SetParent(self.n2)
self.assertEqual(self.n3.GetLevel(), 2)
def test_node_getset_leftright(self):
self.n.SetLeft(self.n2)
self.n2.SetParent(self.n)
self.n.SetRight(self.n3)
self.n3.SetParent(self.n)
self.assertEqual(self.n2.AmLeft(), True)
self.assertEqual(self.n3.AmRight(), True)
def test_node_amroot(self):
self.assertEqual(self.n.AmRoot(), True)
def test_node_getbinary(self):
self.n.SetLeft(self.n2)
self.n2.SetParent(self.n)
self.n.SetRight(self.n3)
self.n3.SetParent(self.n)
self.assertEqual(self.n2.GetBinary(), 0)
self.assertEqual(self.n3.GetBinary(), 1)
def test_node_get_path(self):
self.n.SetLeft(self.n2)
self.n2.SetParent(self.n)
self.n.SetRight(self.n3)
self.n3.SetParent(self.n)
self.n4.SetParent(self.n2)
self.n5.SetParent(self.n2)
self.n2.SetLeft(self.n4)
self.n2.SetRight(self.n5)
self.assertEqual(self.n2.GetPath(), "0")
self.assertEqual(self.n3.GetPath(), "1")
self.assertEqual(self.n4.GetPath(), "00")
self.assertEqual(self.n5.GetPath(), "01")
class TreeTest(unittest.TestCase):
def setUp(self):
self.t = tree.Tree()
def structuralSetUp(self):
# Setup for structual comparisons & marking-as-used
self.n = tree.Node(supplied_data = "Root")
self.n2 = tree.Node(supplied_data = "Test2")
self.n3 = tree.Node(supplied_data = "Test3")
self.n4 = tree.Node(supplied_data = "Test4")
self.n5 = tree.Node(supplied_data = "Test5")
self.n6 = tree.Node(supplied_data = "Test6")
self.n7 = tree.Node(supplied_data = "Test7")
self.t.SetRoot(self.n)
self.t.GetRoot().SetLeft(self.n2)
self.n2.SetParent(self.n)
self.t.GetRoot().GetLeft().SetLeft(self.n3)
self.n3.SetParent(self.n2)
self.t.GetRoot().GetLeft().SetRight(self.n4)
self.n4.SetParent(self.n2)
self.t.GetRoot().SetRight(self.n5)
self.n5.SetParent(self.n)
self.t.GetRoot().GetRight().SetLeft(self.n6)
self.n6.SetParent(self.n5)
self.t.GetRoot().GetRight().SetRight(self.n7)
self.n7.SetParent(self.n5)
self.n3.used = True
self.n4.used = True
self.n6.used = True
self.n7.used = True
def test_tree_new(self):
self.failUnless('Insert' in dir(self.t))
def test_tree_path_to_dot_quad(self):
binstr = "1111"
x = self.t.PathToDotQuad(binstr, 4)
self.assertEqual(x, "240.0.0.0/4")
binstr = "10100111111"
y = self.t.PathToDotQuad(binstr, 16)
self.assertEqual(y, "167.224.0.0/16")
def test_tree_get_root_properties(self):
self.failUnless(self.t.GetRoot().GetData() == 'Root')
self.failUnless(self.t.GetRoot().GetLeft() == None)
self.failUnless(self.t.GetRoot().GetRight() == None)
self.failUnless(self.t.GetRoot().GetParent() == None)
def test_tree_generate_for_prefix(self):
for x in self.t.GenerateForPrefix(2):
self.failUnless(x in ['0.0.0.0/2', '64.0.0.0/2',
'128.0.0.0/2', '192.0.0.0/2'])
def test_tree_insert_default_route(self):
obj = self.t.Insert('0.0.0.0/0', "test03point5", test_dup = False)
self.assertEqual(obj, self.t.GetRoot())
def test_tree_structural_comparison(self):
# N
# N2 N5
# N3 N4 N6 N7
self.structuralSetUp()
for x in self.t.IterateNodes():
self.failUnless(x in ['0.0.0.0/2', '64.0.0.0/2',
'128.0.0.0/2', '192.0.0.0/2'])
self.t2 = tree.Tree()
self.t2.Insert('0.0.0.0/2', 'structural')
self.t2.Insert('64.0.0.0/2', 'structural')
self.t2.Insert('128.0.0.0/2', 'structural')
self.t2.Insert('192.0.0.0/2', 'structural')
for x in self.t2.IterateNodes():
self.failUnless(x in ['0.0.0.0/2', '64.0.0.0/2',
'128.0.0.0/2', '192.0.0.0/2'])
def test_tree_follow_chain(self):
self.t.Insert('192.168.0.0/16', 'test_tree_follow_chain')
obj = self.t.Lookup('192.168.0.0/16')
current = obj
self.assertEqual(current.GetLevel(), 16)
while current != self.t.root:
old_level = current.GetLevel()
current = current.GetParent()
new_level = current.GetLevel()
self.assertEqual(old_level, new_level + 1)
# TODO(niallm): check for membership of array [n - level] -> 192.168.0.0 here
self.t.Insert('192.169.0.0/16', 'test_tree_follow_chain_2')
new_obj = self.t.Lookup('192.169.0.0/16', 'test_tree_follow_chain_3')
self.assertEqual(obj.GetParent(), new_obj.GetParent())
def test_tree_recursive_marking(self):
self.structuralSetUp()
self.assertEqual(self.n2.used, False)
self.t.CheckRecursivelyUsed(self.n3)
self.assertEqual(self.n2.used, True)
self.assertEqual(self.n.used, False)
self.n5.used = True
self.t.CheckRecursivelyUsed(self.n3)
self.assertEqual(self.n.used, True)
def test_tree_insert_one_prefix_left(self):
obj = self.t.Insert('0.0.0.0/1', "testInsertSmall")
data = obj.GetData()
used = obj.used
left = obj.GetLeft()
right = obj.GetRight()
parent = obj.GetParent()
level = obj.GetLevel()
root = self.t.GetRoot()
self.assertEqual(data, "testInsertSmall")
self.assertEqual(used, True)
self.assertEqual(parent, root)
self.assertEqual(left, None)
self.assertEqual(right, None)
self.failUnless(obj.GetParent().GetLeft() == obj)
self.assertEqual(level, 1)
def test_tree_insert_flags(self):
result = self.t.Insert('0.0.0.0/8', '4.5treeobj', mark_used = False,
test_used = True, test_none = False)
self.assertEqual(result.used, False)
def test_tree_insert_two_prefixes_getbinary(self):
obj = self.t.Insert('0.0.0.0/1', "testInsertSmall")
bin = obj.GetBinary()
self.failUnless(str(bin) == "0")
obj = self.t.Insert('128.0.0.0/1', "testInsertSmall")
bin = obj.GetBinary()
self.failUnless(str(bin) == "1")
def test_tree_insert_one_prefix_right(self):
obj = self.t.Insert('128.0.0.0/1', "testInsertSmall")
data = obj.GetData()
used = obj.used
left = obj.GetLeft()
right = obj.GetRight()
parent = obj.GetParent()
level = obj.GetLevel()
path = obj.GetPath()
self.assertEqual(data, "testInsertSmall")
self.assertEqual(used, True)
self.assertEqual(parent, self.t.GetRoot())
self.assertEqual(left, None)
self.assertEqual(right, None)
self.assertEqual(obj.GetParent().GetRight(), obj)
self.assertEqual(level, 1)
self.assertEqual(path, "1")
def test_tree_insert_one_longer_prefix(self):
obj = self.t.Insert('10.0.0.0/8', "testInsertLarge")
data = obj.GetData()
used = obj.used
left = obj.GetLeft()
right = obj.GetRight()
parent = obj.GetParent()
level = obj.GetLevel()
path = obj.GetPath()
self.failUnless(obj.GetData() == 'testInsertLarge')
self.assertEqual(right, None)
self.assertEqual(left, None)
self.assertEqual(level, 8)
self.assertEqual(path, "00001010")
def test_tree_get_path_to_real_prefix(self):
obj = self.t.Insert('10.0.0.0/8', "testGetPath")
path = obj.GetPath()
self.failUnless(path == "00001010", "unexpected path to node: [%s] " % path)
obj = self.t.Insert('137.43.0.0/16', "testInsertLarge")
path = obj.GetPath()
self.failUnless(path == "1000100100101011", "unexpected path to node: [%s] " % path)
def test_tree_lookup_succeed(self):
obj = self.t.Insert('10.0.0.0/8', "testLookup")
obj2 = self.t.Lookup('10.0.0.0/8')
self.assertEqual(obj, obj2)
def test_tree_lookup_fail(self):
obj = self.t.Insert('10.0.0.0/8', "testNegLookup")
obj2 = self.t.Lookup('127.0.0.1')
self.assertEqual(obj2, None)
self.assertNotEqual(obj, None)
def test_tree_lookup_funky(self):
for count in range(4,12):
objdict = {}
total_route_set = []
for route in self.t.GenerateForPrefix(count):
total_route_set.append(route)
picks = random.sample(total_route_set, count/2)
for item in picks:
objdict[item] = self.t.Insert(item,
"complex_find_gap", mark_used = False)
for item in total_route_set:
if item in picks:
self.assertEqual(self.t.Lookup(item), objdict[item],
"Picks lookup [%s] got [%s]" % (self.t.Lookup(item),
objdict[item]))
else:
self.assertEqual(self.t.Lookup(item), None,
"Non-picks lookup get [%s] not none" %
self.t.Lookup(item))
def test_insert_duplicate_fails(self):
#self.t.debug=30
obj1 = self.t.Insert('137.43.0.0/16', 'testInsertDup')
self.assertEqual(False, self.t.Insert('137.43.0.0/16',
'testInsertDup'))
#self.t.debug=0
def test_tree_quick_insert_multiple_prefixes(self):
obj1 = self.t.Insert('0.0.0.0/8', "testInsertMultiple")
obj2 = self.t.Insert('1.0.0.0/8', "testInsertMultiple")
data1 = obj1.GetData()
used1 = obj1.used
left1 = obj1.GetLeft()
right1 = obj1.GetRight()
parent1 = obj1.GetParent()
level1 = obj1.GetLevel()
left2 = obj2.GetLeft()
right2 = obj2.GetRight()
parent2 = obj2.GetParent()
level2 = obj2.GetLevel()
self.assertEqual(data1, 'testInsertMultiple')
self.assertEqual(left1, None)
self.assertEqual(left2, None)
self.assertEqual(level1, 8)
self.assertEqual(level2, 8)
class TreeTestGaps(unittest.TestCase):
def setUp(self):
self.t = tree.Tree()
def test_tree_quick_find_gap_vanilla(self):
# Simple insert
self.t.Insert('0.0.0.0/8', "testFindGap")
ret = self.t.FindGap(8)
self.assertEqual(ret, "1.0.0.0/8",
"Find gap returned [%s], not 1.0.0.0/8" % ret)
# Route of same length immediately beside
self.t.Insert('1.0.0.0/8', "testFindGap2")
ret2 = self.t.FindGap(8)
self.assertEqual(ret2, "2.0.0.0/8",
"Find gap returned [%s], not 2.0.0.0/8" % ret2)
# And up two levels and down again
self.t.Insert("2.0.0.0/8", "testFindGap")
ret3 = self.t.FindGap(8)
self.assertEqual(ret3, "3.0.0.0/8",
"Find gap returned [%s], not " % ret3)
# Insert covering route (0-3/8)
self.t.Insert("0.0.0.0/6", "testFindGap")
ret4 = self.t.FindGap(6)
self.assertEqual(ret4, "4.0.0.0/6")
# Find a large gap after some small routes inserted
self.t.Insert("0.0.0.0/4", "testFindGap")
ret5 = self.t.FindGap(6)
self.assertEqual(ret5, "16.0.0.0/6")
# Bang over to the other side of the tree altogether
ret6 = self.t.FindGap(1)
self.assertEqual(ret6, "128.0.0.0/1")
def test_tree_quick_find_gap_random(self):
for count in range(1,10):
self.t = None
self.t = tree.Tree()
# Looking for route with a relevant prefix size.
# Generate a list of all possible prefixes leaving out one at random.
total_route_set = []
for route in self.t.GenerateForPrefix(count):
total_route_set.append(route)
remove_me = random.choice(total_route_set)
total_route_set.remove(remove_me)
for item in total_route_set:
obj1 = self.t.Insert(item, "testFindGap2")
found = self.t.FindGap(count)
self.assertEqual(found, remove_me, "Find gap gave [%s] not expected \
[%s]" % (found, remove_me))
def test_tree_different_size_find_gap(self):
self.t.Insert('0.0.0.0/8', 'reason1')
self.t.Insert('1.0.0.0/8', 'reason2')
r1 = self.t.FindGap(8)
self.assertEqual(r1, '2.0.0.0/8')
self.t.Insert(r1, 'reason1')
r2 = self.t.FindGap(8)
self.assertEqual(r2, '3.0.0.0/8')
self.t.Insert(r2, 'reason2')
r3 = self.t.FindGap(20)
self.assertEqual(r3, '4.0.0.0/20')
self.t.Insert(r3, 'reason3')
r4 = self.t.FindGap(8)
self.assertEqual(r4, '5.0.0.0/8')
r5 = self.t.FindGap(10)
self.assertEqual(r5, '4.64.0.0/10')
self.t.Insert(r5, 'reason5')
r6 = self.t.FindGap(6)
self.assertEqual(r6, '8.0.0.0/6')
r7 = self.t.FindGap(30)
self.assertEqual(r7, '4.0.16.0/30')
def test_tree_different_size_find_gap_from(self):
#self.t.debug = 10
self.t.Insert('0.0.0.0/8', 'reason1')
self.t.Insert('1.0.0.0/8', 'reason2')
r1 = self.t.FindGapFrom('0.0.0.0/1', 8)
self.assertEqual(r1, '2.0.0.0/8')
self.t.Insert(r1, 'reason1')
r2 = self.t.FindGapFrom('0.0.0.0/1', 8)
self.assertEqual(r2, '3.0.0.0/8')
self.t.Insert(r2, 'reason2')
r3 = self.t.FindGapFrom('0.0.0.0/1', 20)
self.assertEqual(r3, '4.0.0.0/20')
self.t.Insert(r3, 'reason3')
r4 = self.t.FindGapFrom('0.0.0.0/1', 8)
self.assertEqual(r4, '5.0.0.0/8')
r5 = self.t.FindGapFrom('0.0.0.0/1', 10)
self.assertEqual(r5, '4.64.0.0/10')
self.t.Insert(r5, 'reason5')
r6 = self.t.FindGapFrom('0.0.0.0/1', 6)
self.assertEqual(r6, '8.0.0.0/6')
r7 = self.t.FindGapFrom('0.0.0.0/1', 30)
self.assertEqual(r7, '4.0.16.0/30')
def test_tree_find_gap(self):
for count in range(4,12):
total_route_set = []
for route in self.t.GenerateForPrefix(count):
total_route_set.append(route)
picks = random.sample(total_route_set, count/2)
for item in picks:
obj1 = self.t.Insert(item, "testFindGap3")
for item in picks:
gap = self.t.FindGap(count)
self.failUnless(gap in total_route_set, "Gap found [%s] not in total \
route set!" % gap)
if gap not in picks:
# Add it and try again
self.t.Insert(gap, "testFindGap3Update")
else:
print "??????"
def test_tree_find_gap_from_simple(self):
self.t.Insert("0.0.0.0/8", 'testFindGapFrom', mark_used = False,
test_none = False)
gap = self.t.FindGapFrom("0.0.0.0/8", 24)
self.assertEqual(gap, "0.0.0.0/24",
"Should find 0.0.0.0/24, instead found [%s]" % gap)
gap = self.t.FindGapFrom("1.0.0.0/8", 24)
self.assertEqual(gap, None,
"Should find no gap, instead got [%s]" % gap)
def test_tree_find_gap_from_simple_higher(self):
self.t.Insert("0.0.0.0/8", 'testFindGapFrom', mark_used = False,
test_none = False)
gap = self.t.FindGapFrom("0.0.0.0/8", 7)
self.assertEqual(gap, None,
"Should find no gap, instead got [%s]" % gap)
def test_tree_find_gap_from_simple_samesize(self):
self.t.Insert("0.0.0.0/8", 'testFindGapFrom', mark_used = False,
test_none = False)
gap = self.t.FindGapFrom("0.0.0.0/8", 8)
self.assertEqual(gap, "0.0.0.0/8")
def test_tree_find_gap_from_middling(self):
self.t.Insert("172.16.0.0/12", "findgapmiddling", mark_used = False,
test_none = False)
gap = self.t.FindGapFrom("172.16.0.0/12", 16)
self.assertEqual(gap, "172.16.0.0/16")
self.t.Insert("172.16.0.0/16", "findgapmiddling")
gap = self.t.FindGapFrom("172.16.0.0/12", 16)
self.assertEqual(gap, "172.17.0.0/16")
self.t.Insert("172.17.0.0/16", "findgapmiddling")
gap = self.t.FindGapFrom("172.16.0.0/12", 24)
self.assertEqual(gap, "172.18.0.0/24")
self.t.Insert("172.16.0.0/13", "findgapmiddling")
self.t.Insert("172.24.0.0/13", "findgapmiddling")
gap = self.t.FindGapFrom("172.16.0.0/12", 8)
self.assertEqual(gap, None)
def test_tree_find_gap_middling_occupied(self):
node = self.t.Insert("172.16.0.0/12", "findgapmiddling", mark_used = False,
test_none = False)
gap = self.t.FindGapFrom("172.16.0.0/12", 16)
self.assertEqual(gap, "172.16.0.0/16")
node.used = True
gap = self.t.FindGapFrom("172.16.0.0/12", 16)
self.assertEqual(gap, None)
def test_tree_find_gap_from_complex(self):
for count in range(4,12):
total_route_set = []
for route in self.t.GenerateForPrefix(count):
total_route_set.append(route)
picks = random.sample(total_route_set, count/2)
for item in picks:
obj1 = self.t.Insert(item, "complex_find_gap", mark_used = False)
for item in total_route_set:
if item in picks:
gap = self.t.FindGapFrom(item, count)
self.assertEqual(gap, item, "Find gap from gave [%s] not expected \
[%s]" % (gap, item))
else:
gap = self.t.FindGapFrom(item, 24)
self.assertEqual(gap, None)
class TreeIteration(unittest.TestCase):
def setUp(self):
self.t = tree.Tree()
def test_tree_iterate_nodes(self):
compare_list = []
for item in self.t.GenerateForPrefix(3):
obj1 = self.t.Insert(item, "testIterateNodes")
compare_list.append(item)
for node in self.t.IterateNodes():
compare_list.remove(node)
self.assertEqual(compare_list, [])
def test_tree_only_supernets(self):
#self.t.debug = 1
#self.t.Insert('199.0.0.0/8', "walk prob root", mark_used = False)
#self.assertEqual(self.t.Lookup('199.0.0.0/8').GetData(), "walk prob root")
original_routes = ['199.4.32.0/19', '199.4.64.0/18', '199.4.128.0/24', '199.4.130.0/23',
'199.4.132.0/24', '199.4.134.0/23', '199.4.136.0/24', '199.4.139.0/24',
'199.4.140.0/24', '199.4.141.0/24']
for route in original_routes:
self.t.Insert(route, "walk problem", mark_used = True, propagate_used = True)
result = []
for f in self.t.IterateNodes(prefix='199.0.0.0/8', top_used=True):
result.append(f)
result2 = ['199.4.32.0/19', '199.4.64.0/18', '199.4.128.0/24', '199.4.130.0/23',
'199.4.132.0/24', '199.4.134.0/23', '199.4.136.0/24', '199.4.139.0/24',
'199.4.140.0/23']
self.assertEqual(result, result2)
class TreeSlowTests(unittest.TestCase):
def setUp(self):
self.t = tree.Tree()
def test_tree_slow_13_treeobj_find_gap_exhaust(self):
self.t.Insert('0.0.0.0/8', "find_gap_exhaust")
route = self.t.FindGap(8)
while route != None:
self.t.Insert(route, "find_gap_exhaust_extend")
route = self.t.FindGap(8)
def test_tree_slow_14_treeobj_find_gap_too_large(self):
self.t.Insert('0.0.0.0/8', "find_gap_exhaust")
route = self.t.FindGap(8)
while route != None:
self.t.Insert(route, "find_gap_exhaust_large")
route = self.t.FindGap(7)
class TreeComparisonTests(unittest.TestCase):
def setUp(self):
self.t = tree.Tree()
def test_compare_tree_1(self):
self.t2 = tree.Tree()
self.t.Insert('1.0.0.0/8', 'reason1')
self.t2.Insert('1.0.0.0/8', 'reason2')
self.assertEqual(self.t, self.t2)
def test_compare_tree_2(self):
self.t2 = tree.Tree()
self.t.Insert('192.168.0.0/23', 'reason1', mark_used=True)
self.t2.Insert('192.168.0.0/24', 'reason2', mark_used=True, propagate_used=True)
self.t2.Insert('192.168.1.0/24', 'reason3', mark_used=True, propagate_used=True)
#print "T2"
#for x in self.t2.IterateNodes():
#print "X T2", x
#print "T1"
#for y in self.t.IterateNodes():
#print "Y T", y
#print "ASSERT"
self.assertEqual(self.t, self.t2)
def test_compare_tree_3(self):
pass
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -4,469,209,220,088,208,400 | 34.709402 | 94 | 0.613787 | false | 2.750132 | true | false | false |
kmahyyg/learn_py3 | antiscanhttp.py | 1 | 2192 | #!/usr/bin/env python3
# -*- coding : utf-8 -*-
# http://speedtest.tele2.net/10GB.zip
# https://docs.python.org/3/library/http.server.html
# http://blog.csdn.net/cteng/article/details/51584766
"""
Anti-HTTP-Scanner : Redirect all requests to 10GB speedtest file
Patrick Young 2017/10/8
usage: "antiscanhttp.py" [-h] [--port] [--ip] redirect_url
positional arguments:
redirect_url (such as http://speedtest.tele2.net/10GB.zip)
optional arguments:
-h,--help Show this help message and exit
--port,-p Port to listen on , Default 80
--ip,-i Host interface to listen on
redirect_url Recommend to use 'http://speedtest.tele2.net/10GB.zip'
"""
import socketserver
import http.server
import argparse
def redirect_handler(url):
class RedirectHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(301)
self.send_header('Location', url)
self.end_headers()
def do_POST(self):
self.send_response(301)
self.send_header('Location', url)
self.end_headers()
def do_HEAD(self):
self.send_response(301)
self.send_header('Location', url)
self.end_headers()
def do_PUT(self):
self.send_response(301)
self.send_header('Location', url)
self.end_headers()
return RedirectHandler
def main():
parser = argparse.ArgumentParser(description='Anti HTTP Scanner redirector')
parser.add_argument('--port', '-p', action='store', type=int, default=80, help='Server listens on this port')
parser.add_argument('--ip', '-i', action='store', default='', help='Host Interface to listen on')
parser.add_argument('redirect_url', action='store',help='(such as http://speedtest.tele2.net/10GB.zip)')
userinput = parser.parse_args()
redirect_url = userinput.redirect_url
port = userinput.port
host = userinput.ip
redirect_handle = redirect_handler(redirect_url)
handler = socketserver.TCPServer((host, port), redirect_handle)
print('Server now is running on the port %s' % port)
handler.serve_forever()
if __name__ == "__main__":
main()
| agpl-3.0 | 8,516,625,122,009,231,000 | 31.235294 | 113 | 0.649635 | false | 3.564228 | false | false | false |
brendanlong/dash-ts-tools | dash_initialization_segmenter.py | 1 | 4159 | #!/usr/bin/env python3
import argparse
import os
from ts import *
def write_ts(file_name, packets, force):
logging.info("Writing %s", file_name)
if not force and os.path.exists(file_name):
choice = input(
"Output file {} already exists. Overwrite it? "
"[y/N] ".format(file_name)).lower()
if choice != "y":
return
with open(file_name, "wb") as f:
for packet in packets:
f.write(packet.bytes)
def generate_initialization_segment(
segment_file_names, segment_template, out_file_name, force):
pat = None
pat_ts = None
pmt = None
pmt_ts = None
segment_ts = {}
pmt_pid = None
for segment_file_name in segment_file_names:
logging.info("Reading %s", segment_file_name)
current_segment_ts = []
segment_ts[segment_file_name] = current_segment_ts
for ts in read_ts(segment_file_name):
if ts.pid == ProgramAssociationTable.PID:
new_pat = ProgramAssociationTable(ts.payload)
if pat is None:
pat = new_pat
pat_ts = ts
programs = list(pat.programs.values())
if len(programs) != 1:
raise Exception(
"PAT has {} programs, but DASH only allows 1 "
"program.".format(len(pat.programs)))
if pmt_pid is not None and programs[0] != pmt_pid:
raise Exception("PAT has new PMT PID. This program has "
"not been tested to handled this case.")
pmt_pid = programs[0]
elif new_pat != pat:
raise Exception("Cannot generate initialization segment "
"for segment with multiple PAT's. {} != {"
"}".format(new_pat, pat))
elif ts.pid == pmt_pid:
new_pmt = ProgramMapTable(ts.payload)
if pmt is None:
pmt = new_pmt
pmt_ts = ts
elif new_pmt != pmt:
raise Exception("Cannot generate initialization segment "
"for segment with multiple PMT's. {} != {"
"}".format(new_pmt, pmt))
else:
current_segment_ts.append(ts)
logging.debug("Common PSI is:\nPAT: %s\nPMT: %s", pat, pmt)
write_ts(out_file_name, [pat_ts, pmt_ts], force)
for segment_file_name in segment_file_names:
path, file_name = os.path.split(segment_file_name)
name_part, _ = os.path.splitext(file_name)
segment_out_file_name = segment_template.format_map(
{"path": path, "name_part": name_part})
write_ts(segment_out_file_name, segment_ts[segment_file_name], force)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"media_segment", nargs="+",
help="The media segments to create an initialization segment for.")
parser.add_argument(
"--segment-template", "-s",
help="Template for segment index files. {name_part} will be replaced "
"with the file name of the media segment minus the suffix (.ts). "
"{path} will be replaced with the full path to the media segment.",
default="{path}/{name_part}.ts")
parser.add_argument(
"--out", "-o", required=True,
help="The file to write the initialization segment to.")
parser.add_argument(
"--force", "-f", action="store_true", default=False,
help="Overwrite output files without prompting.")
parser.add_argument(
"--verbose", "-v", action="store_true", default=False,
help="Enable verbose output.")
args = parser.parse_args()
logging.basicConfig(
format='%(levelname)s: %(message)s',
level=logging.DEBUG if args.verbose else logging.INFO)
generate_initialization_segment(
args.media_segment, args.segment_template, args.out, args.force)
| bsd-2-clause | -5,032,914,013,594,060,000 | 40.178218 | 80 | 0.5434 | false | 4.19254 | false | false | false |
MarcoVogt/basil | tests/test_RegisterHardwareLayer.py | 1 | 9389 | #
# ------------------------------------------------------------
# Copyright (c) All rights reserved
# SiLab, Institute of Physics, University of Bonn
# ------------------------------------------------------------
#
import unittest
from basil.dut import Dut
from basil.HL.RegisterHardwareLayer import RegisterHardwareLayer
import os
_test_init = {
'REG_TEST_INIT': 15,
'REG1': 120,
'REG_BYTE_ARRAY': [4, 3, 2, 1]
}
class test_RegisterHardwareLayer(RegisterHardwareLayer):
'''Register Hardware Layer.
Implementation of advanced register operations.
'''
_registers = {
'REG1': {'default': 12, 'descr': {'addr': 0, 'size': 15, 'offset': 0}},
'REG2': {'default': 1, 'descr': {'addr': 1, 'size': 1, 'offset': 7}},
'REG3': {'default': 2 ** 16 - 1, 'descr': {'addr': 2, 'size': 16, 'offset': 0}},
'REG4_RO': {'default': 0, 'descr': {'addr': 4, 'size': 8, 'properties': ['readonly']}},
'REG5_WO': {'default': 0, 'descr': {'addr': 5, 'size': 8, 'properties': ['writeonly']}},
'REG_TEST_INIT': {'descr': {'addr': 6, 'size': 8}},
'REG_BYTE_ARRAY': {'default': [1, 2, 3, 4], 'descr': {'addr': 16, 'size': 4, 'properties': ['bytearray']}}
}
class TestRegisterHardwareLayer(unittest.TestCase):
def setUp(self):
self.dut = Dut(os.path.join(os.path.dirname(__file__), 'test_RegisterHardwareLayer.yaml'))
self.dut.init()
def test_init_non_existing(self):
with self.assertRaises(KeyError):
self.dut.init({"test_register": {"NON_EXISTING": 1}})
def test_lazy_programming(self):
self.dut['test_register'].set_default()
self.assertDictEqual({0: 12, 1: 128, 2: 255, 3: 255, 5: 0, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
self.dut['test_register'].REG5_WO = 255
self.assertDictEqual({0: 12, 1: 128, 2: 255, 3: 255, 5: 255, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
self.dut['test_register'].REG5_WO # get value from write-only register, but this will write zero instead
self.assertDictEqual({0: 12, 1: 128, 2: 255, 3: 255, 5: 0, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
def test_get_configuration(self):
self.dut.set_configuration(os.path.join(os.path.dirname(__file__), 'test_RegisterHardwareLayer_configuration.yaml'))
conf = self.dut['test_register'].get_configuration()
self.assertDictEqual({'REG1': 257, 'REG2': 1, 'REG3': 2, 'REG_TEST_INIT': 0, 'REG_BYTE_ARRAY': [1, 2, 3, 4]}, conf)
def test_set_configuration(self):
self.dut.set_configuration(os.path.join(os.path.dirname(__file__), 'test_RegisterHardwareLayer_configuration.yaml'))
self.assertDictEqual({0: 1, 1: 129, 2: 2, 3: 0, 5: 5, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
def test_set_configuration_non_existing(self):
with self.assertRaises(KeyError):
self.dut.set_configuration({"test_register": {"NON_EXISTING": 1}})
def test_read_only(self):
self.assertRaises(IOError, self.dut['test_register']._set, 'REG4_RO', value=0)
# def test_write_only(self):
# self.assertRaises(IOError, self.dut['test_register']._get, 'REG5_WO')
def test_write_only_lazy_programming(self):
self.dut['test_register'].set_default()
self.assertDictEqual({0: 12, 1: 128, 2: 255, 3: 255, 5: 0, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
self.dut['test_register'].REG5_WO = 20
self.assertDictEqual({0: 12, 1: 128, 2: 255, 3: 255, 5: 20, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
self.dut['test_register'].REG5_WO
self.assertDictEqual({0: 12, 1: 128, 2: 255, 3: 255, 5: 0, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
self.assertIs(None, self.dut['test_register']._get('REG5_WO'))
def test_set_default(self):
self.dut['test_register'].set_default()
self.assertDictEqual({0: 12, 1: 128, 2: 255, 3: 255, 5: 0, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
def test_set_attribute_add(self):
val = self.dut['test_register']._registers['REG1']['default']
self.dut['test_register'].REG1 = val # 12
mem = self.dut['dummy_tl'].mem.copy()
self.dut['test_register'].REG1 += 1 # 13
mem[0] = 13
self.assertDictEqual(mem, self.dut['dummy_tl'].mem)
def test_write_read_reg(self):
for reg in ['REG1', 'REG2', 'REG3']:
val = self.dut['test_register']._registers[reg]['default']
self.dut['test_register']._set(reg, val)
ret_val = self.dut['test_register']._get(reg)
self.assertEqual(ret_val, val)
self.assertDictEqual({0: 12, 1: 128, 2: 255, 3: 255, 5: 0, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
def test_set_attribute_by_value(self):
self.dut['test_register'].set_default()
self.assertDictEqual({0: 12, 1: 128, 2: 255, 3: 255, 5: 0, 16: 1, 17: 2, 18: 3, 19: 4}, self.dut['dummy_tl'].mem)
self.dut['test_register'].REG2 = 0
mem = self.dut['dummy_tl'].mem.copy()
mem[1] = 0
self.assertDictEqual(mem, self.dut['dummy_tl'].mem)
def test_set_attribute_by_string(self):
mem = self.dut['dummy_tl'].mem.copy()
self.dut['test_register'].REG3 = '1010101010101010' # dfghfghdfghgfdghf
mem[2] = 170
mem[3] = 170
self.assertDictEqual(mem, self.dut['dummy_tl'].mem)
def test_get_attribute_by_string(self):
self.dut['test_register'].REG3 = '1010101010101010' # 43690
self.assertEqual(43690, self.dut['test_register'].REG3)
def test_set_attribute_too_long_string(self):
val = '11010101010101010' # 17 bit
self.assertRaises(ValueError, self.dut['test_register']._set, 'REG3', value=val)
def test_set_attribute_dict_access(self):
self.dut['test_register']['REG1'] = 27306 # 27306
self.assertEqual(27306, self.dut['test_register']['REG1'])
def test_set_attribute_too_big_val(self):
val = 2 ** 16 # max 2 ** 16 - 1
self.assertRaises(ValueError, self.dut['test_register']._set, 'REG3', value=val)
def test_set_by_function(self):
self.dut['test_register'].set_REG1(27308)
self.assertEqual(27308, self.dut['test_register']['REG1'])
def test_get_by_function(self):
self.dut['test_register']['REG1'] = 27305 # 27306
ret = self.dut['test_register'].get_REG1()
self.assertEqual(ret, self.dut['test_register']['REG1'])
def test_init_with_dict(self):
self.dut['test_register'].set_default()
self.dut.init({'test_register': _test_init})
conf = self.dut.get_configuration()
self.assertDictEqual({'test_register': {'REG1': 120, 'REG2': 1, 'REG3': 65535, 'REG_TEST_INIT': 15, 'REG_BYTE_ARRAY': [4, 3, 2, 1]}, 'dummy_tl': {}}, conf)
def test_get_dut_configuration(self):
self.dut['test_register'].set_default()
conf = self.dut.get_configuration()
self.assertDictEqual({'test_register': {'REG1': 12, 'REG2': 1, 'REG3': 65535, 'REG_TEST_INIT': 0, 'REG_BYTE_ARRAY': [1, 2, 3, 4]}, 'dummy_tl': {}}, conf)
def test_get_set_value(self):
for val in range(256):
self.dut['test_register'].set_value(val, 0, size=8, offset=0)
ret_val = self.dut['test_register'].get_value(0, size=8, offset=0)
self.assertEqual(ret_val, val)
def test_write_read_reg_with_bit_str(self):
val = '00110110' # 54
self.dut['test_register'].set_value(val, 0, size=8, offset=0)
ret_val = self.dut['test_register'].get_value(0, size=8, offset=0)
self.assertEqual(ret_val, int(val, base=2))
def test_write_read_reg_with_offset(self):
for offset in range(32):
val = 131
self.dut['test_register'].set_value(val, 0, size=8, offset=offset)
ret_val = self.dut['test_register'].get_value(0, size=8, offset=offset)
self.assertEqual(ret_val, val)
def test_write_read_reg_with_size(self):
for size in range(8, 33):
val = 131
self.dut['test_register'].set_value(val, 0, size=size, offset=7)
ret_val = self.dut['test_register'].get_value(0, size=size, offset=7)
self.assertEqual(ret_val, val)
def test_read_non_existing(self):
with self.assertRaises(KeyError):
self.dut['test_register'].NON_EXISTING
with self.assertRaises(KeyError):
self.dut['test_register']['NON_EXISTING']
with self.assertRaises(KeyError):
self.dut['test_register'].get_NON_EXISTING()
def test_write_non_existing(self):
with self.assertRaises(KeyError):
self.dut['test_register'].NON_EXISTING = 42
with self.assertRaises(KeyError):
self.dut['test_register']['NON_EXISTING'] = 42
with self.assertRaises(KeyError):
self.dut['test_register'].set_NON_EXISTING(42)
def test_wrong_size(self):
self.assertRaises(ValueError, self.dut['test_register'].set_value, 131, addr=0, size=7, offset=7)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -6,639,450,954,130,615,000 | 45.180905 | 163 | 0.575248 | false | 3.235355 | true | false | false |
mistercrunch/panoramix | superset/reports/api.py | 2 | 14710 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Any, Optional
from flask import g, request, Response
from flask_appbuilder.api import expose, permission_name, protect, rison, safe
from flask_appbuilder.hooks import before_request
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import ngettext
from marshmallow import ValidationError
from superset import is_feature_enabled
from superset.charts.filters import ChartFilter
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.dashboards.filters import DashboardAccessFilter
from superset.databases.filters import DatabaseFilter
from superset.models.reports import ReportSchedule
from superset.reports.commands.bulk_delete import BulkDeleteReportScheduleCommand
from superset.reports.commands.create import CreateReportScheduleCommand
from superset.reports.commands.delete import DeleteReportScheduleCommand
from superset.reports.commands.exceptions import (
ReportScheduleBulkDeleteFailedError,
ReportScheduleCreateFailedError,
ReportScheduleDeleteFailedError,
ReportScheduleForbiddenError,
ReportScheduleInvalidError,
ReportScheduleNotFoundError,
ReportScheduleUpdateFailedError,
)
from superset.reports.commands.update import UpdateReportScheduleCommand
from superset.reports.filters import ReportScheduleAllTextFilter
from superset.reports.schemas import (
get_delete_ids_schema,
openapi_spec_methods_override,
ReportSchedulePostSchema,
ReportSchedulePutSchema,
)
from superset.views.base_api import (
BaseSupersetModelRestApi,
RelatedFieldFilter,
statsd_metrics,
)
from superset.views.filters import FilterRelatedOwners
logger = logging.getLogger(__name__)
class ReportScheduleRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(ReportSchedule)
@before_request
def ensure_alert_reports_enabled(self) -> Optional[Response]:
if not is_feature_enabled("ALERT_REPORTS"):
return self.response_404()
return None
include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | {
RouteMethod.RELATED,
"bulk_delete", # not using RouteMethod since locally defined
}
class_permission_name = "ReportSchedule"
method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP
resource_name = "report"
allow_browser_login = True
show_columns = [
"id",
"active",
"chart.id",
"chart.slice_name",
"context_markdown",
"crontab",
"dashboard.dashboard_title",
"dashboard.id",
"database.database_name",
"database.id",
"description",
"grace_period",
"last_eval_dttm",
"last_state",
"last_value",
"last_value_row_json",
"log_retention",
"name",
"owners.first_name",
"owners.id",
"owners.last_name",
"recipients.id",
"recipients.recipient_config_json",
"recipients.type",
"report_format",
"sql",
"type",
"validator_config_json",
"validator_type",
"working_timeout",
]
show_select_columns = show_columns + [
"chart.datasource_id",
"chart.datasource_type",
]
list_columns = [
"active",
"changed_by.first_name",
"changed_by.last_name",
"changed_on",
"changed_on_delta_humanized",
"created_by.first_name",
"created_by.last_name",
"created_on",
"crontab",
"crontab_humanized",
"id",
"last_eval_dttm",
"last_state",
"name",
"owners.first_name",
"owners.id",
"owners.last_name",
"recipients.id",
"recipients.type",
"type",
]
add_columns = [
"active",
"chart",
"context_markdown",
"crontab",
"dashboard",
"database",
"description",
"grace_period",
"log_retention",
"name",
"owners",
"recipients",
"report_format",
"sql",
"type",
"validator_config_json",
"validator_type",
"working_timeout",
]
edit_columns = add_columns
add_model_schema = ReportSchedulePostSchema()
edit_model_schema = ReportSchedulePutSchema()
order_columns = [
"active",
"created_by.first_name",
"changed_by.first_name",
"changed_on",
"changed_on_delta_humanized",
"created_on",
"crontab",
"last_eval_dttm",
"name",
"type",
"crontab_humanized",
]
search_columns = ["name", "active", "created_by", "type", "last_state"]
search_filters = {"name": [ReportScheduleAllTextFilter]}
allowed_rel_fields = {"owners", "chart", "dashboard", "database", "created_by"}
filter_rel_fields = {
"chart": [["id", ChartFilter, lambda: []]],
"dashboard": [["id", DashboardAccessFilter, lambda: []]],
"database": [["id", DatabaseFilter, lambda: []]],
}
text_field_rel_fields = {
"dashboard": "dashboard_title",
"chart": "slice_name",
"database": "database_name",
}
related_field_filters = {
"dashboard": "dashboard_title",
"chart": "slice_name",
"database": "database_name",
"owners": RelatedFieldFilter("first_name", FilterRelatedOwners),
}
apispec_parameter_schemas = {
"get_delete_ids_schema": get_delete_ids_schema,
}
openapi_spec_tag = "Report Schedules"
openapi_spec_methods = openapi_spec_methods_override
@expose("/<int:pk>", methods=["DELETE"])
@protect()
@safe
@statsd_metrics
@permission_name("delete")
def delete(self, pk: int) -> Response:
"""Delete a Report Schedule
---
delete:
description: >-
Delete a Report Schedule
parameters:
- in: path
schema:
type: integer
name: pk
description: The report schedule pk
responses:
200:
description: Item deleted
content:
application/json:
schema:
type: object
properties:
message:
type: string
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
try:
DeleteReportScheduleCommand(g.user, pk).run()
return self.response(200, message="OK")
except ReportScheduleNotFoundError:
return self.response_404()
except ReportScheduleForbiddenError:
return self.response_403()
except ReportScheduleDeleteFailedError as ex:
logger.error(
"Error deleting report schedule %s: %s",
self.__class__.__name__,
str(ex),
exc_info=True,
)
return self.response_422(message=str(ex))
@expose("/", methods=["POST"])
@protect()
@safe
@statsd_metrics
@permission_name("post")
def post(self) -> Response:
"""Creates a new Report Schedule
---
post:
description: >-
Create a new Report Schedule
requestBody:
description: Report Schedule schema
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/{{self.__class__.__name__}}.post'
responses:
201:
description: Report schedule added
content:
application/json:
schema:
type: object
properties:
id:
type: number
result:
$ref: '#/components/schemas/{{self.__class__.__name__}}.post'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
if not request.is_json:
return self.response_400(message="Request is not JSON")
try:
item = self.add_model_schema.load(request.json)
# This validates custom Schema with custom validations
except ValidationError as error:
return self.response_400(message=error.messages)
try:
new_model = CreateReportScheduleCommand(g.user, item).run()
return self.response(201, id=new_model.id, result=item)
except ReportScheduleNotFoundError as ex:
return self.response_400(message=str(ex))
except ReportScheduleInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ReportScheduleCreateFailedError as ex:
logger.error(
"Error creating report schedule %s: %s",
self.__class__.__name__,
str(ex),
exc_info=True,
)
return self.response_422(message=str(ex))
@expose("/<int:pk>", methods=["PUT"])
@protect()
@safe
@statsd_metrics
@permission_name("put")
def put(self, pk: int) -> Response: # pylint: disable=too-many-return-statements
"""Updates an Report Schedule
---
put:
description: >-
Updates a Report Schedule
parameters:
- in: path
schema:
type: integer
name: pk
description: The Report Schedule pk
requestBody:
description: Report Schedule schema
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/{{self.__class__.__name__}}.put'
responses:
200:
description: Report Schedule changed
content:
application/json:
schema:
type: object
properties:
id:
type: number
result:
$ref: '#/components/schemas/{{self.__class__.__name__}}.put'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
if not request.is_json:
return self.response_400(message="Request is not JSON")
try:
item = self.edit_model_schema.load(request.json)
# This validates custom Schema with custom validations
except ValidationError as error:
return self.response_400(message=error.messages)
try:
new_model = UpdateReportScheduleCommand(g.user, pk, item).run()
return self.response(200, id=new_model.id, result=item)
except ReportScheduleNotFoundError:
return self.response_404()
except ReportScheduleInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ReportScheduleForbiddenError:
return self.response_403()
except ReportScheduleUpdateFailedError as ex:
logger.error(
"Error updating report %s: %s",
self.__class__.__name__,
str(ex),
exc_info=True,
)
return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"])
@protect()
@safe
@statsd_metrics
@rison(get_delete_ids_schema)
def bulk_delete(self, **kwargs: Any) -> Response:
"""Delete bulk Report Schedule layers
---
delete:
description: >-
Deletes multiple report schedules in a bulk operation.
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_delete_ids_schema'
responses:
200:
description: Report Schedule bulk delete
content:
application/json:
schema:
type: object
properties:
message:
type: string
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
item_ids = kwargs["rison"]
try:
BulkDeleteReportScheduleCommand(g.user, item_ids).run()
return self.response(
200,
message=ngettext(
"Deleted %(num)d report schedule",
"Deleted %(num)d report schedules",
num=len(item_ids),
),
)
except ReportScheduleNotFoundError:
return self.response_404()
except ReportScheduleForbiddenError:
return self.response_403()
except ReportScheduleBulkDeleteFailedError as ex:
return self.response_422(message=str(ex))
| apache-2.0 | -7,320,515,751,932,594,000 | 32.205418 | 85 | 0.560639 | false | 4.497096 | false | false | false |
adieu/django-invitation | invitation/models.py | 1 | 6880 | import os
import random
import datetime
from django.db import models
from django.conf import settings
from django.utils.http import int_to_base36
from django.utils.hashcompat import sha_constructor
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.contrib.sites.models import Site
from registration.models import SHA1_RE
class InvitationKeyManager(models.Manager):
def get_key(self, invitation_key):
"""
Return InvitationKey, or None if it doesn't (or shouldn't) exist.
"""
try:
code = InvitationCode.objects.get(code=invitation_key)
if self.filter(key=invitation_key).count() < code.redeem_limit:
key = self.model(key=invitation_key, from_user=code.from_user)
return key
except InvitationCode.DoesNotExist:
pass
# Don't bother hitting database if invitation_key doesn't match pattern.
if not SHA1_RE.search(invitation_key):
return None
try:
key = self.get(key=invitation_key)
except self.model.DoesNotExist:
return None
return key
def is_key_valid(self, invitation_key):
"""
Check if an ``InvitationKey`` is valid or not, returning a boolean,
``True`` if the key is valid.
"""
invitation_key = self.get_key(invitation_key)
return invitation_key and invitation_key.is_usable()
def create_invitation(self, user):
"""
Create an ``InvitationKey`` and returns it.
The key for the ``InvitationKey`` will be a SHA1 hash, generated
from a combination of the ``User``'s username and a random salt.
"""
salt = sha_constructor(str(random.random())).hexdigest()[:5]
key = sha_constructor("%s%s%s" % (datetime.datetime.now(), salt, user.username)).hexdigest()
return self.create(from_user=user, key=key)
def remaining_invitations_for_user(self, user):
"""
Return the number of remaining invitations for a given ``User``.
"""
invitation_user, created = InvitationUser.objects.get_or_create(
inviter=user,
defaults={'invitations_remaining': settings.INVITATIONS_PER_USER})
return invitation_user.invitations_remaining
def delete_expired_keys(self):
for key in self.all():
if key.key_expired():
key.delete()
class InvitationKey(models.Model):
key = models.CharField(_('invitation key'), max_length=40)
date_invited = models.DateTimeField(_('date invited'), default=datetime.datetime.now)
from_user = models.ForeignKey(User, related_name='invitations_sent')
registrant = models.ForeignKey(User, null=True, blank=True, related_name='invitations_used')
objects = InvitationKeyManager()
def __unicode__(self):
return u"Invitation from %s on %s" % (self.from_user.username, self.date_invited)
def is_usable(self):
"""
Return whether this key is still valid for registering a new user.
"""
return self.registrant is None and not self.key_expired()
def key_expired(self):
"""
Determine whether this ``InvitationKey`` has expired, returning
a boolean -- ``True`` if the key has expired.
The date the key has been created is incremented by the number of days
specified in the setting ``ACCOUNT_INVITATION_DAYS`` (which should be
the number of days after invite during which a user is allowed to
create their account); if the result is less than or equal to the
current date, the key has expired and this method returns ``True``.
"""
expiration_date = datetime.timedelta(days=settings.ACCOUNT_INVITATION_DAYS)
return self.date_invited + expiration_date <= datetime.datetime.now()
key_expired.boolean = True
def mark_used(self, registrant):
"""
Note that this key has been used to register a new user.
"""
self.registrant = registrant
self.save()
def send_to(self, email):
"""
Send an invitation email to ``email``.
"""
current_site = Site.objects.get_current()
subject = render_to_string('invitation/invitation_email_subject.txt',
{ 'site': current_site,
'invitation_key': self })
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('invitation/invitation_email.txt',
{ 'invitation_key': self,
'expiration_days': settings.ACCOUNT_INVITATION_DAYS,
'site': current_site })
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, [email])
class InvitationCode(models.Model):
code = models.CharField(_('invitation code'), max_length=40)
date_created = models.DateTimeField(_('date created'), default=datetime.datetime.now)
from_user = models.ForeignKey(User, related_name='invitation_code_set')
redeem_limit = models.IntegerField()
def __unicode__(self):
return u"Invitation code %s from %s" % (self.code, self.from_user.username)
class InvitationRequest(models.Model):
email = models.EmailField()
invited = models.BooleanField(default=False)
def __unicode__(self):
return u"InvitationRequest from %s" % self.email
class InvitationUser(models.Model):
inviter = models.ForeignKey(User, unique=True)
invitations_remaining = models.IntegerField()
def __unicode__(self):
return u"InvitationUser for %s" % self.inviter.username
def user_post_save(sender, instance, created, **kwargs):
"""Create InvitationUser for user when User is created."""
if created:
invitation_user = InvitationUser()
invitation_user.inviter = instance
invitation_user.invitations_remaining = settings.INVITATIONS_PER_USER
invitation_user.save()
models.signals.post_save.connect(user_post_save, sender=User)
def invitation_key_post_save(sender, instance, created, **kwargs):
"""Decrement invitations_remaining when InvitationKey is created."""
if created:
invitation_user = InvitationUser.objects.get(inviter=instance.from_user)
remaining = invitation_user.invitations_remaining
invitation_user.invitations_remaining = remaining-1
invitation_user.save()
models.signals.post_save.connect(invitation_key_post_save, sender=InvitationKey)
| bsd-3-clause | -6,604,925,912,574,837,000 | 37.435754 | 100 | 0.636628 | false | 4.075829 | false | false | false |
drssoccer55/RLBot | src/main/python/rlbot/utils/structures/rigid_body_struct.py | 1 | 1072 | import ctypes
from rlbot.utils.structures.bot_input_struct import PlayerInput
from rlbot.utils.structures.game_data_struct import Vector3
from rlbot.utils.structures.start_match_structures import MAX_PLAYERS
class Quaternion(ctypes.Structure):
_fields_ = [("x", ctypes.c_float),
("y", ctypes.c_float),
("z", ctypes.c_float),
("w", ctypes.c_float)]
class RigidBodyState(ctypes.Structure):
_fields_ = [("frame", ctypes.c_int),
("location", Vector3),
("rotation", Quaternion),
("velocity", Vector3),
("angular_velocity", Vector3)]
class PlayerRigidBodyState(ctypes.Structure):
_fields_ = [("state", RigidBodyState),
("input", PlayerInput)]
class BallRigidBodyState(ctypes.Structure):
_fields_ = [("state", RigidBodyState)]
class RigidBodyTick(ctypes.Structure):
_fields_ = [("ball", BallRigidBodyState),
("players", PlayerRigidBodyState * MAX_PLAYERS),
("num_players", ctypes.c_int)]
| mit | -2,333,322,161,590,240,000 | 29.628571 | 69 | 0.609142 | false | 3.658703 | false | false | false |
jepler/linuxcnc-mirror | src/emc/usr_intf/pncconf/pncconf.py | 1 | 292409 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# This is pncconf, a graphical configuration editor for LinuxCNC
# Chris Morley copyright 2009
# This is based from stepconf, a graphical configuration editor for linuxcnc
# Copyright 2007 Jeff Epler <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import os
# this is for importing modules from lib/python/pncconf
BIN = os.path.dirname(__file__)
BASE = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))
libdir = os.path.join(BASE, "lib", "python","pncconf")
sys.path.insert(0, libdir)
import errno
import time
import pickle
import shutil
import math
from optparse import Option, OptionParser
import textwrap
import locale
import copy
import fnmatch
import subprocess
import gobject
import gtk
import gtk.glade
import xml.dom.minidom
import xml.etree.ElementTree
import xml.etree.ElementPath
import traceback
from multifilebuilder import MultiFileBuilder
from touchy import preferences
from pncconf import pages
from pncconf import build_INI
from pncconf import build_HAL
from pncconf import tests
from pncconf import data
from pncconf import private_data
import cairo
import hal
#import mesatest
try:
LINUXCNCVERSION = os.environ['LINUXCNCVERSION']
except:
LINUXCNCVERSION = 'UNAVAILABLE'
def get_value(w):
try:
return w.get_value()
except AttributeError:
pass
oldlocale = locale.getlocale(locale.LC_NUMERIC)
try:
locale.setlocale(locale.LC_NUMERIC, "")
return locale.atof(w.get_text())
finally:
locale.setlocale(locale.LC_NUMERIC, oldlocale)
def makedirs(d):
try:
os.makedirs(d)
except os.error, detail:
if detail.errno != errno.EEXIST: raise
makedirs(os.path.expanduser("~/linuxcnc/configs"))
# otherwise, on hardy the user is shown spurious "[application] closed
# unexpectedly" messages but denied the ability to actually "report [the]
# problem"
def excepthook(exc_type, exc_obj, exc_tb):
try:
w = app.widgets.window1
except NameError:
w = None
lines = traceback.format_exception(exc_type, exc_obj, exc_tb)
m = gtk.MessageDialog(w,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR, gtk.BUTTONS_OK,
_("PNCconf encountered an error. The following "
"information may be useful in troubleshooting:\n\n")
+ "LinuxCNC Version: %s\n\n"% LINUXCNCVERSION + ''.join(lines))
m.show()
m.run()
m.destroy()
sys.excepthook = excepthook
BASE = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))
LOCALEDIR = os.path.join(BASE, "share", "locale")
import gettext;
domain = "linuxcnc"
gettext.install(domain, localedir=LOCALEDIR, unicode=True)
locale.setlocale(locale.LC_ALL, '')
locale.bindtextdomain(domain, LOCALEDIR)
gettext.bindtextdomain(domain, LOCALEDIR)
def iceil(x):
if isinstance(x, (int, long)): return x
if isinstance(x, basestring): x = float(x)
return int(math.ceil(x))
prefs = preferences.preferences()
_DEBUGSTRING = ["NONE"]
debugstate = False
# a class for holding the glade widgets rather then searching for them each time
class Widgets:
def __init__(self, xml):
self._xml = xml
def __getattr__(self, attr):
r = self._xml.get_object(attr)
if r is None: raise AttributeError, "No widget %r" % attr
return r
def __getitem__(self, attr):
r = self._xml.get_object(attr)
if r is None: raise IndexError, "No widget %r" % attr
return r
class App:
def __init__(self, dbgstate=0):
print dbgstate
global debug
global dbg
global _PD
self.debugstate = dbgstate
dbg = self.dbg
if self.debugstate:
print 'PNCconf debug',dbgstate
global _DEBUGSTRING
_DEBUGSTRING = [dbgstate]
self.recursive_block = False
self.firmware_block = False
# Private data holds the array of pages to load, signals, and messages
_PD = self._p = private_data.Private_Data(self,BIN,BASE)
self.d = data.Data(self, _PD, BASE, LINUXCNCVERSION)
self.splash_screen()
#self.pbar.set_fraction(.2)
#while gtk.events_pending():
# gtk.main_iteration()
bar_size = 0
# build the glade files
self.builder = MultiFileBuilder()
self.builder.set_translation_domain(domain)
self.builder.add_from_file(os.path.join(self._p.DATADIR,'main_page.glade'))
self.builder.add_from_file(os.path.join(self._p.DATADIR,'dialogs.glade'))
self.builder.add_from_file(os.path.join(self._p.DATADIR,'help.glade'))
window = self.builder.get_object("window1")
notebook1 = self.builder.get_object("notebook1")
for name,y,z,a in (self._p.available_page):
if name == 'intro': continue
dbg("loading glade page REFERENCE:%s TITLE:%s INIT STATE: %s STATE:%s"% (name,y,z,a),mtype="glade")
if not z:
self.add_placeholder_page(name)
page = self.builder.get_object('label_%s'%name)
notebook1.append_page(page)
continue
self.builder.add_from_file(os.path.join(self._p.DATADIR, '%s.glade'%name))
page = self.builder.get_object(name)
notebook1.append_page(page)
self.pbar.set_fraction(bar_size)
while gtk.events_pending():
gtk.main_iteration()
bar_size += .0555
if not 'dev' in dbgstate:
notebook1.set_show_tabs(False)
self.widgets = Widgets(self.builder)
self.TESTS = tests.TESTS(self)
self.p = pages.Pages(self)
self.INI = build_INI.INI(self)
self.HAL = build_HAL.HAL(self)
self.builder.set_translation_domain(domain) # for locale translations
self.builder.connect_signals( self.p ) # register callbacks from Pages class
wiz_pic = gtk.gdk.pixbuf_new_from_file(self._p.WIZARD)
self.widgets.wizard_image.set_from_pixbuf(wiz_pic)
self.window.hide()
axisdiagram = os.path.join(self._p.HELPDIR,"axisdiagram1.png")
self.widgets.helppic0.set_from_file(axisdiagram)
axisdiagram = os.path.join(self._p.HELPDIR,"lathe_diagram.png")
self.widgets.helppic1.set_from_file(axisdiagram)
axisdiagram = os.path.join(self._p.HELPDIR,"HomeAxisTravel_V2.png")
self.widgets.helppic2.set_from_file(axisdiagram)
axisdiagram = os.path.join(self._p.HELPDIR,"HomeAxisTravel_V3.png")
self.widgets.helppic3.set_from_file(axisdiagram)
self.map_7i76 = gtk.gdk.pixbuf_new_from_file(os.path.join(self._p.HELPDIR,"7i76_map.png"))
self.widgets.map_7i76_image.set_from_pixbuf(self.map_7i76)
self.map_7i77 = gtk.gdk.pixbuf_new_from_file(os.path.join(self._p.HELPDIR,"7i77_map.png"))
self.widgets.map_7i77_image.set_from_pixbuf(self.map_7i77)
#self.widgets.openloopdialog.hide()
self.p.initialize()
window.show()
self.axis_under_test = False
self.jogminus = self.jogplus = 0
# set preferences if they exist
link = short = advanced = show_pages = False
filename = os.path.expanduser("~/.pncconf-preferences")
if os.path.exists(filename):
match = open(filename).read()
textbuffer = self.widgets.textoutput.get_buffer()
try :
textbuffer.set_text("%s\n\n"% filename)
textbuffer.insert_at_cursor(match)
except:
pass
version = 0.0
d = xml.dom.minidom.parse(open(filename, "r"))
for n in d.getElementsByTagName("property"):
name = n.getAttribute("name")
text = n.getAttribute('value')
if name == "version":
version = eval(text)
elif name == "always_shortcut":
short = eval(text)
elif name == "always_link":
link = eval(text)
elif name == "use_ini_substitution":
self.widgets.useinisubstitution.set_active(eval(text))
elif name == "show_advanced_pages":
show_pages = eval(text)
elif name == "machinename":
self.d._lastconfigname = text
elif name == "chooselastconfig":
self.d._chooselastconfig = eval(text)
elif name == "MESABLACKLIST":
if version == self.d._preference_version:
self._p.MESABLACKLIST = eval(text)
elif name == "EXTRA_MESA_FIRMWAREDATA":
self.d._customfirmwarefilename = text
rcfile = os.path.expanduser(self.d._customfirmwarefilename)
print rcfile
if os.path.exists(rcfile):
try:
execfile(rcfile)
except:
print _("**** PNCCONF ERROR: custom firmware loading error")
self._p.EXTRA_MESA_FIRMWAREDATA = []
if not self._p.EXTRA_MESA_FIRMWAREDATA == []:
print _("**** PNCCONF INFO: Found extra firmware in file")
# these are set from the hidden preference file
self.widgets.createsymlink.set_active(link)
self.widgets.createshortcut.set_active(short)
self.widgets.advancedconfig.set_active(show_pages)
tempfile = os.path.join(self._p.DISTDIR, "configurable_options/ladder/TEMP.clp")
if os.path.exists(tempfile):
os.remove(tempfile)
def add_placeholder_page(self,name):
string = '''
<?xml version="1.0"?>
<interface>
<requires lib="gtk+" version="2.16"/>
<!-- interface-naming-policy project-wide -->
<object class="GtkLabel" id="label_%s">
<property name="visible">True</property>
<property name="label" translatable="yes">%s</property>
</object>
</interface>
'''%(name,name)
self.builder.add_from_string(string)
# build functions
def makedirs(self, path):
makedirs(path)
def build_base(self):
base = os.path.expanduser("~/linuxcnc/configs/%s" % self.d.machinename)
ncfiles = os.path.expanduser("~/linuxcnc/nc_files")
if not os.path.exists(ncfiles):
self.makedirs(ncfiles)
examples = os.path.join(BASE, "share", "linuxcnc", "ncfiles")
if not os.path.exists(examples):
examples = os.path.join(BASE, "nc_files")
if os.path.exists(examples):
os.symlink(examples, os.path.join(ncfiles, "examples"))
self.makedirs(base)
return base
def copy(self, base, filename):
dest = os.path.join(base, filename)
if not os.path.exists(dest):
shutil.copy(os.path.join(self._p.DISTDIR, filename), dest)
def buid_config(self):
base = self.build_base()
self.d.save(base)
#self.write_readme(base)
self.INI.write_inifile(base)
self.HAL.write_halfile(base)
self.copy(base, "tool.tbl")
if self.warning_dialog(self._p.MESS_QUIT,False):
gtk.main_quit()
# helper functions
def get_discovery_meta(self):
self.widgets.boarddiscoverydialog.set_title(_("Discovery metadata update"))
#self.widgets.cardname_label.set_text('Boardname: %s'%name)
self.widgets.boarddiscoverydialog.show_all()
self.widgets.window1.set_sensitive(0)
result = self.widgets.boarddiscoverydialog.run()
self.widgets.boarddiscoverydialog.hide()
self.widgets.window1.set_sensitive(1)
if result == gtk.RESPONSE_OK:
n = self.widgets.discovery_name_entry.get_text()
itr = self.widgets.discovery_interface_combobox.get_active_iter()
d = self.widgets.discovery_interface_combobox.get_model().get_value(itr, 1)
a = self.widgets.discovery_address_entry.get_text()
print 'discovery:',n,d,a
return n,d,a
def discovery_interface_combobox_changed(self,w):
itr = w.get_active_iter()
d = w.get_model().get_value(itr, 1)
if d == '--addr':
self.widgets.discovery_address_entry.set_sensitive(True)
else:
self.widgets.discovery_address_entry.set_sensitive(False)
def get_board_meta(self, name):
name = name.lower()
meta = _PD.MESA_BOARD_META.get(name)
if meta:
return meta
else:
for key in _PD.MESA_BOARD_META:
if key in name:
return _PD.MESA_BOARD_META.get(key)
print 'boardname %s not found in hardware metadata array'% name
self.widgets.boardmetadialog.set_title(_("%s metadata update") % name)
self.widgets.cardname_label.set_text('Boardname: %s'%name)
self.widgets.boardmetadialog.show_all()
self.widgets.window1.set_sensitive(0)
result = self.widgets.boardmetadialog.run()
self.widgets.boardmetadialog.hide()
self.widgets.window1.set_sensitive(1)
if result == gtk.RESPONSE_OK:
itr = self.widgets.interface_combobox.get_active_iter()
d = self.widgets.interface_combobox.get_model().get_value(itr, 1)
ppc = int(self.widgets.ppc_combobox.get_active_text())
tp = int(self.widgets.noc_spinbutton.get_value())
_PD.MESA_BOARD_META[name] = {'DRIVER':d,'PINS_PER_CONNECTOR':ppc,'TOTAL_CONNECTORS':tp}
meta = _PD.MESA_BOARD_META.get(name)
if meta:
return meta
def splash_screen(self):
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_SPLASHSCREEN)
self.window.set_title(_("Pncconf setup"))
self.window.set_border_width(10)
vbox = gtk.VBox(False, 5)
vbox.set_border_width(10)
self.window.add(vbox)
vbox.show()
align = gtk.Alignment(0.5, 0.5, 0, 0)
vbox.pack_start(align, False, False, 5)
align.show()
self.pbar = gtk.ProgressBar()
self.pbar.set_text(_("Pncconf is setting up"))
self.pbar.set_fraction(.1)
align.add(self.pbar)
self.pbar.show()
self.window.show()
while gtk.events_pending():
gtk.main_iteration()
def dbg(self,message,mtype='all'):
for hint in _DEBUGSTRING:
if "all" in hint or mtype in hint:
print(message)
if "step" in _DEBUGSTRING:
c = raw_input(_("\n**** Debug Pause! ****"))
return
def query_dialog(self,title, message):
def responseToDialog(entry, dialog, response):
dialog.response(response)
label = gtk.Label(message)
#label.modify_font(pango.FontDescription("sans 20"))
entry = gtk.Entry()
dialog = gtk.MessageDialog(self.widgets.window1,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING, gtk.BUTTONS_OK_CANCEL, title)
dialog.vbox.pack_start(label)
dialog.vbox.add(entry)
#allow the user to press enter to do ok
entry.connect("activate", responseToDialog, dialog, gtk.RESPONSE_OK)
dialog.show_all()
result = dialog.run()
text = entry.get_text()
dialog.destroy()
if result == gtk.RESPONSE_OK:
return text
else:
return None
def warning_dialog(self,message,is_ok_type):
if is_ok_type:
dialog = gtk.MessageDialog(self.widgets.window1,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING, gtk.BUTTONS_OK,message)
dialog.show_all()
result = dialog.run()
dialog.destroy()
return True
else:
dialog = gtk.MessageDialog(self.widgets.window1,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO,message)
dialog.show_all()
result = dialog.run()
dialog.destroy()
if result == gtk.RESPONSE_YES:
return True
else:
return False
def show_help(self):
helpfilename = os.path.join(self._p.HELPDIR, "%s"% self.d.help)
textbuffer = self.widgets.helpview.get_buffer()
try :
infile = open(helpfilename, "r")
if infile:
string = infile.read()
infile.close()
textbuffer.set_text(string)
except:
text = _("Specific Help page is unavailable\n")
self.warning_dialog(text,True)
self.widgets.help_window.set_title(_("Help Pages") )
self.widgets.helpnotebook.set_current_page(0)
self.widgets.help_window.show_all()
if self.debugstate:
self.widgets.input_tab.set_visible(True)
else:
self.widgets.input_tab.set_visible(False)
self.widgets.help_window.present()
def print_page(self,print_dialog, context, n, imagename):
ctx = context.get_cairo_context()
gdkcr = gtk.gdk.CairoContext(ctx)
gdkcr.set_source_pixbuf(self[imagename], 0,0)
gdkcr.paint ()
def print_image(self,image_name):
print 'print image'
print_dialog = gtk.PrintOperation()
print_dialog.set_n_pages(1)
settings = gtk.PrintSettings()
settings.set_orientation(gtk.PAGE_ORIENTATION_LANDSCAPE)
print_dialog.set_print_settings(settings)
print_dialog.connect("draw-page", self.print_page, image_name)
res = print_dialog.run(gtk.PRINT_OPERATION_ACTION_PRINT_DIALOG, self.widgets.help_window)
if res == gtk.PRINT_OPERATION_RESULT_APPLY:
settings = print_dialog.get_print_settings()
# check for realtime kernel
def check_for_rt(self):
actual_kernel = os.uname()[2]
if hal.is_sim :
self.warning_dialog(self._p.MESS_NO_REALTIME,True)
if self.debugstate:
return True
else:
return False
elif hal.is_kernelspace and hal.kernel_version != actual_kernel:
self.warning_dialog(self._p.MESS_KERNEL_WRONG + '%s'%hal.kernel_version,True)
if self.debugstate:
return True
else:
return False
else:
return True
def add_external_folder_boardnames(self):
if os.path.exists(self._p.FIRMDIR):
self._p.MESA_BOARDNAMES = []
for root, dirs, files in os.walk(self._p.FIRMDIR):
folder = root.lstrip(self._p.FIRMDIR)
if folder in self._p.MESABLACKLIST:continue
if folder == "":continue
dbg("****folder added :%s"%folder,mtype='firmware')
self._p.MESA_BOARDNAMES.append(folder)
else:
#TODO what if there are no external firmware is this enough?
self.warning_dialog(_("You have no hostmot2 firmware downloaded in folder:\n%s\n\
PNCconf will use internal firmware data"%self._p.FIRMDIR),True)
for firmware in self._p.MESA_INTERNAL_FIRMWAREDATA:
if 'internal' in firmware[0].lower():
if firmware[0] in self._p.MESA_BOARDNAMES:
continue
self._p.MESA_BOARDNAMES.append(firmware[0])
if self.d.advanced_option:
self._p.MESA_BOARDNAMES.append('Discovery Option')
# add any extra firmware boardnames from .pncconf-preference file
if not self._p.EXTRA_MESA_FIRMWAREDATA == []:
for search, item in enumerate(self._p.EXTRA_MESA_FIRMWAREDATA):
d = self._p.EXTRA_MESA_FIRMWAREDATA[search]
if not d[_PD._BOARDTITLE] in self._p.MESA_BOARDNAMES:
self._p.MESA_BOARDNAMES.append(d[_PD._BOARDTITLE])
model = self.widgets.mesa_boardname_store
model.clear()
for search,item in enumerate(self._p.MESA_BOARDNAMES):
#print search,item
model.append((item,))
def fill_pintype_model(self):
# notused
self.d._notusedliststore = gtk.ListStore(str,int)
self.d._notusedliststore.append([_PD.pintype_notused[0],0])
self.d._ssrliststore = gtk.ListStore(str,int)
self.d._ssrliststore.append([_PD.pintype_ssr[0],0])
# gpio
self.d._gpioliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_gpio):
self.d._gpioliststore.append([text,0])
# stepper
self.d._stepperliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_stepper):
self.d._stepperliststore.append([text,number])
# encoder
self.d._encoderliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_encoder):
self.d._encoderliststore.append([text,number])
# mux encoder
self.d._muxencoderliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_muxencoder):
self.d._muxencoderliststore.append([text,number])
# resolver
self.d._resolverliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_resolver):
self.d._resolverliststore.append([text,number])
# 8i20 AMP
self.d._8i20liststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_8i20):
self.d._8i20liststore.append([text,number])
# potentiometer output
self.d._potliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_potentiometer):
self.d._potliststore.append([text,number])
# analog input
self.d._analoginliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_analog_in):
self.d._analoginliststore.append([text,number])
# pwm
self.d._pwmrelatedliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_pwm):
self.d._pwmrelatedliststore.append([text,number])
self.d._pwmcontrolliststore = gtk.ListStore(str,int)
self.d._pwmcontrolliststore.append([_PD.pintype_pwm[0],0])
self.d._pwmcontrolliststore.append([_PD.pintype_pdm[0],0])
self.d._pwmcontrolliststore.append([_PD.pintype_udm[0],0])
# pdm
self.d._pdmrelatedliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_pdm):
self.d._pdmrelatedliststore.append([text,number])
self.d._pdmcontrolliststore = gtk.ListStore(str,int)
self.d._pdmcontrolliststore.append([_PD.pintype_pwm[0],0])
self.d._pdmcontrolliststore.append([_PD.pintype_pdm[0],0])
self.d._pdmcontrolliststore.append([_PD.pintype_udm[0],0])
# udm
self.d._udmrelatedliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_udm):
self.d._udmrelatedliststore.append([text,number])
self.d._udmcontrolliststore = gtk.ListStore(str,int)
self.d._udmcontrolliststore.append([_PD.pintype_pwm[0],0])
self.d._udmcontrolliststore.append([_PD.pintype_pdm[0],0])
self.d._udmcontrolliststore.append([_PD.pintype_udm[0],0])
#tppwm
self.d._tppwmliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_tp_pwm):
self.d._tppwmliststore.append([text,number])
#sserial
self.d._sserialliststore = gtk.ListStore(str,int)
for number,text in enumerate(_PD.pintype_sserial):
self.d._sserialliststore.append([text,number])
# comboboxes with 3 levels
def fill_combobox_models2(self):
templist = [ ["_gpioisignaltree",_PD.human_input_names,1,'hal_input_names'],
["_steppersignaltree",_PD.human_stepper_names,1,'hal_stepper_names'],
["_encodersignaltree",_PD.human_encoder_input_names,1,'hal_encoder_input_names'],
["_muxencodersignaltree",_PD.human_encoder_input_names,1,'hal_encoder_input_names'],
["_pwmsignaltree",_PD.human_pwm_output_names,1,'hal_pwm_output_names'],]
for item in templist:
#print "\ntype",item[0]
count = 0
end = len(item[1])-1
# treestore(parentname,parentnum,signalname,signaltreename,signal index number)
self.d[item[0]]= gtk.TreeStore(str,int,str,str,int)
for i,parent in enumerate(item[1]):
############################
# if there are no children:
############################
if not isinstance(parent[1], list):
signame = parent[1]
index = _PD[item[3]].index(parent[1])
#print 'no children:', signame, index
# add parent and get reference for child
# This entry is selectable it has a signal attached to it
piter = self.d[item[0]].append(None, [parent[0], index,signame,item[3],0])
#print parent,parentnum,count,signame,item[3],i,signame,count
else:
# If list is empty it's a custome signal - with no signals yet
if len(parent[1]) == 0:
piter = self.d[item[0]].append(None, [parent[0], 0,'none',item[3],0])
else:
#print "parsing child",parent[1]
# add parent title
##########################
# if there are children:
# add an entry to first list that cannot be selected
# (well it always gives the unused signal - 0)
# because we need users to select from the next column
##########################
piter = self.d[item[0]].append(None, [parent[0],0,signame,item[3],0])
for j,child in enumerate(parent[1]):
#############################
# If grandchildren
#############################
if isinstance(child[1], list):
##########################
# if there are children:
# add an entry to second list that cannot be selected
# (well it always gives the unused signal - 0)
# because we need users to select from the next column
##########################
citer = self.d[item[0]].append(piter, [child[0], 0,signame,item[3],0])
#print 'add to CHILD list',child[0]
#print 'String:',child[1]
for k,grandchild in enumerate(child[1]):
#print 'raw grand: ', grandchild
#############################
# If GREAT children
#############################
#print grandchild[0],grandchild[1]
if isinstance(grandchild[1], list):
#print 'ERROR combo boxes can not have GREAT children yet add'
#print 'skipping'
continue
else:
#############################
# If No GREAT children
############################
humanName = grandchild[0]
sigName = grandchild[1]
index = _PD[item[3]].index(grandchild[1])
halNameArray = item[3]
#print 'adding to grandchild to childlist: ', humanName,index,sigName,halNameArray,index
self.d[item[0]].append(citer, [humanName, index,sigName,halNameArray,index])
####################
# No grandchildren
####################
else:
#print' add to child - no grandchild',child
humanName = child[0]
sigName = child[1]
index = _PD[item[3]].index(child[1])
halNameArray = item[3]
#print child[0],index,sigName,item[3],index
self.d[item[0]].append(piter, [humanName, index,sigName,halNameArray,index])
count +=item[2]
# combobox with 2 levels
def fill_combobox_models(self):
templist = [ ["_gpioosignaltree",_PD.human_output_names,1,'hal_output_names'],
["_resolversignaltree",_PD.human_resolver_input_names,1,'hal_resolver_input_names'],
["_tppwmsignaltree",_PD.human_tppwm_output_names,8,'hal_tppwm_output_names'],
["_8i20signaltree",_PD.human_8i20_input_names,1,'hal_8i20_input_names'],
["_potsignaltree",_PD.human_pot_output_names,2,'hal_pot_output_names'],
["_analoginsignaltree",_PD.human_analog_input_names,1,'hal_analog_input_names'],
["_sserialsignaltree",_PD.human_sserial_names,3,'hal_sserial_names']
]
for item in templist:
#print "\ntype",item[0]
count = 0
end = len(item[1])-1
# treestore(parentname,parentnum,signalname,signaltreename,signal index number)
self.d[item[0]]= gtk.TreeStore(str,int,str,str,int)
for i,parent in enumerate(item[1]):
############################
# if there are no children:
############################
if len(parent[1]) == 0:
# if combobox has a 'custom' signal choice then the index must be 0
if i == end and not item[0] =="_sserialsignaltree":parentnum = 0
else:parentnum = count
#print "length of human names:",len(parent[1])
# this adds the index number (parentnum) of the signal
try:
signame=_PD[item[3]][count]
except:
signame = 'none'
# add parent and get reference for child
piter = self.d[item[0]].append(None, [parent[0], parentnum,signame,item[3],count])
#print parent,parentnum,count,signame,item[3],i,signame,count
if count == 0: count = 1
else: count +=item[2]
##########################
# if there are children:
##########################
else:
#print "parsing child",signame
# add parent title
piter = self.d[item[0]].append(None, [parent[0],0,signame,item[3],count])
for j,child in enumerate(parent[1]):
#print len(child[1]), child[0]
#if item[0] =='_gpioisignaltree':
#print item[0], child[0],len(child[1])
#############################
# If grandchildren
#############################
if len(child[1]) > 1:
# add child and get reference
citer = self.d[item[0]].append(piter, [child[0], 0,signame,item[3],count])
#if item[0] =='_gpioisignaltree':
#print 'add to CHILD list',child[0]
#print 'Strig:',child[1]
for k,grandchild in enumerate(child[1]):
#print 'raw grand: ', grandchild
#############################
# If greatchildren
#############################
#print grandchild[0],grandchild[1]
if len(grandchild) > 1:
#print 'add to grandchild child list',grandchild[0]
index = _PD[item[3]].index(grandchild[1])
self.d[item[0]].append(citer, [grandchild[0],index,grandchild[1],item[3],index])
continue
else:
#############################
# If No greatchildren
#############################
try:
signame=_PD[item[3]][count]
except:
signame = 'none'
#print 'adding to grandchild to childlist: ', grandchild,signame,item[3],count
# add grandchild
self.d[item[0]].append(piter, [child,0,signame,item[3],count])
#count +=item[2]
####################
# No grandchildren
####################
else:
#print' add to child - no grandchild',child
signame=_PD[item[3]][count]
#print i,count,parent[0],child,signame,item[3], _PD[item[3]].index(signame),count
self.d[item[0]].append(piter, [child, count,signame,item[3],count])
count +=item[2]
self.fill_combobox_models2()
self.d._notusedsignaltree = gtk.TreeStore(str,int,str,str,int)
self.d._notusedsignaltree.append(None, [_PD.human_notused_names[0][0],0,'unused-unused','_notusedsignaltree',0])
# make a filter for sserial encoder as they can't be used for AXES
self.d._encodersignalfilter = self.d._encodersignaltree.filter_new()
self.d._enc_filter_list = ['Axis Encoder']
self.d._encodersignalfilter.set_visible_func(self.visible_cb, self.d._enc_filter_list)
# build filters for the 'controlling' sserial combbox
# We need to limit selections often
for channel in range(0,_PD._NUM_CHANNELS):
self.d['_sserial%d_filter_list'%channel] =[]
self.d['_sserial%d_signalfilter'%channel] = self.d._sserialsignaltree.filter_new()
self.d['_sserial%d_signalfilter'%channel].set_visible_func(self.filter_cb,self.d['_sserial%d_filter_list'%channel])
self.set_filter('_sserial%d'%channel,'ALL')
# Filter out any matching names in a list
def visible_cb(self, model, iter, data ):
#print model.get_value(iter, 0) ,data
return not model.get_value(iter, 0) in data
# filter out anything not in one of the lists, the list depending on a keyword
def set_filter(self,sserial,data):
keyword = data.upper()
if keyword == '7I77':
f_list = ['Unused','7i77']
elif keyword == '7I76':
f_list = ['Unused','7i76']
else:
f_list = ['Unused','7i73','7i69','8i20','7i64','7i71','7i70','7i84']
del self.d['%s_filter_list'%sserial][:]
for i in(f_list):
self.d['%s_filter_list'%sserial].append(i)
#print '\n',filterlist,self.d[filterlist]
self.d['%s_signalfilter'%sserial].refilter()
# Filter callback
def filter_cb(self, model, iter, data ):
#print model.get_value(iter, 0) ,data
for i in data:
if i in model.get_value(iter, 0):
return True
return False
def load_config(self):
filter = gtk.FileFilter()
filter.add_pattern("*.pncconf")
filter.set_name(_("LinuxCNC 'PNCconf' configuration files"))
dialog = gtk.FileChooserDialog(_("Modify Existing Configuration"),
self.widgets.window1, gtk.FILE_CHOOSER_ACTION_OPEN,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
dialog.set_default_response(gtk.RESPONSE_OK)
dialog.add_filter(filter)
if not self.d._lastconfigname == "" and self.d._chooselastconfig:
dialog.set_filename(os.path.expanduser("~/linuxcnc/configs/%s.pncconf"% self.d._lastconfigname))
dialog.add_shortcut_folder(os.path.expanduser("~/linuxcnc/configs"))
dialog.set_current_folder(os.path.expanduser("~/linuxcnc/configs"))
dialog.show_all()
result = dialog.run()
if result == gtk.RESPONSE_OK:
filename = dialog.get_filename()
dialog.destroy()
self.d.load(filename, self)
self.d._mesa0_configured = False
self.d._mesa1_configured = False
try:
# check that the firmware is current enough by checking the length of a sub element and that the other is an integer.
for boardnum in(0,1):
i=j=None
i = len(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._NUMOFCNCTRS])
j = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._HIFREQ]+100 # throws an error if not an integer.
if not i > 1:
print i,j,boardnum
raise UserWarning
except :
print i,j,boardnum
self.warning_dialog(_("It seems data in this file is from too old of a version of PNCConf to continue.\n."),True)
return True
else:
dialog.destroy()
return True
def mesa_firmware_search(self,boardtitle,*args):
#TODO if no firm packages set up for internal data?
#TODO don't do this if the firmware is already loaded
self.pbar.set_text("Loading external firmware")
self.pbar.set_fraction(0)
self.window.show()
while gtk.events_pending():
gtk.main_iteration()
firmlist = []
for root, dirs, files in os.walk(self._p.FIRMDIR):
folder = root.lstrip(self._p.FIRMDIR)
#dbg('Firmware folder:%s'% folder)
if folder in self._p.MESABLACKLIST:continue
if not folder == boardtitle:continue
for n,name in enumerate(files):
if name in self._p.MESABLACKLIST:continue
if ".xml" in name:
dbg('%s'% name)
temp = name.rstrip(".xml")
firmlist.append(temp)
dbg("\nXML list:%s"%firmlist,mtype="firmname")
for n,currentfirm in enumerate(firmlist):
self.pbar.set_fraction(n*1.0/len(firmlist))
while gtk.events_pending():
gtk.main_iteration()
# XMLs don't tell us the driver type so set to None (parse will guess)
firmdata = self.parse_xml(None,boardtitle, currentfirm,os.path.join(
self._p.FIRMDIR,boardtitle,currentfirm+".xml"))
self._p.MESA_FIRMWAREDATA.append(firmdata)
self.window.hide()
def parse_xml(self, driver, boardtitle, firmname, xml_path):
def search(elementlist):
for i in elementlist:
temp = root.find(i)
if temp is not None:
return temp.text
return temp
root = xml.etree.ElementTree.parse(xml_path)
watchdog = encoder = resolver = pwmgen = led = muxedqcount = 0
stepgen = tppwmgen = sserialports = sserialchannels = 0
numencoderpins = numpwmpins = 3; numstepperpins = 2; numttpwmpins = 0; numresolverpins = 10
text = search(('boardname','BOARDNAME'))
if text == None:
print 'Missing info: boardname'
return
boardname = text.lower()
#dbg("\nBoard and firmwarename: %s %s\n"%( boardname, firmname), "firmraw")
text = search(("IOPORTS","ioports")) ; #print numcnctrs
if text == None:
print 'Missing info: ioports'
return
numcnctrs = int(text)
text = search(("PORTWIDTH","portwidth"))
if text == None:
print 'Missing info: portwidth'
return
portwidth = int(text)
maxgpio = numcnctrs * portwidth ; #print maxgpio
placeholders = 24-portwidth
text = search(("CLOCKLOW","clocklow")) ; #print lowfreq
if text == None:
print 'Missing info: clocklow'
return
lowfreq = int(text)/1000000
text = search(("CLOCKHIGH","clockhigh")); #print hifreq
if text == None:
print 'Missing info: clockhigh'
return
hifreq = int(text)/1000000
modules = root.findall(".//modules")[0]
if driver == None:
meta = self.get_board_meta(boardname)
driver = meta.get('DRIVER')
for i,j in enumerate(modules):
k = modules[i].find("tagname").text
print k
if k in ("Watchdog","WatchDog","WATCHDOG"):
l = modules[i].find("numinstances").text;#print l,k
watchdog = int(l)
elif k in ("Encoder","QCOUNT"):
l = modules[i].find("numinstances").text;#print l,k
encoder = int(l)
elif k in ("ResolverMod","RESOLVERMOD"):
l = modules[i].find("numinstances").text;#print l,k
resolver = int(l)
elif k in ("PWMGen","PWMGEN","PWM"):
l = modules[i].find("numinstances").text;#print l,k
pwmgen = int(l)
elif k == "LED":
l = modules[i].find("numinstances").text;#print l,k
led = int(l)
elif k in ("MuxedQCount","MUXEDQCOUNT"):
l = modules[i].find("numinstances").text;#print l,k
muxedqcount = int(l)
elif k in ("StepGen","STEPGEN"):
l = modules[i].find("numinstances").text;#print l,k
stepgen = int(l)
elif k in ("TPPWM","TPPWM"):
l = modules[i].find("numinstances").text;#print l,k
tppwmgen = int(l)
elif k in ("SSerial","SSERIAL"):
l = modules[i].find("numinstances").text;#print l,k
sserialports = int(l)
elif k in ("None","NONE"):
l = modules[i].find("numinstances").text;#print l,k
elif k in ("ssr","SSR"):
l = modules[i].find("numinstances").text;#print l,k
elif k in ("IOPort","AddrX","MuxedQCountSel"):
continue
else:
print "**** WARNING: Pncconf parsing firmware: tagname (%s) not reconized"% k
discov_sserial = []
ssname = root.findall("SSERIALDEVICES/SSERIALFUNCTION")
for i in (ssname):
port = i.find("PORT").text
dev = i.find("DEVICE").text
chan = i.find("CHANNEL").text
discov_sserial.append((int(port),int(chan),dev))
print 'discovered sserial:', discov_sserial
pins = root.findall(".//pins")[0]
temppinlist = []
tempconlist = []
pinconvertenc = {"PHASE A":_PD.ENCA,"PHASE B":_PD.ENCB,"INDEX":_PD.ENCI,"INDEXMASK":_PD.ENCM,
"QUAD-A":_PD.ENCA,"QUAD-B":_PD.ENCB,"QUAD-IDX":_PD.ENCI,
"MUXED PHASE A":_PD.MXE0,"MUXED PHASE B":_PD.MXE1,"MUXED INDEX":_PD.MXEI,
"MUXED INDEX MASK":_PD.MXEM,"MUXED ENCODER SELECT 0":_PD.MXES,"MUXED ENCODER SELEC":_PD.MXES,
"MUXQ-A":_PD.MXE0,"MUXQ-B":_PD.MXE1,"MUXQ-IDX":_PD.MXEI,"MUXSEL0":_PD.MXES}
pinconvertresolver = {"RESOLVER POWER ENABLE":_PD.RESU,"RESOLVER SPIDI 0":_PD.RES0,
"RESOLVER SPIDI 1":_PD.RES1,"RESOLVER ADC CHANNEL 2":_PD.RES2,"RESOLVER ADC CHANNEL 1":_PD.RES3,
"RESOLVER ADC CHANNEL 0":_PD.RES4,"RESOLVER SPI CLK":_PD.RES5,"RESOLVER SPI CHIP SELECT":_PD.RESU,
"RESOLVER PDMM":_PD.RESU,"RESOLVER PDMP":_PD.RESU}
pinconvertstep = {"STEP":_PD.STEPA,"DIR":_PD.STEPB,"STEP/TABLE1":_PD.STEPA,"DIR/TABLE2":_PD.STEPB}
#"StepTable 2":STEPC,"StepTable 3":STEPD,"StepTable 4":STEPE,"StepTable 5":STEPF
pinconvertppwm = {"PWM/UP":_PD.PWMP,"DIR/DOWN":_PD.PWMD,"ENABLE":_PD.PWME,
"PWM":_PD.PWMP,"DIR":_PD.PWMD,"/ENABLE":_PD.PWME}
pinconverttppwm = {"PWM A":_PD.TPPWMA,"PWM B":_PD.TPPWMB,"PWM C":_PD.TPPWMC,
"PWM /A":_PD.TPPWMAN,"PWM /B":_PD.TPPWMBN,"PWM /C":_PD.TPPWMCN,
"FAULT":_PD.TPPWMF,"ENABLE":_PD.TPPWME}
pinconvertsserial = {"RXDATA0":_PD.RXDATA0,"TXDATA0":_PD.TXDATA0,"TXE0":_PD.TXEN0,"TXEN0":_PD.TXEN0,
"RXDATA1":_PD.RXDATA0,"TXDATA1":_PD.TXDATA0,"TXE1":_PD.TXEN0,"TXEN1":_PD.TXEN0,
"RXDATA2":_PD.RXDATA1,"TXDATA2":_PD.TXDATA1,"TXE2":_PD.TXEN1,"TXEN2":_PD.TXEN1,
"RXDATA3":_PD.RXDATA2,"TXDATA3":_PD.TXDATA2,"TXE3":_PD.TXEN2,"TXEN3":_PD.TXEN2,
"RXDATA4":_PD.RXDATA3,"TXDATA4":_PD.TXDATA3,"TXE4":_PD.TXEN3,"TXEN4":_PD.TXEN3,
"RXDATA5":_PD.RXDATA4,"TXDATA5":_PD.TXDATA4,"TXE5":_PD.TXEN4,"TXEN4":_PD.TXEN4,
"RXDATA6":_PD.RXDATA5,"TXDATA6":_PD.TXDATA5,"TXE6":_PD.TXEN5,"TXEN6":_PD.TXEN5,
"RXDATA7":_PD.RXDATA6,"TXDATA7":_PD.TXDATA6,"TXE7":_PD.TXEN6,"TXEN7":_PD.TXEN6,
"RXDATA8":_PD.RXDATA7,"TXDATA8":_PD.TXDATA7,"TXE8":_PD.TXEN7,"TXEN8":_PD.TXEN7}
pinconvertnone = {"NOT USED":_PD.GPIOI}
count = 0
fakecon = 0
for i,j in enumerate(pins):
instance_num = 9999
iocode = None
temppinunit = []
temp = pins[i].find("connector").text
if 'P' in temp:
tempcon = int(temp.strip("P"))
else:
tempcon = temp
tempfunc = pins[i].find("secondaryfunctionname").text
tempfunc = tempfunc.upper().strip() # normalise capitalization: Peters XMLs are different from linuxcncs
if "(IN)" in tempfunc:
tempfunc = tempfunc.rstrip(" (IN)")
elif "(OUT" in tempfunc:
tempfunc = tempfunc.rstrip(" (OUT)")
convertedname = "Not Converted"
# this converts the XML file componennt names to pncconf's names
try:
secmodname = pins[i].find("secondarymodulename")
modulename = secmodname.text.upper().strip()
dbg("secondary modulename: %s, %s."%( tempfunc,modulename), "firmraw")
if modulename in ("ENCODER","QCOUNT","MUXEDQCOUNT","MUXEDQCOUNTSEL"):
convertedname = pinconvertenc[tempfunc]
elif modulename in ("ResolverMod","RESOLVERMOD"):
convertedname = pinconvertresolver[tempfunc]
elif modulename in ("PWMGen","PWMGEN","PWM"):
convertedname = pinconvertppwm[tempfunc]
elif modulename in ("StepGen","STEPGEN"):
convertedname = pinconvertstep[tempfunc]
elif modulename in ("TPPWM","TPPWM"):
convertedname = pinconverttppwm[tempfunc]
elif modulename in ("SSerial","SSERIAL"):
temp = pins[i].find("foundsserialdevice")
if temp is not None:
founddevice = temp.text.upper()
else:
founddevice = None
#print tempfunc,founddevice
# this auto selects the sserial 7i76 mode 0 card for sserial 0 and 2
# as the 5i25/7i76 uses some of the sserial channels for it's pins.
if boardname in ("5i25","7i92"):
if "7i77_7i76" in firmname:
if tempfunc == "TXDATA1": convertedname = _PD.SS7I77M0
elif tempfunc == "TXDATA2": convertedname = _PD.SS7I77M1
elif tempfunc == "TXDATA4": convertedname = _PD.SS7I76M3
else: convertedname = pinconvertsserial[tempfunc]
#print "XML ",firmname, tempfunc,convertedname
elif "7i76x2" in firmname or "7i76x1" in firmname:
if tempfunc == "TXDATA1": convertedname = _PD.SS7I76M0
elif tempfunc == "TXDATA3": convertedname = _PD.SS7I76M2
else: convertedname = pinconvertsserial[tempfunc]
#print "XML ",firmname, tempfunc,convertedname
elif "7i77x2" in firmname or "7i77x1" in firmname:
if tempfunc == "TXDATA1": convertedname = _PD.SS7I77M0
elif tempfunc == "TXDATA2": convertedname = _PD.SS7I77M1
elif tempfunc == "TXDATA4": convertedname = _PD.SS7I77M3
elif tempfunc == "TXDATA5": convertedname = _PD.SS7I77M4
else: convertedname = pinconvertsserial[tempfunc]
#print "XML ",firmname, tempfunc,convertedname
elif founddevice == "7I77-0": convertedname = _PD.SS7I77M0
elif founddevice == "7I77-1": convertedname = _PD.SS7I77M1
elif founddevice == "7I77-3": convertedname = _PD.SS7I77M3
elif founddevice == "7I77-4": convertedname = _PD.SS7I77M4
elif founddevice == "7I76-0": convertedname = _PD.SS7I76M0
elif founddevice == "7I76-2": convertedname = _PD.SS7I76M2
elif founddevice == "7I76-3": convertedname = _PD.SS7I76M3
else: convertedname = pinconvertsserial[tempfunc]
else:
convertedname = pinconvertsserial[tempfunc]
elif modulename in ('SSR','SSR'):
if tempfunc == 'AC':
convertedname = _PD.NUSED
elif 'OUT-' in tempfunc:
convertedname = _PD.SSR0
# ssr outputs encode the HAL number in the XML name
# add it to 100 so it's not change from output
iocode = 100 + int(tempfunc[4:])
elif modulename in ("None","NONE"):
iocode = 0
#convertedname = pinconvertnone[tempfunc]
else:
print 'unknon module - setting to unusable',modulename, tempfunc
convertedname = _PD.NUSED
except:
iocode = 0
exc_type, exc_value, exc_traceback = sys.exc_info()
formatted_lines = traceback.format_exc().splitlines()
print
print "****pncconf verbose XML parse debugging:",formatted_lines[0]
traceback.print_tb(exc_traceback, limit=1, file=sys.stdout)
print formatted_lines[-1]
if iocode == 0:
# must be GPIO pins if there is no secondary mudule name
# or if pinconvert fails eg. StepTable instance default to GPIO
temppinunit.append(_PD.GPIOI)
temppinunit.append(0) # 0 signals to pncconf that GPIO can changed to be input or output
elif iocode >= 100:
temppinunit.append(_PD.SSR0)
temppinunit.append(iocode)
else:
instance_num = int(pins[i].find("secondaryinstance").text)
# this is a workaround for the 7i77_7i776 firmware. it uses a mux encoder for the 7i76 but only uses half of it
# this is because of a limitation of hostmot2 - it can't have mux encoders and regular encoders
# so in pncconf we look for this and change it to a regular encoder.
if boardname == "5i25" and firmname == "7i77_7i76":
if modulename in ("MuxedQCount","MUXEDQCOUNT") and instance_num == 3:
instance_num = 6
encoder =-1
if convertedname == _PD.MXE0: convertedname = _PD.ENCA
elif convertedname == _PD.MXE1: convertedname = _PD.ENCB
elif convertedname == _PD.MXEI: convertedname = _PD.ENCI
temppinunit.append(convertedname)
if tempfunc in("MUXED ENCODER SELECT 0","MUXEDQCOUNTSEL") and instance_num == 6:
instance_num = 3
temppinunit.append(instance_num)
tempmod = pins[i].find("secondarymodulename").text
tempfunc = tempfunc.upper()# normalize capitalization
#dbg("secondary modulename, function: %s, %s."%( tempmod,tempfunc), "firmraw")
if tempmod in("Encoder","MuxedQCount") and tempfunc in ("MUXED INDEX MASK (IN)","INDEXMASK (IN)"):
numencoderpins = 4
if tempmod in("SSerial","SSERIAL") and tempfunc in ("TXDATA1","TXDATA2","TXDATA3",
"TXDATA4","TXDATA5","TXDATA6","TXDATA7","TXDATA8"):
sserialchannels +=1
#dbg("temp: %s, converted name: %s. num %d"%( tempfunc,convertedname,instance_num), "firmraw")
if not tempcon in tempconlist:
tempconlist.append(tempcon)
temppinlist.append(temppinunit)
# add NONE place holders for boards with less then 24 pins per connector.
if not placeholders == 0:
#print i,portwidth*numcnctrs
if i == (portwidth + count-1) or i == portwidth*numcnctrs-1:
#print "loop %d %d"% (i,portwidth + count-1)
count =+ portwidth
#print "count %d" % count
for k in range(0,placeholders):
#print "%d fill here with %d parts"% (k,placeholders)
temppinlist.append((_PD.NUSED,0))
if not sserialchannels == 0:
sserialchannels +=1
# 7i96 doesn't number the connectors with P numbers so we fake it
# TODO
# probably should move the connector numbers to board data rather then firmware
for j in tempconlist:
if not isinstance(j, (int, long)):
tempconlist = [i for i in range(1,len(tempconlist)+1)]
break
temp = [boardtitle,boardname,firmname,boardtitle,driver,encoder + muxedqcount,
numencoderpins,resolver,numresolverpins,pwmgen,numpwmpins,
tppwmgen,numttpwmpins,stepgen,numstepperpins,
sserialports,sserialchannels,discov_sserial,0,0,0,0,0,0,0,watchdog,maxgpio,
lowfreq,hifreq,tempconlist]
for i in temppinlist:
temp.append(i)
if "5i25" in boardname :
dbg("5i25 firmware:\n%s\n"%( temp), mtype="5i25")
print 'firm added:\n',temp
return temp
def discover_mesacards(self):
name, interface, address = self.get_discovery_meta()
if name is None: return
if not name:
name = '5i25'
if self.debugstate:
print 'try to discover board by reading help text input:',name
buf = self.widgets.textinput.get_buffer()
info = buf.get_text(buf.get_start_iter(),
buf.get_end_iter(),
True)
else:
info = self.call_mesaflash(name,interface,address)
print 'INFO:',info,'<-'
if info is None: return None
lines = info.splitlines()
try:
if 'ERROR' in lines[0]:
raise ValueError('Mesaflash Error')
except ValueError as err:
text = err.args
self.warning_dialog(text[0],True)
return
except:
self.warning_dialog('Unspecified Error with Mesaflash',True)
return
if 'No' in lines[0] and 'board found' in lines[0] :
text = _("No board was found\n")
self.warning_dialog(text,True)
print 'OOPS no board found!'
return None
return info
def call_mesaflash(self, devicename, interface, address):
if address == ' ':
address = None
textbuffer = self.widgets.textoutput.get_buffer()
print 'DEVICE NAME SPECIFIED',devicename, interface, address
# 7i43 needs it's firmware loaded before it can be 'discovered'
if '7i43' in devicename.lower():
halrun = os.popen("halrun -Is > /dev/null", "w")
halrun.write("echo\n")
load,read,write = self.hostmot2_command_string()
# do I/O load commands
for i in load:
halrun.write('%s\n'%i)
halrun.flush()
time.sleep(.001)
halrun.close()
if interface == '--addr' and address:
board_command = '--device %s %s %s' %(devicename, interface, address)
elif interface == '--epp':
board_command = '--device %s %s' %(devicename, interface)
else:
board_command = '--device %s' %(devicename)
#cmd ="""pkexec "sh -c 'mesaflash %s';'mesaflash %s --sserial';'mesaflash %s --readhmid' " """%(board_command, board_command, board_command)
cmd =""" mesaflash -%s;mesaflash %s --sserial;mesaflash %s --readhmid """%(board_command, board_command, board_command)
discover = subprocess.Popen([cmd], shell=True,stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE )
output, error = discover.communicate()
if output == '':
text = _("Discovery is got an error\n\n Is mesaflash installed?\n\n %s"%error)
self.warning_dialog(text,True)
try :
textbuffer.set_text('Command:\n%s\n gave:\n%s'%(cmd,error))
self.widgets.helpnotebook.set_current_page(2)
except Exception as e :
print e
return None
try :
textbuffer.set_text(output)
self.widgets.helpnotebook.set_current_page(2)
self.widgets.help_window.show_all()
except:
text = _("Discovery is unavailable\n")
self.warning_dialog(text,True)
print 'cmd=',cmd
return output
def parse_discovery(self,info,boardnum=0):
DRIVER = BOARDNAME = ''
WATCHDOG = NUMCONS = NUMCONPINS = ENCODERS = MUXENCODERS = 0
RESOLVERS = NUMSSCHANNELS = SSERIALPORTS = 0
PWMGENS = LEDS = STEPGENS = TPPWMGEN = 0
NUMENCODERPINS = NUMPWMPINS = 3; NUMSTEPPERPINS = 2
NUMTPPWMPINS = 0;NUMRESOLVERPINS = 10
DOC = xml.dom.minidom.getDOMImplementation().createDocument(
None, 'hostmot2', None)
ELEMENT = DOC.documentElement
def add_element(ELEMENT,name):
n1 = DOC.createElement(name)
ELEMENT.appendChild(n1)
return n1
def add_text(root,title,value):
n = DOC.createElement(title)
root.appendChild(n)
nodeText = DOC.createTextNode( value )
n.appendChild(nodeText)
return n
info = info.upper()
lines = info.splitlines()
sserial=[]
ssflag = pinsflag = True
dev7i77flag = dev7i76flag = False
for l_num,i in enumerate(lines):
i = i.lstrip()
temp2 = i.split(" ")
#print i,temp2
if 'ETH' in i:
DRIVER = 'hm2_eth'
if 'PCI' in i:
DRIVER = 'hm2_pci'
if 'BOARDNAME' in i:
BOARDNAME = temp2[2].strip('MESA').lower()
add_text(ELEMENT,'BOARDNAME',BOARDNAME)
if 'DEVICE AT' in i:
if ssflag:
n1 = add_element(ELEMENT,'SSERIALDEVICES')
ssflag = False
for num,i in enumerate(temp2):
if i =="CHANNEL":
sserial.append((temp2[num+1].strip(':'),temp2[num+2]))
n2 = add_element(n1,'SSERIALFUNCTION')
add_text(n2,'PORT','0')
add_text(n2,'CHANNEL',temp2[num+1].strip(':'))
add_text(n2,'DEVICE',temp2[num+2])
if '7I77' in(temp2[num+2]):
dev7i77flag = True
elif '7I76' in(temp2[num+2]):
dev7i76flag = True
if 'SSLBP CHANNELS:' in i:
NUMSSCHANNELS = temp2[2]
if 'CLOCK LOW FREQUENCY: ' in i:
add_text(ELEMENT,'CLOCKLOW',str(int(float(temp2[3])*1000000)))
if 'CLOCK HIGH FREQUENCY:' in i:
add_text(ELEMENT,'CLOCKHIGH',str(int(float(temp2[3])*1000000)))
if 'NUMBER OF IO PORTS:' in i:
NUMCONS = temp2[4]
add_text(ELEMENT,'IOPORTS',NUMCONS)
if 'WIDTH OF ONE I/O PORT:' in i:
NUMCONPINS = temp2[5]
add_text(ELEMENT,'PORTWIDTH',NUMCONPINS)
if 'MODULES IN CONFIGURATION:' in i:
mod_ele = add_element(ELEMENT,'modules')
modflag = True
if 'MODULE: WATCHDOG' in i:
tline = lines[l_num+1].split(" ")
new = add_element(mod_ele,'module')
add_text(new,'tagname','WATCHDOG')
add_text(new,'numinstances',tline[4].lstrip())
if 'MODULE: QCOUNT' in i:
tline = lines[l_num+1].split(" ")
ENCODERS = tline[4].lstrip()
new = add_element(mod_ele,'module')
add_text(new,'tagname','QCOUNT')
add_text(new,'numinstances',tline[4].lstrip())
if 'MODULE: MUXEDQCOUNTSEL' in i:
continue
if 'MODULE: MUXEDQCOUNT' in i:
tline = lines[l_num+1].split(" ")
MUXENCODERS = tline[4].lstrip()
new = add_element(mod_ele,'module')
add_text(new,'tagname','MUXEDQCOUNT')
add_text(new,'numinstances',tline[4].lstrip())
if 'MODULE: SSERIAL' in i:
tline = lines[l_num+1].split(" ")
SSERIALPORTS = tline[4].lstrip()
new = add_element(mod_ele,'module')
add_text(new,'tagname','SSERIAL')
add_text(new,'numinstances',tline[4].lstrip())
if 'MODULE: RESOLVERMOD' in i:
tline = lines[l_num+1].split(" ")
RESOLVER = tline[4].lstrip()
new = add_element(mod_ele,'module')
add_text(new,'tagname','RESOLVERMOD')
add_text(new,'numinstances',tline[4].lstrip())
if 'MODULE: PWM' in i:
tline = lines[l_num+1].split(" ")
PWMGENS = tline[4].lstrip()
new = add_element(mod_ele,'module')
add_text(new,'tagname','PWMGEN')
add_text(new,'numinstances',tline[4].lstrip())
if 'MODULE: TPPWM' in i:
tline = lines[l_num+1].split(" ")
TPPWMGENS = tline[4].lstrip()
new = add_element(mod_ele,'module')
add_text(new,'tagname','TPPWMGEN')
add_text(new,'numinstances',tline[4].lstrip())
if 'MODULE: STEPGEN' in i:
tline = lines[l_num+1].split(" ")
STEPGENS = tline[4].lstrip()
new = add_element(mod_ele,'module')
add_text(new,'tagname','STEPGEN')
add_text(new,'numinstances',tline[4].lstrip())
if 'MODULE: LED' in i:
tline = lines[l_num+1].split(" ")
LEDS = tline[4].lstrip()
new = add_element(mod_ele,'module')
add_text(new,'tagname','LED')
add_text(new,'numinstances',tline[4].lstrip())
if 'MODULE: SSR' in i:
tline = lines[l_num+1].split(" ")
LEDS = tline[4].lstrip()
new = add_element(mod_ele,'module')
add_text(new,'tagname','SSR')
add_text(new,'numinstances',tline[4].lstrip())
if 'IO CONNECTIONS FOR' in i:
if pinsflag:
n1 = add_element(ELEMENT,'pins')
pinsflag = False
CON = temp2[3]
print CON
for num in range(l_num+3,l_num+3+int(NUMCONPINS)):
CHAN = PINFNCTN = ''
pin_line = ' '.join(lines[num].split()).split()
PINNO = pin_line[0]
IO = pin_line[1]
SECFNCTN = pin_line[3]
n2 = add_element(n1,'pin')
add_text(n2,'index',IO)
add_text(n2,'connector',CON)
add_text(n2,'pinno',PINNO)
add_text(n2,'secondarymodulename',SECFNCTN)
if not SECFNCTN == 'NONE':
CHAN = pin_line[4]
PINFNCTN = pin_line[5]
if PINFNCTN in("TXDATA1","TXDATA2","TXDATA3",
"TXDATA4","TXDATA5","TXDATA6","TXDATA7","TXDATA8"):
num = int(PINFNCTN[6])-1
print num
for idnum,dev in sserial:
print idnum,dev,num
if int(idnum) == num:
NEW_FNCTN = '%s-%d'% (dev,num)
add_text(n2,'foundsserialdevice',NEW_FNCTN)
add_text(n2,'secondaryfunctionname',PINFNCTN)
add_text(n2,'secondaryinstance',CHAN)
else:
add_text(n2,'secondaryfunctionname','NOT USED')
print ' I/O ',IO, ' function ',SECFNCTN,' CHANNEL:',CHAN,'PINFUNCTION:',PINFNCTN
print 'Sserial CARDS FOUND:',sserial
print NUMCONS,NUMCONPINS,ENCODERS,MUXENCODERS,SSERIALPORTS,NUMSSCHANNELS
print RESOLVERS,PWMGENS,LEDS
firmname = "~/mesa%d_discovered.xml"%boardnum
filename = os.path.expanduser(firmname)
DOC.writexml(open(filename, "wb"), addindent=" ", newl="\n")
return DRIVER, BOARDNAME, firmname, filename
# update all the firmware/boardname arrays and comboboxes
def discovery_selection_update(self, info, bdnum):
driver, boardname, firmname, path = self.parse_discovery(info,boardnum=bdnum)
boardname = 'Discovered:%s'% boardname
firmdata = self.parse_xml( driver,boardname,firmname,path)
self._p.MESA_FIRMWAREDATA.append(firmdata)
self._p.MESA_INTERNAL_FIRMWAREDATA.append(firmdata)
self._p.MESA_BOARDNAMES.append(boardname)
# add firmname to combo box if it's not there
model = self.widgets["mesa%s_firmware"%bdnum].get_model()
flag = True
for search,item in enumerate(model):
if model[search][0] == firmname:
flag = False
break
if flag:
model.append((firmname,))
search = 0
model = self.widgets["mesa%s_firmware"%bdnum].get_model()
for search,item in enumerate(model):
if model[search][0] == firmname:
self.widgets["mesa%s_firmware"%bdnum].set_active(search)
break
# add boardtitle
model = self.widgets["mesa%s_boardtitle"%bdnum].get_model()
flag2 = True
for search,item in enumerate(model):
if model[search][0] == boardname:
flag2 = False
break
if flag2:
model.append((boardname,))
search = 0
model = self.widgets["mesa%s_boardtitle"%bdnum].get_model()
for search,item in enumerate(model):
#print model[search][0], boardname
if model[search][0] == boardname:
self.widgets["mesa%s_boardtitle"%bdnum].set_active(search)
break
# update if there was a change
if flag or flag2:
self.on_mesa_component_value_changed(None,0)
def add_device_rule(self):
text = []
sourcefile = "/tmp/"
if os.path.exists("/etc/udev/rules.d/50-LINUXCNC-general.rules"):
text.append( "General rule already exists\n")
else:
text.append("adding a general rule first\nso your device will be found\n")
filename = os.path.join(sourcefile, "LINUXCNCtempGeneral.rules")
file = open(filename, "w")
print >>file, ("# This is a rule for LinuxCNC's hal_input\n")
print >>file, ("""SUBSYSTEM="input", MODE="0660", GROUP="plugdev" """)
file.close()
p=os.popen("gksudo cp %sLINUXCNCtempGeneral.rules /etc/udev/rules.d/50-LINUXCNC-general.rules"% sourcefile )
time.sleep(.1)
p.flush()
p.close()
os.remove('%sLINUXCNCtempGeneral.rules'% sourcefile)
text.append(("disconect USB device please\n"))
if not self.warning_dialog("\n".join(text),False):return
os.popen('less /proc/bus/input/devices >> %sLINUXCNCnojoytemp.txt'% sourcefile)
text = ["Plug in USB device please"]
if not self.warning_dialog("\n".join(text),False):return
time.sleep(1)
os.popen('less /proc/bus/input/devices >> %sLINUXCNCjoytemp.txt'% sourcefile).read()
diff = os.popen (" less /proc/bus/input/devices | diff %sLINUXCNCnojoytemp.txt %sLINUXCNCjoytemp.txt "%(sourcefile, sourcefile) ).read()
self.widgets.help_window.set_title(_("USB device Info Search"))
os.remove('%sLINUXCNCnojoytemp.txt'% sourcefile)
os.remove('%sLINUXCNCjoytemp.txt'% sourcefile)
if diff =="":
text = ["No new USB device found"]
if not self.warning_dialog("\n".join(text),True):return
else:
textbuffer = self.widgets.textoutput.get_buffer()
try :
textbuffer.set_text(diff)
self.widgets.helpnotebook.set_current_page(2)
self.widgets.help_window.show_all()
except:
text = _("USB device page is unavailable\n")
self.warning_dialog(text,True)
linelist = diff.split("\n")
for i in linelist:
if "Name" in i:
temp = i.split("\"")
name = temp[1]
temp = name.split(" ")
self.widgets.usbdevicename.set_text(temp[0])
infolist = diff.split()
for i in infolist:
if "Vendor" in i:
temp = i.split("=")
vendor = temp[1]
if "Product" in i:
temp = i.split("=")
product = temp[1]
text =[ "Vendor = %s\n product = %s\n name = %s\nadding specific rule"%(vendor,product,name)]
if not self.warning_dialog("\n".join(text),False):return
tempname = sourcefile+"LINUXCNCtempspecific.rules"
file = open(tempname, "w")
print >>file, ("# This is a rule for LINUXCNC's hal_input\n")
print >>file, ("# For devicename=%s\n"% name)
print >>file, ("""SYSFS{idProduct}=="%s", SYSFS{idVendor}=="%s", MODE="0660", GROUP="plugdev" """%(product,vendor))
file.close()
# remove illegal filename characters
for i in ("(",")"):
temp = name.replace(i,"")
name = temp
newname = "50-LINUXCNC-%s.rules"% name.replace(" ","_")
os.popen("gksudo cp %s /etc/udev/rules.d/%s"% (tempname,newname) )
time.sleep(1)
os.remove('%sLINUXCNCtempspecific.rules'% sourcefile)
text = ["Please unplug and plug in your device again"]
if not self.warning_dialog("\n".join(text),True):return
def test_joystick(self):
halrun = subprocess.Popen("halrun -I ", shell=True,stdin=subprocess.PIPE,stdout=subprocess.PIPE )
#print "requested devicename = ",self.widgets.usbdevicename.get_text()
halrun.stdin.write("loadusr hal_input -W -KRAL +%s\n"% self.widgets.usbdevicename.get_text())
halrun.stdin.write("loadusr halmeter -g 0 500\n")
time.sleep(1.5)
halrun.stdin.write("show pin\n")
self.warning_dialog("Close me When done.\n",True)
halrun.stdin.write("exit\n")
output = halrun.communicate()[0]
temp2 = output.split(" ")
temp=[]
for i in temp2:
if i =="": continue
temp.append(i)
buttonlist=""
for index,i in enumerate(temp):
if "bit" in i and "OUT" in temp[index+1]:
buttonlist = buttonlist + " Digital: %s"% ( temp[index+3] )
if "float" in i and "OUT" in temp[index+1]:
buttonlist = buttonlist + " Analog: %s"% ( temp[index+3] )
if buttonlist =="": return
textbuffer = self.widgets.textoutput.get_buffer()
try :
textbuffer.set_text(buttonlist)
self.widgets.helpnotebook.set_current_page(2)
self.widgets.help_window.show_all()
except:
text = _("Pin names are unavailable\n")
self.warning_dialog(text,True)
def search_for_device_rule(self):
flag = False
textbuffer = self.widgets.textoutput.get_buffer()
textbuffer.set_text("Searching for device rules in folder: /etc/udev/rules.d\n\n")
for entry in os.listdir("/etc/udev/rules.d"):
if fnmatch.fnmatch( entry,"50-LINUXCNC-*"):
temp = open("/etc/udev/rules.d/" + entry, "r").read()
templist = temp.split("\n")
for i in templist:
if "devicename=" in i:
flag = True
temp = i.split("=")
name = temp[1]
try:
textbuffer.insert_at_cursor( "File name: %s\n"% entry)
textbuffer.insert_at_cursor( "Device name: %s\n\n"% name)
self.widgets.helpnotebook.set_current_page(2)
self.widgets.help_window.show_all()
except:
self.show_try_errors()
text = _("Device names are unavailable\n")
self.warning_dialog(text,True)
if flag == False:
text = _("No Pncconf made device rules were found\n")
textbuffer.insert_at_cursor(text)
self.warning_dialog(text,True)
def read_touchy_preferences(self):
# This reads the Touchy preference file directly
tempdict = {"touchyabscolor":"abs_textcolor","touchyrelcolor":"rel_textcolor",
"touchydtgcolor":"dtg_textcolor","touchyerrcolor":"err_textcolor"}
for key,value in tempdict.iteritems():
data = prefs.getpref(value, 'default', str)
if data == "default":
self.widgets[key].set_active(False)
else:
self.widgets[key].set_active(True)
self.widgets[key+"button"].set_color(gtk.gdk.color_parse(data))
self.widgets.touchyforcemax.set_active(bool(prefs.getpref('window_force_max')))
def get_installed_themes(self):
data1 = self.d.gladevcptheme
data2 = prefs.getpref('gtk_theme', 'Follow System Theme', str)
data3 = self.d.gmcpytheme
model = self.widgets.themestore
model.clear()
model.append((_("Follow System Theme"),))
model2 = self.widgets.glade_themestore
model2.clear()
model2.append((_("Follow System Theme"),))
temp1 = temp2 = temp3 = 0
names = os.listdir(_PD.THEMEDIR)
names.sort()
for search,dirs in enumerate(names):
model.append((dirs,))
model2.append((dirs,))
if dirs == data1:
temp1 = search+1
if dirs == data2:
temp2 = search+1
if dirs == data3:
temp3 = search+1
self.widgets.gladevcptheme.set_active(temp1)
self.widgets.touchytheme.set_active(temp2)
self.widgets.gmcpy_theme.set_active(temp3)
def gladevcp_sanity_check(self):
if os.path.exists(os.path.expanduser("~/linuxcnc/configs/%s/gvcp-panel.ui" % self.d.machinename)):
if not self.warning_dialog(_("OK to replace existing glade panel ?\
\nIt will be renamed and added to 'backups' folder.\n Clicking 'existing custom program' will avoid this warning, but \
if you change related options later -such as spindle feedback- the HAL connection will not update"),False):
return True
def pyvcp_sanity_check(self):
if os.path.exists(os.path.expanduser("~/linuxcnc/configs/%s/pyvcp-panel.xml" % self.d.machinename)):
if not self.warning_dialog(_("OK to replace existing custom pyvcp panel?\
\nExisting pyvcp-panel.xml will be renamed and added to 'backups' folder\n\
Clicking 'existing custom program' will aviod this warning. "),False):
return True
# disallow some signal combinations
def do_exclusive_inputs(self, widget,portnum,pinname):
# If initializing the Pport pages we don't want the signal calls to register here.
# if we are working in here we don't want signal calls because of changes made in here
# GTK supports signal blocking but then you can't assign signal block name references in GLADE -slaps head
if self._p.prepare_block or self.recursive_block: return
if 'mesa' in pinname:
ptype = '%stype'%pinname
if not self.widgets[ptype].get_active_text() == _PD.pintype_gpio[0]: return
self.recursive_block = True
SIG = self._p
exclusive = {
SIG.HOME_X: (SIG.MAX_HOME_X, SIG.MIN_HOME_X, SIG.BOTH_HOME_X, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.HOME_Y: (SIG.MAX_HOME_Y, SIG.MIN_HOME_Y, SIG.BOTH_HOME_Y, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.HOME_Z: (SIG.MAX_HOME_Z, SIG.MIN_HOME_Z, SIG.BOTH_HOME_Z, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.HOME_A: (SIG.MAX_HOME_A, SIG.MIN_HOME_A, SIG.BOTH_HOME_A, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.MAX_HOME_X: (SIG.HOME_X, SIG.MIN_HOME_X, SIG.MAX_HOME_X, SIG.BOTH_HOME_X, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.MAX_HOME_Y: (SIG.HOME_Y, SIG.MIN_HOME_Y, SIG.MAX_HOME_Y, SIG.BOTH_HOME_Y, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.MAX_HOME_Z: (SIG.HOME_Z, SIG.MIN_HOME_Z, SIG.MAX_HOME_Z, SIG.BOTH_HOME_Z, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.MAX_HOME_A: (SIG.HOME_A, SIG.MIN_HOME_A, SIG.MAX_HOME_A, SIG.BOTH_HOME_A, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.MIN_HOME_X: (SIG.HOME_X, SIG.MAX_HOME_X, SIG.BOTH_HOME_X, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.MIN_HOME_Y: (SIG.HOME_Y, SIG.MAX_HOME_Y, SIG.BOTH_HOME_Y, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.MIN_HOME_Z: (SIG.HOME_Z, SIG.MAX_HOME_Z, SIG.BOTH_HOME_Z, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.MIN_HOME_A: (SIG.HOME_A, SIG.MAX_HOME_A, SIG.BOTH_HOME_A, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.BOTH_HOME_X: (SIG.HOME_X, SIG.MAX_HOME_X, SIG.MIN_HOME_X, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.BOTH_HOME_Y: (SIG.HOME_Y, SIG.MAX_HOME_Y, SIG.MIN_HOME_Y, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.BOTH_HOME_Z: (SIG.HOME_Z, SIG.MAX_HOME_Z, SIG.MIN_HOME_Z, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.BOTH_HOME_A: (SIG.HOME_A, SIG.MAX_HOME_A, SIG.MIN_HOME_A, SIG.ALL_LIMIT, SIG.ALL_HOME, SIG.ALL_LIMIT_HOME),
SIG.MIN_X: (SIG.BOTH_X, SIG.BOTH_HOME_X, SIG.MIN_HOME_X, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.MIN_Y: (SIG.BOTH_Y, SIG.BOTH_HOME_Y, SIG.MIN_HOME_Y, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.MIN_Z: (SIG.BOTH_Z, SIG.BOTH_HOME_Z, SIG.MIN_HOME_Z, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.MIN_A: (SIG.BOTH_A, SIG.BOTH_HOME_A, SIG.MIN_HOME_A, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.MAX_X: (SIG.BOTH_X, SIG.BOTH_HOME_X, SIG.MIN_HOME_X, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.MAX_Y: (SIG.BOTH_Y, SIG.BOTH_HOME_Y, SIG.MIN_HOME_Y, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.MAX_Z: (SIG.BOTH_Z, SIG.BOTH_HOME_Z, SIG.MIN_HOME_Z, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.MAX_A: (SIG.BOTH_A, SIG.BOTH_HOME_A, SIG.MIN_HOME_A, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.BOTH_X: (SIG.MIN_X, SIG.MAX_X, SIG.MIN_HOME_X, SIG.MAX_HOME_X, SIG.BOTH_HOME_X, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.BOTH_Y: (SIG.MIN_Y, SIG.MAX_Y, SIG.MIN_HOME_Y, SIG.MAX_HOME_Y, SIG.BOTH_HOME_Y, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.BOTH_Z: (SIG.MIN_Z, SIG.MAX_Z, SIG.MIN_HOME_Z, SIG.MAX_HOME_Z, SIG.BOTH_HOME_Z, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.BOTH_A: (SIG.MIN_A, SIG.MAX_A, SIG.MIN_HOME_A, SIG.MAX_HOME_A, SIG.BOTH_HOME_A, SIG.ALL_LIMIT, SIG.ALL_LIMIT_HOME),
SIG.ALL_LIMIT: (
SIG.MIN_X, SIG.MAX_X, SIG.BOTH_X, SIG.MIN_HOME_X, SIG.MAX_HOME_X, SIG.BOTH_HOME_X,
SIG.MIN_Y, SIG.MAX_Y, SIG.BOTH_Y, SIG.MIN_HOME_Y, SIG.MAX_HOME_Y, SIG.BOTH_HOME_Y,
SIG.MIN_Z, SIG.MAX_Z, SIG.BOTH_Z, SIG.MIN_HOME_Z, SIG.MAX_HOME_Z, SIG.BOTH_HOME_Z,
SIG.MIN_A, SIG.MAX_A, SIG.BOTH_A, SIG.MIN_HOME_A, SIG.MAX_HOME_A, SIG.BOTH_HOME_A,
SIG.ALL_LIMIT_HOME),
SIG.ALL_HOME: (
SIG.HOME_X, SIG.MIN_HOME_X, SIG.MAX_HOME_X, SIG.BOTH_HOME_X,
SIG.HOME_Y, SIG.MIN_HOME_Y, SIG.MAX_HOME_Y, SIG.BOTH_HOME_Y,
SIG.HOME_Z, SIG.MIN_HOME_Z, SIG.MAX_HOME_Z, SIG.BOTH_HOME_Z,
SIG.HOME_A, SIG.MIN_HOME_A, SIG.MAX_HOME_A, SIG.BOTH_HOME_A,
SIG.ALL_LIMIT_HOME),
SIG.ALL_LIMIT_HOME: (
SIG.HOME_X, SIG.MIN_HOME_X, SIG.MAX_HOME_X, SIG.BOTH_HOME_X,
SIG.HOME_Y, SIG.MIN_HOME_Y, SIG.MAX_HOME_Y, SIG.BOTH_HOME_Y,
SIG.HOME_Z, SIG.MIN_HOME_Z, SIG.MAX_HOME_Z, SIG.BOTH_HOME_Z,
SIG.HOME_A, SIG.MIN_HOME_A, SIG.MAX_HOME_A, SIG.BOTH_HOME_A,
SIG.MIN_X, SIG.MAX_X, SIG.BOTH_X, SIG.MIN_HOME_X, SIG.MAX_HOME_X, SIG.BOTH_HOME_X,
SIG.MIN_Y, SIG.MAX_Y, SIG.BOTH_Y, SIG.MIN_HOME_Y, SIG.MAX_HOME_Y, SIG.BOTH_HOME_Y,
SIG.MIN_Z, SIG.MAX_Z, SIG.BOTH_Z, SIG.MIN_HOME_Z, SIG.MAX_HOME_Z, SIG.BOTH_HOME_Z,
SIG.MIN_A, SIG.MAX_A, SIG.BOTH_A, SIG.MIN_HOME_A, SIG.MAX_HOME_A, SIG.BOTH_HOME_A,
SIG.ALL_LIMIT, SIG.ALL_HOME),
}
model = self.widgets[pinname].get_model()
piter = self.widgets[pinname].get_active_iter()
try:
dummy, index,signame,sig_group = model.get(piter, 0,1,2,3)
except:
self.recursive_block = False
return
dbg('exclusive: current:%s %d %s %s'%(pinname,index,signame,sig_group),mtype='excl')
ex = exclusive.get(signame, ())
if self.d.number_mesa > 0:
dbg( 'looking for %s in mesa'%signame,mtype='excl')
# check mesa main board - only if the tab is shown and the ptype is GOIOI
for boardnum in range(0,int(self.d.number_mesa)):
for concount,connector in enumerate(self.d["mesa%d_currentfirmwaredata"% (boardnum)][_PD._NUMOFCNCTRS]) :
try:
if not self.widgets['mesa%dcon%dtable'%(boardnum,connector)].get_visible():continue
except:
break
break
for s in range(0,24):
p = "mesa%dc%dpin%d"% (boardnum,connector,s)
ptype = "mesa%dc%dpin%dtype"% (boardnum,connector,s)
#print p,self.widgets[ptype].get_active_text(),_PD.pintype_gpio[0]
try:
if not self.widgets[ptype].get_active_text() == _PD.pintype_gpio[0]: continue
if self.widgets[p] == widget:continue
except:
break
break
break
model = self.widgets[p].get_model()
piter = self.widgets[p].get_active_iter()
dummy, index,v1,sig_group = model.get(piter, 0,1,2,3)
#print 'check mesa signals',v1
if v1 in ex or v1 == signame:
dbg( 'found %s, at %s'%(signame,p),mtype='excl')
self.widgets[p].set_active(self._p.hal_input_names.index(SIG.UNUSED_INPUT))
self.d[p] = SIG.UNUSED_INPUT
port = 0
dbg( 'looking for %s in mesa sserial'%signame,mtype='excl')
for channel in range (0,self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._MAXSSERIALCHANNELS]):
if channel == _PD._NUM_CHANNELS: break # TODO may not have all channels worth of glade widgets
if not self.widgets['mesa%dsserial%d_%d'%(boardnum,port,channel)].get_visible():continue
#print "sserial data transfering"
for s in range (0,_PD._SSCOMBOLEN):
p = 'mesa%dsserial%d_%dpin%d' % (boardnum, port, channel, s)
ptype = 'mesa%dsserial%d_%dpin%dtype' % (boardnum, port, channel, s)
try:
if not self.widgets[ptype].get_active_text() == _PD.pintype_gpio[0]: continue
if self.widgets[p] == widget:continue
except:
break
break
model = self.widgets[p].get_model()
piter = self.widgets[p].get_active_iter()
dummy, index,v1,sig_group = model.get(piter, 0,1,2,3)
#print 'check mesa signals',v1
if v1 in ex or v1 == signame:
dbg( 'found %s, at %s'%(signame,p),mtype='excl')
self.widgets[p].set_active(self._p.hal_input_names.index(SIG.UNUSED_INPUT))
self.d[p] = SIG.UNUSED_INPUT
if self.d.number_pports >0:
# search pport1 for the illegal signals and change them to unused.
dbg( 'looking for %s in pport1'%signame,mtype='excl')
for pin1 in (2,3,4,5,6,7,8,9,10,11,12,13,15):
p = 'pp1_Ipin%d' % pin1
# pport2 may not be loaded yet
try:
if self.widgets[p] == widget:continue
except:
self.recursive_block = False
return
model = self.widgets[p].get_model()
piter = self.widgets[p].get_active_iter()
dummy, index,v1,sig_group = model.get(piter, 0,1,2,3)
#print 'check pport1 signals',v1
if v1 in ex or v1 == signame:
dbg( 'found %s, at %s'%(signame,p),mtype='excl')
self.widgets[p].set_active(self._p.hal_input_names.index(SIG.UNUSED_INPUT))
self.d[p] = SIG.UNUSED_INPUT
if self.d.number_pports >1:
# search pport2 for the illegal signals and change them to unused.
dbg( 'looking for %s in pport2'%signame,mtype='excl')
for pin1 in (2,3,4,5,6,7,8,9,10,11,12,13,15):
p2 = 'pp2_Ipin%d' % pin1
# pport2 may not be loaded yet
try:
if self.widgets[p2] == widget: continue
except:
self.recursive_block = False
return
model = self.widgets[p].get_model()
piter = self.widgets[p].get_active_iter()
dummy, index,v2,sig_group = model.get(piter, 0,1,2,3)
#print 'check pport2 signals',v1
if v2 in ex or v2 == signame:
dbg( 'found %s, at %s'%(signame,p2),mtype='excl')
self.widgets[p2].set_active(self._p.hal_input_names.index(SIG.UNUSED_INPUT))
self.d[p2] = SIG.UNUSED_INPUT
self.recursive_block = False
# MESA SIGNALS
# connect signals with pin designation data to mesa signal comboboxes and pintype comboboxes
# record the signal ID numbers so we can block the signals later in the mesa routines
# have to do it here manually (instead of autoconnect) because glade doesn't handle added
# user info (board/connector/pin number designations) and doesn't record the signal ID numbers
# none of this is done if mesa is not checked off in pncconf
# TODO we should check to see if signals are already present as each time user goes though this page
# the signals get added again causing multple calls to the functions.
def init_mesa_signals(self,boardnum):
cb = "mesa%d_discovery"% (boardnum)
i = "_mesa%dsignalhandler_discovery"% (boardnum)
self.d[i] = int(self.widgets[cb].connect("clicked", self.p['on_mesa%d_discovery_clicked'%boardnum]))
cb = "mesa%d_comp_update"% (boardnum)
i = "_mesa%dsignalhandler_comp_update"% (boardnum)
self.d[i] = int(self.widgets[cb].connect("clicked", self.on_mesa_component_value_changed,boardnum))
cb = "mesa%d_boardtitle"% (boardnum)
i = "_mesa%dsignalhandler_boardname_change"% (boardnum)
self.d[i] = int(self.widgets[cb].connect("changed", self.on_mesa_boardname_changed,boardnum))
cb = "mesa%d_firmware"% (boardnum)
i = "_mesa%dsignalhandler_firmware_change"% (boardnum)
self.d[i] = int(self.widgets[cb].connect("changed", self.on_mesa_firmware_changed,boardnum))
for connector in (1,2,3,4,5,6,7,8,9):
for pin in range(0,24):
cb = "mesa%dc%ipin%i"% (boardnum,connector,pin)
i = "_mesa%dsignalhandlerc%ipin%i"% (boardnum,connector,pin)
self.d[i] = int(self.widgets[cb].connect("changed",
self.on_general_pin_changed,"mesa",boardnum,connector,None,pin,False))
i = "_mesa%dactivatehandlerc%ipin%i"% (boardnum,connector,pin)
self.d[i] = int(self.widgets[cb].child.connect("activate",
self.on_general_pin_changed,"mesa",boardnum,connector,None,pin,True))
self.widgets[cb].connect('changed', self.do_exclusive_inputs,boardnum,cb)
cb = "mesa%dc%ipin%itype"% (boardnum,connector,pin)
i = "_mesa%dptypesignalhandlerc%ipin%i"% (boardnum,connector,pin)
self.d[i] = int(self.widgets[cb].connect("changed", self.on_mesa_pintype_changed,boardnum,connector,None,pin))
# SmartSerial signals
port = 0 #TODO we only support one serial port
for channel in range (0,self._p._NUM_CHANNELS):
for pin in range (0,self._p._SSCOMBOLEN):
cb = "mesa%dsserial%i_%ipin%i"% (boardnum,port,channel,pin)
i = "_mesa%dsignalhandlersserial%i_%ipin%i"% (boardnum,port,channel,pin)
self.d[i] = int(self.widgets[cb].connect("changed",
self.on_general_pin_changed,"sserial",boardnum,port,channel,pin,False))
i = "_mesa%dactivatehandlersserial%i_%ipin%i"% (boardnum,port,channel,pin)
self.d[i] = int(self.widgets[cb].child.connect("activate",
self.on_general_pin_changed,"sserial",boardnum,port,channel,pin,True))
self.widgets[cb].connect('changed', self.do_exclusive_inputs,boardnum,cb)
cb = "mesa%dsserial%i_%ipin%itype"% (boardnum,port,channel,pin)
i = "_mesa%dptypesignalhandlersserial%i_%ipin%i"% (boardnum,port,channel,pin)
self.d[i] = int(self.widgets[cb].connect("changed", self.on_mesa_pintype_changed,boardnum,port,channel,pin))
self.widgets["mesa%d_7i29_sanity_check"%boardnum].connect('clicked', self.daughter_board_sanity_check)
self.widgets["mesa%d_7i30_sanity_check"%boardnum].connect('clicked', self.daughter_board_sanity_check)
self.widgets["mesa%d_7i33_sanity_check"%boardnum].connect('clicked', self.daughter_board_sanity_check)
self.widgets["mesa%d_7i40_sanity_check"%boardnum].connect('clicked', self.daughter_board_sanity_check)
self.widgets["mesa%d_7i48_sanity_check"%boardnum].connect('clicked', self.daughter_board_sanity_check)
def init_mesa_options(self,boardnum):
#print 'init mesa%d options'%boardnum
i = self.widgets['mesa%d_boardtitle'%boardnum].get_active_text()
# check for installed firmware
#print i,self.d['mesa%d_boardtitle'%boardnum]
if 1==1:#if not self.d['_mesa%d_arrayloaded'%boardnum]:
#print boardnum,self._p.FIRMDIR,i
# add any extra firmware data from .pncconf-preference file
#if not customself._p.MESA_FIRMWAREDATA == []:
# for i,j in enumerate(customself._p.MESA_FIRMWAREDATA):
# self._p.MESA_FIRMWAREDATA.append(customself._p.MESA_FIRMWAREDATA[i])
# ok set up mesa info
dbg('Looking for firmware data %s'%self.d["mesa%d_firmware"% boardnum])
found = False
search = 0
model = self.widgets["mesa%d_firmware"% boardnum].get_model()
for search,item in enumerate(model):
dbg('%d,%s'%(search,model[search][0]))
if model[search][0] == self.d["mesa%d_firmware"% boardnum]:
self.widgets["mesa%d_firmware"% boardnum].set_active(search)
found = True
dbg('found firmware # %d'% search)
break
if not found:
dbg('firmware not found')
cur_firm = self.d['mesa%d_currentfirmwaredata'% boardnum][_PD._FIRMWARE]
dbg('looking for: %s'% cur_firm )
#self.widgets["mesa%d_firmware"% boardnum].set_active(0)
self._p.MESA_FIRMWAREDATA.append(self.d['mesa%d_currentfirmwaredata'% boardnum])
model.append((cur_firm,))
self.init_mesa_options(boardnum)
return
else:
self.widgets["mesa%d_pwm_frequency"% boardnum].set_value(self.d["mesa%d_pwm_frequency"% boardnum])
self.widgets["mesa%d_pdm_frequency"% boardnum].set_value(self.d["mesa%d_pdm_frequency"% boardnum])
self.widgets["mesa%d_3pwm_frequency"% boardnum].set_value(self.d["mesa%d_3pwm_frequency"% boardnum])
self.widgets["mesa%d_watchdog_timeout"% boardnum].set_value(self.d["mesa%d_watchdog_timeout"% boardnum])
self.widgets["mesa%d_numof_encodergens"% boardnum].set_value(self.d["mesa%d_numof_encodergens"% boardnum])
self.widgets["mesa%d_numof_pwmgens"% boardnum].set_value(self.d["mesa%d_numof_pwmgens"% boardnum])
self.widgets["mesa%d_numof_tppwmgens"% boardnum].set_value(self.d["mesa%d_numof_tppwmgens"% boardnum])
self.widgets["mesa%d_numof_stepgens"% boardnum].set_value(self.d["mesa%d_numof_stepgens"% boardnum])
self.widgets["mesa%d_numof_sserialports"% boardnum].set_value(self.d["mesa%d_numof_sserialports"% boardnum])
self.widgets["mesa%d_numof_sserialchannels"% boardnum].set_value(self.d["mesa%d_numof_sserialchannels"% boardnum])
if not self.widgets.createconfig.get_active() and not self.d['_mesa%d_configured'%boardnum]:
bt = self.d['mesa%d_boardtitle'%boardnum]
firm = self.d['mesa%d_firmware'%boardnum]
pgens = self.d['mesa%d_numof_pwmgens'%boardnum]
tpgens = self.d['mesa%d_numof_tppwmgens'%boardnum]
stepgens = self.d['mesa%d_numof_stepgens'%boardnum]
enc = self.d['mesa%d_numof_encodergens'%boardnum]
ssports = self.d['mesa%d_numof_sserialports'%boardnum]
sschannels = self.d['mesa%d_numof_sserialchannels'%boardnum]
self.set_mesa_options(boardnum,bt,firm,pgens,tpgens,stepgens,enc,ssports,sschannels)
elif not self.d._mesa0_configured:
self.widgets['mesa%dcon2table'%boardnum].hide()
self.widgets['mesa%dcon3table'%boardnum].hide()
self.widgets['mesa%dcon4table'%boardnum].hide()
self.widgets['mesa%dcon5table'%boardnum].hide()
def on_mesa_boardname_changed(self, widget,boardnum):
#print "**** INFO boardname %d changed"% boardnum
model = self.widgets["mesa%d_boardtitle"% boardnum].get_model()
title = self.widgets["mesa%d_boardtitle"% boardnum].get_active_text()
if title:
if 'Discovery Option' in title:
self.widgets["mesa%d_discovery"% boardnum].show()
else:
self.widgets["mesa%d_discovery"% boardnum].hide()
for i in(1,2,3,4,5,6,7,8,9):
self.widgets['mesa%dcon%dtable'%(boardnum,i)].hide()
self.widgets["mesa{}con{}tab".format(boardnum,i)].set_text('I/O\n Connector %d'%i)
for i in(0,1,2,3,4,5):
self.widgets["mesa%dsserial0_%d"%(boardnum,i)].hide()
if title == None: return
if 'Discovery Option' not in title:
meta = self.get_board_meta(title)
names = meta.get('TAB_NAMES')
tnums = meta.get('TAB_NUMS')
if names and tnums:
for index, tabnum in enumerate(tnums):
self.widgets["mesa{}con{}tab".format(boardnum,tabnum)].set_text(names[index])
#print 'title',title
self.fill_firmware(boardnum)
def fill_firmware(self,boardnum):
#print 'fill firmware'
self.firmware_block = True
title = self.widgets["mesa%d_boardtitle"% boardnum].get_active_text()
#print title
self._p.MESA_FIRMWAREDATA = []
if os.path.exists(os.path.join(self._p.FIRMDIR,title)):
self.mesa_firmware_search(title)
self.d['_mesa%d_arrayloaded'%boardnum] = True
for i in self._p.MESA_INTERNAL_FIRMWAREDATA:
self._p.MESA_FIRMWAREDATA.append(i)
model = self.widgets["mesa%d_firmware"% boardnum].get_model()
model.clear()
temp=[]
for search, item in enumerate(self._p.MESA_FIRMWAREDATA):
d = self._p.MESA_FIRMWAREDATA[search]
if not d[self._p._BOARDTITLE] == title:continue
temp.append(d[self._p._FIRMWARE])
temp.sort()
for i in temp:
#print i
model.append((i,))
self.widgets["mesa%d_firmware"% boardnum].set_active(0)
self.firmware_block = False
self.on_mesa_firmware_changed(None,boardnum)
#print "firmware-",self.widgets["mesa%d_firmware"% boardnum].get_active_text(),self.widgets["mesa%d_firmware"% boardnum].get_active()
#print "boardname-" + d[_PD._BOARDNAME]
def on_mesa_firmware_changed(self, widget,boardnum):
if self.firmware_block:
return
print "**** INFO firmware %d changed"% boardnum
model = self.widgets["mesa%d_boardtitle"% boardnum].get_model()
active = self.widgets["mesa%d_boardtitle"% boardnum].get_active()
if active < 0:
title = None
else: title = model[active][0]
firmware = self.widgets["mesa%d_firmware"% boardnum].get_active_text()
for search, item in enumerate(self._p.MESA_FIRMWAREDATA):
d = self._p.MESA_FIRMWAREDATA[search]
#print firmware,d[_PD._FIRMWARE],title,d[_PD._BOARDTITLE]
if not d[_PD._BOARDTITLE] == title:continue
if d[_PD._FIRMWARE] == firmware:
self.widgets["mesa%d_numof_encodergens"%boardnum].set_range(0,d[_PD._MAXENC])
self.widgets["mesa%d_numof_encodergens"% boardnum].set_value(d[_PD._MAXENC])
self.widgets["mesa%d_numof_pwmgens"% boardnum].set_range(0,d[_PD._MAXPWM])
self.widgets["mesa%d_numof_pwmgens"% boardnum].set_value(d[_PD._MAXPWM])
if d[_PD._MAXTPPWM]:
self.widgets["mesa%d_numof_tppwmgens"% boardnum].show()
self.widgets["mesa%d_numof_tpp_label"% boardnum].show()
self.widgets["mesa%d_3pwm_freq_label"% boardnum].show()
self.widgets["mesa%d_3pwm_freq_units"% boardnum].show()
self.widgets["mesa%d_3pwm_frequency"% boardnum].show()
else:
self.widgets["mesa%d_numof_tppwmgens"% boardnum].hide()
self.widgets["mesa%d_numof_tpp_label"% boardnum].hide()
self.widgets["mesa%d_3pwm_freq_label"% boardnum].hide()
self.widgets["mesa%d_3pwm_freq_units"% boardnum].hide()
self.widgets["mesa%d_3pwm_frequency"% boardnum].hide()
self.widgets["mesa%d_numof_tppwmgens"% boardnum].set_range(0,d[_PD._MAXTPPWM])
self.widgets["mesa%d_numof_tppwmgens"% boardnum].set_value(d[_PD._MAXTPPWM])
self.widgets["mesa%d_numof_stepgens"% boardnum].set_range(0,d[_PD._MAXSTEP])
self.widgets["mesa%d_numof_stepgens"% boardnum].set_value(d[_PD._MAXSTEP])
self.d["mesa%d_numof_resolvers"% boardnum] = (d[_PD._MAXRES]) # TODO fix this hack should be selectable
if d[_PD._MAXRES]:
self.widgets["mesa%d_numof_resolvers"% boardnum].show()
self.widgets["mesa%d_numof_resolvers"% boardnum].set_value(d[_PD._MAXRES]*6)
self.widgets["mesa%d_numof_resolvers"% boardnum].set_sensitive(False)
self.widgets["mesa%d_numof_resolvers_label"% boardnum].show()
self.widgets["mesa%d_pwm_frequency"% boardnum].set_value(24000)
else:
self.widgets["mesa%d_numof_resolvers"% boardnum].hide()
self.widgets["mesa%d_numof_resolvers_label"% boardnum].hide()
self.widgets["mesa%d_numof_resolvers"% boardnum].set_value(0)
if d[_PD._MAXSSERIALPORTS]:
self.widgets["mesa%d_numof_sserialports"% boardnum].show()
self.widgets["mesa%d_numof_sserialports_label"% boardnum].show()
self.widgets["mesa%d_numof_sserialchannels"% boardnum].show()
self.widgets["mesa%d_numof_sserialchannels_label"% boardnum].show()
else:
self.widgets["mesa%d_numof_sserialports"% boardnum].hide()
self.widgets["mesa%d_numof_sserialports_label"% boardnum].hide()
self.widgets["mesa%d_numof_sserialchannels"% boardnum].hide()
self.widgets["mesa%d_numof_sserialchannels_label"% boardnum].hide()
self.widgets["mesa%d_numof_sserialports"% boardnum].set_range(0,d[_PD._MAXSSERIALPORTS])
self.widgets["mesa%d_numof_sserialports"% boardnum].set_value(d[_PD._MAXSSERIALPORTS])
self.widgets["mesa%d_numof_sserialchannels"% boardnum].set_range(1,d[_PD._MAXSSERIALCHANNELS])
self.widgets["mesa%d_numof_sserialchannels"% boardnum].set_value(d[_PD._MAXSSERIALCHANNELS])
self.widgets["mesa%d_totalpins"% boardnum].set_text("%s"% d[_PD._MAXGPIO])
self.widgets["mesa%d_3pwm_frequency"% boardnum].set_sensitive(d[_PD._MAXTPPWM])
if d[_PD._MAXRES]:
self.widgets["mesa%d_pwm_frequency"% boardnum].set_sensitive(False)
else:
self.widgets["mesa%d_pwm_frequency"% boardnum].set_sensitive(d[_PD._MAXPWM])
self.widgets["mesa%d_pdm_frequency"% boardnum].set_sensitive(d[_PD._MAXPWM])
if 'eth' in d[_PD._HALDRIVER] or "7i43" in title or '7i90' in title:
self.widgets["mesa%d_card_addrs_hbox"% boardnum].show()
if '7i43' in title or '7i90' in title:
self.widgets["mesa%d_parportaddrs"% boardnum].show()
self.widgets["mesa%d_card_addrs"% boardnum].hide()
else:
self.widgets["mesa%d_parportaddrs"% boardnum].hide()
self.widgets["mesa%d_card_addrs"% boardnum].show()
self.widgets["mesa%d_parporttext"% boardnum].show()
else:
self.widgets["mesa%d_card_addrs_hbox"% boardnum].hide()
self.widgets["mesa%d_parporttext"% boardnum].hide()
break
# This method converts data from the GUI page to signal names for pncconf's mesa data variables
# It starts by checking pin type to set up the proper lists to search
# then depending on the pin type widget data is converted to signal names.
# if the signal name is not in the list add it to Human_names, signal_names
# and disc-saved signalname lists
# if encoder, pwm, or stepper pins the related pin are also set properly
# it does this by searching the current firmware array and finding what the
# other related pins numbers are then changing them to the appropriate signalname.
def mesa_data_transfer(self,boardnum):
for concount,connector in enumerate(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._NUMOFCNCTRS]) :
for pin in range(0,24):
p = 'mesa%dc%dpin%d' % (boardnum,connector,pin)
pinv = 'mesa%dc%dpin%dinv' % (boardnum,connector,pin)
ptype = 'mesa%dc%dpin%dtype' % (boardnum,connector,pin)
self.data_transfer(boardnum,connector,None,pin,p,pinv,ptype)
self.d["mesa%d_pwm_frequency"% boardnum] = self.widgets["mesa%d_pwm_frequency"% boardnum].get_value()
self.d["mesa%d_pdm_frequency"% boardnum] = self.widgets["mesa%d_pdm_frequency"% boardnum].get_value()
self.d["mesa%d_3pwm_frequency"% boardnum] = self.widgets["mesa%d_3pwm_frequency"% boardnum].get_value()
self.d["mesa%d_watchdog_timeout"% boardnum] = self.widgets["mesa%d_watchdog_timeout"% boardnum].get_value()
port = 0
for channel in range (0,self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._MAXSSERIALCHANNELS]):
if channel == _PD._NUM_CHANNELS: break # TODO may not have all channels worth of glade widgets
subboardname = self.d["mesa%dsserial%d_%dsubboard"% (boardnum, port, channel)]
#print "data transfer-channel ",channel," subboard name",subboardname
if subboardname == "none":
#print "no subboard for %s"% subboardname
continue
#print "sserial data transfering"
for pin in range (0,_PD._SSCOMBOLEN):
p = 'mesa%dsserial%d_%dpin%d' % (boardnum, port, channel, pin)
pinv = 'mesa%dsserial%d_%dpin%dinv' % (boardnum, port, channel, pin)
ptype = 'mesa%dsserial%d_%dpin%dtype' % (boardnum, port, channel, pin)
self.data_transfer(boardnum,port,channel,pin,p,pinv,ptype)
#print "sserial data transfer",p
def data_transfer(self,boardnum,connector,channel,pin,p,pinv,ptype):
foundit = False
piter = self.widgets[p].get_active_iter()
ptiter = self.widgets[ptype].get_active_iter()
pintype = self.widgets[ptype].get_active_text()
selection = self.widgets[p].get_active_text()
signaltree = self.widgets[p].get_model()
#if "serial" in p:
# print "**** INFO mesa-data-transfer:",p," selection: ",selection," pintype: ",pintype
# print "**** INFO mesa-data-transfer:",ptiter,piter
# type NOTUSED
if pintype == _PD.NUSED:
self.d[p] = _PD.UNUSED_UNUSED
self.d[ptype] = _PD.NUSED
self.d[pinv] = False
return
# type GPIO input
if pintype == _PD.GPIOI:
ptypetree = self.d._gpioliststore
signaltocheck = _PD.hal_input_names
# type gpio output and open drain
elif pintype in (_PD.GPIOO,_PD.GPIOD):
ptypetree = self.d._gpioliststore
signaltocheck = _PD.hal_output_names
elif pintype == _PD.SSR0:
ptypetree = self.d._ssrliststore
signaltocheck = _PD.hal_output_names
#type encoder
elif pintype in (_PD.ENCA,_PD.ENCB,_PD.ENCI,_PD.ENCM):
ptypetree = self.d._encoderliststore
signaltocheck = _PD.hal_encoder_input_names
# resolvers
elif pintype in (_PD.RES0,_PD.RES1,_PD.RES2,_PD.RES3,_PD.RES4,_PD.RES5,_PD.RESU):
ptypetree = self.d._resolverliststore
signaltocheck = _PD.hal_resolver_input_names
# 8i20 amplifier card
elif pintype == _PD.AMP8I20:
ptypetree = self.d._8i20liststore
signaltocheck = _PD.hal_8i20_input_names
# potentiometer output
elif pintype in (_PD.POTO,_PD.POTE):
ptypetree = self.d._potliststore
signaltocheck = _PD.hal_pot_output_names
# analog in
elif pintype == (_PD.ANALOGIN):
ptypetree = self.d._analoginliststore
signaltocheck = _PD.hal_analog_input_names
#type mux encoder
elif pintype in (_PD.MXE0, _PD.MXE1, _PD.MXEI, _PD.MXEM, _PD.MXES):
ptypetree = self.d._muxencoderliststore
signaltocheck = _PD.hal_encoder_input_names
# type PWM gen
elif pintype in( _PD.PDMP,_PD.PDMD,_PD.PDME):
if pintype == _PD.PDMP:
ptypetree = self.d._pdmcontrolliststore
else:
ptypetree = self.d._pdmrelatedliststore
signaltocheck = _PD.hal_pwm_output_names
# PDM
elif pintype in( _PD.PWMP,_PD.PWMD,_PD.PWME):
if pintype == _PD.PWMP:
ptypetree = self.d._pwmcontrolliststore
else:
ptypetree = self.d._pwmrelatedliststore
signaltocheck = _PD.hal_pwm_output_names
# Up/Down mode
elif pintype in( _PD.UDMU,_PD.UDMD,_PD.UDME):
if pintype == _PD.UDMU:
ptypetree = self.d._udmcontrolliststore
else:
ptypetree = self.d._udmrelatedliststore
signaltocheck = _PD.hal_pwm_output_names
# type tp pwm
elif pintype in (_PD.TPPWMA,_PD.TPPWMB,_PD.TPPWMC,_PD.TPPWMAN,_PD.TPPWMBN,_PD.TPPWMCN,_PD.TPPWME,_PD.TPPWMF):
ptypetree = self.d._tppwmliststore
signaltocheck = _PD.hal_tppwm_output_names
# type step gen
elif pintype in (_PD.STEPA,_PD.STEPB):
ptypetree = self.d._stepperliststore
signaltocheck = _PD.hal_stepper_names
# type sserial
elif pintype in (_PD.RXDATA0,_PD.TXDATA0,_PD.TXEN0,_PD.RXDATA1,_PD.TXDATA1,_PD.TXEN1,_PD.RXDATA2,
_PD.TXDATA2,_PD.TXEN2,_PD.RXDATA3,_PD.TXDATA3,_PD.TXEN3,
_PD.RXDATA4,_PD.TXDATA4,_PD.TXEN4,_PD.RXDATA5,_PD.TXDATA5,_PD.TXEN5,_PD.RXDATA6,_PD.TXDATA6,
_PD.TXEN6,_PD.RXDATA7,_PD.TXDATA7,_PD.TXEN7,
_PD.SS7I76M0,_PD.SS7I76M2,_PD.SS7I76M3,_PD.SS7I77M0,_PD.SS7I77M1,_PD.SS7I77M3,_PD.SS7I77M4):
ptypetree = self.d._sserialliststore
signaltocheck = _PD.hal_sserial_names
# this suppresses errors because of unused and uninitialized sserial instances
elif pintype == None and "sserial" in ptype: return
else :
print "**** ERROR mesa-data-transfer: error unknown pin type:",pintype,"of ",ptype
return
# **Start widget to data Convertion**
# for encoder pins
if piter == None:
#print "callin pin changed !!!"
name ="mesa"
if "sserial" in p: name = "sserial"
self.on_general_pin_changed(None,name,boardnum,connector,channel,pin,True)
selection = self.widgets[p].get_active_text()
piter = self.widgets[p].get_active_iter()
if piter == None:
print "****ERROR PNCCONF: no custom name available"
return
#print "found signame -> ",selection," "
# ok we have a piter with a signal type now- lets convert it to a signalname
#if not "serial" in p:
# self.debug_iter(piter,p,"signal")
dummy, index = signaltree.get(piter,0,1)
#if not "serial" in p:
# print "signaltree: ",dummy
# self.debug_iter(ptiter,ptype,"ptype")
widgetptype, index2 = ptypetree.get(ptiter,0,1)
#if not "serial" in p:
# print "ptypetree: ",widgetptype
if pintype in (_PD.GPIOI,_PD.GPIOO,_PD.GPIOD,_PD.SSR0,_PD.MXE0,_PD.MXE1,_PD.RES1,_PD.RES2,_PD.RES3,_PD.RES4,_PD.RES5,_PD.RESU,_PD.SS7I76M0,
_PD.SS7I76M2,_PD.SS7I76M3,_PD.SS7I77M0,_PD.SS7I77M1,_PD.SS7I77M3,_PD.SS7I77M4) or (index == 0):
index2 = 0
elif pintype in ( _PD.TXDATA0,_PD.RXDATA0,_PD.TXEN0,_PD.TXDATA1,_PD.RXDATA1,_PD.TXEN1,_PD.TXDATA2,_PD.RXDATA2,
_PD.TXEN2,_PD.TXDATA3,_PD.RXDATA3,_PD.TXEN3,_PD.TXDATA4,_PD.RXDATA4,_PD.TXEN4,
_PD.TXDATA5,_PD.RXDATA5,_PD.TXEN5,_PD.TXDATA6,_PD.RXDATA6,_PD.TXEN6,_PD.TXDATA7,_PD.RXDATA7,_PD.TXEN7 ):
index2 = 0
#print index,index2,signaltocheck[index+index2]
self.d[p] = signaltocheck[index+index2]
self.d[ptype] = widgetptype
self.d[pinv] = self.widgets[pinv].get_active()
#if "serial" in p:
# print "*** INFO PNCCONF mesa pin:",p,"signalname:",self.d[p],"pin type:",widgetptype
def on_mesa_pintype_changed(self, widget,boardnum,connector,channel,pin):
#print "mesa pintype changed:",boardnum,connector,channel,pin
if not channel == None:
port = connector
p = 'mesa%dsserial%d_%dpin%d' % (boardnum, port, channel, pin)
ptype = 'mesa%dsserial%d_%dpin%dtype' % (boardnum, port, channel, pin)
blocksignal = "_mesa%dsignalhandlersserial%i_%ipin%i" % (boardnum, port, channel, pin)
ptypeblocksignal = "_mesa%dptypesignalhandlersserial%i_%ipin%i"% (boardnum, port, channel, pin)
else:
p = 'mesa%dc%dpin%d' % (boardnum,connector,pin)
ptype = 'mesa%dc%dpin%dtype' % (boardnum,connector,pin)
blocksignal = "_mesa%dsignalhandlerc%ipin%i"% (boardnum,connector,pin)
ptypeblocksignal = "_mesa%dptypesignalhandlerc%ipin%i" % (boardnum, connector,pin)
modelcheck = self.widgets[p].get_model()
modelptcheck = self.widgets[ptype].get_model()
new = self.widgets[ptype].get_active_text()
#print "pintypechanged",p
# switch GPIO input to GPIO output
# here we switch the available signal names in the combobox
# we block signals so pinchanged method is not called
if modelcheck == self.d._gpioisignaltree and new in (_PD.GPIOO,_PD.GPIOD):
#print "switch GPIO input ",p," to output",new
self.widgets[p].handler_block(self.d[blocksignal])
self.widgets[p].set_model(self.d._gpioosignaltree)
self.widgets[p].set_active(0)
self.widgets[p].handler_unblock(self.d[blocksignal])
# switch GPIO output to input
elif modelcheck == self.d._gpioosignaltree:
if new == _PD.GPIOI:
#print "switch GPIO output ",p,"to input"
self.widgets[p].handler_block(self.d[blocksignal])
self.widgets[p].set_model(self.d._gpioisignaltree)
self.widgets[p].set_active(0)
self.widgets[p].handler_unblock(self.d[blocksignal])
# switch between pulse width, pulse density or up/down mode analog modes
# here we search the firmware for related pins (eg PWMP,PWMD,PWME ) and change them too.
# we block signals so we don't call this routine again.
elif modelptcheck in (self.d._pwmcontrolliststore, self.d._pdmcontrolliststore, self.d._udmcontrolliststore):
relatedpins = [_PD.PWMP,_PD.PWMD,_PD.PWME]
if new == _PD.PWMP:
display = 0
relatedliststore = self.d._pwmrelatedliststore
controlliststore = self.d._pwmcontrolliststore
elif new == _PD.PDMP:
display = 1
relatedliststore = self.d._pdmrelatedliststore
controlliststore = self.d._pdmcontrolliststore
elif new == _PD.UDMU:
display = 2
relatedliststore = self.d._udmrelatedliststore
controlliststore = self.d._udmcontrolliststore
else:print "**** WARNING PNCCONF: pintype error-PWM type not found";return
self.widgets[ptype].handler_block(self.d[ptypeblocksignal])
self.widgets[ptype].set_model(controlliststore)
self.widgets[ptype].set_active(display)
self.widgets[ptype].handler_unblock(self.d[ptypeblocksignal])
pinlist = self.list_related_pins(relatedpins, boardnum, connector, channel, pin, 1)
for i in (pinlist):
relatedptype = i[0]
if relatedptype == ptype :continue
if not channel == None:
ptypeblocksignal = "_mesa%dptypesignalhandlersserial%i_%ipin%i"% (i[1], i[2],i[3],i[4])
else:
ptypeblocksignal = "_mesa%dptypesignalhandlerc%ipin%i" % (i[1], i[2],i[4])
self.widgets[relatedptype].handler_block(self.d[ptypeblocksignal])
j = self.widgets[relatedptype].get_active()
self.widgets[relatedptype].set_model(relatedliststore)
self.widgets[relatedptype].set_active(j)
self.widgets[relatedptype].handler_unblock(self.d[ptypeblocksignal])
else: print "**** WARNING PNCCONF: pintype error in pintypechanged method new ",new," pinnumber ",p
def on_mesa_component_value_changed(self, widget,boardnum):
self.in_mesa_prepare = True
self.d["mesa%d_pwm_frequency"% boardnum] = self.widgets["mesa%d_pwm_frequency"% boardnum].get_value()
self.d["mesa%d_pdm_frequency"% boardnum] = self.widgets["mesa%d_pdm_frequency"% boardnum].get_value()
self.d["mesa%d_watchdog_timeout"% boardnum] = self.widgets["mesa%d_watchdog_timeout"% boardnum].get_value()
numofpwmgens = self.d["mesa%d_numof_pwmgens"% boardnum] = int(self.widgets["mesa%d_numof_pwmgens"% boardnum].get_value())
numoftppwmgens = self.d["mesa%d_numof_tppwmgens"% boardnum] = int(self.widgets["mesa%d_numof_tppwmgens"% boardnum].get_value())
numofstepgens = self.d["mesa%d_numof_stepgens"% boardnum] = int(self.widgets["mesa%d_numof_stepgens"% boardnum].get_value())
numofencoders = self.d["mesa%d_numof_encodergens"% boardnum] = int(self.widgets["mesa%d_numof_encodergens"% boardnum].get_value())
numofsserialports = self.d["mesa%d_numof_sserialports"% boardnum] = int(self.widgets["mesa%d_numof_sserialports"% boardnum].get_value())
numofsserialchannels = self.d["mesa%d_numof_sserialchannels"% boardnum] = \
int(self.widgets["mesa%d_numof_sserialchannels"% boardnum].get_value())
title = self.d["mesa%d_boardtitle"% boardnum] = self.widgets["mesa%d_boardtitle"% boardnum].get_active_text()
firmware = self.d["mesa%d_firmware"% boardnum] = self.widgets["mesa%d_firmware"% boardnum].get_active_text()
self.set_mesa_options(boardnum,title,firmware,numofpwmgens,numoftppwmgens,numofstepgens,numofencoders,numofsserialports,numofsserialchannels)
return True
# This method sets up the mesa GUI page and is used when changing component values / firmware or boards from config page.
# it changes the component comboboxes according to the firmware max and user requested amounts
# it adds signal names to the signal name combo boxes according to component type and in the
# case of GPIO options selected on the basic page such as limit/homing types.
# it will grey out I/O tabs according to the selected board type.
# it uses GTK signal blocking to block on_general_pin_change and on_mesa_pintype_changed methods.
# Since this method is for initialization, there is no need to check for changes and this speeds up
# the update.
# 'self._p.MESA_FIRMWAREDATA' holds all the firmware d.
# 'self.d.mesaX_currentfirmwaredata' hold the current selected firmware data (X is 0 or 1)
def set_mesa_options(self,boardnum,title,firmware,numofpwmgens,numoftppwmgens,numofstepgens,numofencoders,numofsserialports,numofsserialchannels):
_PD.prepare_block = True
self.p.set_buttons_sensitive(0,0)
self.pbar.set_text("Setting up Mesa tabs")
self.pbar.set_fraction(0)
self.window.show()
while gtk.events_pending():
gtk.main_iteration()
for search, item in enumerate(self._p.MESA_FIRMWAREDATA):
d = self._p.MESA_FIRMWAREDATA[search]
if not d[_PD._BOARDTITLE] == title:continue
if d[_PD._FIRMWARE] == firmware:
self.d["mesa%d_currentfirmwaredata"% boardnum] = self._p.MESA_FIRMWAREDATA[search]
break
dbg('current firmware:\n%r'%self._p.MESA_FIRMWAREDATA[search],mtype='curfirm')
self.widgets["mesa%dcon2table"% boardnum].hide()
self.widgets["mesa%dcon3table"% boardnum].hide()
self.widgets["mesa%dcon4table"% boardnum].hide()
self.widgets["mesa%dcon5table"% boardnum].hide()
self.widgets["mesa%dcon6table"% boardnum].hide()
self.widgets["mesa%dcon7table"% boardnum].hide()
self.widgets["mesa%dcon8table"% boardnum].hide()
self.widgets["mesa%dcon9table"% boardnum].hide()
self.widgets["mesa%dsserial0_0"% boardnum].hide()
self.widgets["mesa%dsserial0_1"% boardnum].hide()
self.widgets["mesa%dsserial0_2"% boardnum].hide()
self.widgets["mesa%dsserial0_3"% boardnum].hide()
self.widgets["mesa%dsserial0_4"% boardnum].hide()
currentboard = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._BOARDNAME]
for i in self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._NUMOFCNCTRS]:
self.widgets["mesa%dcon%dtable"% (boardnum,i)].show()
# self.widgets["mesa%d"%boardnum].set_title("Mesa%d Configuration-Board: %s firmware: %s"% (boardnum,self.d["mesa%d_boardtitle"%boardnum],
# self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._FIRMWARE]))
temp = "/usr/share/doc/hostmot2-firmware-%s/%s.PIN"% (self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._DIRECTORY],
self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._FIRMWARE] )
filename = os.path.expanduser(temp)
if os.path.exists(filename):
match = open(filename).read()
textbuffer = self.widgets.textoutput.get_buffer()
try :
textbuffer.set_text("%s\n\n"% filename)
textbuffer.insert_at_cursor(match)
except:
pass
currentboard = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._BOARDNAME]
meta = self.get_board_meta(currentboard)
ppc = meta.get('PINS_PER_CONNECTOR')
for concount,connector in enumerate(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._NUMOFCNCTRS]) :
for pin in range (0,24):
self.pbar.set_fraction((pin+1)/24.0)
while gtk.events_pending():
gtk.main_iteration()
firmptype,compnum = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._STARTOFDATA+pin+(concount*24)]
p = 'mesa%dc%dpin%d' % (boardnum, connector, pin)
ptype = 'mesa%dc%dpin%dtype' % (boardnum, connector , pin)
#print "**** INFO set-mesa-options DATA:",self.d[p],p,self.d[ptype]
#print "**** INFO set-mesa-options FIRM:",firmptype
#print "**** INFO set-mesa-options WIDGET:",self.widgets[p].get_active_text(),self.widgets[ptype].get_active_text()
complabel = 'mesa%dc%dpin%dnum' % (boardnum, connector , pin)
pinv = 'mesa%dc%dpin%dinv' % (boardnum, connector , pin)
blocksignal = "_mesa%dsignalhandlerc%ipin%i" % (boardnum, connector, pin)
ptypeblocksignal = "_mesa%dptypesignalhandlerc%ipin%i" % (boardnum, connector,pin)
actblocksignal = "_mesa%dactivatehandlerc%ipin%i" % (boardnum, connector, pin)
# kill all widget signals:
self.widgets[ptype].handler_block(self.d[ptypeblocksignal])
self.widgets[p].handler_block(self.d[blocksignal])
self.widgets[p].child.handler_block(self.d[actblocksignal])
self.firmware_to_widgets(boardnum,firmptype,p,ptype,pinv,complabel,compnum,concount,ppc,pin,numofencoders,
numofpwmgens,numoftppwmgens,numofstepgens,None,numofsserialports,numofsserialchannels,False)
self.d["mesa%d_numof_stepgens"% boardnum] = numofstepgens
self.d["mesa%d_numof_pwmgens"% boardnum] = numofpwmgens
self.d["mesa%d_numof_encodergens"% boardnum] = numofencoders
self.d["mesa%d_numof_sserialports"% boardnum] = numofsserialports
self.d["mesa%d_numof_sserialchannels"% boardnum] = numofsserialchannels
self.widgets["mesa%d_numof_stepgens"% boardnum].set_value(numofstepgens)
self.widgets["mesa%d_numof_encodergens"% boardnum].set_value(numofencoders)
self.widgets["mesa%d_numof_pwmgens"% boardnum].set_value(numofpwmgens)
self.in_mesa_prepare = False
self.d["_mesa%d_configured"% boardnum] = True
# unblock all the widget signals again
for concount,connector in enumerate(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._NUMOFCNCTRS]) :
for pin in range (0,24):
p = 'mesa%dc%dpin%d' % (boardnum, connector, pin)
ptype = 'mesa%dc%dpin%dtype' % (boardnum, connector , pin)
blocksignal = "_mesa%dsignalhandlerc%ipin%i" % (boardnum, connector, pin)
ptypeblocksignal = "_mesa%dptypesignalhandlerc%ipin%i" % (boardnum, connector,pin)
actblocksignal = "_mesa%dactivatehandlerc%ipin%i" % (boardnum, connector, pin)
self.widgets[ptype].handler_unblock(self.d[ptypeblocksignal])
self.widgets[p].handler_unblock(self.d[blocksignal])
self.widgets[p].child.handler_unblock(self.d[actblocksignal])
self.mesa_mainboard_data_to_widgets(boardnum)
self.window.hide()
self.p.set_buttons_sensitive(1,1)
_PD.prepare_block = False
def set_sserial_options(self,boardnum,port,channel):
numofsserialports = self.d["mesa%d_numof_sserialports"% boardnum]
numofsserialchannels = self.d["mesa%d_numof_sserialchannels"% boardnum]
subboardname = self.d["mesa%dsserial%d_%dsubboard"% (boardnum, port, channel)]
if subboardname == "none":return
self.pbar.set_text("Setting up Mesa Smart Serial tabs")
self.pbar.set_fraction(0)
self.window.show()
while gtk.events_pending():
gtk.main_iteration()
for subnum,temp in enumerate(self._p.MESA_DAUGHTERDATA):
#print self._p.MESA_DAUGHTERDATA[subnum][self._p._SUBFIRMNAME],subboardname
if self._p.MESA_DAUGHTERDATA[subnum][self._p._SUBFIRMNAME] == subboardname: break
#print "found subboard name:",self._p.MESA_DAUGHTERDATA[subnum][self._p._SUBFIRMNAME],subboardname,subnum,"channel:",channel
for pin in range (0,self._p._SSCOMBOLEN):
self.pbar.set_fraction((pin+1)/60.0)
while gtk.events_pending():
gtk.main_iteration()
p = 'mesa%dsserial%d_%dpin%d' % (boardnum, port, channel, pin)
ptype = 'mesa%dsserial%d_%dpin%dtype' % (boardnum, port, channel, pin)
pinv = 'mesa%dsserial%d_%dpin%dinv' % (boardnum, port, channel, pin)
complabel = 'mesa%dsserial%d_%dpin%dnum' % (boardnum, port, channel, pin)
blocksignal = "_mesa%dsignalhandlersserial%i_%ipin%i" % (boardnum, port, channel, pin)
ptypeblocksignal = "_mesa%dptypesignalhandlersserial%i_%ipin%i" % (boardnum, port, channel, pin)
actblocksignal = "_mesa%dactivatehandlersserial%i_%ipin%i" % (boardnum, port, channel, pin)
firmptype,compnum = self._p.MESA_DAUGHTERDATA[subnum][self._p._SUBSTARTOFDATA+pin]
#print "sserial set options",p
# kill all widget signals:
self.widgets[ptype].handler_block(self.d[ptypeblocksignal])
self.widgets[p].handler_block(self.d[blocksignal])
self.widgets[p].child.handler_block(self.d[actblocksignal])
ppc = 0
concount = 0
numofencoders = 10
numofpwmgens = 12
numoftppwmgens = 0
numofstepgens = 0
self.firmware_to_widgets(boardnum,firmptype,p,ptype,pinv,complabel,compnum,concount,ppc,pin,numofencoders,
numofpwmgens,numoftppwmgens,numofstepgens,subboardname,numofsserialports,numofsserialchannels,True)
# all this to unblock signals
for pin in range (0,self._p._SSCOMBOLEN):
firmptype,compnum = self._p.MESA_DAUGHTERDATA[0][self._p._SUBSTARTOFDATA+pin]
p = 'mesa%dsserial%d_%dpin%d' % (boardnum, port, channel, pin)
ptype = 'mesa%dsserial%d_%dpin%dtype' % (boardnum, port, channel, pin)
pinv = 'mesa%dsserial%d_%dpin%dinv' % (boardnum, port, channel, pin)
complabel = 'mesa%dsserial%d_%dpin%dnum' % (boardnum, port, channel, pin)
blocksignal = "_mesa%dsignalhandlersserial%i_%ipin%i" % (boardnum, port, channel, pin)
ptypeblocksignal = "_mesa%dptypesignalhandlersserial%i_%ipin%i" % (boardnum, port, channel, pin)
actblocksignal = "_mesa%dactivatehandlersserial%i_%ipin%i" % (boardnum, port, channel, pin)
# unblock all widget signals:
self.widgets[ptype].handler_unblock(self.d[ptypeblocksignal])
self.widgets[p].handler_unblock(self.d[blocksignal])
self.widgets[p].child.handler_unblock(self.d[actblocksignal])
# now that the widgets are set up as per firmware, change them as per the loaded data and add signals
for pin in range (0,self._p._SSCOMBOLEN):
firmptype,compnum = self._p.MESA_DAUGHTERDATA[subnum][self._p._SUBSTARTOFDATA+pin]
p = 'mesa%dsserial%d_%dpin%d' % (boardnum, port, channel, pin)
#print "INFO: data to widget smartserial- ",p, firmptype
ptype = 'mesa%dsserial%d_%dpin%dtype' % (boardnum, port, channel, pin)
pinv = 'mesa%dsserial%d_%dpin%dinv' % (boardnum, port, channel, pin)
self.data_to_widgets(boardnum,firmptype,compnum,p,ptype,pinv)
#print "sserial data-widget",p
self.widgets["mesa%d_numof_sserialports"% boardnum].set_value(numofsserialports)
self.widgets["mesa%d_numof_sserialchannels"% boardnum].set_value(numofsserialchannels)
self.window.hide()
def firmware_to_widgets(self,boardnum,firmptype,p,ptype,pinv,complabel,compnum,concount,ppc, pin,numofencoders,numofpwmgens,numoftppwmgens,
numofstepgens,subboardname,numofsserialports,numofsserialchannels,sserialflag):
currentboard = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._BOARDNAME]
# *** convert widget[ptype] to component specified in firmwaredata ***
# if the board has less then 24 pins hide the extra comboboxes
if firmptype == _PD.NUSED:
self.widgets[p].hide()
self.widgets[ptype].hide()
self.widgets[pinv].hide()
self.widgets[complabel].hide()
self.widgets[ptype].set_model(self.d._notusedliststore)
self.widgets[ptype].set_active(0)
self.widgets[p].set_model(self.d._notusedsignaltree)
self.widgets[p].set_active(0)
return
else:
self.widgets[p].show()
self.widgets[ptype].show()
self.widgets[pinv].show()
self.widgets[complabel].show()
self.widgets[p].child.set_editable(True)
# ---SETUP GUI FOR ENCODER FAMILY COMPONENT---
# check that we are not converting more encoders that user requested
# if we are then we trick this routine into thinking the firware asked for GPIO:
# we can do that by changing the variable 'firmptype' to ask for GPIO
if firmptype in ( _PD.ENCA,_PD.ENCB,_PD.ENCI,_PD.ENCM ):
if numofencoders >= (compnum+1):
# if the combobox is not already displaying the right component:
# then we need to set up the comboboxes for this pin, otherwise skip it
if self.widgets[ptype].get_model():
widgetptype = self.widgets[ptype].get_active_text()
else: widgetptype = None
if not widgetptype == firmptype or not self.d["_mesa%d_configured"%boardnum]:
self.widgets[pinv].set_sensitive(0)
self.widgets[pinv].set_active(0)
self.widgets[ptype].set_model(self.d._encoderliststore)
# serial encoders are not for AXES - filter AXES selections out
if sserialflag:
self.widgets[p].set_model(self.d._encodersignalfilter)
else:
self.widgets[p].set_model(self.d._encodersignaltree)
# we only add every 4th human name so the user can only select
# the encoder's 'A' signal name. If its the other signals
# we can add them all because pncconf controls what the user sees
if firmptype == _PD.ENCA:
self.widgets[complabel].set_text("%d:"%compnum)
self.widgets[p].set_active(0)
self.widgets[p].set_sensitive(1)
self.widgets[ptype].set_sensitive(0)
self.widgets[ptype].set_active(0)
# pncconf control what the user sees with these ones:
elif firmptype in(_PD.ENCB,_PD.ENCI,_PD.ENCM):
self.widgets[complabel].set_text("")
self.widgets[p].set_active(0)
self.widgets[p].set_sensitive(0)
self.widgets[ptype].set_sensitive(0)
for i,j in enumerate((_PD.ENCB,_PD.ENCI,_PD.ENCM)):
if firmptype == j:break
self.widgets[ptype].set_active(i+1)
else:
# user requested this encoder component to be GPIO instead
# We cheat a little and tell the rest of the method that the firmware says
# it should be GPIO and compnum is changed to signify that the GPIO can be changed
# from input to output
# Right now only mainboard GPIO can be changed
# sserial I/O can not
firmptype = _PD.GPIOI
compnum = 0
# --- mux encoder ---
elif firmptype in (_PD.MXE0,_PD.MXE1,_PD.MXEI,_PD.MXEM,_PD.MXES):
#print "**** INFO: MUX ENCODER:",firmptype,compnum,numofencoders
if numofencoders >= (compnum*2+1) or (firmptype == _PD.MXES and numofencoders >= compnum*2+1) or \
(firmptype == _PD.MXEM and numofencoders >= compnum +1):
# if the combobox is not already displaying the right component:
# then we need to set up the comboboxes for this pin, otherwise skip it
self.widgets[pinv].set_sensitive(0)
self.widgets[pinv].set_active(0)
pmodel = self.widgets[p].set_model(self.d._muxencodersignaltree)
ptmodel = self.widgets[ptype].set_model(self.d._muxencoderliststore)
self.widgets[ptype].set_active(_PD.pintype_muxencoder.index(firmptype))
self.widgets[ptype].set_sensitive(0)
self.widgets[p].set_active(0)
if firmptype in(_PD.MXE0,_PD.MXE1):
temp = 0
if firmptype == _PD.MXE1: temp = 1
self.widgets[complabel].set_text("%d:"%(compnum *2 + temp))
self.widgets[p].set_sensitive(1)
self.widgets[ptype].show()
self.widgets[p].show()
elif firmptype == _PD.MXEM:
self.widgets[complabel].set_text("%d:"%compnum)
self.widgets[p].set_sensitive(0)
self.widgets[ptype].show()
self.widgets[p].hide()
else:
self.widgets[complabel].set_text("")
self.widgets[p].set_sensitive(0)
self.widgets[ptype].hide()
self.widgets[p].hide()
else:
firmptype = _PD.GPIOI
compnum = 0
# ---SETUP GUI FOR RESOLVER FAMILY COMPONENTS---
elif firmptype in (_PD.RES0,_PD.RES1,_PD.RES2,_PD.RES3,_PD.RES4,_PD.RES5,_PD.RESU):
if 0 == 0:
self.widgets[pinv].set_sensitive(0)
self.widgets[pinv].set_active(0)
self.widgets[p].set_model(self.d._resolversignaltree)
self.widgets[ptype].set_model(self.d._resolverliststore)
self.widgets[ptype].set_sensitive(0)
self.widgets[ptype].set_active(0)
if firmptype == _PD.RESU:
self.widgets[complabel].set_text("")
self.widgets[p].hide()
self.widgets[p].set_sensitive(0)
self.widgets[p].set_active(0)
self.widgets[ptype].set_active(6)
else:
temp = (_PD.RES0,_PD.RES1,_PD.RES2,_PD.RES3,_PD.RES4,_PD.RES5)
self.widgets[p].show()
for num,i in enumerate(temp):
if firmptype == i:break
self.widgets[complabel].set_text("%d:"% (compnum*6+num))
self.widgets[p].set_sensitive(1)
self.widgets[p].set_active(0)
self.widgets[ptype].set_active(num)
# ---SETUP 8i20 amp---
elif firmptype == _PD.AMP8I20:
self.widgets[ptype].set_model(self.d._8i20liststore)
self.widgets[p].set_model(self.d._8i20signaltree)
self.widgets[complabel].set_text("%d:"%compnum)
self.widgets[p].set_active(0)
self.widgets[p].set_sensitive(1)
self.widgets[pinv].set_sensitive(1)
self.widgets[pinv].set_active(0)
self.widgets[ptype].set_sensitive(0)
self.widgets[ptype].set_active(0)
# --- SETUP potentiometer output
elif firmptype in (_PD.POTO,_PD.POTE):
self.widgets[ptype].set_model(self.d._potliststore)
self.widgets[p].set_model(self.d._potsignaltree)
self.widgets[complabel].set_text("%d:"%compnum)
self.widgets[p].set_active(0)
self.widgets[pinv].set_sensitive(1)
self.widgets[pinv].set_active(0)
self.widgets[ptype].set_sensitive(0)
if firmptype == _PD.POTO:
self.widgets[ptype].set_active(0)
self.widgets[p].set_sensitive(1)
else:
self.widgets[ptype].set_active(1)
self.widgets[p].set_sensitive(0)
# --- SETUP analog input
elif firmptype == (_PD.ANALOGIN):
self.widgets[ptype].set_model(self.d._analoginliststore)
self.widgets[p].set_model(self.d._analoginsignaltree)
self.widgets[complabel].set_text("%d:"%compnum)
self.widgets[p].set_active(0)
self.widgets[pinv].set_sensitive(1)
self.widgets[pinv].set_active(0)
self.widgets[ptype].set_sensitive(0)
self.widgets[ptype].set_active(0)
self.widgets[p].set_sensitive(1)
# ---SETUP GUI FOR PWM FAMILY COMPONENT---
# the user has a choice of pulse width or pulse density modulation
elif firmptype in ( _PD.PWMP,_PD.PWMD,_PD.PWME,_PD.PDMP,_PD.PDMD,_PD.PDME ):
if numofpwmgens >= (compnum+1):
self.widgets[pinv].set_sensitive(1)
self.widgets[pinv].set_active(0)
self.widgets[p].set_model(self.d._pwmsignaltree)
# only add the -pulse signal names for the user to see
if firmptype in(_PD.PWMP,_PD.PDMP):
self.widgets[complabel].set_text("%d:"%compnum)
#print "firmptype = controlling"
self.widgets[ptype].set_model(self.d._pwmcontrolliststore)
self.widgets[ptype].set_sensitive(not sserialflag) # sserial pwm cannot be changed
self.widgets[p].set_sensitive(1)
self.widgets[p].set_active(0)
self.widgets[ptype].set_active(0)
# add them all here
elif firmptype in (_PD.PWMD,_PD.PWME,_PD.PDMD,_PD.PDME):
self.widgets[complabel].set_text("")
#print "firmptype = related"
if firmptype in (_PD.PWMD,_PD.PWME):
self.widgets[ptype].set_model(self.d._pwmrelatedliststore)
else:
self.widgets[ptype].set_model(self.d._pdmrelatedliststore)
self.widgets[p].set_sensitive(0)
self.widgets[p].set_active(0)
self.widgets[ptype].set_sensitive(0)
temp = 1
if firmptype in (_PD.PWME,_PD.PDME):
self.widgets[pinv].set_sensitive(0)
temp = 2
self.widgets[ptype].set_active(temp)
else:
firmptype = _PD.GPIOI
compnum = 0
# ---SETUP GUI FOR TP PWM FAMILY COMPONENT---
elif firmptype in ( _PD.TPPWMA,_PD.TPPWMB,_PD.TPPWMC,_PD.TPPWMAN,_PD.TPPWMBN,_PD.TPPWMCN,_PD.TPPWME,_PD.TPPWMF ):
if numoftppwmgens >= (compnum+1):
if not self.widgets[ptype].get_active_text() == firmptype or not self.d["_mesa%d_configured"%boardnum]:
self.widgets[p].set_model(self.d._tppwmsignaltree)
self.widgets[ptype].set_model(self.d._tppwmliststore)
self.widgets[pinv].set_sensitive(0)
self.widgets[pinv].set_active(0)
self.widgets[ptype].set_sensitive(0)
self.widgets[ptype].set_active(_PD.pintype_tp_pwm.index(firmptype))
self.widgets[p].set_active(0)
# only add the -a signal names for the user to change
if firmptype == _PD.TPPWMA:
self.widgets[complabel].set_text("%d:"%compnum)
self.widgets[p].set_sensitive(1)
# the rest the user can't change
else:
self.widgets[complabel].set_text("")
self.widgets[p].set_sensitive(0)
else:
firmptype = _PD.GPIOI
compnum = 0
# ---SETUP SMART SERIAL COMPONENTS---
# smart serial has port numbers (0-3) and channels (0-7).
# so the component number check is different from other components it checks the port number and channel number
elif firmptype in (_PD.TXDATA0,_PD.RXDATA0,_PD.TXEN0,_PD.TXDATA1,_PD.RXDATA1,_PD.TXEN1,
_PD.TXDATA2,_PD.RXDATA2,_PD.TXEN2,_PD.TXDATA3,_PD.RXDATA3,_PD.TXEN3,
_PD.TXDATA4,_PD.RXDATA4,_PD.TXEN4,_PD.TXDATA5,_PD.RXDATA5,_PD.TXEN5,
_PD.TXDATA6,_PD.RXDATA6,_PD.TXEN6,_PD.TXDATA7,_PD.RXDATA7,_PD.TXEN7,
_PD.SS7I76M0,_PD.SS7I76M2,_PD.SS7I76M3,_PD.SS7I77M0,_PD.SS7I77M1,_PD.SS7I77M3,_PD.SS7I77M4):
channelnum = 1
if firmptype in (_PD.TXDATA1,_PD.RXDATA1,_PD.TXEN1,_PD.SS7I77M1): channelnum = 2
if firmptype in (_PD.TXDATA2,_PD.RXDATA2,_PD.TXEN2,_PD.SS7I76M2): channelnum = 3
if firmptype in (_PD.TXDATA3,_PD.RXDATA3,_PD.TXEN3,_PD.SS7I76M3,_PD.SS7I77M3): channelnum = 4
if firmptype in (_PD.TXDATA4,_PD.RXDATA4,_PD.TXEN4,_PD.SS7I77M4): channelnum = 5
if firmptype in (_PD.TXDATA5,_PD.RXDATA5,_PD.TXEN5): channelnum = 6
if firmptype in (_PD.TXDATA6,_PD.RXDATA6,_PD.TXEN6): channelnum = 7
if firmptype in (_PD.TXDATA7,_PD.RXDATA7,_PD.TXEN7): channelnum = 8
# control combobox is the one the user can select from others are unsensitized
CONTROL = False
if firmptype in (_PD.TXDATA0,_PD.TXDATA1,_PD.TXDATA2,_PD.TXDATA3,_PD.TXDATA4,_PD.TXDATA5,
_PD.TXDATA6,_PD.TXDATA7,_PD.SS7I76M0,_PD.SS7I76M2,_PD.SS7I76M3,
_PD.SS7I77M0,_PD.SS7I77M1,_PD.SS7I77M3,_PD.SS7I77M4):
CONTROL = True
#print "**** INFO: SMART SERIAL ENCODER:",firmptype," compnum = ",compnum," channel = ",channelnum
#print "sserial channel:%d"% numofsserialchannels
if numofsserialports >= (compnum + 1) and numofsserialchannels >= (channelnum):
# if the combobox is not already displaying the right component:
# then we need to set up the comboboxes for this pin, otherwise skip it
#if compnum < _PD._NUM_CHANNELS: # TODO not all channels available
# self.widgets["mesa%dsserialtab%d"% (boardnum,compnum)].show()
self.widgets[pinv].set_sensitive(0)
self.widgets[pinv].set_active(0)
# Filter the selection that the user can choose.
# eg only show two modes for 7i77 and 7i76 or
# don't give those selections on regular sserial channels
if CONTROL:
self.widgets[p].set_model(self.d['_sserial%d_signalfilter'%(channelnum-1)])
if firmptype in (_PD.SS7I77M0,_PD.SS7I77M1,_PD.SS7I77M3,_PD.SS7I77M4):
self.set_filter('_sserial%d'% (channelnum-1),'7I77')
elif firmptype in (_PD.SS7I76M0,_PD.SS7I76M2,_PD.SS7I76M3):
self.set_filter('_sserial%d'% (channelnum-1),'7I76')
else:
self.set_filter('_sserial%d'% (channelnum-1),'ALL')
else:
self.widgets[p].set_model(self.d._sserialsignaltree)
self.widgets[ptype].set_model(self.d._sserialliststore)
self.widgets[ptype].set_active(_PD.pintype_sserial.index(firmptype))
self.widgets[ptype].set_sensitive(0)
self.widgets[p].set_active(0)
self.widgets[p].child.set_editable(False) # sserial cannot have custom names
# controlling combbox
if CONTROL:
self.widgets[complabel].set_text("%d:"% (channelnum -1))
if channelnum <= _PD._NUM_CHANNELS:#TODO not all channels available
self.widgets[p].set_sensitive(1)
else:
self.widgets[p].set_sensitive(0)
# This is a bit of a hack to make 7i77 and 7i76 firmware automatically choose
# the apropriate sserial component and allow the user to select different modes
# if the sserial ptype is 7i76 or 7i77 then the data must be set to 7i76/7i77 signal
# as that sserial instance can only be for the 7i76/7i77 I/O points
# 7i76:
if firmptype in (_PD.SS7I76M0,_PD.SS7I76M2,_PD.SS7I76M3):
if not self.d[p] in (_PD.I7I76_M0_T,_PD.I7I76_M2_T):
self.d[p] = _PD.I7I76_M0_T
self.d[ptype] = firmptype
self.widgets[p].set_sensitive(self.d.advanced_option)
# 7i77:
elif firmptype in (_PD.SS7I77M0,_PD.SS7I77M1,_PD.SS7I77M3,_PD.SS7I77M4):
if not self.d[p] in (_PD.I7I77_M3_T,_PD.I7I77_M0_T):
self.d[p] = _PD.I7I77_M0_T
if not firmptype in( _PD.SS7I77M1,_PD.SS7I77M4):
self.widgets[p].set_sensitive(self.d.advanced_option)
else:
self.widgets[p].set_sensitive(0)
self.d[ptype] = firmptype
else:
print 'found a sserial channel'
ssdevice = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._SSDEVICES]
for port,channel,device in (ssdevice):
print port,channel,device,channelnum
if port == 0 and channel+1 == channelnum:
print 'configure for: %s device'% device
if '7I64' in device:
if not '7i64' in self.d[p]:
self.d[p] = _PD.I7I64_T
elif '7I73' in device:
if not '7i73' in self.d[p]:
self.d[p] = _PD.I7I73_M0_T
else:
self.widgets[complabel].set_text("")
self.widgets[p].set_sensitive(0)
else:
firmptype = _PD.GPIOI
compnum = 0
# ---SETUP FOR STEPPER FAMILY COMPONENT---
elif firmptype in (_PD.STEPA,_PD.STEPB):
if numofstepgens >= (compnum+1):
self.widgets[ptype].set_model(self.d._stepperliststore)
self.widgets[p].set_model(self.d._steppersignaltree)
self.widgets[pinv].set_sensitive(1)
self.widgets[pinv].set_active(0)
self.widgets[ptype].set_sensitive(0)
self.widgets[ptype].set_active( _PD.pintype_stepper.index(firmptype) )
self.widgets[p].set_active(0)
#self.widgets[p].set_active(0)
if firmptype == _PD.STEPA:
self.widgets[complabel].set_text("%d:"%compnum)
self.widgets[p].set_sensitive(1)
elif firmptype == _PD.STEPB:
self.widgets[complabel].set_text("")
self.widgets[p].set_sensitive(0)
else:
firmptype = _PD.GPIOI
compnum = 0
# ---SETUP FOR GPIO FAMILY COMPONENT---
# first check to see if firmware says it should be in GPIO family
# (note this can be because firmware says it should be some other
# type but the user wants to deselect it so as to use it as GPIO
# this is done in the firmptype checks before this check.
# They will change firmptype variable to GPIOI)
# check if firmptype is in GPIO family
# check if widget is already configured
# we now set everything in a known state.
if firmptype in (_PD.GPIOI,_PD.GPIOO,_PD.GPIOD,_PD.SSR0):
if self.widgets[ptype].get_model():
widgettext = self.widgets[ptype].get_active_text()
else:
widgettext = None
if sserialflag:
if "7i77" in subboardname or "7i76" in subboardname or "7i84" in subboardname:
if pin <16:
self.widgets[complabel].set_text("%02d:"%(pin)) # sserial input
elif (pin >23 and pin < 40):
self.widgets[complabel].set_text("%02d:"%(pin-8)) # sserial input
elif pin >15 and pin < 24:
self.widgets[complabel].set_text("%02d:"%(pin-16)) #sserial output
elif pin >39:
self.widgets[complabel].set_text("%02d:"%(pin-32)) #sserial output
elif "7i70" in subboardname or "7i71" in subboardname:
self.widgets[complabel].set_text("%02d:"%(pin))
else:
if pin <24 :
self.widgets[complabel].set_text("%02d:"%(concount*24+pin)) # sserial input
else:
self.widgets[complabel].set_text("%02d:"%(concount*24+pin-24)) #sserial output
else:
if firmptype == _PD.SSR0:
self.widgets[complabel].set_text("%02d:"%(compnum - 100))
else:
self.widgets[complabel].set_text("%03d:"%(concount*ppc+pin))# mainboard GPIO
if compnum >= 100 and widgettext == firmptype:
return
elif not compnum >= 100 and (widgettext in (_PD.GPIOI,_PD.GPIOO,_PD.GPIOD)):
return
else:
#self.widgets[ptype].show()
#self.widgets[p].show()
self.widgets[p].set_sensitive(1)
self.widgets[pinv].set_sensitive(1)
self.widgets[ptype].set_sensitive(not compnum >= 100) # compnum = 100 means GPIO cannot be changed by user
if firmptype == _PD.SSR0:
self.widgets[ptype].set_model(self.d._ssrliststore)
else:
self.widgets[ptype].set_model(self.d._gpioliststore)
if firmptype == _PD.GPIOI:
# set pin treestore to gpioi signals
if not self.widgets[p].get_model() == self.d._gpioisignaltree:
self.widgets[p].set_model(self.d._gpioisignaltree)
# set ptype gpioi
self.widgets[ptype].set_active(0)
# set p unused signal
self.widgets[p].set_active(0)
# set pinv unset
self.widgets[pinv].set_active(False)
elif firmptype == _PD.SSR0:
if not self.widgets[p].get_model() == self.d._gpioosignaltree:
self.widgets[p].set_model(self.d._gpioosignaltree)
# set ptype gpioo
self.widgets[ptype].set_active(0)
# set p unused signal
self.widgets[p].set_active(0)
# set pinv unset
self.widgets[pinv].set_active(False)
else:
if not self.widgets[p].get_model() == self.d._gpioosignaltree:
self.widgets[p].set_model(self.d._gpioosignaltree)
# set ptype gpioo
self.widgets[ptype].set_active(1)
# set p unused signal
self.widgets[p].set_active(0)
# set pinv unset
self.widgets[pinv].set_active(False)
def find_sig_name_iter(self,model, signal_name):
for i, k in enumerate(model):
itr = model.get_iter(i)
title = model.get_value(itr,2)
#print 'first:',title
# check first set
if title == signal_name :return itr
cld_itr = model.iter_children(itr)
if cld_itr != None:
while cld_itr != None:
gcld_itr = model.iter_children(cld_itr)
if gcld_itr != None:
while gcld_itr != None:
title = model.get_value(gcld_itr,2)
#print title
# check third set
if title == signal_name :return gcld_itr
gcld_itr = model.iter_next(gcld_itr)
title = model.get_value(cld_itr,2)
#print title
# check second set
if title == signal_name :return cld_itr
cld_itr = model.iter_next(cld_itr)
# return first entry if no signal name is found
return model.get_iter_first()
def mesa_mainboard_data_to_widgets(self,boardnum):
for concount,connector in enumerate(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._NUMOFCNCTRS]) :
for pin in range (0,24):
firmptype,compnum = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._STARTOFDATA+pin+(concount*24)]
p = 'mesa%dc%dpin%d' % (boardnum, connector, pin)
ptype = 'mesa%dc%dpin%dtype' % (boardnum, connector , pin)
pinv = 'mesa%dc%dpin%dinv' % (boardnum, connector , pin)
self.data_to_widgets(boardnum,firmptype,compnum,p,ptype,pinv)
# by now the widgets should be right according to the firmware (and user deselected components)
# now we apply the data - setting signalnames and possible changing the pintype choice (eg pwm to pdm)
# We need to only set the 'controlling' signalname the pinchanged method will be called
# immediately and set the 'related' pins (if there are related pins)
def data_to_widgets(self,boardnum,firmptype,compnum,p,ptype,pinv):
debug = False
datap = self.d[p]
dataptype = self.d[ptype]
datapinv = self.d[pinv]
widgetp = self.widgets[p].get_active_text()
widgetptype = self.widgets[ptype].get_active_text()
#print "**** INFO set-data-options DATA:",p,datap,dataptype
#print "**** INFO set-data-options WIDGET:",p,widgetp,widgetptype
# ignore related pins
if widgetptype in (_PD.ENCB,_PD.ENCI,_PD.ENCM,
_PD.MXEI,_PD.MXEM,_PD.MXES,
_PD.RESU,
_PD.STEPB,_PD.STEPC,_PD.STEPD,_PD.STEPE,_PD.STEPF,
_PD.PDMD,_PD.PDME,_PD.PWMD,_PD.PWME,_PD.UDMD,_PD.UDME,
_PD.TPPWMB,_PD.TPPWMC,_PD.TPPWMAN,_PD.TPPWMBN,_PD.TPPWMCN,_PD.TPPWME,_PD.TPPWMF,
_PD.NUSED,_PD.POTD,_PD.POTE,
_PD.RXDATA0,_PD.TXEN0,_PD.RXDATA1,_PD.TXEN1,_PD.RXDATA2,_PD.TXEN2,_PD.RXDATA3,_PD.TXEN3,
_PD.RXDATA4,_PD.TXEN4,_PD.RXDATA5,_PD.TXEN5,_PD.RXDATA6,_PD.TXEN6,_PD.RXDATA7,_PD.TXEN7
):
self.widgets[pinv].set_active(datapinv)
return
# TODO fix this for cmboboxes withgrandchildren
# we are searching through human names - why not just search the model?
# type GPIO
# if compnum = 100 then it means that the GPIO type can not
# be changed from what the firmware designates it as.
if widgetptype in (_PD.GPIOI,_PD.GPIOO,_PD.GPIOD,_PD.SSR0):
#print "data ptype index:",_PD.pintype_gpio.index(dataptype)
#self.debug_iter(0,p,"data to widget")
#self.debug_iter(0,ptype,"data to widget")
# signal names for GPIO INPUT
#print "compnum = ",compnum
if compnum >= 100: dataptype = widgetptype
self.widgets[pinv].set_active(self.d[pinv])
if widgetptype == _PD.SSR0:
self.widgets[ptype].set_active(0)
else:
try:
self.widgets[ptype].set_active( _PD.pintype_gpio.index(dataptype) )
except:
self.widgets[ptype].set_active( _PD.pintype_gpio.index(widgetptype) )
# if GPIOI or dataptype not in GPIO family force it GPIOI
if dataptype == _PD.GPIOI or dataptype not in(_PD.GPIOO,_PD.GPIOI,_PD.GPIOD,_PD.SSR0):
human = _PD.human_input_names
signal = _PD.hal_input_names
tree = self.d._gpioisignaltree
# signal names for GPIO OUTPUT and OPEN DRAIN OUTPUT
elif dataptype in (_PD.GPIOO,_PD.GPIOD,_PD.SSR0):
human = _PD.human_output_names
signal = _PD.hal_output_names
tree = self.d._gpioosignaltree
self.widgets[p].set_model(tree)
itr = self.find_sig_name_iter(tree, datap)
self.widgets[p].set_active_iter(itr)
# type encoder / mux encoder
# we find the data's signal index
# then we search through the combobox's actual model's 4th array index
# this contains the comboxbox's signal's index number
# when they match then that is the row to show in the combobox
# this is different because the sserial combobox's model
# can be filtered and that screws with the relationship of
# signalname array vrs model row
elif widgetptype == _PD.ENCA or widgetptype in(_PD.MXE0,_PD.MXE1):
#print "ENC ->dataptype:",self.d[ptype]," dataptype:",self.d[p],signalindex
pinmodel = self.widgets[p].get_model()
itr = self.find_sig_name_iter(pinmodel, datap)
self.widgets[p].set_active_iter(itr)
# type resolver
elif widgetptype in(_PD.RES0,_PD.RES1,_PD.RES2,_PD.RES3,_PD.RES4,_PD.RES5,_PD.RESU):
try:
signalindex = _PD.hal_resolver_input_names.index(datap)
except:
if debug: print "**** INFO: PNCCONF warning no resolver signal named: %s\n found for pin %s"% (datap ,p)
signalindex = 0
#print "dataptype:",self.d[ptype]," dataptype:",self.d[p],signalindex
count = 0
temp = (0) # set unused resolver
if signalindex > 0:
for row,parent in enumerate(_PD.human_resolver_input_names):
if row == 0: continue
if len(parent[1]) == 0:
count +=1
#print row,count,"parent-",parent[0]
if count == signalindex:
#print "match",row
temp = (row)
break
continue
for column,child in enumerate(parent[1]):
count +=1
#print row,column,count,parent[0],child
if count == signalindex:
#print "match",row
temp = (row,column)
break
if count >= signalindex:break
#print "temp",temp
treeiter = self.d._resolversignaltree.get_iter(temp)
self.widgets[p].set_active_iter(treeiter)
# Type 8i20 AMP
elif widgetptype == _PD.AMP8I20:
try:
signalindex = _PD.hal_8i20_input_names.index(datap)
except:
if debug: print "**** INFO: PNCCONF warning no 8i20 signal named: %s\n found for pin %s"% (datap ,p)
signalindex = 0
#print "dataptype:",self.d[ptype]," dataptype:",self.d[p],signalindex
count = 0
temp = (0) # set unused 8i20 amp
if signalindex > 0:
for row,parent in enumerate(_PD.human_8i20_input_names):
if row == 0: continue
if len(parent[1]) == 0:
count +=1
#print row,count,"parent-",parent[0]
if count == signalindex:
#print "match",row
temp = (row)
break
continue
for column,child in enumerate(parent[1]):
count +=1
#print row,column,count,parent[0],child
if count == signalindex:
#print "match",row
temp = (row,column)
break
if count >= signalindex:break
#print "temp",temp
treeiter = self.d._8i20signaltree.get_iter(temp)
self.widgets[p].set_active_iter(treeiter)
# Type potentiometer (7i76"s spindle control)
elif widgetptype in (_PD.POTO,_PD.POTE):
self.widgets[pinv].set_active(self.d[pinv])
try:
signalindex = _PD.hal_pot_output_names.index(datap)
except:
if debug: print "**** INFO: PNCCONF warning no potentiometer signal named: %s\n found for pin %s"% (datap ,p)
signalindex = 0
#print "dataptype:",self.d[ptype]," dataptype:",self.d[p],signalindex
count = -1
temp = (0) # set unused potentiometer
if signalindex > 0:
for row,parent in enumerate(_PD.human_pot_output_names):
if row == 0: continue
if len(parent[1]) == 0:
count +=2
#print row,count,"parent-",parent[0]
if count == signalindex:
#print "match",row
temp = (row)
break
continue
for column,child in enumerate(parent[1]):
count +=2
#print row,column,count,parent[0],child
if count == signalindex:
#print "match",row
temp = (row,column)
break
if count >= signalindex:break
#print "temp",temp
treeiter = self.d._potsignaltree.get_iter(temp)
self.widgets[p].set_active_iter(treeiter)
# Type analog in
elif widgetptype == _PD.ANALOGIN:
try:
signalindex = _PD.hal_analog_input_names.index(datap)
except:
if debug: print "**** INFO: PNCCONF warning no analog in signal named: %s\n found for pin %s"% (datap ,p)
signalindex = 0
#print "dataptype:",self.d[ptype]," dataptype:",self.d[p],signalindex
count = 0
temp = (0) # set unused 8i20 amp
if signalindex > 0:
for row,parent in enumerate(_PD.human_analog_input_names):
if row == 0: continue
if len(parent[1]) == 0:
count +=1
#print row,count,"parent-",parent[0]
if count == signalindex:
#print "match",row
temp = (row)
break
continue
for column,child in enumerate(parent[1]):
count +=1
#print row,column,count,parent[0],child
if count == signalindex:
#print "match",row
temp = (row,column)
break
if count >= signalindex:break
#print "temp",temp
treeiter = self.d._analoginsignaltree.get_iter(temp)
self.widgets[p].set_active_iter(treeiter)
# type PWM gen
elif widgetptype in (_PD.PDMP,_PD.PWMP,_PD.UDMU):
self.widgets[pinv].set_active(datapinv)
if self.widgets["mesa%d_numof_resolvers"% boardnum].get_value(): dataptype = _PD.UDMU # hack resolver board needs UDMU
if dataptype == _PD.PDMP:
#print "pdm"
self.widgets[ptype].set_model(self.d._pdmcontrolliststore)
self.widgets[ptype].set_active(1)
elif dataptype == _PD.PWMP:
#print "pwm",self.d._pwmcontrolliststore
self.widgets[ptype].set_model(self.d._pwmcontrolliststore)
self.widgets[ptype].set_active(0)
elif dataptype == _PD.UDMU:
#print "udm",self.d._udmcontrolliststore
self.widgets[ptype].set_model(self.d._udmcontrolliststore)
self.widgets[ptype].set_active(2)
itr = self.find_sig_name_iter(self.d._pwmsignaltree, datap)
self.widgets[p].set_active_iter(itr)
# type tp 3 pwm for direct brushless motor control
elif widgetptype == _PD.TPPWMA:
#print "3 pwm"
count = -7
try:
signalindex = _PD.hal_tppwm_output_names.index(datap)
except:
if debug: print "**** INFO: PNCCONF warning no THREE PWM signal named: %s\n found for pin %s"% (datap ,p)
signalindex = 0
#print "3 PWw ,dataptype:",self.d[ptype]," dataptype:",self.d[p],signalindex
temp = (0) # set unused stepper
if signalindex > 0:
for row,parent in enumerate(_PD.human_tppwm_output_names):
if row == 0:continue
if len(parent[1]) == 0:
count += 8
#print row,count,parent[0]
if count == signalindex:
#print "match",row
temp = (row)
break
continue
for column,child in enumerate(parent[1]):
count +=8
#print row,column,count,parent[0],child
if count == signalindex:
#print "match",row
temp = (row,column)
break
if count >= signalindex:break
treeiter = self.d._tppwmsignaltree.get_iter(temp)
self.widgets[p].set_active_iter(treeiter)
# type step gen
elif widgetptype == _PD.STEPA:
#print "stepper", dataptype
self.widgets[ptype].set_active(0)
self.widgets[p].set_active(0)
self.widgets[pinv].set_active(datapinv)
itr = self.find_sig_name_iter(self.d._steppersignaltree, datap)
self.widgets[p].set_active_iter(itr)
# type smartserial
# we do things differently here
# we find the data's signal index
# then we search through the combobox's model's 4th array index
# this contains the comboxbox's signal's index number
# when they match then that is the row to show in the combobox
# this is different because the sserial combobox's model
# can be filtered and that screws with the relationship of
# signalname array vrs model row
elif widgetptype in( _PD.TXDATA0,_PD.SS7I76M0,_PD.SS7I77M0,_PD.SS7I77M3,_PD.TXDATA1,
_PD.TXDATA2,_PD.TXDATA3,_PD.TXDATA4,_PD.TXDATA5,_PD.TXDATA6,_PD.TXDATA7,
_PD.SS7I76M2,_PD.SS7I76M3,_PD.SS7I77M1,_PD.SS7I77M4):
#print "SMART SERIAL", dataptype,widgetptype
self.widgets[pinv].set_active(datapinv)
try:
signalindex = _PD.hal_sserial_names.index(self.d[p])
except:
if debug: print "**** INFO: PNCCONF warning no SMART SERIAL signal named: %s\n found for pin %s"% (datap ,p)
signalindex = 0
pinmodel = self.widgets[p].get_model()
for row,parent in enumerate(pinmodel):
#print row,parent[0],parent[2],parent[3],parent[4]
if parent[4] == signalindex:
#print 'FOUND',parent[2],parent[4]
treeiter = pinmodel.get_iter(row)
self.widgets[p].set_active_iter(treeiter)
else:
print "**** WARNING: PNCCONF data to widget: ptype not recognized/match:",dataptype,widgetptype
# This is for when a user picks a signal name or creates a custom signal (by pressing enter)
# if searches for the 'related pins' of a component so it can update them too
# it also handles adding and updating custom signal names
# it is used for mesa boards and parport boards according to boardtype
def on_general_pin_changed(self, widget, boardtype, boardnum, connector, channel, pin, custom):
self.p.set_buttons_sensitive(0,0)
if boardtype == "sserial":
p = 'mesa%dsserial%d_%dpin%d' % (boardnum,connector,channel,pin)
ptype = 'mesa%dsserial%d_%dpin%dtype' % (boardnum,connector,channel,pin)
widgetptype = self.widgets[ptype].get_active_text()
#print "pinchanged-",p
elif boardtype == "mesa":
p = 'mesa%dc%dpin%d' % (boardnum,connector,pin)
ptype = 'mesa%dc%dpin%dtype' % (boardnum,connector,pin)
widgetptype = self.widgets[ptype].get_active_text()
elif boardtype == "parport":
p = '%s_%s%d' % (boardnum,connector, pin)
#print p
if "I" in p: widgetptype = _PD.GPIOI
else: widgetptype = _PD.GPIOO
pinchanged = self.widgets[p].get_active_text()
piter = self.widgets[p].get_active_iter()
signaltree = self.widgets[p].get_model()
try:
basetree = signaltree.get_model()
except:
basetree = signaltree
#print "generalpin changed",p
#print "*** INFO ",boardtype,"-pin-changed: pin:",p,"custom:",custom
#print "*** INFO ",boardtype,"-pin-changed: ptype:",widgetptype,"pinchaanged:",pinchanged
if piter == None and not custom:
#print "*** INFO ",boardtype,"-pin-changed: no iter and not custom"
self.p.set_buttons_sensitive(1,1)
return
if widgetptype in (_PD.ENCB,_PD.ENCI,_PD.ENCM,
_PD.MXEI,_PD.MXEM,_PD.MXES,
_PD.RESU,
_PD.STEPB,_PD.STEPC,_PD.STEPD,_PD.STEPE,_PD.STEPF,
_PD.PDMD,_PD.PDME,_PD.PWMD,_PD.PWME,_PD.UDMD,_PD.UDME,
_PD.TPPWMB,_PD.TPPWMC,_PD.TPPWMAN,_PD.TPPWMBN,_PD.TPPWMCN,_PD.TPPWME,_PD.TPPWMF,
_PD.RXDATA0,_PD.TXEN0,_PD.RXDATA1,_PD.TXEN1,_PD.RXDATA2,_PD.TXEN2,_PD.RXDATA3,_PD.TXEN3,
_PD.POTE,_PD.POTD, _PD.SSR0):
self.p.set_buttons_sensitive(1,1)
return
# for GPIO output
if widgetptype in (_PD.GPIOO,_PD.GPIOD):
#print"ptype GPIOO\n"
halsignallist = 'hal_output_names'
humansignallist = _PD.human_output_names
addsignalto = self.d.haloutputsignames
relatedsearch = ["dummy"]
relatedending = [""]
customindex = len(humansignallist)-1
# for GPIO input
elif widgetptype == _PD.GPIOI:
#print"ptype GPIOI\n"
halsignallist = 'hal_input_names'
humansignallist = _PD.human_input_names
addsignalto = self.d.halinputsignames
relatedsearch = ["dummy"]
relatedending = [""]
customindex = len(humansignallist)-1
# for stepgen pins
elif widgetptype == _PD.STEPA:
#print"ptype step\n"
halsignallist = 'hal_stepper_names'
humansignallist = _PD.human_stepper_names
addsignalto = self.d.halsteppersignames
relatedsearch = [_PD.STEPA,_PD.STEPB,_PD.STEPC,_PD.STEPD,_PD.STEPE,_PD.STEPF]
relatedending = ["-step","-dir","-c","-d","-e","-f"]
customindex = len(humansignallist)-1
# for encoder pins
elif widgetptype == _PD.ENCA:
#print"\nptype encoder"
halsignallist = 'hal_encoder_input_names'
humansignallist = _PD.human_encoder_input_names
addsignalto = self.d.halencoderinputsignames
relatedsearch = [_PD.ENCA,_PD.ENCB,_PD.ENCI,_PD.ENCM]
relatedending = ["-a","-b","-i","-m"]
customindex = len(humansignallist)-1
# for mux encoder pins
elif widgetptype in(_PD.MXE0,_PD.MXE1):
#print"\nptype encoder"
halsignallist = 'hal_encoder_input_names'
humansignallist = _PD.human_encoder_input_names
addsignalto = self.d.halencoderinputsignames
relatedsearch = ["dummy","dummy","dummy","dummy",]
relatedending = ["-a","-b","-i","-m"]
customindex = len(humansignallist)-1
# resolvers
elif widgetptype in (_PD.RES0,_PD.RES1,_PD.RES2,_PD.RES3,_PD.RES4,_PD.RES5):
halsignallist = 'hal_resolver_input_names'
humansignallist = _PD.human_resolver_input_names
addsignalto = self.d.halresolversignames
relatedsearch = ["dummy"]
relatedending = [""]
customindex = len(humansignallist)-1
# 8i20 amplifier
elif widgetptype == _PD.AMP8I20:
halsignallist = 'hal_8i20_input_names'
humansignallist = _PD.human_8i20_input_names
addsignalto = self.d.hal8i20signames
relatedsearch = ["dummy"]
relatedending = [""]
customindex = len(humansignallist)-1
# potentiometer output
elif widgetptype == _PD.POTO:
halsignallist = 'hal_pot_output_names'
humansignallist = _PD.human_pot_output_names
addsignalto = self.d.halpotsignames
relatedsearch = [_PD.POTO,_PD.POTE]
relatedending = ["-output","-enable"]
customindex = 2
# analog input
elif widgetptype == _PD.ANALOGIN:
halsignallist = 'hal_analog_input_names'
humansignallist = _PD.human_analog_input_names
addsignalto = self.d.halanaloginsignames
relatedsearch = ["dummy"]
relatedending = [""]
customindex = len(humansignallist)-1
# for PWM,PDM,UDM pins
elif widgetptype in(_PD.PWMP,_PD.PDMP,_PD.UDMU):
#print"ptype pwmp\n"
halsignallist = 'hal_pwm_output_names'
humansignallist = _PD.human_pwm_output_names
addsignalto = self.d.halpwmoutputsignames
relatedsearch = [_PD.PWMP,_PD.PWMD,_PD.PWME]
relatedending = ["-pulse","-dir","-enable"]
customindex = len(humansignallist)-1
elif widgetptype == _PD.TPPWMA:
#print"ptype pdmp\n"
halsignallist = 'hal_tppwm_output_names'
humansignallist = _PD.human_tppwm_output_names
addsignalto = self.d.haltppwmoutputsignames
relatedsearch = [_PD.TPPWMA,_PD.TPPWMB,_PD.TPPWMC,_PD.TPPWMAN,_PD.TPPWMBN,_PD.TPPWMCN,_PD.TPPWME,_PD.TPPWMF]
relatedending = ["-a","-b","c","-anot","-bnot","cnot","-enable","-fault"]
customindex = len(humansignallist)-1
elif widgetptype in (_PD.TXDATA0,_PD.TXDATA1,_PD.TXDATA2,_PD.TXDATA3,_PD.TXDATA4,_PD.TXDATA5,_PD.SS7I76M0,_PD.SS7I76M3,
_PD.SS7I76M2,_PD.SS7I77M0,_PD.SS7I77M1,_PD.SS7I77M3,_PD.SS7I77M4):
portnum = 0 #TODO support more ports
for count,temp in enumerate(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._NUMOFCNCTRS]) :
if connector == temp:
firmptype,portnum = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._STARTOFDATA+pin+(count*24)]
if widgetptype in (_PD.TXDATA0,_PD.SS7I76M0,_PD.SS7I77M0): channelnum = 0
elif widgetptype in (_PD.TXDATA1,_PD.SS7I77M1): channelnum = 1
elif widgetptype in (_PD.TXDATA2,_PD.SS7I76M2): channelnum = 2
elif widgetptype in (_PD.TXDATA3,_PD.SS7I77M3,_PD.SS7I76M3): channelnum = 3
elif widgetptype in (_PD.TXDATA4,_PD.SS7I77M4): channelnum = 4
elif widgetptype in (_PD.TXDATA5): channelnum = 5
BASE = "mesa%dsserial0_%d"% (boardnum,channelnum)
if self.widgets[p].get_active_text() == _("Unused Channel"):
self.widgets[BASE].hide()
self.d[BASE+"subboard"] = "none"
self.p.set_buttons_sensitive(1,1)
return
else:
self.widgets[BASE].show()
# TODO we should search for these names rather then use hard coded logic
# so as to make adding cards easier
temp = self.widgets[p].get_active_text()
table = BASE+"table2"
self.widgets[table].show()
table = BASE+"table3"
self.widgets[table].show()
if "7i76" in temp:
if 'Mode 2' in temp:
ssfirmname = "7i76-m2"
else:
ssfirmname = "7i76-m0"
self.d[BASE+"subboard"] = ssfirmname
self.widgets[BASE+'_tablabel'].set_text("7I76 I/O\n (SS# %d)"% channelnum)
elif "7i64" in temp:
self.d[BASE+"subboard"] = "7i64"
self.widgets[BASE+'_tablabel'].set_text("7I64 I/O\n (SS# %d)"% channelnum)
elif "7i69" in temp:
self.d[BASE+"subboard"] = "7i69"
self.widgets[table].hide()
self.widgets[BASE+'_tablabel'].set_text("7I69 I/O\n (SS# %d)"% channelnum)
elif "7i70" in temp:
self.d[BASE+"subboard"] = "7i70"
self.widgets[table].hide()
self.widgets[BASE+'_tablabel'].set_text("7I70 I/O\n (SS# %d)"% channelnum)
elif "7i71" in temp:
self.d[BASE+"subboard"] = "7i71"
self.widgets[table].hide()
self.widgets[BASE+'_tablabel'].set_text("7I71 I/O\n (SS# %d)"% channelnum)
elif "7i73" in temp:
self.d[BASE+"subboard"] = "7i73-m1"
self.widgets[BASE+'_tablabel'].set_text("7I73 I/O\n (SS# %d)"% channelnum)
elif "7i77" in temp:
print 'ssname',temp,'sschannel#',channelnum
if 'Mode 3' in temp:
ssfirmname = "7i77-m3"
else:
ssfirmname = "7i77-m0"
self.d[BASE+"subboard"] = ssfirmname
if channelnum in(0,3):
self.widgets[BASE+'_tablabel'].set_text("7I77 I/O\n (SS# %d)"% channelnum)
self.widgets[table].hide()
elif channelnum in(1,4):
self.widgets[BASE+'_tablabel'].set_text("7I77 PWM\n (SS# %d)"% channelnum)
table = BASE+"table2"
self.widgets[table].hide()
table = BASE+"table1"
self.widgets[table].hide()
elif "7i84" in temp:
print 'ssname',temp,'sschannel#',channelnum
if 'Mode 3' in temp:
ssfirmname = "7i84-m3"
else:
ssfirmname = "7i84-m0"
self.d[BASE+"subboard"] = ssfirmname
self.widgets[table].hide()
self.widgets[BASE+'_tablabel'].set_text("7I84 I/O\n (SS# %d)"%channelnum)
elif "8i20" in temp:
self.d[BASE+"subboard"] = "8i20"
self.widgets[table].hide()
table = BASE+"table2"
self.widgets[table].hide()
self.widgets[BASE+'_tablabel'].set_text("8I20\n (SS# %d)"% channelnum)
else:
self.d[BASE+"subboard"] = "none"
self.widgets[table].hide()
table = BASE+"table2"
self.widgets[table].hide()
table = BASE+"table1"
self.widgets[table].hide()
self.p.set_buttons_sensitive(1,1)
return
# set sserial tab names to corresond to connector numbers so users have a clue
# first we have to find the daughter board in pncconf's internal list
# TODO here we search the list- this should be done for the table names see above todo
subfirmname = self.d[BASE+"subboard"]
for subnum,temp in enumerate(self._p.MESA_DAUGHTERDATA):
if self._p.MESA_DAUGHTERDATA[subnum][self._p._SUBFIRMNAME] == subfirmname: break
subconlist = self._p.MESA_DAUGHTERDATA[subnum][self._p._SUBCONLIST]
# now search the connector list and write it to the tab names
for tabnum in range(0,3):
conname = subconlist[tabnum]
tab = BASE+"tab%d"% tabnum
self.widgets[tab].set_text(conname)
#print p,temp," set at",self.d[BASE+"subboard"]
self.set_sserial_options(boardnum,portnum,channelnum)
self.p.set_buttons_sensitive(1,1)
return
self.p.set_buttons_sensitive(1,1)
return
else:
print"**** INFO: pncconf on_general_pin_changed: pintype not found:%s\n"% widgetptype
self.p.set_buttons_sensitive(1,1)
return
# *** change the related pin's signal names ***
# see if the piter is none - if it is a custom names has been entered
# else find the signal name index number if the index is zero set the piter to unused signal
# this is a work around for thye combo box allowing the parent to be shown and selected in the
# child column haven\t figured out how to stop that #TODO
# either way we have to search the current firmware array for the pin numbers of the related
# pins so we can change them to the related signal name
# all signal names have related signal (eg encoders have A and B phase and index and index mask)
# except 'unused' signal it is a special case as there is no related signal names with it.
if piter == None or custom:
#print "*** INFO ",boardtype,"-pin-changed: PITER:",piter," length:",len(signaltree)
if pinchanged in (addsignalto):return
for i in (humansignallist):
if pinchanged == i[0]:return
if pinchanged in i[1]:return
length = len(signaltree)
index = len(_PD[halsignallist]) - len(relatedsearch)
customiter = signaltree.get_iter((length-1,))
childiter = signaltree.iter_nth_child(customiter, 0)
n = 0
while childiter:
dummy, index = signaltree.get(childiter, 0, 1)
n+=1
childiter = signaltree.iter_nth_child(customiter, n)
index += len(relatedsearch)
else:
dummy, index = signaltree.get(piter, 0, 1)
if index == 0:
piter = signaltree.get_iter_first()
#print "*** INFO ",boardtype,"-pin-changed: index",index
# This finds the pin type and component number of the pin that has changed
pinlist = []
# this components have no related pins - fake the list
if widgetptype in(_PD.GPIOI,_PD.GPIOO,_PD.GPIOD,_PD.SSR0,_PD.MXE0,_PD.MXE1,_PD.RES0,_PD.RES1,
_PD.RES2,_PD.RES3,_PD.RES4,_PD.RES5,_PD.AMP8I20,_PD.ANALOGIN):
pinlist = [["%s"%p,boardnum,connector,channel,pin]]
else:
pinlist = self.list_related_pins(relatedsearch, boardnum, connector, channel, pin, 0)
#print pinlist
# Now we have a list of pins that need to be updated
# first check if the name is a custom name if it is
# add the legalized custom name to ;
# addsignalto -> for recording custom names for next time loaded
# signalsto check -> for making signal names (we add different endings for different signalnames
# signaltree -> for display in the gui - itis automatically added to all comboboxes that uses this treesort
# then go through the pinlist:
# block signals
# display the proper text depending if custom or not
# then unblock signals
if custom:
legal_name = pinchanged.replace(" ","_")
addsignalto.append ((legal_name))
print "add: "+legal_name+" to human list",humansignallist[customindex][1]
humansignallist[customindex][1].append ((legal_name))
endoftree = len(basetree)-1
customiter = basetree.get_iter((endoftree,))
newiter = basetree.append(customiter, [legal_name,index,legal_name,halsignallist,index])
#print 'new signal:',legal_name,index,legal_name,halsignallist,endoftree,index
for offset,i in enumerate(relatedsearch):
with_endings = legal_name + relatedending[offset]
#print "new signal:",with_endings
_PD[halsignallist].append ((with_endings))
for data in(pinlist):
if boardtype == "mesa":
blocksignal1 = "_mesa%dsignalhandlerc%ipin%i" % (data[1], data[2], data[4])
blocksignal2 = "_mesa%dactivatehandlerc%ipin%i" % (data[1], data[2], data[4])
if boardtype == "sserial":
blocksignal1 = "_mesa%dsignalhandlersserial%i_%ipin%i" % (data[1], data[2], data[3], data[4])
blocksignal2 = "_mesa%dactivatehandlersserial%i_%ipin%i" % (data[1], data[2], data[3],data[4])
elif boardtype =="parport":
blocksignal1 = "_%s_%s%dsignalhandler" % (data[1], data[2], data[4])
blocksignal2 = "_%s_%s%dactivatehandler" % (data[1], data[2], data[4])
self.widgets[data[0]].handler_block(self.d[blocksignal1])
self.widgets[data[0]].child.handler_block(self.d[blocksignal2])
if custom:
if basetree == signaltree:
temp = newiter
else:
temp = signaltree.convert_child_iter_to_iter(newiter)
self.widgets[data[0]].set_active_iter(temp)
else:
self.widgets[data[0]].set_active_iter(piter)
self.widgets[data[0]].child.handler_unblock(self.d[blocksignal2])
self.widgets[data[0]].handler_unblock(self.d[blocksignal1])
#self.debug_iter(0,p,"pin changed")
#if boardtype == "mesa": self.debug_iter(0,ptype,"pin changed")
self.p.set_buttons_sensitive(1,1)
def pport_push_data(self,port,direction,pin,pinv,signaltree,signaltocheck):
p = '%s_%s%d' % (port, direction, pin)
piter = self.widgets[p].get_active_iter()
selection = self.widgets[p].get_active_text()
# **Start widget to data Convertion**
if piter == None:# means new custom signal name and user never pushed enter
#print "callin pin changed !!!"
self.on_general_pin_changed( None,"parport", port, direction, None, pin, True)
selection = self.widgets[p].get_active_text()
piter = self.widgets[p].get_active_iter()
#print "found signame -> ",selection," "
# ok we have a piter with a signal type now- lets convert it to a signalname
#print "**** INFO parport-data-transfer piter:",piter
#self.debug_iter(piter,p,"signal")
dummy, index = signaltree.get(piter,0,1)
#print "signaltree: ",dummy
return p, signaltocheck[index], self.widgets[pinv].get_active()
def set_pport_combo(self,pinname):
#print pinname
# signal names for GPIO INPUT
datap = self.d[pinname]
if '_Ipin' in pinname:
human = self._p.human_input_names
signal = self._p.hal_input_names
tree = self.d._gpioisignaltree
# signal names for GPIO OUTPUT and OPEN DRAIN OUTPUT
elif 'Opin'in pinname:
human = self._p.human_output_names
signal =self._p.hal_output_names
tree = self.d._gpioosignaltree
#self.w[pinname].set_model(tree)
# an error probably means the signal name cannot be found
# set it as unused rather then error
itr = self.find_sig_name_iter(tree, datap)
self.widgets[pinname].set_active_iter(itr)
return
try:
signalindex = signal.index(datap)
except:
signalindex = 0
print "**** INFO: PNCCONF warning no GPIO signal named: %s\n found for pin %s"% (datap , p)
#print "gpio temp ptype:",pinname,datap,signalindex
count = 0
temp = (0) # set unused gpio if no match
if signalindex > 0:
for row,parent in enumerate(human):
#print row,parent
if len(parent[1]) == 0:continue
for column,child in enumerate(parent[1]):
count +=1
#print row,column,count,parent[0],child
if count == signalindex:
#print "match",row,column
break
if count >= signalindex:break
temp = (row,column)
treeiter = tree.get_iter(temp)
self.widgets[pinname].set_active_iter(treeiter)
def signal_sanity_check(self, *args):
warnings = []
do_warning = False
do_error = False
for i in self.d.available_axes:
tppwm = pwm = amp_8i20 = False
step = self.findsignal(i+"-stepgen-step")
step2 = self.findsignal(i+"2-stepgen-step")
enc = self.findsignal(i+"-encoder-a")
resolver = self.findsignal(i+"-resolver")
if self.findsignal("%s-8i20"% i): amp_8i20 = pwm =True
if self.findsignal(i+"-pwm-pulse"): pwm = True
if self.findsignal(i+"-tppwm-a"): tppwm = pwm = True
#print "signal sanity check: axis",i,"\n pwm = ",pwm,"\n 3pwm =",tppwm,"\n encoder =",enc,"\n step=",step
if i == 's':
if step and pwm:
warnings.append(_("You can not have both steppers and pwm signals for spindle control\n") )
do_error = True
continue
if not step and not pwm:
warnings.append(_("You forgot to designate a stepper or pwm signal for axis %s\n")% i)
do_error = True
if pwm and not (enc or resolver):
warnings.append(_("You forgot to designate an encoder /resolver signal for axis %s servo\n")% i)
do_error = True
if enc and not pwm and not step:
warnings.append(_("You forgot to designate a pwm signal or stepper signal for axis %s\n")% i)
do_error = True
if step and pwm:
warnings.append(_("You can not have both steppers and pwm signals for axis %s\n")% i)
do_error = True
if step2 and not step:
warnings.append(_("If using a tandem axis stepper, you must select a master stepgen for axis %s\n")% i)
do_error = True
if self.d.frontend == _PD._TOUCHY:# TOUCHY GUI
abort = self.findsignal("abort")
cycle = self.findsignal("cycle-start")
single = self.findsignal("single-step")
mpg = self.findsignal("select-mpg-a")
if not cycle:
warnings.append(_("Touchy require an external cycle start signal\n"))
do_warning = True
if not abort:
warnings.append(_("Touchy require an external abort signal\n"))
do_warning = True
if not single:
warnings.append(_("Touchy require an external single-step signal\n"))
do_warning = True
if not mpg:
warnings.append(_("Touchy require an external multi handwheel MPG encoder signal on the mesa page\n"))
do_warning = True
if not self.d.externalmpg:
warnings.append(_("Touchy require 'external mpg jogging' to be selected on the external control page\n"))
do_warning = True
if self.d.multimpg:
warnings.append(_("Touchy require the external mpg to be in 'shared mpg' mode on the external controls page\n"))
do_warning = True
if self.d.incrselect:
warnings.append(_("Touchy require selectable increments to be unchecked on the external controls page\n"))
do_warning = True
if do_warning or do_error:
self.warning_dialog("\n".join(warnings),True)
if do_error: return True
return False
def daughter_board_sanity_check(self,widget):
warnings = []
do_warning = False
for boardnum in range(0,int(self.d.number_mesa)):
if widget == self.widgets["mesa%d_7i29_sanity_check"%boardnum]:
warnings.append(_("The 7i29 daughter board requires PWM type generators and a PWM base frequency of 20 khz\n"))
do_warning = True
if widget == self.widgets["mesa%d_7i30_sanity_check"%boardnum]:
warnings.append(_("The 7i30 daughter board requires PWM type generators and a PWM base frequency of 20 khz\n"))
do_warning = True
if widget == self.widgets["mesa%d_7i33_sanity_check"%boardnum]:
warnings.append(_("The 7i33 daughter board requires PDM type generators and a PDM base frequency of 6 Mhz\n"))
do_warning = True
if widget == self.widgets["mesa%d_7i40_sanity_check"%boardnum]:
warnings.append(_("The 7i40 daughter board requires PWM type generators and a PWM base frequency of 50 khz\n"))
do_warning = True
if widget == self.widgets["mesa%d_7i48_sanity_check"%boardnum]:
warnings.append(_("The 7i48 daughter board requires UDM type generators and a PWM base frequency of 24 khz\n"))
do_warning = True
if do_warning:
self.warning_dialog("\n".join(warnings),True)
def axis_prepare(self, axis):
d = self.d
w = self.widgets
def set_text_from_text(n): w[axis + n].set_text("%s" % d[axis + n])
def set_text(n): w[axis + n].set_text(locale.format("%.4f", (d[axis + n])))
def set_value(n): w[axis + n].set_value(d[axis + n])
def set_active(n): w[axis + n].set_active(d[axis + n])
stepdriven = encoder = pwmgen = resolver = tppwm = digital_at_speed = amp_8i20 = False
spindlepot = sserial_scaling = False
vfd_spindle = self.d.serial_vfd and (self.d.mitsub_vfd or self.d.gs2_vfd)
if self.findsignal("%s-8i20"% axis):amp_8i20 = True
if self.findsignal("spindle-at-speed"): digital_at_speed = True
if self.findsignal(axis+"-stepgen-step"): stepdriven = True
if self.findsignal(axis+"-encoder-a"): encoder = True
if self.findsignal(axis+"-resolver"): encoder = resolver = True
temp = self.findsignal(axis+"-pwm-pulse")
if temp:
pwmgen = True
pinname = self.make_pinname(temp)
if "analog" in pinname: sserial_scaling = True
if self.findsignal(axis+"-tppwm-a"): pwmgen = tppwm = True
if self.findsignal(axis+"-pot-output"): spindlepot = sserial_scaling = True
model = w[axis+"drivertype"].get_model()
model.clear()
for i in _PD.alldrivertypes:
model.append((i[1],))
model.append((_("Custom"),))
w["steprev"].set_text("%s" % d[axis+"steprev"])
w["microstep"].set_text("%s" % d[axis +"microstep"])
# P setting needs to default to different values based on
# stepper vrs servo configs. But we still want to allow user setting it.
# If the value is None then we should set a default value, if not then
# that means it's been set to something already...hopefully right.
# TODO this should be smarter - after going thru a config once it
# always uses the value set here - if it is set to a default value
# if should keep checking that the value is still right.
# but thats a bigger change then we want now.
# We check fo None and 'None' because when None is saved
# it's saved as a string
if not d[axis + "P"] == None and not d[axis + "P"] == 'None':
set_value("P")
elif stepdriven == True:
w[axis + "P"].set_value(1/(d.servoperiod/1000000000))
else:
w[axis + "P"].set_value(50)
set_value("I")
set_value("D")
set_value("FF0")
set_value("FF1")
set_value("FF2")
set_value("bias")
set_value("deadband")
set_value("steptime")
set_value("stepspace")
set_value("dirhold")
set_value("dirsetup")
set_value("outputscale")
set_value("3pwmscale")
set_value("3pwmdeadtime")
set_active("invertmotor")
set_active("invertencoder")
set_value("maxoutput")
if amp_8i20:
w[axis + "bldc_option"].set_active(True)
else:
set_active("bldc_option")
set_active("bldc_no_feedback")
set_active("bldc_absolute_feedback")
set_active("bldc_incremental_feedback")
set_active("bldc_use_hall")
set_active("bldc_use_encoder" )
set_active("bldc_use_index")
set_active("bldc_fanuc_alignment")
set_active("bldc_digital_output")
set_active("bldc_six_outputs")
set_active("bldc_emulated_feedback")
set_active("bldc_output_hall")
set_active("bldc_output_fanuc")
set_active("bldc_force_trapz")
set_active("bldc_reverse")
set_value("bldc_scale")
set_value("bldc_poles")
set_value("bldc_lead_angle")
set_value("bldc_inital_value")
set_value("bldc_encoder_offset")
set_value("bldc_drive_offset")
set_value("bldc_pattern_out")
set_value("bldc_pattern_in")
set_value("8i20maxcurrent")
w["encoderline"].set_value((d[axis+"encodercounts"]/4))
set_value("stepscale")
set_value("encoderscale")
w[axis+"maxvel"].set_value(d[axis+"maxvel"]*60)
set_value("maxacc")
if not axis == "s" or axis == "s" and (encoder and (pwmgen or tppwm or stepdriven or sserial_scaling)):
w[axis + "servo_info"].show()
else:
w[axis + "servo_info"].hide()
if stepdriven or not (pwmgen or spindlepot):
w[axis + "output_info"].hide()
else:
w[axis + "output_info"].show()
w[axis + "invertencoder"].set_sensitive(encoder)
w[axis + "encoderscale"].set_sensitive(encoder)
w[axis + "stepscale"].set_sensitive(stepdriven)
if stepdriven:
w[axis + "stepper_info"].show()
else:
w[axis + "stepper_info"].hide()
if pwmgen or sserial_scaling:
w[axis + "outputscale"].show()
w[axis + "outputscalelabel"].show()
else:
w[axis + "outputscale"].hide()
w[axis + "outputscalelabel"].hide()
if amp_8i20 or pwmgen and d.advanced_option == True:
w[axis + "bldcframe"].show()
else: w[axis + "bldcframe"].hide()
if tppwm:
w[axis + "3pwmdeadtime"].show()
w[axis + "3pwmscale"].show()
w[axis + "3pwmdeadtimelabel"].show()
w[axis + "3pwmscalelabel"].show()
else:
w[axis + "3pwmdeadtime"].hide()
w[axis + "3pwmscale"].hide()
w[axis + "3pwmdeadtimelabel"].hide()
w[axis + "3pwmscalelabel"].hide()
w[axis + "drivertype"].set_active(self.drivertype_toindex(axis))
if w[axis + "drivertype"].get_active_text() == _("Custom"):
w[axis + "steptime"].set_value(d[axis + "steptime"])
w[axis + "stepspace"].set_value(d[axis + "stepspace"])
w[axis + "dirhold"].set_value(d[axis + "dirhold"])
w[axis + "dirsetup"].set_value(d[axis + "dirsetup"])
gobject.idle_add(lambda: self.motor_encoder_sanity_check(None,axis))
if axis == "s":
unit = "rev"
pitchunit =_("Gearbox Reduction Ratio")
elif axis == "a":
unit = "degree"
pitchunit = _("Reduction Ratio")
elif d.units ==_PD._METRIC:
unit = "mm"
pitchunit =_("Leadscrew Pitch")
else:
unit = "inch"
pitchunit =_("Leadscrew TPI")
if axis == "s" or axis =="a":
w["labelmotor_pitch"].set_text(pitchunit)
w["labelencoder_pitch"].set_text(pitchunit)
w["motor_screwunits"].set_text(_("("+unit+" / rev)"))
w["encoder_screwunits"].set_text(_("("+unit+" / rev)"))
w[axis + "velunits"].set_text(_(unit+" / min"))
w[axis + "accunits"].set_text(_(unit+" / sec²"))
w["accdistunits"].set_text(unit)
if stepdriven:
w[ "resolutionunits1"].set_text(_(unit+" / Step"))
w["scaleunits"].set_text(_("Steps / "+unit))
else:
w["resolutionunits1"].set_text(_(unit+" / encoder pulse"))
w["scaleunits"].set_text(_("Encoder pulses / "+unit))
if not axis =="s":
w[axis + "homevelunits"].set_text(_(unit+" / min"))
w[axis + "homelatchvelunits"].set_text(_(unit+" / min"))
w[axis + "homefinalvelunits"].set_text(_(unit+" / min"))
w[axis + "minfollowunits"].set_text(unit)
w[axis + "maxfollowunits"].set_text(unit)
if resolver:
w[axis + "encoderscale_label"].set_text(_("Resolver Scale:"))
if axis == 's':
if vfd_spindle:
w.serial_vfd_info.show()
else:
w.serial_vfd_info.hide()
set_value("outputscale2")
w.ssingleinputencoder.set_sensitive(encoder)
w["sinvertencoder"].set_sensitive(encoder)
w["ssingleinputencoder"].show()
w["saxistest"].set_sensitive(pwmgen or spindlepot)
w["sstepper_info"].set_sensitive(stepdriven)
w["smaxvel"].set_sensitive(stepdriven)
w["smaxacc"].set_sensitive(stepdriven)
w["suseatspeed"].set_sensitive(not digital_at_speed and encoder)
if encoder or resolver:
if (self.d.pyvcp and self.d.pyvcphaltype == 1 and self.d.pyvcpconnect == 1) or (self.d.gladevcp
and self.d.spindlespeedbar):
w["sfiltergain"].set_sensitive(True)
set_active("useatspeed")
w.snearrange_button.set_active(d.susenearrange)
w["snearscale"].set_value(d["snearscale"]*100)
w["snearrange"].set_value(d["snearrange"])
set_value("filtergain")
set_active("singleinputencoder")
set_value("outputmaxvoltage")
set_active("usenegativevoltage")
set_active("useoutputrange2")
self.useoutputrange2_toggled()
else:
if sserial_scaling:
w[axis + "outputminlimit"].show()
w[axis + "outputminlimitlabel"].show()
w[axis + "outputmaxlimit"].show()
w[axis + "outputmaxlimitlabel"].show()
else:
w[axis + "outputminlimit"].hide()
w[axis + "outputminlimitlabel"].hide()
w[axis + "outputmaxlimit"].hide()
w[axis + "outputmaxlimitlabel"].hide()
set_value("outputminlimit")
set_value("outputmaxlimit")
set_text("encodercounts")
w[axis+"maxferror"].set_sensitive(True)
w[axis+"minferror"].set_sensitive(True)
set_value("maxferror")
set_value("minferror")
set_text_from_text("compfilename")
set_active("comptype")
set_active("usebacklash")
set_value("backlash")
set_active("usecomp")
set_text("homepos")
set_text("minlim")
set_text("maxlim")
set_text("homesw")
w[axis+"homesearchvel"].set_text("%d" % (d[axis+"homesearchvel"]*60))
w[axis+"homelatchvel"].set_text("%d" % (d[axis+"homelatchvel"]*60))
w[axis+"homefinalvel"].set_text("%d" % (d[axis+"homefinalvel"]*60))
w[axis+"homesequence"].set_text("%d" % abs(d[axis+"homesequence"]))
set_active("searchdir")
set_active("latchdir")
set_active("usehomeindex")
thisaxishome = set(("all-home", "home-" + axis, "min-home-" + axis,"max-home-" + axis, "both-home-" + axis))
homes = False
for i in thisaxishome:
test = self.findsignal(i)
if test: homes = True
w[axis + "homesw"].set_sensitive(homes)
w[axis + "homesearchvel"].set_sensitive(homes)
w[axis + "searchdir"].set_sensitive(homes)
w[axis + "latchdir"].set_sensitive(homes)
w[axis + "usehomeindex"].set_sensitive(encoder and homes)
w[axis + "homefinalvel"].set_sensitive(homes)
w[axis + "homelatchvel"].set_sensitive(homes)
i = d[axis + "usecomp"]
w[axis + "comptype"].set_sensitive(i)
w[axis + "compfilename"].set_sensitive(i)
i = d[axis + "usebacklash"]
w[axis + "backlash"].set_sensitive(i)
self.p.set_buttons_sensitive(1,0)
self.motor_encoder_sanity_check(None,axis)
def driver_changed(self, axis):
d = self.d
w = self.widgets
v = w[axis + "drivertype"].get_active()
if v < len(_PD.alldrivertypes):
d = _PD.alldrivertypes[v]
w[axis + "steptime"].set_value(d[2])
w[axis + "stepspace"].set_value(d[3])
w[axis + "dirhold"].set_value(d[4])
w[axis + "dirsetup"].set_value(d[5])
w[axis + "steptime"].set_sensitive(0)
w[axis + "stepspace"].set_sensitive(0)
w[axis + "dirhold"].set_sensitive(0)
w[axis + "dirsetup"].set_sensitive(0)
else:
w[axis + "steptime"].set_sensitive(1)
w[axis + "stepspace"].set_sensitive(1)
w[axis + "dirhold"].set_sensitive(1)
w[axis + "dirsetup"].set_sensitive(1)
def drivertype_toindex(self, axis, what=None):
if what is None: what = self.d[axis + "drivertype"]
for i, d in enumerate(_PD.alldrivertypes):
if d[0] == what: return i
return len(_PD.alldrivertypes)
def drivertype_toid(self, axis, what=None):
if not isinstance(what, int): what = self.drivertype_toindex(axis, what)
if what < len(_PD.alldrivertypes): return _PD.alldrivertypes[what][0]
return "custom"
def drivertype_fromindex(self, axis):
i = self.widgets[axis + "drivertype"].get_active()
if i < len(_PD.alldrivertypes): return _PD.alldrivertypes[i][1]
return _("Custom")
def comp_toggle(self, axis):
i = self.widgets[axis + "usecomp"].get_active()
self.widgets[axis + "compfilename"].set_sensitive(i)
self.widgets[axis + "comptype"].set_sensitive(i)
if i:
self.widgets[axis + "backlash"].set_sensitive(0)
self.widgets[axis + "usebacklash"].set_active(0)
def bldc_toggled(self, axis):
i = self.widgets[axis + "bldc_option"].get_active()
self.widgets[axis + "bldcoptionbox"].set_sensitive(i)
def useatspeed_toggled(self):
i = self.widgets.suseatspeed.get_active()
self.widgets.snearscale.set_sensitive(self.widgets.snearscale_button.get_active() and i)
self.widgets.snearrange.set_sensitive(self.widgets.snearrange_button.get_active() and i)
def useoutputrange2_toggled(self):
i = self.widgets.suseoutputrange2.get_active()
self.widgets.soutputscale2.set_sensitive(i)
def bldc_update(self,Widgets,axis):
w = self.widgets
i = False
if w[axis+"bldc_incremental_feedback"].get_active():
i = True
w[axis+"bldc_pattern_in"].set_sensitive(i and w[axis+"bldc_use_hall"].get_active() )
w[axis+"bldc_inital_value"].set_sensitive(i and w[axis+"bldc_use_encoder"].get_active() and not w[axis+"bldc_use_hall"].get_active() )
w[axis+"bldc_use_hall"].set_sensitive(i)
w[axis+"bldc_use_encoder"].set_sensitive(i)
w[axis+"bldc_use_index"].set_sensitive(i)
w[axis+"bldc_fanuc_alignment"].set_sensitive(i)
i = False
if w[axis+"bldc_emulated_feedback"].get_active():
i = True
w[axis+"bldc_output_hall"].set_sensitive(i)
w[axis+"bldc_output_fanuc"].set_sensitive(i)
w[axis+"bldc_pattern_out"].set_sensitive(i and w[axis+"bldc_output_hall"].get_active() )
def backlash_toggle(self, axis):
i = self.widgets[axis + "usebacklash"].get_active()
self.widgets[axis + "backlash"].set_sensitive(i)
if i:
self.widgets[axis + "compfilename"].set_sensitive(0)
self.widgets[axis + "comptype"].set_sensitive(0)
self.widgets[axis + "usecomp"].set_active(0)
def axis_done(self, axis):
d = self.d
w = self.widgets
def get_text(n): d[axis + n] = get_value(w[axis + n])
def get_pagevalue(n): d[axis + n] = get_value(w[axis + n])
def get_active(n): d[axis + n] = w[axis + n].get_active()
stepdrive = self.findsignal(axis+"-stepgen-step")
encoder = self.findsignal(axis+"-encoder-a")
resolver = self.findsignal(axis+"-resolver")
get_pagevalue("P")
get_pagevalue("I")
get_pagevalue("D")
get_pagevalue("FF0")
get_pagevalue("FF1")
get_pagevalue("FF2")
get_pagevalue("bias")
get_pagevalue("deadband")
if stepdrive:
d[axis + "maxoutput"] = (get_value(w[axis + "maxvel"])/60) *1.25 # TODO should be X2 if using backlash comp ?
if axis == "s":
d[axis + "maxoutput"] = (get_value(w[axis +"outputscale"]))
else:
get_pagevalue("maxoutput")
get_pagevalue("steptime")
get_pagevalue("stepspace")
get_pagevalue("dirhold")
get_pagevalue("dirsetup")
get_pagevalue("outputscale")
get_pagevalue("3pwmscale")
get_pagevalue("3pwmdeadtime")
get_active("bldc_option")
get_active("bldc_reverse")
get_pagevalue("bldc_scale")
get_pagevalue("bldc_poles")
get_pagevalue("bldc_encoder_offset")
get_pagevalue("bldc_drive_offset")
get_pagevalue("bldc_pattern_out")
get_pagevalue("bldc_pattern_in")
get_pagevalue("bldc_lead_angle")
get_pagevalue("bldc_inital_value")
get_pagevalue("8i20maxcurrent")
get_active("bldc_no_feedback")
get_active("bldc_absolute_feedback")
get_active("bldc_incremental_feedback")
get_active("bldc_use_hall")
get_active("bldc_use_encoder" )
get_active("bldc_use_index")
get_active("bldc_fanuc_alignment")
get_active("bldc_digital_output")
get_active("bldc_six_outputs")
get_active("bldc_emulated_feedback")
get_active("bldc_output_hall")
get_active("bldc_output_fanuc")
get_active("bldc_force_trapz")
if w[axis + "bldc_option"].get_active():
self.configure_bldc(axis)
d[axis + "encodercounts"] = int(float(w["encoderline"].get_text())*4)
if stepdrive: get_pagevalue("stepscale")
if encoder: get_pagevalue("encoderscale")
if resolver: get_pagevalue("encoderscale")
get_active("invertmotor")
get_active("invertencoder")
d[axis + "maxvel"] = (get_value(w[axis + "maxvel"])/60)
get_pagevalue("maxacc")
d[axis + "drivertype"] = self.drivertype_toid(axis, w[axis + "drivertype"].get_active())
if not axis == "s":
get_pagevalue("outputminlimit")
get_pagevalue("outputmaxlimit")
get_pagevalue("maxferror")
get_pagevalue("minferror")
get_text("homepos")
get_text("minlim")
get_text("maxlim")
get_text("homesw")
d[axis + "homesearchvel"] = (get_value(w[axis + "homesearchvel"])/60)
d[axis + "homelatchvel"] = (get_value(w[axis + "homelatchvel"])/60)
d[axis + "homefinalvel"] = (get_value(w[axis + "homefinalvel"])/60)
d[axis+"homesequence"] = (abs(get_value(w[axis+"homesequence"])))
get_active("searchdir")
get_active("latchdir")
get_active("usehomeindex")
d[axis + "compfilename"] = w[axis + "compfilename"].get_text()
get_active("comptype")
d[axis + "backlash"]= w[axis + "backlash"].get_value()
get_active("usecomp")
get_active("usebacklash")
else:
get_active("useatspeed")
d.susenearrange = w.snearrange_button.get_active()
get_pagevalue("nearscale")
d["snearscale"] = w["snearscale"].get_value()/100
d["snearrange"] = w["snearrange"].get_value()
get_pagevalue("filtergain")
get_active("singleinputencoder")
get_pagevalue("outputscale2")
self.d.gsincrvalue0 = self.d.soutputscale
self.d.gsincrvalue1 = self.d.soutputscale2
get_active("useoutputrange2")
self.d.scaleselect = self.d.suseoutputrange2
get_active("usenegativevoltage")
get_pagevalue("outputmaxvoltage")
def configure_bldc(self,axis):
d = self.d
string = ""
# Inputs
if d[axis + "bldc_no_feedback"]: string = string + "n"
elif d[axis +"bldc_absolute_feedback"]: string = string + "a"
elif d[axis + "bldc_incremental_feedback"]:
if d[axis + "bldc_use_hall"]: string = string + "h"
if d[axis + "bldc_use_encoder" ]: string = string + "q"
if d[axis + "bldc_use_index"]: string = string + "i"
if d[axis + "bldc_fanuc_alignment"]: string = string + "f"
# Outputs
if d[axis + "bldc_digital_output"]: string = string + "B"
if d[axis + "bldc_six_outputs"]: string = string + "6"
if d[axis + "bldc_emulated_feedback"]:
if d[axis + "bldc_output_hall"]: string = string + "H"
if d[axis + "bldc_output_fanuc"]: string = string +"F"
if d[axis + "bldc_force_trapz"]: string = string + "T"
#print "axis ",axis,"bldc config ",string
d[axis+"bldc_config"] = string
def calculate_spindle_scale(self):
def get(n): return get_value(self.widgets[n])
stepdrive = bool(self.findsignal("s-stepgen-step"))
encoder = bool(self.findsignal("s-encoder-a"))
resolver = bool(self.findsignal("s-resolver"))
twoscales = self.widgets.suseoutputrange2.get_active()
data_list=[ "steprev","microstep","motor_pulleydriver","motor_pulleydriven","motor_gear1driver","motor_gear1driven",
"motor_gear2driver","motor_gear2driven","motor_max"]
templist1 = ["encoderline","steprev","microstep","motor_gear1driven","motor_gear1driver","motor_gear2driven","motor_gear2driver",
"motor_pulleydriven","motor_pulleydriver","motor_max"]
checkbutton_list = ["cbmicrosteps","cbmotor_gear1","cbmotor_gear2","cbmotor_pulley","rbvoltage_5"
]
self.widgets.spindle_cbmicrosteps.set_sensitive(stepdrive)
self.widgets.spindle_microstep.set_sensitive(stepdrive)
self.widgets.spindle_steprev.set_sensitive(stepdrive)
self.widgets.label_steps_per_rev.set_sensitive(stepdrive)
self.widgets.spindle_motor_max.set_sensitive(not stepdrive)
self.widgets.label_motor_at_max_volt.set_sensitive(not stepdrive)
self.widgets.label_volt_at_max_rpm.set_sensitive(not stepdrive)
self.widgets.spindle_rbvoltage_10.set_sensitive(not stepdrive)
self.widgets.spindle_rbvoltage_5.set_sensitive(not stepdrive)
self.widgets.spindle_cbnegative_rot.set_sensitive(not stepdrive)
# pre set data
for i in data_list:
self.widgets['spindle_'+i].set_value(self.d['s'+i])
for i in checkbutton_list:
self.widgets['spindle_'+i].set_active(self.d['s'+i])
self.widgets.spindle_encoderline.set_value(self.widgets.sencoderscale.get_value()/4)
self.widgets.spindle_cbmotor_gear2.set_active(twoscales)
self.widgets.spindle_cbnegative_rot.set_active(self.widgets.susenegativevoltage.get_active())
# temparally add signals
for i in templist1:
self.d[i] = self.widgets['spindle_'+i].connect("value-changed", self.update_spindle_calculation)
for i in checkbutton_list:
self.d[i] = self.widgets['spindle_'+i].connect("toggled", self.update_spindle_calculation)
self.update_spindle_calculation(None)
# run dialog
self.widgets.spindle_scaledialog.set_title(_("Spindle Scale Calculation"))
self.widgets.spindle_scaledialog.show_all()
result = self.widgets.spindle_scaledialog.run()
self.widgets.spindle_scaledialog.hide()
# remove signals
for i in templist1:
self.widgets['spindle_'+i].disconnect(self.d[i])
for i in checkbutton_list:
self.widgets['spindle_'+i].disconnect(self.d[i])
if not result: return
# record data values
for i in data_list:
self.d['s'+i] = get('spindle_'+i)
for i in checkbutton_list:
self.d['s'+i] = self.widgets['spindle_'+i].get_active()
# set the widgets on the spindle page as per calculations
self.widgets.susenegativevoltage.set_active(self.widgets.spindle_cbnegative_rot.get_active())
if self.widgets.spindle_rbvoltage_5.get_active():
self.widgets.soutputmaxvoltage.set_value(5)
else:
self.widgets.soutputmaxvoltage.set_value(10)
self.widgets.soutputscale.set_value(self.temp_max_motor_speed1)
self.widgets.soutputscale2.set_value(self.temp_max_motor_speed2)
self.widgets.smaxoutput.set_value(self.temp_max_motor_speed1)
self.widgets.sencoderscale.set_value(self.widgets.spindle_encoderline.get_value()*4)
self.widgets.suseoutputrange2.set_active(self.widgets.spindle_cbmotor_gear2.get_active())
if stepdrive:
motor_steps = get_value(self.widgets.spindle_steprev)
if self.widgets.spindle_cbmicrosteps.get_active():
microstepfactor = get_value(self.widgets.spindle_microstep)
else:
microstepfactor = 1
self.widgets.sstepscale.set_value(motor_steps * microstepfactor)
if encoder or resolver:
self.widgets.sencoderscale.set_value(get("spindle_encoderline")*4)
def update_spindle_calculation(self,widget):
w= self.widgets
def get(n): return get_value(w[n])
motor_pulley_ratio = gear1_ratio = gear2_ratio = 1
motor_rpm = get("spindle_motor_max")
volts_at_max_rpm = 5
if self.widgets.spindle_rbvoltage_10.get_active():
volts_at_max_rpm = 10
if w["spindle_cbmotor_pulley"].get_active():
w["spindle_motor_pulleydriver"].set_sensitive(True)
w["spindle_motor_pulleydriven"].set_sensitive(True)
motor_pulley_ratio = (get("spindle_motor_pulleydriver") / get("spindle_motor_pulleydriven"))
else:
w["spindle_motor_pulleydriver"].set_sensitive(False)
w["spindle_motor_pulleydriven"].set_sensitive(False)
motor_pulley_ratio = 1
if w["spindle_cbmotor_gear1"].get_active():
w["spindle_motor_gear1driver"].set_sensitive(True)
w["spindle_motor_gear1driven"].set_sensitive(True)
gear1_ratio = (get("spindle_motor_gear1driver") / get("spindle_motor_gear1driven"))
else:
w["spindle_motor_gear1driver"].set_sensitive(False)
w["spindle_motor_gear1driven"].set_sensitive(False)
gear1_ratio = 1
i = w["spindle_cbmotor_gear2"].get_active()
w["spindle_motor_gear2driver"].set_sensitive(i)
w["spindle_motor_gear2driven"].set_sensitive(i)
w["label_rpm_at_max_motor2"].set_sensitive(i)
w["label_gear2_max_speed"].set_sensitive(i)
if i:
gear2_ratio = (get("spindle_motor_gear2driver") / get("spindle_motor_gear2driven"))
else:
gear2_ratio = 1
w["spindle_microstep"].set_sensitive(w["spindle_cbmicrosteps"].get_active())
self.temp_max_motor_speed1 = (motor_pulley_ratio * gear1_ratio * motor_rpm)
self.temp_max_motor_speed2 = (motor_pulley_ratio * gear2_ratio * motor_rpm)
w["label_motor_at_max_volt"].set_markup(" <b>MOTOR</b> RPM at %d Volt Command"% volts_at_max_rpm)
w["label_volt_at_max_rpm"].set_text(" Voltage for %d Motor RPM:"% motor_rpm)
w["label_rpm_at_max_motor1"].set_text("Spindle RPM at %d Motor RPM -gear 1:"% motor_rpm)
w["label_rpm_at_max_motor2"].set_text("Spindle RPM at %d Motor RPM -gear 2:"% motor_rpm)
w["label_gear1_max_speed"].set_text("%d" % (motor_pulley_ratio * gear1_ratio * motor_rpm))
w["label_gear2_max_speed"].set_text("%d" % (motor_pulley_ratio * gear2_ratio * motor_rpm))
def calculate_scale(self,axis):
def get(n): return get_value(self.widgets[n])
stepdrive = self.findsignal(axis+"-stepgen-step")
encoder = self.findsignal(axis+"-encoder-a")
resolver = self.findsignal(axis+"-resolver")
data_list=[ "steprev","microstep","motor_pulleydriver","motor_pulleydriven","motor_wormdriver","motor_wormdriven",
"encoder_pulleydriver","encoder_pulleydriven","encoder_wormdriver","encoder_wormdriven","motor_leadscrew",
"encoder_leadscrew","motor_leadscrew_tpi","encoder_leadscrew_tpi",
]
templist1 = ["encoderline","encoder_leadscrew","encoder_leadscrew_tpi","encoder_wormdriven",
"encoder_wormdriver","encoder_pulleydriven","encoder_pulleydriver","steprev","motor_leadscrew","motor_leadscrew_tpi",
"microstep","motor_wormdriven","motor_wormdriver","motor_pulleydriven","motor_pulleydriver"
]
checkbutton_list = [ "cbencoder_pitch","cbencoder_tpi","cbencoder_worm","cbencoder_pulley","cbmotor_pitch",
"cbmotor_tpi","cbmicrosteps","cbmotor_worm","cbmotor_pulley"
]
# pre set data
for i in data_list:
self.widgets[i].set_value(self.d[axis+i])
for i in checkbutton_list:
self.widgets[i].set_active(self.d[axis+i])
# temparally add signals
for i in templist1:
self.d[i] = self.widgets[i].connect("value-changed", self.update_scale_calculation,axis)
for i in checkbutton_list:
self.d[i] = self.widgets[i].connect("toggled", self.update_scale_calculation,axis)
# pre calculate
self.update_scale_calculation(self.widgets,axis)
# run dialog
self.widgets.scaledialog.set_title(_("Axis Scale Calculation"))
self.widgets.scaledialog.show_all()
result = self.widgets.scaledialog.run()
self.widgets.scaledialog.hide()
# remove signals
for i in templist1:
self.widgets[i].disconnect(self.d[i])
for i in checkbutton_list:
self.widgets[i].disconnect(self.d[i])
if not result: return
# record data values
for i in data_list:
self.d[axis+i] = self.widgets[i].get_value()
for i in checkbutton_list:
self.d[axis+i] = self.widgets[i].get_active()
# set the calculations result
if encoder or resolver:
self.widgets[axis+"encoderscale"].set_value(get("calcencoder_scale"))
if stepdrive:
self.widgets[axis+"stepscale"].set_value(get("calcmotor_scale"))
def update_scale_calculation(self,widget,axis):
w = self.widgets
d = self.d
def get(n): return get_value(w[n])
stepdrive = self.findsignal(axis+"-stepgen-step")
encoder = self.findsignal(axis+"-encoder-a")
resolver = self.findsignal(axis+"-resolver")
motor_pulley_ratio = encoder_pulley_ratio = 1
motor_worm_ratio = encoder_worm_ratio = 1
encoder_scale = motor_scale = 0
microstepfactor = motor_pitch = encoder_pitch = motor_steps = 1
if axis == "a": rotary_scale = 360
else: rotary_scale = 1
try:
if stepdrive:
# stepmotor scale
w["calcmotor_scale"].set_sensitive(True)
w["stepscaleframe"].set_sensitive(True)
if w["cbmotor_pulley"].get_active():
w["motor_pulleydriver"].set_sensitive(True)
w["motor_pulleydriven"].set_sensitive(True)
motor_pulley_ratio = (get("motor_pulleydriven") / get("motor_pulleydriver"))
else:
w["motor_pulleydriver"].set_sensitive(False)
w["motor_pulleydriven"].set_sensitive(False)
if w["cbmotor_worm"].get_active():
w["motor_wormdriver"].set_sensitive(True)
w["motor_wormdriven"].set_sensitive(True)
motor_worm_ratio = (get("motor_wormdriver") / get("motor_wormdriven"))
else:
w["motor_wormdriver"].set_sensitive(False)
w["motor_wormdriven"].set_sensitive(False)
if w["cbmicrosteps"].get_active():
w["microstep"].set_sensitive(True)
microstepfactor = get("microstep")
else:
w["microstep"].set_sensitive(False)
if w["cbmotor_pitch"].get_active():
w["motor_leadscrew"].set_sensitive(True)
w["cbmotor_tpi"].set_active(False)
if self.d.units == _PD._METRIC:
motor_pitch = 1./ get("motor_leadscrew")
else:
motor_pitch = 1./ (get("motor_leadscrew")* .03937008)
else: w["motor_leadscrew"].set_sensitive(False)
if w["cbmotor_tpi"].get_active():
w["motor_leadscrew_tpi"].set_sensitive(True)
w["cbmotor_pitch"].set_active(False)
if self.d.units == _PD._METRIC:
motor_pitch = (get("motor_leadscrew_tpi")* .03937008)
else:
motor_pitch = get("motor_leadscrew_tpi")
else: w["motor_leadscrew_tpi"].set_sensitive(False)
motor_steps = get("steprev")
motor_scale = (motor_steps * microstepfactor * motor_pulley_ratio * motor_worm_ratio * motor_pitch) / rotary_scale
w["calcmotor_scale"].set_text(locale.format("%.4f", (motor_scale)))
else:
w["calcmotor_scale"].set_sensitive(False)
w["stepscaleframe"].set_sensitive(False)
# encoder scale
if encoder or resolver:
w["calcencoder_scale"].set_sensitive(True)
w["encoderscaleframe"].set_sensitive(True)
if w["cbencoder_pulley"].get_active():
w["encoder_pulleydriver"].set_sensitive(True)
w["encoder_pulleydriven"].set_sensitive(True)
encoder_pulley_ratio = (get("encoder_pulleydriven") / get("encoder_pulleydriver"))
else:
w["encoder_pulleydriver"].set_sensitive(False)
w["encoder_pulleydriven"].set_sensitive(False)
if w["cbencoder_worm"].get_active():
w["encoder_wormdriver"].set_sensitive(True)
w["encoder_wormdriven"].set_sensitive(True)
encoder_worm_ratio = (get("encoder_wormdriver") / get("encoder_wormdriven"))
else:
w["encoder_wormdriver"].set_sensitive(False)
w["encoder_wormdriven"].set_sensitive(False)
if w["cbencoder_pitch"].get_active():
w["encoder_leadscrew"].set_sensitive(True)
w["cbencoder_tpi"].set_active(False)
if self.d.units == _PD._METRIC:
encoder_pitch = 1./ get("encoder_leadscrew")
else:
encoder_pitch = 1./ (get("encoder_leadscrew")*.03937008)
else: w["encoder_leadscrew"].set_sensitive(False)
if w["cbencoder_tpi"].get_active():
w["encoder_leadscrew_tpi"].set_sensitive(True)
w["cbencoder_pitch"].set_active(False)
if self.d.units == _PD._METRIC:
encoder_pitch = (get("encoder_leadscrew_tpi")*.03937008)
else:
encoder_pitch = get("encoder_leadscrew_tpi")
else: w["encoder_leadscrew_tpi"].set_sensitive(False)
encoder_cpr = get_value(w[("encoderline")]) * 4
encoder_scale = (encoder_pulley_ratio * encoder_worm_ratio * encoder_pitch * encoder_cpr) / rotary_scale
w["calcencoder_scale"].set_text(locale.format("%.4f", (encoder_scale)))
else:
w["calcencoder_scale"].set_sensitive(False)
w["encoderscaleframe"].set_sensitive(False)
#new stuff
if stepdrive: scale = motor_scale
else: scale = encoder_scale
maxvps = (get_value(w[axis+"maxvel"]))/60
pps = (scale * (maxvps))/1000
if pps == 0: raise ValueError
pps = abs(pps)
w["khz"].set_text("%.1f" % pps)
acctime = (maxvps) / get_value(w[axis+"maxacc"])
accdist = acctime * .5 * (maxvps)
if encoder or resolver:
maxrpm = int(maxvps * 60 * (scale/encoder_cpr))
else:
maxrpm = int(maxvps * 60 * (scale/(microstepfactor * motor_steps)))
w["acctime"].set_text("%.4f" % acctime)
w["accdist"].set_text("%.4f" % accdist)
w["chartresolution"].set_text("%.7f" % (1.0 / scale))
w["calscale"].set_text(str(scale))
w["maxrpm"].set_text("%d" % maxrpm)
except (ValueError, ZeroDivisionError):
w["calcmotor_scale"].set_text("200")
w["calcencoder_scale"].set_text("1000")
w["chartresolution"].set_text("")
w["acctime"].set_text("")
if not axis == 's':
w["accdist"].set_text("")
w["khz"].set_text("")
w["calscale"].set_text("")
def motor_encoder_sanity_check(self,widgets,axis):
stepdrive = encoder = bad = resolver = pot = False
if self.findsignal(axis+"-stepgen-step"): stepdrive = True
if self.findsignal(axis+"-encoder-a"): encoder = True
if self.findsignal(axis+"-resolver"): resolver = True
if self.findsignal(axis+"-pot-outpot"): pot = True
if encoder or resolver:
if self.widgets[axis+"encoderscale"].get_value() < 1:
self.widgets[axis+"encoderscale"].modify_bg(gtk.STATE_NORMAL, self.widgets[axis+"encoderscale"].get_colormap().alloc_color("red"))
dbg('encoder resolver scale bad %f'%self.widgets[axis+"encoderscale"].get_value())
bad = True
if stepdrive:
if self.widgets[axis+"stepscale"].get_value() < 1:
self.widgets[axis+"stepscale"].modify_bg(gtk.STATE_NORMAL, self.widgets[axis+"stepscale"].get_colormap().alloc_color("red"))
dbg('step scale bad')
bad = True
if not (encoder or resolver) and not stepdrive and not axis == "s":
dbg('encoder %s resolver %s stepper %s axis %s'%(encoder,resolver,stepdrive,axis))
bad = True
if self.widgets[axis+"maxvel"] < 1:
dbg('max vel low')
bad = True
if self.widgets[axis+"maxacc"] < 1:
dbg('max accl low')
bad = True
if bad:
dbg('motor %s_encoder sanity check -bad'%axis)
self.p.set_buttons_sensitive(1,0)
self.widgets[axis + "axistune"].set_sensitive(0)
self.widgets[axis + "axistest"].set_sensitive(0)
else:
dbg('motor %s_encoder sanity check - good'%axis)
self.widgets[axis+"encoderscale"].modify_bg(gtk.STATE_NORMAL, self.origbg)
self.widgets[axis+"stepscale"].modify_bg(gtk.STATE_NORMAL, self.origbg)
self.p.set_buttons_sensitive(1,1)
self.widgets[axis + "axistune"].set_sensitive(1)
self.widgets[axis + "axistest"].set_sensitive(1)
def update_gladevcp(self):
i = self.widgets.gladevcp.get_active()
self.widgets.gladevcpbox.set_sensitive( i )
if self.d.frontend == _PD._TOUCHY:
self.widgets.centerembededgvcp.set_active(True)
self.widgets.centerembededgvcp.set_sensitive(True)
self.widgets.sideembededgvcp.set_sensitive(False)
self.widgets.standalonegvcp.set_sensitive(False)
elif self.d.frontend == _PD._GMOCCAPY or self.d.frontend == _PD._AXIS:
self.widgets.sideembededgvcp.set_sensitive(True)
self.widgets.centerembededgvcp.set_sensitive(True)
self.widgets.standalonegvcp.set_sensitive(False)
if not self.widgets.centerembededgvcp.get_active() and not self.widgets.sideembededgvcp.get_active():
self.widgets.centerembededgvcp.set_active(True)
else:
self.widgets.sideembededgvcp.set_sensitive(False)
self.widgets.centerembededgvcp.set_sensitive(False)
self.widgets.standalonegvcp.set_sensitive(True)
self.widgets.standalonegvcp.set_active(True)
i = self.widgets.standalonegvcp.get_active()
self.widgets.gladevcpsize.set_sensitive(i)
self.widgets.gladevcpposition.set_sensitive(i)
self.widgets.gladevcpforcemax.set_sensitive(i)
if not i:
self.widgets.gladevcpsize.set_active(False)
self.widgets.gladevcpposition.set_active(False)
self.widgets.gladevcpforcemax.set_active(False)
i = self.widgets.gladevcpsize.get_active()
self.widgets.gladevcpwidth.set_sensitive(i)
self.widgets.gladevcpheight.set_sensitive(i)
i = self.widgets.gladevcpposition.get_active()
self.widgets.gladevcpxpos.set_sensitive(i)
self.widgets.gladevcpypos.set_sensitive(i)
for i in (("zerox","x"),("zeroy","y"),("zeroz","z"),("zeroa","a"),("autotouchz","z")):
if not i[1] in(self.d.available_axes):
self.widgets[i[0]].set_active(False)
self.widgets[i[0]].set_sensitive(False)
else:
self.widgets[i[0]].set_sensitive(True)
def has_spindle_speed_control(self):
for test in ("s-stepgen-step", "s-pwm-pulse", "s-encoder-a", "spindle-enable", "spindle-cw", "spindle-ccw", "spindle-brake",
"s-pot-output"):
has_spindle = self.findsignal(test)
print test,has_spindle
if has_spindle:
return True
if self.d.serial_vfd and (self.d.mitsub_vfd or self.d.gs2_vfd):
return True
return False
def clean_unused_ports(self, *args):
# if parallel ports not used clear all signals
parportnames = ("pp1","pp2","pp3")
for check,connector in enumerate(parportnames):
if self.d.number_pports >= (check+1):continue
# initialize parport input / inv pins
for i in (1,2,3,4,5,6,7,8,10,11,12,13,15):
pinname ="%s_Ipin%d"% (connector,i)
self.d[pinname] = _PD.UNUSED_INPUT
pinname ="%s_Ipin%d_inv"% (connector,i)
self.d[pinname] = False
# initialize parport output / inv pins
for i in (1,2,3,4,5,6,7,8,9,14,16,17):
pinname ="%s_Opin%d"% (connector,i)
self.d[pinname] = _PD.UNUSED_OUTPUT
pinname ="%s_Opin%d_inv"% (connector,i)
self.d[pinname] = False
# clear all unused mesa signals
for boardnum in(0,1):
for connector in(1,2,3,4,5,6,7,8,9):
if self.d.number_mesa >= boardnum + 1 :
if connector in(self.d["mesa%d_currentfirmwaredata"% (boardnum)][_PD._NUMOFCNCTRS]) :
continue
# This initializes GPIO input pins
for i in range(0,16):
pinname ="mesa%dc%dpin%d"% (boardnum,connector,i)
self.d[pinname] = _PD.UNUSED_INPUT
pinname ="mesa%dc%dpin%dtype"% (boardnum,connector,i)
self.d[pinname] = _PD.GPIOI
# This initializes GPIO output pins
for i in range(16,24):
pinname ="mesa%dc%dpin%d"% (boardnum,connector,i)
self.d[pinname] = _PD.UNUSED_OUTPUT
pinname ="mesa%dc%dpin%dtype"% (boardnum,connector,i)
self.d[pinname] = _PD.GPIOO
# This initializes the mesa inverse pins
for i in range(0,24):
pinname ="mesa%dc%dpin%dinv"% (boardnum,connector,i)
self.d[pinname] = False
# clear unused sserial signals
keeplist =[]
# if the current firmware supports sserial better check for used channels
# and make a 'keeplist'. we don't want to clear them
if self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._MAXSSERIALPORTS]:
#search all pins for sserial port
for concount,connector in enumerate(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._NUMOFCNCTRS]) :
for pin in range (0,24):
firmptype,compnum = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._STARTOFDATA+pin+(concount*24)]
p = 'mesa%dc%dpin%d' % (boardnum, connector, pin)
ptype = 'mesa%dc%dpin%dtype' % (boardnum, connector , pin)
if self.d[ptype] in (_PD.TXDATA0,_PD.TXDATA1,_PD.TXDATA2,_PD.TXDATA3,_PD.TXDATA4,_PD.SS7I76M0,_PD.SS7I76M2,_PD.SS7I76M3,
_PD.SS7I77M0,_PD.SS7I77M1,_PD.SS7I77M3,_PD.SS7I77M4) and not self.d[p] == _PD.UNUSED_SSERIAL:
if self.d[ptype] in (_PD.TXDATA0,_PD.SS7I76M0,_PD.SS7I77M0): channelnum = 0
elif self.d[ptype] in (_PD.TXDATA1,_PD.SS7I77M1): channelnum = 1
elif self.d[ptype] == _PD.TXDATA2: channelnum = 2
elif self.d[ptype] in (_PD.TXDATA3,_PD.SS7I76M3,_PD.SS7I77M3): channelnum = 3
elif self.d[ptype] in (_PD.TXDATA4,_PD.SS7I77M4): channelnum = 4
keeplist.append(channelnum)
#print "board # %d sserial keeplist"%(boardnum),keeplist
# ok clear the sserial pins unless they are in the keeplist
port = 0# TODO hard code at only 1 sserial port
for channel in range(0,_PD._NUM_CHANNELS): #TODO hardcoded at 5 sserial channels instead of 8
if channel in keeplist: continue
# This initializes pins
for i in range(0,self._p._SSCOMBOLEN):
pinname ="mesa%dsserial%d_%dpin%d"% (boardnum, port,channel,i)
if i < 24:
self.d[pinname] = _PD.UNUSED_INPUT
else:
self.d[pinname] = _PD.UNUSED_OUTPUT
pinname ="mesa%dsserial%d_%dpin%dtype"% (boardnum, port,channel,i)
if i < 24:
self.d[pinname] = _PD.GPIOI
else:
self.d[pinname] = _PD.GPIOO
pinname ="mesa%dsserial%d_%dpin%dinv"% (boardnum, port,channel,i)
self.d[pinname] = False
def debug_iter(self,test,testwidget,message=None):
print "#### DEBUG :",message
for i in ("_gpioosignaltree","_gpioisignaltree","_steppersignaltree","_encodersignaltree","_muxencodersignaltree",
"_pwmcontrolsignaltree","_pwmrelatedsignaltree","_tppwmsignaltree",
"_gpioliststore","_encoderliststore","_muxencoderliststore","_pwmliststore","_tppwmliststore"):
modelcheck = self.widgets[testwidget].get_model()
if modelcheck == self.d[i]:print i;break
#********************
# Common Helper functions
#********************
def tandem_check(self, letter):
tandem_stepper = self.make_pinname(self.stepgen_sig("%s2"%letter))
tandem_pwm = self.make_pinname(self.pwmgen_sig("%s2"%letter))
print letter, bool(tandem_stepper or tandem_pwm), tandem_stepper, tandem_pwm
return bool(tandem_stepper or tandem_pwm)
def stepgen_sig(self, axis):
thisaxisstepgen = axis + "-stepgen-step"
test = self.findsignal(thisaxisstepgen)
return test
# find the individual related oins to step gens
# so that we can check if they were inverted
def stepgen_invert_pins(self,pinnumber):
# sample pinname = mesa0c0pin11
signallist_a = []
signallist_b = []
pin = int(pinnumber[10:])
connector = int(pinnumber[6:7])
boardnum = int(pinnumber[4:5])
channel = None
pinlist = self.list_related_pins([_PD.STEPA,_PD.STEPB], boardnum, connector, channel, pin, 0)
#print pinlist
for num,i in enumerate(pinlist):
if self.d[i[0]+"inv"]:
gpioname = self.make_pinname(self.findsignal( self.d[i[0]] ),True)
#print gpioname
if num:
signallist_b.append(gpioname)
else:
signallist_a.append(gpioname)
return [signallist_a, signallist_b]
def spindle_invert_pins(self,pinnumber):
# sample pinname = mesa0sserial0_0pin11
signallist = []
pin = int(pinnumber[18:])
port = int(pinnumber[12:13])
boardnum = int(pinnumber[4:5])
channel = int(pinnumber[14:15])
pinlist = self.list_related_pins([_PD.POTO,_PD.POTE], boardnum, port, channel, pin, 0)
for i in pinlist:
if self.d[i[0]+"inv"]:
name = self.d[i[0]+"type"]
signallist.append(name)
return signallist
def encoder_sig(self, axis):
thisaxisencoder = axis +"-encoder-a"
test = self.findsignal(thisaxisencoder)
return test
def resolver_sig(self, axis):
thisaxisresolver = axis +"-resolver"
test = self.findsignal(thisaxisresolver)
return test
def amp_8i20_sig(self, axis):
thisaxis8i20 = "%s-8i20"% axis
test = self.findsignal(thisaxis8i20)
return test
def potoutput_sig(self,axis):
thisaxispot = "%s-pot-output"% axis
test = self.findsignal(thisaxispot)
return test
def pwmgen_sig(self, axis):
thisaxispwmgen = axis + "-pwm-pulse"
test = self.findsignal( thisaxispwmgen)
return test
def pwmgen_invert_pins(self,pinnumber):
print "list pwm invert pins",pinnumber
# sample pinname = mesa0c0pin11
signallist = []
pin = int(pinnumber[10:])
connector = int(pinnumber[6:7])
boardnum = int(pinnumber[4:5])
channel = None
pinlist = self.list_related_pins([_PD.PWMP, _PD.PWMD, _PD.PWME], boardnum, connector, channel, pin, 0)
print pinlist
for i in pinlist:
if self.d[i[0]+"inv"]:
gpioname = self.make_pinname(self.findsignal( self.d[i[0]] ),True)
print gpioname
signallist.append(gpioname)
return signallist
def tppwmgen_sig(self, axis):
thisaxispwmgen = axis + "-tppwm-a"
test = self.findsignal(thisaxispwmgen)
return test
def tppwmgen_has_6(self, axis):
thisaxispwmgen = axis + "-tppwm-anot"
test = self.findsignal(thisaxispwmgen)
return test
def home_sig(self, axis):
thisaxishome = set(("all-home", "home-" + axis, "min-home-" + axis, "max-home-" + axis, "both-home-" + axis))
for i in thisaxishome:
if self.findsignal(i): return i
return None
def min_lim_sig(self, axis):
thisaxishome = set(("all-limit", "min-" + axis,"min-home-" + axis, "both-" + axis, "both-home-" + axis))
for i in thisaxishome:
if self.findsignal(i): return i
return None
def max_lim_sig(self, axis):
thisaxishome = set(("all-limit", "max-" + axis, "max-home-" + axis, "both-" + axis, "both-home-" + axis))
for i in thisaxishome:
if self.findsignal(i): return i
return None
def get_value(self,w):
return get_value(w)
def show_try_errors(self):
exc_type, exc_value, exc_traceback = sys.exc_info()
formatted_lines = traceback.format_exc().splitlines()
print
print "****Pncconf verbose debugging:",formatted_lines[0]
traceback.print_tb(exc_traceback, limit=1, file=sys.stdout)
print formatted_lines[-1]
def hostmot2_command_string(self, substitution = False):
def make_name(bname,bnum):
if substitution:
return "[HMOT](CARD%d)"% (bnum)
else:
return "hm2_%s.%d"% (bname,bnum)
# mesa stuff
load_cmnds = []
board0 = self.d.mesa0_currentfirmwaredata[_PD._BOARDNAME]
board1 = self.d.mesa1_currentfirmwaredata[_PD._BOARDNAME]
driver0 = ' %s'% self.d.mesa0_currentfirmwaredata[_PD._HALDRIVER]
driver1 = ' %s'% self.d.mesa1_currentfirmwaredata[_PD._HALDRIVER]
directory0 = self.d.mesa0_currentfirmwaredata[_PD._DIRECTORY]
directory1 = self.d.mesa1_currentfirmwaredata[_PD._DIRECTORY]
firm0 = self.d.mesa0_currentfirmwaredata[_PD._FIRMWARE]
firm1 = self.d.mesa1_currentfirmwaredata[_PD._FIRMWARE]
firmstring0 = firmstring1 = board0_ip = board1_ip = ""
mesa0_3pwm = mesa1_3pwm = ''
mesa0_ioaddr = mesa1_ioaddr = ''
load_cmnds.append("loadrt hostmot2")
if '7i43' in board0:
mesa0_ioaddr = ' ioaddr=%s ioaddr_hi=0 epp_wide=1'% self.d.mesa0_parportaddrs
if '7i43' in board1:
mesa1_ioaddr = ' ioaddr=%s ioaddr_hi=0 epp_wide=1'% self.d.mesa1_parportaddrs
if 'eth' in driver0:
firmstring0 =''
if self.d.mesa0_card_addrs:
board0_ip = ''' board_ip="%s"''' % self.d.mesa0_card_addrs
elif not "5i25" in board0:
firmstring0 = "firmware=hm2/%s/%s.BIT" % (directory0, firm0)
if 'eth' in driver1:
firmstring1 =''
if self.d.mesa1_card_addrs:
board1_ip = ''' board_ip="%s"'''% self.d.mesa1_card_addrs
elif not "5i25" in board1:
firmstring1 = "firmware=hm2/%s/%s.BIT" % (directory1, firm1)
# TODO fix this hardcoded hack: only one serialport
ssconfig0 = ssconfig1 = resolver0 = resolver1 = temp = ""
if self.d.mesa0_numof_sserialports:
for i in range(1,_PD._NUM_CHANNELS+1):
if i <= self.d.mesa0_numof_sserialchannels:
# m number in the name signifies the required sserial mode
for j in ("123456789"):
if ("m"+j) in self.d["mesa0sserial0_%dsubboard"% (i-1)]:
temp = temp + j
break
else: temp = temp + "0" # default case
else:
temp = temp + "x"
ssconfig0 = " sserial_port_0=%s"% temp
if self.d.number_mesa == 2 and self.d.mesa1_numof_sserialports:
for i in range(1,_PD._NUM_CHANNELS+1):
if i <= self.d.mesa1_numof_sserialchannels:
# m number in the name signifies the required sserial mode
for j in ("123456789"):
if ("m"+j) in self.d["mesa1sserial0_%dsubboard"% (i-1)]:
temp = temp + j
break
else: temp = temp + "0" # default case
else:
temp = temp + "x"
ssconfig1 = " sserial_port_0=%s"% temp
if self.d.mesa0_numof_resolvers:
resolver0 = " num_resolvers=%d"% self.d.mesa0_numof_resolvers
if self.d.mesa1_numof_resolvers:
resolver1 = " num_resolvers=%d"% self.d.mesa1_numof_resolvers
if self.d.mesa0_numof_tppwmgens:
mesa0_3pwm = ' num_3pwmgens=%d' %self.d.mesa0_numof_tppwmgens
if self.d.mesa1_numof_tppwmgens:
mesa1_3pwm = ' num_3pwmgens=%d' %self.d.mesa1_numof_tppwmgens
if self.d.number_mesa == 1:
load_cmnds.append( """loadrt%s%s%s config="%s num_encoders=%d num_pwmgens=%d%s num_stepgens=%d%s%s" """ % (
driver0, board0_ip, mesa0_ioaddr, firmstring0, self.d.mesa0_numof_encodergens, self.d.mesa0_numof_pwmgens,
mesa0_3pwm, self.d.mesa0_numof_stepgens, ssconfig0, resolver0))
elif self.d.number_mesa == 2 and (driver0 == driver1):
load_cmnds.append( """loadrt%s%s%s config="%s num_encoders=%d num_pwmgens=%d%s num_stepgens=%d%s%s,\
%s%s num_encoders=%d num_pwmgens=%d%s num_stepgens=%d%s%s" """ % (
driver0, board0_ip, mesa0_ioaddr, firmstring0, self.d.mesa0_numof_encodergens, self.d.mesa0_numof_pwmgens,
mesa0_3pwm, self.d.mesa0_numof_stepgens, ssconfig0, resolver0, mesa1_ioaddr, firmstring1,
self.d.mesa1_numof_encodergens, self.d.mesa1_numof_pwmgens, mesa1_3pwm,
self.d.mesa1_numof_stepgens, ssconfig1, resolver1))
elif self.d.number_mesa == 2:
load_cmnds.append( """loadrt%s%s%s config="%s num_encoders=%d num_pwmgens=%d%s num_stepgens=%d%s%s" """ % (
driver0, board0_ip, mesa0_ioaddr, firmstring0, self.d.mesa0_numof_encodergens, self.d.mesa0_numof_pwmgens,
mesa0_3pwm, self.d.mesa0_numof_stepgens, ssconfig0, resolver0 ))
load_cmnds.append( """loadrt%s%s%s config="%s num_encoders=%d num_pwmgens=%d%s num_stepgens=%d%s%s" """ % (
driver1, board1_ip, mesa1_ioaddr, firmstring1, self.d.mesa1_numof_encodergens, self.d.mesa1_numof_pwmgens,
mesa0_3pwm, self.d.mesa1_numof_stepgens, ssconfig1, resolver1 ))
for boardnum in range(0,int(self.d.number_mesa)):
if boardnum == 1 and (board0 == board1):
halnum = 1
else:
halnum = 0
prefix = make_name(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._BOARDNAME],halnum)
if self.d["mesa%d_numof_pwmgens"% boardnum] > 0:
load_cmnds.append( "setp %s.pwmgen.pwm_frequency %d"% (prefix, self.d["mesa%d_pwm_frequency"% boardnum] ))
load_cmnds.append( "setp %s.pwmgen.pdm_frequency %d"% (prefix, self.d["mesa%d_pdm_frequency"% boardnum] ))
load_cmnds.append( "setp %s.watchdog.timeout_ns %d"% (prefix, self.d["mesa%d_watchdog_timeout"% boardnum] ))
# READ
read_cmnds = []
for boardnum in range(0,int(self.d.number_mesa)):
if boardnum == 1 and (self.d.mesa0_currentfirmwaredata[_PD._BOARDNAME] == self.d.mesa1_currentfirmwaredata[_PD._BOARDNAME]):
halnum = 1
else:
halnum = 0
prefix = make_name(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._BOARDNAME],halnum)
read_cmnds.append( "addf %s.read servo-thread"% (prefix))
# WRITE
write_cmnds = []
for boardnum in range(0,int(self.d.number_mesa)):
if boardnum == 1 and (self.d.mesa0_currentfirmwaredata[_PD._BOARDNAME] == self.d.mesa1_currentfirmwaredata[_PD._BOARDNAME]):
halnum = 1
else:
halnum = 0
prefix = make_name(self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._BOARDNAME],halnum)
write_cmnds.append( "addf %s.write servo-thread"% (prefix))
if '7i76e' in self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._BOARDNAME] or \
'7i92' in self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._BOARDNAME]:
write_cmnds.append( "setp %s.dpll.01.timer-us -50"% (prefix))
write_cmnds.append( "setp %s.stepgen.timer-number 1"% (prefix))
return load_cmnds,read_cmnds,write_cmnds
def pport_command_string(self):
# LOAD
load_cmnds = []
# parport stuff
port3name = port2name = port1name = port3dir = port2dir = port1dir = ""
if self.d.number_pports>2:
port3name = " " + self.d.ioaddr3
if self.d.pp3_direction:
port3dir =" out"
else:
port3dir =" in"
if self.d.number_pports>1:
port2name = " " + self.d.ioaddr2
if self.d.pp2_direction:
port2dir =" out"
else:
port2dir =" in"
port1name = self.d.ioaddr1
if self.d.pp1_direction:
port1dir =" out"
else:
port1dir =" in"
load_cmnds.append("loadrt hal_parport cfg=\"%s%s%s%s%s%s\"" % (port1name, port1dir, port2name, port2dir, port3name, port3dir))
# READ
read_cmnds = []
read_cmnds.append( "addf parport.0.read servo-thread")
if self.d.number_pports > 1:
read_cmnds.append( "addf parport.1.read servo-thread")
if self.d.number_pports > 2:
read_cmnds.append( "addf parport.2.read servo-thread")
# WRITE
write_cmnds = []
write_cmnds.append( "addf parport.0.write servo-thread")
if self.d.number_pports > 1:
write_cmnds.append( "addf parport.1.write servo-thread")
if self.d.number_pports > 2:
write_cmnds.append( "addf parport.2.write servo-thread")
return load_cmnds,read_cmnds,write_cmnds
# This method returns I/O pin designation (name and number) of a given HAL signalname.
# It does not check to see if the signalname is in the list more then once.
# if parports are not used then signals are not searched.
def findsignal(self, sig):
if self.d.number_pports:
ppinput = {}
ppoutput = {}
for i in (1,2,3):
for s in (2,3,4,5,6,7,8,9,10,11,12,13,15):
key = self.d["pp%d_Ipin%d" %(i,s)]
ppinput[key] = "pp%d_Ipin%d" %(i,s)
for s in (1,2,3,4,5,6,7,8,9,14,16,17):
key = self.d["pp%d_Opin%d" %(i,s)]
ppoutput[key] = "pp%d_Opin%d" %(i,s)
mesa = {}
for boardnum in range(0,int(self.d.number_mesa)):
for concount,connector in enumerate(self.d["mesa%d_currentfirmwaredata"% (boardnum)][_PD._NUMOFCNCTRS]) :
for s in range(0,24):
key = self.d["mesa%dc%dpin%d"% (boardnum,connector,s)]
mesa[key] = "mesa%dc%dpin%d" %(boardnum,connector,s)
if self.d["mesa%d_numof_sserialports"% boardnum]:
sserial = {}
port = 0
for channel in range (0,self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._MAXSSERIALCHANNELS]):
if channel ==_PD._NUM_CHANNELS: break # TODO may not be all channels available
for pin in range (0,_PD._SSCOMBOLEN):
key = self.d['mesa%dsserial%d_%dpin%d' % (boardnum, port, channel, pin)]
sserial[key] = 'mesa%dsserial%d_%dpin%d' % (boardnum, port, channel, pin)
try:
return mesa[sig]
except:
try:
return sserial[sig]
except:
pass
if self.d.number_pports:
try:
return ppinput[sig]
except:
try:
return ppoutput[sig]
except:
return None
else: return None
# search all the current firmware array for related pins
# if not the same component number as the pin that changed or
# if not in the relate component type keep searching
# if is the right component type and number, check the relatedsearch array for a match
# if its a match add it to a list of pins (pinlist) that need to be updated
def list_related_pins(self, relatedsearch, boardnum, connector, channel, pin, style):
#print relatedsearch, boardnum, connector, channel, pin, style
pinlist =[]
if not channel == None:
subfirmname = self.d["mesa%dsserial%d_%dsubboard"% (boardnum, connector, channel)]
for subnum,temp in enumerate(_PD.MESA_DAUGHTERDATA):
if _PD.MESA_DAUGHTERDATA[subnum][_PD._SUBFIRMNAME] == subfirmname: break
subboardname = _PD.MESA_DAUGHTERDATA[subnum][_PD._SUBBOARDNAME]
currentptype,currentcompnum = _PD.MESA_DAUGHTERDATA[subnum][_PD._SUBSTARTOFDATA+pin]
for t_pin in range (0,_PD._SSCOMBOLEN):
comptype,compnum = _PD.MESA_DAUGHTERDATA[subnum][_PD._SUBSTARTOFDATA+t_pin]
if compnum != currentcompnum: continue
if comptype not in (relatedsearch): continue
if style == 0:
tochange = ['mesa%dsserial%d_%dpin%d'% (boardnum,connector,channel,t_pin),boardnum,connector,channel,t_pin]
if style == 1:
tochange = ['mesa%dsserial%d_%dpin%dtype'% (boardnum,connector,channel,t_pin),boardnum,connector,channel,t_pin]
if style == 2:
tochange = ['mesa%dsserial%d_%dpin%dinv'% (boardnum,connector,channel,t_pin),boardnum,connector,channel,t_pin]
pinlist.append(tochange)
else:
for concount,i in enumerate(self.d["mesa%d_currentfirmwaredata"% (boardnum)][_PD._NUMOFCNCTRS]):
if i == connector:
currentptype,currentcompnum = self.d["mesa%d_currentfirmwaredata"% (boardnum)][_PD._STARTOFDATA+pin+(concount*24)]
for t_concount,t_connector in enumerate(self.d["mesa%d_currentfirmwaredata"% (boardnum)][_PD._NUMOFCNCTRS]):
for t_pin in range (0,24):
comptype,compnum = self.d["mesa%d_currentfirmwaredata"% (boardnum)][_PD._STARTOFDATA+t_pin+(t_concount*24)]
if compnum != currentcompnum: continue
if comptype not in (relatedsearch): continue
if style == 0:
tochange = ['mesa%dc%dpin%d'% (boardnum,t_connector,t_pin),boardnum,t_connector,None,t_pin]
if style == 1:
tochange = ['mesa%dc%dpin%dtype'% (boardnum,t_connector,t_pin),boardnum,t_connector,None,t_pin]
if style == 2:
tochange = ['mesa%dc%dpin%dinv'% (boardnum,t_connector,t_pin),boardnum,t_connector,None,t_pin]
pinlist.append(tochange)
return pinlist
# This method takes a signalname data pin (eg mesa0c3pin1)
# and converts it to a HAL pin names (eg hm2_5i20.0.gpio.01)
# component number conversion is for adjustment of position of pins related to the
# 'controlling pin' eg encoder-a (controlling pin) encoder-b encoder -I
# (a,b,i are related pins for encoder component)
# gpionumber is a flag to return a gpio piname instead of the component pinname
# this is used when we want to invert the pins of a component output (such as a stepper)
# because you actually must invert the GPIO that would be in that position
# prefixonly flag is used when we want the pin name without the component name.
# used with sserial when we want the sserial port and channel so we can add out own name (eg enable pins)
def make_pinname(self, pin, gpionumber = False, prefixonly = False, substitution = False):
def make_name(bname,bnum):
if substitution:
return "[HMOT](CARD%d)"% (bnum)
else:
return "hm2_%s.%d"% (bname, bnum)
test = str(pin)
halboardnum = 0
if test == "None": return None
elif 'mesa' in test:
type_name = { _PD.GPIOI:"gpio", _PD.GPIOO:"gpio", _PD.GPIOD:"gpio", _PD.SSR0:"ssr",
_PD.ENCA:"encoder", _PD.ENCB:"encoder",_PD.ENCI:"encoder",_PD.ENCM:"encoder",
_PD.RES0:"resolver",_PD.RES1:"resolver",_PD.RES2:"resolver",_PD.RES3:"resolver",_PD.RES4:"resolver",_PD.RES5:"resolver",
_PD.MXE0:"encoder", _PD.MXE1:"encoder",
_PD.PWMP:"pwmgen",_PD.PWMD:"pwmgen", _PD.PWME:"pwmgen", _PD.PDMP:"pwmgen", _PD.PDMD:"pwmgen", _PD.PDME:"pwmgen",
_PD.UDMU:"pwmgen",_PD.UDMD:"pwmgen", _PD.UDME:"pwmgen",_PD.STEPA:"stepgen", _PD.STEPB:"stepgen",
_PD.TPPWMA:"tppwmgen",_PD.TPPWMB:"tppwmgen",_PD.TPPWMC:"tppwmgen",
_PD.TPPWMAN:"tppwmgen",_PD.TPPWMBN:"tppwmgen",_PD.TPPWMCN:"tppwmgen",
_PD.TPPWME:"tppwmgen",_PD.TPPWMF:"tppwmgen",_PD.AMP8I20:"8i20",_PD.POTO:"spinout",
_PD.POTE:"spinena",_PD.POTD:"spindir",_PD.ANALOGIN:"analog","Error":"None" }
boardnum = int(test[4:5])
boardname = self.d["mesa%d_currentfirmwaredata"% boardnum][_PD._BOARDNAME]
meta = self.get_board_meta(boardname)
num_of_pins = meta.get('PINS_PER_CONNECTOR')
ptype = self.d[pin+"type"]
if boardnum == 1 and self.d.mesa1_currentfirmwaredata[_PD._BOARDNAME] == self.d.mesa0_currentfirmwaredata[_PD._BOARDNAME]:
halboardnum = 1
if 'serial' in test:
# sample pin name = mesa0sserial0_0pin24
pinnum = int(test[18:])
portnum = int(test[12:13])
channel = int(test[14:15])
subfirmname = self.d["mesa%dsserial%d_%dsubboard"% (boardnum, portnum, channel)]
for subnum,temp in enumerate(_PD.MESA_DAUGHTERDATA):
#print "pinname search -",_PD.MESA_DAUGHTERDATA[subnum][_PD._SUBFIRMNAME],subfirmname
if _PD.MESA_DAUGHTERDATA[subnum][_PD._SUBFIRMNAME] == subfirmname: break
#print "pinname -found subboard name:",_PD.MESA_DAUGHTERDATA[subnum][_PD._SUBFIRMNAME],subfirmname,subnum,"channel:",channel
subboardname = _PD.MESA_DAUGHTERDATA[subnum][_PD._SUBBOARDNAME]
firmptype,compnum = _PD.MESA_DAUGHTERDATA[subnum][_PD._SUBSTARTOFDATA+pinnum]
# we iter over this dic because of locale translation problems when using
# comptype = type_name[ptype]
comptype = "ERROR FINDING COMPONENT TYPE"
for key,value in type_name.iteritems():
if key == ptype:
comptype = value
break
if value == "Error":
print "**** ERROR PNCCONF: pintype error in make_pinname: (sserial) ptype = ",ptype
return None
# if gpionumber flag is true - convert to gpio pin name
if gpionumber or ptype in(_PD.GPIOI,_PD.GPIOO,_PD.GPIOD,_PD.SSR0):
if "7i77" in (subboardname) or "7i76" in(subboardname)or "7i84" in(subboardname):
if ptype in(_PD.GPIOO,_PD.GPIOD):
comptype = "output"
if pinnum >15 and pinnum <24:
pinnum = pinnum-16
elif pinnum >39:
pinnum = pinnum -32
elif ptype == _PD.GPIOI:
comptype = "input"
if pinnum >23 and pinnum < 40:
pinnum = pinnum-8
return "%s.%s.%d.%d."% (make_name(boardname,halboardnum),subboardname,portnum,channel) + comptype+"-%02d"% (pinnum)
elif "7i69" in (subboardname) or "7i73" in (subboardname) or "7i64" in(subboardname):
if ptype in(_PD.GPIOO,_PD.GPIOD):
comptype = "output"
pinnum -= 24
elif ptype == _PD.GPIOI:
comptype = "input"
return "%s.%s.%d.%d."% (make_name(boardname,halboardnum),subboardname,portnum,channel) + comptype+"-%02d"% (pinnum)
elif "7i70" in (subboardname) or "7i71" in (subboardname):
if ptype in(_PD.GPIOO,_PD.GPIOD):
comptype = "output"
elif ptype == _PD.GPIOI:
comptype = "input"
return "%s.%s.%d.%d."% (make_name(boardname,halboardnum),subboardname,portnum,channel) + comptype+"-%02d"% (pinnum)
else:
print "**** ERROR PNCCONF: subboard name ",subboardname," in make_pinname: (sserial) ptype = ",ptype,pin
return None
elif ptype in (_PD.AMP8I20,_PD.POTO,_PD.POTE,_PD.POTD) or prefixonly:
return "%s.%s.%d.%d."% (make_name(boardname,halboardnum),subboardname,portnum,channel)
elif ptype in(_PD.PWMP,_PD.PDMP,_PD.UDMU):
comptype = "analogout"
return "%s.%s.%d.%d."% (make_name(boardname,halboardnum),subboardname,portnum,channel) + comptype+"%d"% (compnum)
elif ptype == (_PD.ANALOGIN):
if "7i64" in(subboardname):
comptype = "analog"
else:
comptype = "analogin"
return "%s.%s.%d.%d."% (make_name(boardname,halboardnum),subboardname,portnum,channel) + comptype+"%d"% (compnum)
elif ptype == (_PD.ENCA):
comptype = "enc"
return "%s.%s.%d.%d."% (make_name(boardname,halboardnum),subboardname,portnum,channel) + comptype+"%d"% (compnum)
else:
print "**** ERROR PNCCONF: pintype error in make_pinname: (sserial) ptype = ",ptype,pin
return None
else:
# sample pin name = mesa0c3pin1
pinnum = int(test[10:])
connum = int(test[6:7])
# we iter over this dic because of locale translation problems when using
# comptype = type_name[ptype]
comptype = "ERROR FINDING COMPONENT TYPE"
# we need concount (connector designations are not in numerical order, pin names are) and comnum from this
for concount,i in enumerate(self.d["mesa%d_currentfirmwaredata"% (boardnum)][_PD._NUMOFCNCTRS]):
if i == connum:
dummy,compnum = self.d["mesa%d_currentfirmwaredata"% (boardnum)][_PD._STARTOFDATA+pinnum+(concount*24)]
break
for key,value in type_name.iteritems():
if key == ptype: comptype = value
if value == "Error":
print "**** ERROR PNCCONF: pintype error in make_pinname: (mesa) ptype = ",ptype
return None
# if gpionumber flag is true - convert to gpio pin name
if gpionumber or ptype in(_PD.GPIOI,_PD.GPIOO,_PD.GPIOD,_PD.SSR0):
print '->',ptype,dummy,compnum,pin
if ptype == _PD.SSR0:
compnum -= 100
return "%s."% (make_name(boardname,halboardnum)) + "ssr.00.out-%02d"% (compnum)
else:
compnum = int(pinnum)+(concount* num_of_pins )
return "%s."% (make_name(boardname,halboardnum)) + "gpio.%03d"% (compnum)
elif ptype in (_PD.ENCA,_PD.ENCB,_PD.ENCI,_PD.ENCM,_PD.PWMP,_PD.PWMD,_PD.PWME,_PD.PDMP,_PD.PDMD,_PD.PDME,_PD.UDMU,_PD.UDMD,_PD.UDME,
_PD.STEPA,_PD.STEPB,_PD.STEPC,_PD.STEPD,_PD.STEPE,_PD.STEPF,
_PD.TPPWMA,_PD.TPPWMB,_PD.TPPWMC,_PD.TPPWMAN,_PD.TPPWMBN,_PD.TPPWMCN,_PD.TPPWME,_PD.TPPWMF):
return "%s."% (make_name(boardname,halboardnum)) + comptype+".%02d"% (compnum)
elif ptype in (_PD.RES0,_PD.RES1,_PD.RES2,_PD.RES3,_PD.RES4,_PD.RES5):
temp = (_PD.RES0,_PD.RES1,_PD.RES2,_PD.RES3,_PD.RES4,_PD.RES5)
for num,dummy in enumerate(temp):
if ptype == dummy:break
return "%s."% (make_name(boardname,halboardnum)) + comptype+".%02d"% (compnum*6+num)
elif ptype in (_PD.MXE0,_PD.MXE1):
num = 0
if ptype == _PD.MXE1: num = 1
return "%s."% (make_name(boardname,halboardnum)) + comptype+".%02d"% ((compnum * 2 + num))
elif 'pp' in test:
print test
ending = "-out"
test = str(pin)
print self.d[pin]
pintype = str(test[4:5])
print pintype
pinnum = int(test[8:])
print pinnum
connum = int(test[2:3])-1
print connum
if pintype == 'I': ending = "-in"
return "parport."+str(connum)+".pin-%02d"%(pinnum)+ending
else:
print "pintype error in make_pinname: pinname = ",test
return None
# Boiler code
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, item, value):
return setattr(self, item, value)
# starting with 'pncconf -d' gives debug messages
if __name__ == "__main__":
usage = "usage: pncconf -h for options"
parser = OptionParser(usage=usage)
parser.add_option("-d", action="store", metavar='all', dest="debug",
help="Print debug info and ignore realtime/kernel tests.\nUse 'alldev' to show all the page tabs. 'step' to stop at each debug print,'excl','5i25','rawfirm','curfirm'")
(options, args) = parser.parse_args()
if options.debug:
app = App(dbgstate=options.debug)
else:
app = App('')
gtk.main()
| lgpl-2.1 | -1,847,330,764,788,389,000 | 54.005267 | 192 | 0.527267 | false | 3.77671 | false | false | false |
HaebinShin/tensorflow | tensorflow/python/ops/nn_ops.py | 1 | 56530 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Wrappers for primitive Neural Net (NN) Operations."""
# pylint: disable=invalid-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_nn_ops import *
# pylint: enable=wildcard-import
# Aliases for some automatically-generated names.
local_response_normalization = gen_nn_ops.lrn
def atrous_conv2d(value, filters, rate, padding, name=None):
"""Atrous convolution (a.k.a. convolution with holes or dilated convolution).
Computes a 2-D atrous convolution, also known as convolution with holes or
dilated convolution, given 4-D `value` and `filters` tensors. If the `rate`
parameter is equal to one, it performs regular 2-D convolution. If the `rate`
parameter is greater than one, it performs convolution with holes, sampling
the input values every `rate` pixels in the `height` and `width` dimensions.
This is equivalent to convolving the input with a set of upsampled filters,
produced by inserting `rate - 1` zeros between two consecutive values of the
filters along the `height` and `width` dimensions, hence the name atrous
convolution or convolution with holes (the French word trous means holes in
English).
More specifically:
output[b, i, j, k] = sum_{di, dj, q} filters[di, dj, q, k] *
value[b, i + rate * di, j + rate * dj, q]
Atrous convolution allows us to explicitly control how densely to compute
feature responses in fully convolutional networks. Used in conjunction with
bilinear interpolation, it offers an alternative to `conv2d_transpose` in
dense prediction tasks such as semantic image segmentation, optical flow
computation, or depth estimation. It also allows us to effectively enlarge
the field of view of filters without increasing the number of parameters or
the amount of computation.
For a description of atrous convolution and how it can be used for dense
feature extraction, please see: [Semantic Image Segmentation with Deep
Convolutional Nets and Fully Connected CRFs](http://arxiv.org/abs/1412.7062).
The same operation is investigated further in [Multi-Scale Context Aggregation
by Dilated Convolutions](http://arxiv.org/abs/1511.07122). Previous works
that effectively use atrous convolution in different ways are, among others,
[OverFeat: Integrated Recognition, Localization and Detection using
Convolutional Networks](http://arxiv.org/abs/1312.6229) and [Fast Image
Scanning with Deep Max-Pooling Convolutional Neural Networks]
(http://arxiv.org/abs/1302.1700). Atrous convolution is also closely related
to the so-called noble identities in multi-rate signal processing.
There are many different ways to implement atrous convolution (see the refs
above). The implementation here reduces
atrous_conv2d(value, filters, rate, padding=padding)
to the following three operations:
paddings = ...
net = space_to_batch(value, paddings, block_size=rate)
net = conv2d(net, filters, strides=[1, 1, 1, 1], padding="VALID")
crops = ...
net = batch_to_space(net, crops, block_size=rate)
Advanced usage. Note the following optimization: A sequence of `atrous_conv2d`
operations with identical `rate` parameters, 'SAME' `padding`, and filters
with odd heights/ widths:
net = atrous_conv2d(net, filters1, rate, padding="SAME")
net = atrous_conv2d(net, filters2, rate, padding="SAME")
...
net = atrous_conv2d(net, filtersK, rate, padding="SAME")
can be equivalently performed cheaper in terms of computation and memory as:
pad = ... # padding so that the input dims are multiples of rate
net = space_to_batch(net, paddings=pad, block_size=rate)
net = conv2d(net, filters1, strides=[1, 1, 1, 1], padding="SAME")
net = conv2d(net, filters2, strides=[1, 1, 1, 1], padding="SAME")
...
net = conv2d(net, filtersK, strides=[1, 1, 1, 1], padding="SAME")
net = batch_to_space(net, crops=pad, block_size=rate)
because a pair of consecutive `space_to_batch` and `batch_to_space` ops with
the same `block_size` cancel out when their respective `paddings` and `crops`
inputs are identical.
Args:
value: A 4-D `Tensor` of type `float`. It needs to be in the default "NHWC"
format. Its shape is `[batch, in_height, in_width, in_channels]`.
filters: A 4-D `Tensor` with the same type as `value` and shape
`[filter_height, filter_width, in_channels, out_channels]`. `filters`'
`in_channels` dimension must match that of `value`. Atrous convolution is
equivalent to standard convolution with upsampled filters with effective
height `filter_height + (filter_height - 1) * (rate - 1)` and effective
width `filter_width + (filter_width - 1) * (rate - 1)`, produced by
inserting `rate - 1` zeros along consecutive elements across the
`filters`' spatial dimensions.
rate: A positive int32. The stride with which we sample input values across
the `height` and `width` dimensions. Equivalently, the rate by which we
upsample the filter values by inserting zeros across the `height` and
`width` dimensions. In the literature, the same parameter is sometimes
called `input stride` or `dilation`.
padding: A string, either `'VALID'` or `'SAME'`. The padding algorithm.
name: Optional name for the returned tensor.
Returns:
A `Tensor` with the same type as `value`.
Raises:
ValueError: If input/output depth does not match `filters`' shape, or if
padding is other than `'VALID'` or `'SAME'`.
"""
with ops.op_scope([value, filters], name, "atrous_conv2d") as name:
value = ops.convert_to_tensor(value, name="value")
filters = ops.convert_to_tensor(filters, name="filters")
if not value.get_shape()[3].is_compatible_with(filters.get_shape()[2]):
raise ValueError(
"value's input channels does not match filters' input channels, "
"{} != {}".format(value.get_shape()[3], filters.get_shape()[2]))
if rate < 1:
raise ValueError("rate {} cannot be less than one".format(rate))
if rate == 1:
value = gen_nn_ops.conv2d(input=value,
filter=filters,
strides=[1, 1, 1, 1],
padding=padding)
return value
# We have two padding contributions. The first is used for converting "SAME"
# to "VALID". The second is required so that the height and width of the
# zero-padded value tensor are multiples of rate.
# Padding required to reduce to "VALID" convolution
if padding == "SAME":
# Handle filters whose shape is unknown during graph creation.
if filters.get_shape().is_fully_defined():
filter_shape = filters.get_shape().as_list()
else:
filter_shape = array_ops.shape(filters)
filter_height, filter_width = filter_shape[0], filter_shape[1]
# Spatial dimensions of the filters and the upsampled filters in which we
# introduce (rate - 1) zeros between consecutive filter values.
filter_height_up = filter_height + (filter_height - 1) * (rate - 1)
filter_width_up = filter_width + (filter_width - 1) * (rate - 1)
pad_height = filter_height_up - 1
pad_width = filter_width_up - 1
# When pad_height (pad_width) is odd, we pad more to bottom (right),
# following the same convention as conv2d().
pad_top = pad_height // 2
pad_bottom = pad_height - pad_top
pad_left = pad_width // 2
pad_right = pad_width - pad_left
elif padding == "VALID":
pad_top = 0
pad_bottom = 0
pad_left = 0
pad_right = 0
else:
raise ValueError("Invalid padding")
# Handle input whose shape is unknown during graph creation.
if value.get_shape().is_fully_defined():
value_shape = value.get_shape().as_list()
else:
value_shape = array_ops.shape(value)
in_height = value_shape[1] + pad_top + pad_bottom
in_width = value_shape[2] + pad_left + pad_right
# More padding so that rate divides the height and width of the input.
pad_bottom_extra = (rate - in_height % rate) % rate
pad_right_extra = (rate - in_width % rate) % rate
# The paddings argument to space_to_batch includes both padding components.
space_to_batch_pad = [[pad_top, pad_bottom + pad_bottom_extra],
[pad_left, pad_right + pad_right_extra]]
value = array_ops.space_to_batch(input=value,
paddings=space_to_batch_pad,
block_size=rate)
value = gen_nn_ops.conv2d(input=value,
filter=filters,
strides=[1, 1, 1, 1],
padding="VALID",
name=name)
# The crops argument to batch_to_space is just the extra padding component.
batch_to_space_crop = [[0, pad_bottom_extra], [0, pad_right_extra]]
value = array_ops.batch_to_space(input=value,
crops=batch_to_space_crop,
block_size=rate)
return value
def conv2d_transpose(value,
filter,
output_shape,
strides,
padding="SAME",
name=None):
"""The transpose of `conv2d`.
This operation is sometimes called "deconvolution" after [Deconvolutional
Networks](http://www.matthewzeiler.com/pubs/cvpr2010/cvpr2010.pdf), but is
actually the transpose (gradient) of `conv2d` rather than an actual
deconvolution.
Args:
value: A 4-D `Tensor` of type `float` and shape
`[batch, height, width, in_channels]`.
filter: A 4-D `Tensor` with the same type as `value` and shape
`[height, width, output_channels, in_channels]`. `filter`'s
`in_channels` dimension must match that of `value`.
output_shape: A 1-D `Tensor` representing the output shape of the
deconvolution op.
strides: A list of ints. The stride of the sliding window for each
dimension of the input tensor.
padding: A string, either `'VALID'` or `'SAME'`. The padding algorithm.
See the [comment here](https://www.tensorflow.org/api_docs/python/nn.html#convolution)
name: Optional name for the returned tensor.
Returns:
A `Tensor` with the same type as `value`.
Raises:
ValueError: If input/output depth does not match `filter`'s shape, or if
padding is other than `'VALID'` or `'SAME'`.
"""
with ops.op_scope([value, filter, output_shape], name,
"conv2d_transpose") as name:
value = ops.convert_to_tensor(value, name="value")
filter = ops.convert_to_tensor(filter, name="filter")
if not value.get_shape()[3].is_compatible_with(filter.get_shape()[3]):
raise ValueError("input channels does not match filter's input channels, "
"{} != {}".format(value.get_shape()[3], filter.get_shape(
)[3]))
output_shape_ = ops.convert_to_tensor(output_shape, name="output_shape")
if not output_shape_.get_shape().is_compatible_with(tensor_shape.vector(4)):
raise ValueError("output_shape must have shape (4,), got {}"
.format(output_shape_.get_shape()))
if isinstance(output_shape, (list, np.ndarray)):
# output_shape's shape should be == [4] if reached this point.
if not filter.get_shape()[2].is_compatible_with(output_shape[3]):
raise ValueError(
"output_shape does not match filter's output channels, "
"{} != {}".format(output_shape[3], filter.get_shape()[2]))
if padding != "VALID" and padding != "SAME":
raise ValueError("padding must be either VALID or SAME:"
" {}".format(padding))
return gen_nn_ops.conv2d_backprop_input(input_sizes=output_shape_,
filter=filter,
out_backprop=value,
strides=strides,
padding=padding,
name=name)
def conv3d_transpose(value,
filter,
output_shape,
strides,
padding="SAME",
name=None):
"""The transpose of `conv3d`.
This operation is sometimes called "deconvolution" after [Deconvolutional
Networks](http://www.matthewzeiler.com/pubs/cvpr2010/cvpr2010.pdf), but is
actually the transpose (gradient) of `conv3d` rather than an actual
deconvolution.
Args:
value: A 5-D `Tensor` of type `float` and shape
`[batch, depth, height, width, in_channels]`.
filter: A 5-D `Tensor` with the same type as `value` and shape
`[depth, height, width, output_channels, in_channels]`. `filter`'s
`in_channels` dimension must match that of `value`.
output_shape: A 1-D `Tensor` representing the output shape of the
deconvolution op.
strides: A list of ints. The stride of the sliding window for each
dimension of the input tensor.
padding: A string, either `'VALID'` or `'SAME'`. The padding algorithm.
See the [comment here](https://www.tensorflow.org/api_docs/python/nn.html#convolution)
name: Optional name for the returned tensor.
Returns:
A `Tensor` with the same type as `value`.
Raises:
ValueError: If input/output depth does not match `filter`'s shape, or if
padding is other than `'VALID'` or `'SAME'`.
"""
with ops.op_scope([value, filter, output_shape], name,
"conv3d_transpose") as name:
value = ops.convert_to_tensor(value, name="value")
filter = ops.convert_to_tensor(filter, name="filter")
if not value.get_shape()[4].is_compatible_with(filter.get_shape()[4]):
raise ValueError("input channels does not match filter's input channels, "
"{} != {}".format(value.get_shape()[4], filter.get_shape(
)[4]))
output_shape_ = ops.convert_to_tensor(output_shape, name="output_shape")
if not output_shape_.get_shape().is_compatible_with(tensor_shape.vector(5)):
raise ValueError("output_shape must have shape (5,), got {}"
.format(output_shape_.get_shape()))
if isinstance(output_shape, (list, np.ndarray)):
# output_shape's shape should be == [5] if reached this point.
if not filter.get_shape()[3].is_compatible_with(output_shape[4]):
raise ValueError(
"output_shape does not match filter's output channels, "
"{} != {}".format(output_shape[4], filter.get_shape()[3]))
if padding != "VALID" and padding != "SAME":
raise ValueError("padding must be either VALID or SAME:"
" {}".format(padding))
return gen_nn_ops.conv3d_backprop_input_v2(input_sizes=output_shape_,
filter=filter,
out_backprop=value,
strides=strides,
padding=padding,
name=name)
# pylint: disable=protected-access
def bias_add(value, bias, data_format=None, name=None):
"""Adds `bias` to `value`.
This is (mostly) a special case of `tf.add` where `bias` is restricted to 1-D.
Broadcasting is supported, so `value` may have any number of dimensions.
Unlike `tf.add`, the type of `bias` is allowed to differ from `value` in the
case where both types are quantized.
Args:
value: A `Tensor` with type `float`, `double`, `int64`, `int32`, `uint8`,
`int16`, `int8`, `complex64`, or `complex128`.
bias: A 1-D `Tensor` with size matching the last dimension of `value`.
Must be the same type as `value` unless `value` is a quantized type,
in which case a different quantized type may be used.
data_format: A string. 'NHWC' and 'NCHW' are supported.
name: A name for the operation (optional).
Returns:
A `Tensor` with the same type as `value`.
"""
with ops.op_scope([value, bias], name, "BiasAdd") as name:
value = ops.convert_to_tensor(value, name="input")
bias = ops.convert_to_tensor(bias, dtype=value.dtype, name="bias")
return gen_nn_ops._bias_add(value, bias, data_format=data_format, name=name)
ops.RegisterShape("BiasAdd")(common_shapes.bias_add_shape)
ops.RegisterShape("BiasAddGrad")(common_shapes.bias_add_grad_shape)
# pylint: disable=protected-access
def bias_add_v1(value, bias, name=None):
"""Adds `bias` to `value`.
This is a deprecated version of bias_add and will soon to be removed.
This is (mostly) a special case of `tf.add` where `bias` is restricted to 1-D.
Broadcasting is supported, so `value` may have any number of dimensions.
Unlike `tf.add`, the type of `bias` is allowed to differ from `value` in the
case where both types are quantized.
Args:
value: A `Tensor` with type `float`, `double`, `int64`, `int32`, `uint8`,
`int16`, `int8`, `complex64`, or `complex128`.
bias: A 1-D `Tensor` with size matching the last dimension of `value`.
Must be the same type as `value` unless `value` is a quantized type,
in which case a different quantized type may be used.
name: A name for the operation (optional).
Returns:
A `Tensor` with the same type as `value`.
"""
with ops.op_scope([value, bias], name, "BiasAddV1") as name:
value = ops.convert_to_tensor(value, name="input")
bias = ops.convert_to_tensor(bias, dtype=value.dtype, name="bias")
return gen_nn_ops._bias_add_v1(value, bias, name=name)
ops.RegisterShape("BiasAddV1")(common_shapes.bias_add_shape)
ops.RegisterShape("BiasAddGradV1")(common_shapes.bias_add_grad_shape)
def relu6(features, name=None):
"""Computes Rectified Linear 6: `min(max(features, 0), 6)`.
Args:
features: A `Tensor` with type `float`, `double`, `int32`, `int64`, `uint8`,
`int16`, or `int8`.
name: A name for the operation (optional).
Returns:
A `Tensor` with the same type as `features`.
"""
with ops.op_scope([features], name, "Relu6") as name:
features = ops.convert_to_tensor(features, name="features")
return gen_nn_ops._relu6(features, name=name)
def softmax_cross_entropy_with_logits(logits, labels, name=None):
"""Computes softmax cross entropy between `logits` and `labels`.
Measures the probability error in discrete classification tasks in which the
classes are mutually exclusive (each entry is in exactly one class). For
example, each CIFAR-10 image is labeled with one and only one label: an image
can be a dog or a truck, but not both.
**NOTE:** While the classes are mutually exclusive, their probabilities
need not be. All that is required is that each row of `labels` is
a valid probability distribution. If they are not, the computation of the
gradient will be incorrect.
If using exclusive `labels` (wherein one and only
one class is true at a time), see `sparse_softmax_cross_entropy_with_logits`.
**WARNING:** This op expects unscaled logits, since it performs a `softmax`
on `logits` internally for efficiency. Do not call this op with the
output of `softmax`, as it will produce incorrect results.
`logits` and `labels` must have the same shape `[batch_size, num_classes]`
and the same dtype (either `float32` or `float64`).
Args:
logits: Unscaled log probabilities.
labels: Each row `labels[i]` must be a valid probability distribution.
name: A name for the operation (optional).
Returns:
A 1-D `Tensor` of length `batch_size` of the same type as `logits` with the
softmax cross entropy loss.
"""
# TODO(pcmurray) Raise an error when the labels do not sum to 1. Note: This
# could break users who call this with bad labels, but disregard the bad
# results.
# The second output tensor contains the gradients. We use it in
# _CrossEntropyGrad() in nn_grad but not here.
cost, unused_backprop = gen_nn_ops._softmax_cross_entropy_with_logits(
logits, labels, name=name)
return cost
def sparse_softmax_cross_entropy_with_logits(logits, labels, name=None):
"""Computes sparse softmax cross entropy between `logits` and `labels`.
Measures the probability error in discrete classification tasks in which the
classes are mutually exclusive (each entry is in exactly one class). For
example, each CIFAR-10 image is labeled with one and only one label: an image
can be a dog or a truck, but not both.
**NOTE:** For this operation, the probability of a given label is considered
exclusive. That is, soft classes are not allowed, and the `labels` vector
must provide a single specific index for the true class for each row of
`logits` (each minibatch entry). For soft softmax classification with
a probability distribution for each entry, see
`softmax_cross_entropy_with_logits`.
**WARNING:** This op expects unscaled logits, since it performs a softmax
on `logits` internally for efficiency. Do not call this op with the
output of `softmax`, as it will produce incorrect results.
A common use case is to have logits of shape `[batch_size, num_classes]` and
labels of shape `[batch_size]`. But higher dimensions are supported.
Args:
logits: Unscaled log probabilities of rank `r` and shape
`[d_0, d_1, ..., d_{r-2}, num_classes]` and dtype `float32` or `float64`.
labels: `Tensor` of shape `[d_0, d_1, ..., d_{r-2}]` and dtype `int32` or
`int64`. Each entry in `labels` must be an index in `[0, num_classes)`.
Other values will result in a loss of 0, but incorrect gradient
computations.
name: A name for the operation (optional).
Returns:
A `Tensor` of the same shape as `labels` and of the same type as `logits`
with the softmax cross entropy loss.
Raises:
ValueError: If logits are scalars (need to have rank >= 1) or if the rank
of the labels is not equal to the rank of the labels minus one.
"""
# TODO(pcmurray) Raise an error when the label is not an index in
# [0, num_classes). Note: This could break users who call this with bad
# labels, but disregard the bad results.
# Reshape logits and labels to rank 2.
with ops.op_scope([labels, logits], name,
"SparseSoftmaxCrossEntropyWithLogits"):
labels = ops.convert_to_tensor(labels)
logits = ops.convert_to_tensor(logits)
# Store label shape for result later.
labels_static_shape = labels.get_shape()
labels_shape = array_ops.shape(labels)
if logits.get_shape().ndims is not None and logits.get_shape().ndims == 0:
raise ValueError("Logits cannot be scalars - received shape %s.",
logits.get_shape())
if logits.get_shape().ndims is not None and (
labels_static_shape.ndims is not None and
labels_static_shape.ndims != logits.get_shape().ndims - 1):
raise ValueError("Rank mismatch: Labels rank (received %s) should equal "
"logits rank (received %s) - 1.",
labels_static_shape.ndims, logits.get_shape().ndims)
# Check if no reshapes are required.
if logits.get_shape().ndims == 2:
cost, _ = gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
logits, labels, name=name)
return cost
# Reshape logits to 2 dim, labels to 1 dim.
num_classes = array_ops.gather(array_ops.shape(logits),
array_ops.rank(logits) - 1)
logits = array_ops.reshape(logits, [-1, num_classes])
labels = array_ops.reshape(labels, [-1])
# The second output tensor contains the gradients. We use it in
# _CrossEntropyGrad() in nn_grad but not here.
cost, _ = gen_nn_ops._sparse_softmax_cross_entropy_with_logits(
logits, labels, name=name)
cost = array_ops.reshape(cost, labels_shape)
cost.set_shape(labels_static_shape)
return cost
@ops.RegisterShape("SparseSoftmaxCrossEntropyWithLogits")
def _SparseSoftmaxCrossEntropyWithLogitsShape(op):
"""Shape function for SparseSoftmaxCrossEntropyWithLogits op."""
logits_shape = op.inputs[0].get_shape()
input_shape = logits_shape.with_rank(2)
batch_size = input_shape[0]
# labels_shape
op.inputs[1].get_shape().merge_with(tensor_shape.vector(batch_size))
return [tensor_shape.vector(batch_size.value), input_shape]
@ops.RegisterShape("SoftmaxCrossEntropyWithLogits")
def _SoftmaxCrossEntropyWithLogitsShape(op):
"""Shape function for SoftmaxCrossEntropyWithLogits op."""
logits_shape = op.inputs[0].get_shape()
labels_shape = op.inputs[1].get_shape()
input_shape = logits_shape.merge_with(labels_shape).with_rank(2)
batch_size = input_shape[0]
return [tensor_shape.vector(batch_size.value), input_shape]
def avg_pool(value, ksize, strides, padding, data_format="NHWC", name=None):
"""Performs the average pooling on the input.
Each entry in `output` is the mean of the corresponding size `ksize`
window in `value`.
Args:
value: A 4-D `Tensor` of shape `[batch, height, width, channels]` and type
`float32`, `float64`, `qint8`, `quint8`, or `qint32`.
ksize: A list of ints that has length >= 4.
The size of the window for each dimension of the input tensor.
strides: A list of ints that has length >= 4.
The stride of the sliding window for each dimension of the
input tensor.
padding: A string, either `'VALID'` or `'SAME'`. The padding algorithm.
See the [comment here](https://www.tensorflow.org/api_docs/python/nn.html#convolution)
data_format: A string. 'NHWC' and 'NCHW' are supported.
name: Optional name for the operation.
Returns:
A `Tensor` with the same type as `value`. The average pooled output tensor.
"""
with ops.op_scope([value], name, "AvgPool") as name:
value = ops.convert_to_tensor(value, name="input")
return gen_nn_ops._avg_pool(value,
ksize=ksize,
strides=strides,
padding=padding,
data_format=data_format,
name=name)
def max_pool(value, ksize, strides, padding, data_format="NHWC", name=None):
"""Performs the max pooling on the input.
Args:
value: A 4-D `Tensor` with shape `[batch, height, width, channels]` and
type `tf.float32`.
ksize: A list of ints that has length >= 4. The size of the window for
each dimension of the input tensor.
strides: A list of ints that has length >= 4. The stride of the sliding
window for each dimension of the input tensor.
padding: A string, either `'VALID'` or `'SAME'`. The padding algorithm.
See the [comment here](https://www.tensorflow.org/api_docs/python/nn.html#convolution)
data_format: A string. 'NHWC' and 'NCHW' are supported.
name: Optional name for the operation.
Returns:
A `Tensor` with type `tf.float32`. The max pooled output tensor.
"""
with ops.op_scope([value], name, "MaxPool") as name:
value = ops.convert_to_tensor(value, name="input")
return gen_nn_ops._max_pool(value,
ksize=ksize,
strides=strides,
padding=padding,
data_format=data_format,
name=name)
ops.RegisterShape("Relu")(common_shapes.unchanged_shape)
ops.RegisterShape("Relu6")(common_shapes.unchanged_shape)
ops.RegisterShape("Elu")(common_shapes.unchanged_shape)
ops.RegisterShape("Softplus")(common_shapes.unchanged_shape)
ops.RegisterShape("Softsign")(common_shapes.unchanged_shape)
@ops.RegisterShape("ReluGrad")
@ops.RegisterShape("Relu6Grad")
@ops.RegisterShape("EluGrad")
@ops.RegisterShape("SoftplusGrad")
@ops.RegisterShape("SoftsignGrad")
def _BinaryElementwiseShape(op):
"""Returns same shape as both inputs to op.
Args:
op: Input operation.
Returns:
Shape of both inputs to `op`.
"""
return [op.inputs[0].get_shape().merge_with(op.inputs[1].get_shape())]
ops.RegisterShape("L2Loss")(common_shapes.scalar_shape)
ops.RegisterShape("LRN")(common_shapes.unchanged_shape_with_rank(4))
@ops.RegisterShape("LRNGrad")
def _LRNGradShape(op):
"""Shape function for LRNGrad op."""
in_grads_shape = op.inputs[0].get_shape().with_rank(4)
in_image_shape = op.inputs[1].get_shape().with_rank(4)
out_image_shape = op.inputs[2].get_shape().with_rank(4)
return [in_grads_shape.merge_with(in_image_shape).merge_with(out_image_shape)]
ops.RegisterShape("Softmax")(common_shapes.unchanged_shape_with_rank(2))
ops.RegisterShape("LogSoftmax")(common_shapes.unchanged_shape_with_rank(2))
@ops.RegisterShape("InTopK")
def _InTopKShape(op):
"""Shape function for InTopK op."""
predictions_shape = op.inputs[0].get_shape().with_rank(2)
targets_shape = op.inputs[1].get_shape().with_rank(1)
batch_size = predictions_shape[0].merge_with(targets_shape[0])
return [tensor_shape.vector(batch_size.value)]
@ops.RegisterShape("TopK")
@ops.RegisterShape("TopKV2")
def _TopKShape(op):
"""Shape function for TopK and TopKV2 ops."""
input_shape = op.inputs[0].get_shape().with_rank_at_least(1)
if len(op.inputs) >= 2:
k = tensor_util.constant_value(op.inputs[1])
else:
k = op.get_attr("k")
last = input_shape[-1].value
if last is not None and k is not None and last < k:
raise ValueError("input.shape %s must have last dimension >= k = %d" %
(input_shape, k))
output_shape = input_shape[:-1].concatenate([k])
return [output_shape, output_shape]
@ops.RegisterShape("BatchNormWithGlobalNormalization")
def _BatchNormShape(op):
"""Shape function for BatchNormWithGlobalNormalization op."""
input_shape = op.inputs[0].get_shape().with_rank(4)
mean_shape = op.inputs[1].get_shape().with_rank(1)
var_shape = op.inputs[2].get_shape().with_rank(1)
beta_shape = op.inputs[3].get_shape().with_rank(1)
gamma_shape = op.inputs[4].get_shape().with_rank(1)
mean_shape[0].merge_with(input_shape[3])
var_shape[0].merge_with(input_shape[3])
beta_shape[0].merge_with(input_shape[3])
gamma_shape[0].merge_with(input_shape[3])
return [input_shape]
@ops.RegisterShape("BatchNormWithGlobalNormalizationGrad")
def _BatchNormGradShape(op):
"""Shape function for BatchNormWithGlobalNormalizationGrad op."""
input_shape = op.inputs[0].get_shape().with_rank(4)
mean_shape = op.inputs[1].get_shape().with_rank(1)
var_shape = op.inputs[2].get_shape().with_rank(1)
beta_shape = op.inputs[3].get_shape().with_rank(1)
out_backprop_shape = op.inputs[4].get_shape().with_rank(4)
input_shape = input_shape.merge_with(out_backprop_shape)
vector_dim = input_shape[3]
vector_dim = vector_dim.merge_with(mean_shape[0])
vector_dim = vector_dim.merge_with(var_shape[0])
vector_dim = vector_dim.merge_with(beta_shape[0])
return [input_shape] + ([tensor_shape.vector(vector_dim)] * 4)
ops.RegisterShape("Conv2D")(common_shapes.conv2d_shape)
ops.RegisterShape("DepthwiseConv2dNative")(
common_shapes.depthwise_conv2d_native_shape)
ops.RegisterShape("AvgPool")(common_shapes.avg_pool_shape)
ops.RegisterShape("MaxPool")(common_shapes.max_pool_shape)
@ops.RegisterShape("MaxPoolWithArgmax")
def _MaxPoolWithArgMaxShape(op):
"""Shape function for MaxPoolWithArgmax op."""
return common_shapes.max_pool_shape(op) * 2
@ops.RegisterShape("AvgPoolGrad")
def _AvgPoolGradShape(op):
"""Shape function for the AvgPoolGrad op."""
orig_input_shape = tensor_util.constant_value(op.inputs[0])
if orig_input_shape is not None:
return [tensor_shape.TensorShape(orig_input_shape.tolist())]
else:
# NOTE(mrry): We could in principle work out the shape from the
# gradients and the attrs, but if we do not know orig_input_shape
# statically, then we are unlikely to know the shape of the
# gradients either.
return [tensor_shape.unknown_shape(ndims=4)]
@ops.RegisterShape("Conv2DBackpropFilter")
def _Conv2DBackpropFilterShape(op):
"""Shape function for the Conv2DBackpropFilter op."""
filter_shape = tensor_util.constant_value(op.inputs[1])
if filter_shape is not None:
return [tensor_shape.TensorShape(filter_shape.tolist())]
else:
# NOTE(mrry): We could in principle work out the shape from the
# gradients and the attrs, but if we do not know filter_shape
# statically, then we are unlikely to know the shape of the
# gradients either.
return [tensor_shape.unknown_shape(ndims=4)]
@ops.RegisterShape("Conv2DBackpropInput")
def _Conv2DBackpropInputShape(op):
"""Shape function for the Conv2DBackpropInput op."""
input_shape = tensor_util.constant_value(op.inputs[0])
if input_shape is not None:
return [tensor_shape.TensorShape(input_shape.tolist())]
else:
# NOTE(mrry): We could in principle work out the shape from the
# gradients and the attrs, but if we do not know input_shape
# statically, then we are unlikely to know the shape of the
# gradients either.
return [tensor_shape.unknown_shape(ndims=4)]
@ops.RegisterShape("DepthwiseConv2dNativeBackpropFilter")
def _DepthwiseConv2dNativeBackpropFilterShape(op):
"""Shape function for the DepthwiseConv2dNativeBackpropFilter op."""
filter_shape = tensor_util.constant_value(op.inputs[1])
if filter_shape is not None:
return [tensor_shape.TensorShape(filter_shape.tolist())]
else:
return [tensor_shape.unknown_shape(ndims=4)]
@ops.RegisterShape("DepthwiseConv2dNativeBackpropInput")
def _DepthwiseConv2dNativeBackpropInputShape(op):
"""Shape function for the DepthwiseConv2dNativeBackpropInput op."""
input_shape = tensor_util.constant_value(op.inputs[0])
if input_shape is not None:
return [tensor_shape.TensorShape(input_shape.tolist())]
else:
return [tensor_shape.unknown_shape(ndims=4)]
@ops.RegisterShape("MaxPoolGrad")
@ops.RegisterShape("MaxPoolGradWithArgmax")
def _MaxPoolGradShape(op):
"""Shape function for the MaxPoolGrad op."""
orig_input_shape = op.inputs[0].get_shape().with_rank(4)
return [orig_input_shape]
@ops.RegisterStatistics("Conv2D", "flops")
def _calc_conv_flops(graph, node):
"""Calculates the compute resources needed for Conv2D."""
input_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[0])
input_shape.assert_is_fully_defined()
filter_shape = graph_util.tensor_shape_from_node_def_name(graph,
node.input[1])
filter_shape.assert_is_fully_defined()
output_shape = graph_util.tensor_shape_from_node_def_name(graph, node.name)
output_shape.assert_is_fully_defined()
filter_height = int(filter_shape[0])
filter_width = int(filter_shape[1])
filter_in_depth = int(filter_shape[2])
output_count = np.prod(output_shape.as_list())
return ops.OpStats("flops", (output_count * filter_in_depth * filter_height *
filter_width * 2))
@ops.RegisterStatistics("Conv2D", "weight_parameters")
def _calc_conv_weight_params(graph, node):
"""Calculates the on-disk size of the weights for Conv2D."""
input_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[0])
input_shape.assert_is_fully_defined()
filter_shape = graph_util.tensor_shape_from_node_def_name(graph,
node.input[1])
filter_shape.assert_is_fully_defined()
output_shape = graph_util.tensor_shape_from_node_def_name(graph, node.name)
output_shape.assert_is_fully_defined()
filter_height = int(filter_shape[0])
filter_width = int(filter_shape[1])
filter_in_depth = int(filter_shape[2])
filter_out_depth = int(filter_shape[3])
return ops.OpStats("weight_parameters", (filter_height * filter_width *
filter_in_depth * filter_out_depth))
@ops.RegisterStatistics("DepthwiseConv2dNative", "flops")
def _calc_depthwise_conv_flops(graph, node):
"""Calculates the compute resources needed for DepthwiseConv2dNative."""
input_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[0])
input_shape.assert_is_fully_defined()
filter_shape = graph_util.tensor_shape_from_node_def_name(graph,
node.input[1])
filter_shape.assert_is_fully_defined()
output_shape = graph_util.tensor_shape_from_node_def_name(graph, node.name)
output_shape.assert_is_fully_defined()
filter_height = int(filter_shape[0])
filter_width = int(filter_shape[1])
output_count = np.prod(output_shape.as_list())
return ops.OpStats("flops", (output_count * filter_height * filter_width * 2))
@ops.RegisterStatistics("DepthwiseConv2dNative", "weight_parameters")
def _calc_depthwise_conv_weight_params(graph, node):
"""Calculates the on-disk size of the weights for DepthwiseConv2dNative."""
input_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[0])
input_shape.assert_is_fully_defined()
filter_shape = graph_util.tensor_shape_from_node_def_name(graph,
node.input[1])
filter_shape.assert_is_fully_defined()
output_shape = graph_util.tensor_shape_from_node_def_name(graph, node.name)
output_shape.assert_is_fully_defined()
filter_height = int(filter_shape[0])
filter_width = int(filter_shape[1])
filter_in_depth = int(filter_shape[2])
filter_channel_multiplier = int(filter_shape[3])
return ops.OpStats("weight_parameters", (filter_height * filter_width *
filter_in_depth *
filter_channel_multiplier))
@ops.RegisterShape("Conv3D")
def _Conv3DShape(op):
"""Shape function for Conv3D."""
input_shape = op.inputs[0].get_shape().with_rank(5)
filter_shape = op.inputs[1].get_shape().with_rank(5)
batch_size = input_shape[0]
out_channels = filter_shape[4]
# Check that the input number of channels is compatible between
# input data and filter size.
input_shape[4].assert_is_compatible_with(filter_shape[3])
stride_b, stride_p, stride_r, stride_c, stride_d = op.get_attr("strides")
assert stride_b == 1
assert stride_d == 1
padding_type = op.get_attr("padding")
out_planes, out_rows, out_cols = common_shapes.get_conv_output_size(
input_shape[1:4], filter_shape[0:3], (stride_p, stride_r, stride_c),
padding_type)
return [tensor_shape.TensorShape([batch_size, out_planes, out_rows, out_cols,
out_channels])]
@ops.RegisterShape("MaxPool3D")
@ops.RegisterShape("AvgPool3D")
def _Pool3DShape(op):
"""Shape function for Max/AvgPool3D."""
input_shape = op.inputs[0].get_shape().with_rank(5)
ksize_b, ksize_p, ksize_r, ksize_c, ksize_d = op.get_attr("ksize")
assert ksize_b == 1
assert ksize_d == 1
stride_b, stride_p, stride_r, stride_c, stride_d = op.get_attr("strides")
assert stride_b == 1
assert stride_d == 1
batch_size = input_shape[0]
channels = input_shape[4]
padding = op.get_attr("padding")
out_planes, out_rows, out_cols = common_shapes.get_conv_output_size(
input_shape[1:4], (ksize_p, ksize_r, ksize_c),
(stride_p, stride_r, stride_c), padding)
return [tensor_shape.TensorShape([batch_size, out_planes, out_rows, out_cols,
channels])]
@ops.RegisterShape("Conv3DBackpropFilter")
def _Conv3DBackpropFilterShape(op):
"""Shape function for the Conv3DBackpropFilter op."""
filter_shape = op.inputs[1].get_shape()
return [filter_shape.with_rank(5)]
@ops.RegisterShape("Conv3DBackpropInput")
def _Conv3DBackpropInputShape(op):
"""Shape function for the Conv3DBackpropInput op."""
input_shape = op.inputs[0].get_shape()
return [input_shape.with_rank(5)]
@ops.RegisterShape("Conv3DBackpropFilterV2")
def _Conv3DBackpropFilterShapeV2(op):
"""Shape function for the Conv3DBackpropFilterV2 op."""
filter_shape = tensor_util.constant_value(op.inputs[1])
return [tensor_shape.TensorShape(filter_shape).with_rank(5)]
@ops.RegisterShape("Conv3DBackpropInputV2")
def _Conv3DBackpropInputShapeV2(op):
"""Shape function for the Conv3DBackpropInputV2 op."""
input_shape = tensor_util.constant_value(op.inputs[0])
return [tensor_shape.TensorShape(input_shape).with_rank(5)]
@ops.RegisterShape("AvgPool3DGrad")
def _AvgPool3DGradShape(op):
"""Shape function for the AvgPool3DGrad op."""
orig_input_shape = tensor_util.constant_value(op.inputs[0])
return [tensor_shape.TensorShape(orig_input_shape).with_rank(5)]
@ops.RegisterShape("MaxPool3DGrad")
def _MaxPool3DGradShape(op):
"""Shape function for the MaxPoolGrad op."""
orig_input_shape = op.inputs[0].get_shape().with_rank(5)
return [orig_input_shape]
@ops.RegisterStatistics("BiasAdd", "flops")
def _calc_bias_add_flops(graph, node):
"""Calculates the computing needed for BiasAdd."""
input_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[0])
input_shape.assert_is_fully_defined()
input_count = np.prod(input_shape.as_list())
return ops.OpStats("flops", input_count)
@ops.RegisterStatistics("BiasAdd", "weight_parameters")
def _calc_bias_add_weight_params(graph, node):
"""Calculates the on-disk weight parameters for BiasAdd."""
bias_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[1])
bias_shape.assert_is_fully_defined()
bias_count = np.prod(bias_shape.as_list())
return ops.OpStats("weight_parameters", bias_count)
def xw_plus_b(x, weights, biases, name=None): # pylint: disable=invalid-name
"""Computes matmul(x, weights) + biases.
Args:
x: a 2D tensor. Dimensions typically: batch, in_units
weights: a 2D tensor. Dimensions typically: in_units, out_units
biases: a 1D tensor. Dimensions: out_units
name: A name for the operation (optional). If not specified
"xw_plus_b" is used.
Returns:
A 2-D Tensor computing matmul(x, weights) + biases.
Dimensions typically: batch, out_units.
"""
with ops.op_scope([x, weights, biases], name, "xw_plus_b") as name:
x = ops.convert_to_tensor(x, name="x")
weights = ops.convert_to_tensor(weights, name="weights")
biases = ops.convert_to_tensor(biases, name="biases")
mm = math_ops.matmul(x, weights)
return bias_add(mm, biases, name=name)
def xw_plus_b_v1(x, weights, biases, name=None): # pylint: disable=invalid-name
"""Computes matmul(x, weights) + biases.
This is a deprecated version of that will soon be removed.
Args:
x: a 2D tensor. Dimensions typically: batch, in_units
weights: a 2D tensor. Dimensions typically: in_units, out_units
biases: a 1D tensor. Dimensions: out_units
name: A name for the operation (optional). If not specified
"xw_plus_b_v1" is used.
Returns:
A 2-D Tensor computing matmul(x, weights) + biases.
Dimensions typically: batch, out_units.
"""
with ops.op_scope([x, weights, biases], name, "xw_plus_b_v1") as name:
x = ops.convert_to_tensor(x, name="x")
weights = ops.convert_to_tensor(weights, name="weights")
biases = ops.convert_to_tensor(biases, name="biases")
mm = math_ops.matmul(x, weights)
return bias_add_v1(mm, biases, name=name)
# pylint: disable=invalid-name
def dropout(x, keep_prob, noise_shape=None, seed=None, name=None):
"""Computes dropout.
With probability `keep_prob`, outputs the input element scaled up by
`1 / keep_prob`, otherwise outputs `0`. The scaling is so that the expected
sum is unchanged.
By default, each element is kept or dropped independently. If `noise_shape`
is specified, it must be
[broadcastable](http://docs.scipy.org/doc/numpy/user/basics.broadcasting.html)
to the shape of `x`, and only dimensions with `noise_shape[i] == shape(x)[i]`
will make independent decisions. For example, if `shape(x) = [k, l, m, n]`
and `noise_shape = [k, 1, 1, n]`, each batch and channel component will be
kept independently and each row and column will be kept or not kept together.
Args:
x: A tensor.
keep_prob: A scalar `Tensor` with the same type as x. The probability
that each element is kept.
noise_shape: A 1-D `Tensor` of type `int32`, representing the
shape for randomly generated keep/drop flags.
seed: A Python integer. Used to create random seeds. See
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for behavior.
name: A name for this operation (optional).
Returns:
A Tensor of the same shape of `x`.
Raises:
ValueError: If `keep_prob` is not in `(0, 1]`.
"""
with ops.op_scope([x], name, "dropout") as name:
x = ops.convert_to_tensor(x, name="x")
if isinstance(keep_prob, float) and not 0 < keep_prob <= 1:
raise ValueError("keep_prob must be a scalar tensor or a float in the "
"range (0, 1], got %g" % keep_prob)
keep_prob = ops.convert_to_tensor(keep_prob,
dtype=x.dtype,
name="keep_prob")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
noise_shape = noise_shape if noise_shape is not None else array_ops.shape(x)
# uniform [keep_prob, 1.0 + keep_prob)
random_tensor = keep_prob
random_tensor += random_ops.random_uniform(noise_shape,
seed=seed,
dtype=x.dtype)
# 0. if [keep_prob, 1.0) and 1. if [1.0, 1.0 + keep_prob)
binary_tensor = math_ops.floor(random_tensor)
ret = x * math_ops.inv(keep_prob) * binary_tensor
ret.set_shape(x.get_shape())
return ret
def top_k(input, k=1, sorted=True, name=None):
"""Finds values and indices of the `k` largest entries for the last dimension.
If the input is a vector (rank-1), finds the `k` largest entries in the vector
and outputs their values and indices as vectors. Thus `values[j]` is the
`j`-th largest entry in `input`, and its index is `indices[j]`.
For matrices (resp. higher rank input), computes the top `k` entries in each
row (resp. vector along the last dimension). Thus,
values.shape = indices.shape = input.shape[:-1] + [k]
If two elements are equal, the lower-index element appears first.
Args:
input: 1-D or higher `Tensor` with last dimension at least `k`.
k: 0-D `int32` `Tensor`. Number of top elements to look for along the last
dimension (along each row for matrices).
sorted: If true the resulting `k` elements will be sorted by the values in
descending order.
name: Optional name for the operation.
Returns:
values: The `k` largest elements along each last dimensional slice.
indices: The indices of `values` within the last dimension of `input`.
"""
return gen_nn_ops._top_kv2(input, k=k, sorted=sorted, name=name)
def conv1d(value, filters, stride, padding,
use_cudnn_on_gpu=None, data_format=None,
name=None):
"""Computes a 1-D convolution given 3-D input and filter tensors.
Given an input tensor of shape [batch, in_width, in_channels]
and a filter / kernel tensor of shape
[filter_width, in_channels, out_channels], this op reshapes
the arguments to pass them to conv2d to perform the equivalent
convolution operation.
Internally, this op reshapes the input tensors and invokes
`tf.nn.conv2d`. A tensor of shape [batch, in_width, in_channels]
is reshaped to [batch, 1, in_width, in_channels], and the filter
is reshaped to [1, filter_width, in_channels, out_channels].
The result is then reshaped back to [batch, out_width, out_channels]
(where out_width is a function of the stride and padding as in
conv2d) and returned to the caller.
Args:
value: A 3D `Tensor`. Must be of type `float32` or `float64`.
filters: A 3D `Tensor`. Must have the same type as `input`.
stride: An `integer`. The number of entries by which
the filter is moved right at each step.
padding: 'SAME' or 'VALID'
use_cudnn_on_gpu: An optional `bool`. Defaults to `True`.
data_format: An optional `string` from `"NHWC", "NCHW"`. Defaults
to `"NHWC"`, the data is stored in the order of
[batch, in_width, in_channels]. The `"NCHW"` format stores
data as [batch, in_channels, in_width].
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as input.
"""
with ops.op_scope([value, filters], name, "conv1d") as name:
# Reshape the input tensor to [batch, 1, in_width, in_channels]
value = array_ops.expand_dims(value, 1)
# And reshape the filter to [1, filter_width, in_channels, out_channels]
filters = array_ops.expand_dims(filters, 0)
result = gen_nn_ops.conv2d(value, filters, [1, 1, stride, 1], padding,
use_cudnn_on_gpu=use_cudnn_on_gpu,
data_format=data_format)
return array_ops.squeeze(result, [1])
@ops.RegisterShape("Dilation2D")
def _Dilation2DShape(op):
"""Shape function for Dilation2D op."""
input_shape = op.inputs[0].get_shape().with_rank(4)
filter_shape = op.inputs[1].get_shape().with_rank(3)
batch_size = input_shape[0]
in_rows = input_shape[1]
in_cols = input_shape[2]
depth = input_shape[3]
filter_rows = filter_shape[0]
filter_cols = filter_shape[1]
# Check that the input depths are compatible.
input_shape[3].assert_is_compatible_with(filter_shape[2])
stride_b, stride_r, stride_c, stride_d = op.get_attr("strides")
if stride_b != 1 or stride_d != 1:
raise ValueError("Current implementation does not yet support "
"strides in the batch and depth dimensions.")
rate_b, rate_r, rate_c, rate_d = op.get_attr("rates")
if rate_b != 1 or rate_d != 1:
raise ValueError("Current implementation does not yet support "
"rates in the batch and depth dimensions.")
filter_rows_eff = filter_rows + (filter_rows - 1) * (rate_r - 1)
filter_cols_eff = filter_cols + (filter_cols - 1) * (rate_c - 1)
padding = op.get_attr("padding")
out_rows, out_cols = common_shapes.get2d_conv_output_size(in_rows, in_cols,
filter_rows_eff,
filter_cols_eff,
stride_r, stride_c,
padding)
output_shape = [batch_size, out_rows, out_cols, depth]
return [tensor_shape.TensorShape(output_shape)]
@ops.RegisterShape("Dilation2DBackpropInput")
def _Dilation2DBackpropInputShape(op):
"""Shape function for Dilation2DBackpropInput op."""
return [op.inputs[0].get_shape()]
@ops.RegisterShape("Dilation2DBackpropFilter")
def _Dilation2DBackpropFilterShape(op):
"""Shape function for Dilation2DBackpropFilter op."""
return [op.inputs[1].get_shape()]
@ops.RegisterStatistics("Dilation2D", "flops")
def _calc_dilation2d_flops(graph, node):
"""Calculates the compute resources needed for Dilation2D."""
input_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[0])
input_shape.assert_is_fully_defined()
filter_shape = graph_util.tensor_shape_from_node_def_name(graph,
node.input[1])
filter_shape.assert_is_fully_defined()
output_shape = graph_util.tensor_shape_from_node_def_name(graph, node.name)
output_shape.assert_is_fully_defined()
filter_height = int(filter_shape[0])
filter_width = int(filter_shape[1])
output_count = np.prod(output_shape.as_list())
return ops.OpStats("flops", (output_count * filter_height * filter_width * 2))
@ops.RegisterStatistics("Dilation2D", "weight_parameters")
def _calc_dilation2d_weight_params(graph, node):
"""Calculates the on-disk size of the weights for Dilation2D."""
filter_shape = graph_util.tensor_shape_from_node_def_name(graph,
node.input[1])
filter_shape.assert_is_fully_defined()
filter_height = int(filter_shape[0])
filter_width = int(filter_shape[1])
filter_depth = int(filter_shape[2])
return ops.OpStats("weight_parameters",
(filter_height * filter_width * filter_depth))
def erosion2d(value, kernel, strides, rates, padding, name=None):
"""Computes the grayscale erosion of 4-D `value` and 3-D `kernel` tensors.
The `value` tensor has shape `[batch, in_height, in_width, depth]` and the
`kernel` tensor has shape `[kernel_height, kernel_width, depth]`, i.e.,
each input channel is processed independently of the others with its own
structuring function. The `output` tensor has shape
`[batch, out_height, out_width, depth]`. The spatial dimensions of the
output tensor depend on the `padding` algorithm. We currently only support the
default "NHWC" `data_format`.
In detail, the grayscale morphological 2-D erosion is given by:
output[b, y, x, c] =
min_{dy, dx} value[b,
strides[1] * y - rates[1] * dy,
strides[2] * x - rates[2] * dx,
c] -
kernel[dy, dx, c]
Duality: The erosion of `value` by the `kernel` is equal to the negation of
the dilation of `-value` by the reflected `kernel`.
Args:
value: A `Tensor`. 4-D with shape `[batch, in_height, in_width, depth]`.
kernel: A `Tensor`. Must have the same type as `value`.
3-D with shape `[kernel_height, kernel_width, depth]`.
strides: A list of `ints` that has length `>= 4`.
1-D of length 4. The stride of the sliding window for each dimension of
the input tensor. Must be: `[1, stride_height, stride_width, 1]`.
rates: A list of `ints` that has length `>= 4`.
1-D of length 4. The input stride for atrous morphological dilation.
Must be: `[1, rate_height, rate_width, 1]`.
padding: A `string` from: `"SAME", "VALID"`.
The type of padding algorithm to use.
name: A name for the operation (optional). If not specified "erosion2d"
is used.
Returns:
A `Tensor`. Has the same type as `value`.
4-D with shape `[batch, out_height, out_width, depth]`.
Raises:
ValueError: If the `value` depth does not match `kernel`' shape, or if
padding is other than `'VALID'` or `'SAME'`.
"""
with ops.op_scope([value, kernel], name, "erosion2d") as name:
# Reduce erosion to dilation by duality.
return math_ops.neg(gen_nn_ops.dilation2d(input=math_ops.neg(value),
filter=array_ops.reverse(
kernel, [True, True, False]),
strides=strides,
rates=rates,
padding=padding,
name=name))
# pylint: enable=invalid-name
| apache-2.0 | 5,910,611,416,458,937,000 | 41.535741 | 92 | 0.660375 | false | 3.586929 | false | false | false |
joseguerrero/sembrando | src/presentacion/paginas/pantalla9.py | 1 | 30062 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pygame
from librerias import pantalla
from librerias.boton import boton
from librerias.texto import texto
from librerias.popups import PopUp
from librerias.imagen import imagen
from librerias.contenido import cont
from librerias.imgfondo import fondo
from librerias.pixelperfect import *
from librerias.textopopups import p9
from librerias.objmask import object_mask
from paginas import menucfg
from paginas import pantalla2
from paginas import pantalla8
from paginas import pantalla10
class estado(pantalla.Pantalla):
def __init__(self, parent):
"""
Método inicializador de la clase.
@param parent: Instancia del gestor de pantallas.
@type parent: Manejador
"""
self.parent = parent
self.previa = True
self.deteccion_movimiento = False
self.fondo_texto = False
self.background = pygame.image.load(self.fondos + "fondo-mapa2.png")
self.banner_siembra = imagen(self.banners + "banner-siembra.png", 0, 0)
self.banner_inf = imagen(self.banners + "banner-inf.png", 0, 432)
self.mouse = object_mask("Cursor", 850, 512, self.varios + "puntero.png")
# Para mantener las piezas del mapa bien ubicadas no se deben modificar los valores x e y de las regiones, solo de zulia.
self.zulia = object_mask(u"región zuliana", 13, 140, self.varios + "zulia-des.png", self.varios + "zulia-act.png")
self.occ = object_mask(u"región occidental", self.zulia.rect.left + 55, self.zulia.rect.top - 6, self.varios + "occ-des.png", self.varios + "occ-act.png")
self.central = object_mask(u"región central", self.zulia.rect.left + 115, self.zulia.rect.top + 37, self.varios + "central-des.png", self.varios + "central-act.png")
self.capital = object_mask(u"región capital", self.zulia.rect.left + 152, self.zulia.rect.top + 32, self.varios + "capital-des.png", self.varios + "capital-act.png")
self.ori = object_mask(u"región nor oriental", self.zulia.rect.left +195, self.zulia.rect.top + 29, self.varios + "ori-des.png", self.varios + "ori-act.png")
self.andes = object_mask(u"región los andes", self.zulia.rect.left + 23, self.zulia.rect.top + 48, self.varios + "andes-des.png", self.varios + "andes-act.png")
self.llanos = object_mask(u"región los llanos", self.zulia.rect.left + 26, self.zulia.rect.top + 47, self.varios + "llanos-des.png", self.varios + "llanos-act.png")
self.guayana = object_mask(u"región guayana", self.zulia.rect.left + 140, self.zulia.rect.top + 48, self.varios + "guayana-des.png", self.varios + "guayana-act.png")
self.insu = object_mask(u"región insular", self.zulia.rect.left + 149, self.zulia.rect.top - 6, self.varios + "insular-des.png", self.varios + "insular-act.png")
self.limites1 = pygame.image.load(self.varios + "limitemar.png").convert_alpha()
self.limites2 = pygame.image.load(self.varios + "limitemar2.png").convert_alpha()
self.zona_r = pygame.image.load(self.varios + "zona-recla.png").convert_alpha()
self.n_estados = pygame.image.load(self.varios + "nombre-estados.png").convert_alpha()
self.cargar_botones()
self.cargar_textos()
self.resume()
self.bg = fondo(573, 377)
def cargar_textos(self):
"""
Carga los textos utilizados en esta pantalla.
"""
self.texto9_2_1 = texto(490, 60, cont["texto9_2_1"] , self.parent.config.t_fuente, "normal", 1000)
self.texto9_2_2 = texto(490, self.texto9_2_1.y + self.texto9_2_1.ancho_final + 10, cont["texto9_2_2"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_2_3 = texto(490, self.texto9_2_2.y + self.texto9_2_2.ancho_final + 10, cont["texto9_2_3"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_2_4 = texto(490, self.texto9_2_3.y + self.texto9_2_3.ancho_final + 10, cont["texto9_2_4"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_3_1 = texto(490, 60, cont["texto9_3_1"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_3_2 = texto(490, self.texto9_3_1.y + self.texto9_3_1.ancho_final + 10, cont["texto9_3_2"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_3_3 = texto(490, self.texto9_3_2.y + self.texto9_3_2.ancho_final + 10, cont["texto9_3_3"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_4_1 = texto(490, 60, cont["texto9_4_1"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_4_2 = texto(490, self.texto9_4_1.y + self.texto9_4_1.ancho_final + 10, cont["texto9_4_2"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_4_3 = texto(490, self.texto9_4_2.y + self.texto9_4_2.ancho_final + 10, cont["texto9_4_3"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_5_1 = texto(490, 60, cont["texto9_5_1"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_5_2 = texto(490, self.texto9_5_1.y + self.texto9_5_1.ancho_final + 10, cont["texto9_5_2"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_5_3 = texto(490, self.texto9_5_2.y + self.texto9_5_2.ancho_final + 10, cont["texto9_5_3"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_6_1 = texto(490, 60, cont["texto9_6_1"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_6_2 = texto(490, self.texto9_6_1.y + self.texto9_6_1.ancho_final + 10, cont["texto9_6_2"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_6_3 = texto(490, self.texto9_6_2.y + self.texto9_6_2.ancho_final + 10, cont["texto9_6_3"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_7_1 = texto(490, 60, cont["texto9_7_1"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_7_2 = texto(490, self.texto9_7_1.y + self.texto9_7_1.ancho_final + 10, cont["texto9_7_2"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_7_3 = texto(490, self.texto9_7_2.y + self.texto9_7_2.ancho_final + 10, cont["texto9_7_3"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_8_1 = texto(490, 60, cont["texto9_8_1"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_8_2 = texto(490, self.texto9_8_1.y + self.texto9_8_1.ancho_final + 10, cont["texto9_8_2"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_8_3 = texto(490, self.texto9_8_2.y + self.texto9_8_2.ancho_final + 10, cont["texto9_8_3"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_9_1 = texto(490, 60, cont["texto9_9_1"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_9_2 = texto(490, self.texto9_9_1.y + self.texto9_9_1.ancho_final + 10, cont["texto9_9_2"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_9_3 = texto(490, self.texto9_9_2.y + self.texto9_9_2.ancho_final + 10, cont["texto9_9_3"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_10_1 = texto(490, 60, cont["texto9_10_1"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_10_2 = texto(490, self.texto9_10_1.y + self.texto9_10_1.ancho_final + 10, cont["texto9_10_2"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_10_3 = texto(490, self.texto9_10_2.y + self.texto9_10_2.ancho_final + 10, cont["texto9_10_3"], self.parent.config.t_fuente, "normal", 1000)
self.texto9_10_4 = texto(490, self.texto9_10_3.y + self.texto9_10_3.ancho_final + 10, cont["texto9_10_4"], self.parent.config.t_fuente, "normal", 1000)
self.popup_ins1 = PopUp(self.parent, (p9["texto1"] , ), "", None , self.grupo_popup, 1, 750, 400, -100)
self.popup_ins1.agregar_grupo()
def cargar_botones(self):
"""
Carga los botones utilizados en esta pantalla.
"""
self.home = boton("home", "Menú", self.botones + "boton-menu.png", 3, 889, 440, None, False, 1)
self.volver = boton("volver", "Regresar", self.botones + "boton-regresar.png", 3, 320, 445, None, False, 1)
self.config = boton("config", "Accesibilidad", self.botones + "boton-acc.png", 3 ,60, 445, None, False, 1)
def start(self):
pass
def cleanUp(self):
pass
def pause(self):
pass
def resume(self):
"""
Verifica si se realizaron cambios en la configuración. Carga los valores iniciales de esta pantalla.
"""
if self.parent.config.texto_cambio == True:
self.cargar_botones()
self.cargar_textos()
self.parent.config.texto_cambio = False
self.popup_ins1.agregar_grupo()
self.capital.apagar()
self.ori.apagar()
self.zulia.apagar()
self.occ.apagar()
self.andes.apagar()
self.llanos.apagar()
self.central.apagar()
self.guayana.apagar()
self.grupo_banner.add(self.banner_siembra, self.banner_inf)
self.grupo_botones.add(self.config, self.volver, self.home)
self.grupo_mapa.add(self.zulia, self.occ, self.central, self.insu, self.capital, self.ori, self.andes, self.llanos, self.guayana)
self.spserver.processtext(u"Pantalla: La Agricultura en Venezuela: ", self.parent.config.activar_lector)
self.spserver.processtext(p9["lector1"], self.parent.config.activar_lector)
def handleEvents(self, events):
"""
Evalúa los eventos que se generan en esta pantalla.
@param events: Lista de los eventos.
@type events: list
"""
for event in events:
if event.type == pygame.QUIT:
self.parent.quit()
if event.type == pygame.KEYDOWN:
self.chequeo_mascaras(self.grupo_mapa)
self.chequeo_botones(self.grupo_botones)
self.lista_final = self.lista_palabra + self.lista_mascaras + self.lista_botones
self.numero_elementos = len(self.lista_final)
if event.key == pygame.K_RIGHT:
self.fondo_texto = False
self.grupo_palabras.empty()
self.deteccion_movimiento = True
self.controlador_lector_evento_K_RIGHT()
elif event.key == pygame.K_LEFT:
self.fondo_texto = False
self.grupo_palabras.empty()
self.controlador_lector_evento_K_LEFT()
if self.deteccion_movimiento:
if event.key == pygame.K_RETURN:
if self.x.tipo_objeto == "mapa":
self.fondo_texto = True
if self.x.id == u"región capital":
self.grupo_palabras.empty()
self.central.apagar()
self.llanos.apagar()
self.zulia.apagar()
self.ori.apagar()
self.occ.apagar()
self.andes.apagar()
self.llanos.apagar()
self.capital.iluminar()
self.insu.apagar()
self.grupo_palabras.add(self.texto9_2_1.img_palabras, self.texto9_2_2.img_palabras, self.texto9_2_3.img_palabras, self.texto9_2_4.img_palabras)
self.spserver.processtext(cont["texto9_2_1l"] + self.texto9_2_2.texto + self.texto9_2_3.texto + self.texto9_2_4.texto, self.parent.config.activar_lector)
elif self.x.id == u"región central":
self.grupo_palabras.empty()
self.capital.apagar()
self.llanos.apagar()
self.zulia.apagar()
self.ori.apagar()
self.occ.apagar()
self.andes.apagar()
self.llanos.apagar()
self.central.iluminar()
self.insu.apagar()
self.grupo_palabras.add(self.texto9_3_1.img_palabras, self.texto9_3_2.img_palabras, self.texto9_3_3.img_palabras)
self.spserver.processtext(cont["texto9_3_1l"] + self.texto9_3_2.texto + self.texto9_3_3.texto, self.parent.config.activar_lector)
if self.x.id == u"región los llanos":
self.grupo_palabras.empty()
self.capital.apagar()
self.central.apagar()
self.ori.apagar()
self.zulia.apagar()
self.occ.apagar()
self.andes.apagar()
self.llanos.iluminar()
self.insu.apagar()
self.grupo_palabras.add(self.texto9_4_1.img_palabras, self.texto9_4_2.img_palabras, self.texto9_4_3.img_palabras)
self.spserver.processtext(cont["texto9_4_1l"] + self.texto9_4_2.texto + self.texto9_4_3.texto, self.parent.config.activar_lector)
if self.x.id == u"región occidental":
self.grupo_palabras.empty()
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.ori.apagar()
self.zulia.apagar()
self.andes.apagar()
self.occ.iluminar()
self.llanos.apagar()
self.guayana.apagar()
self.insu.apagar()
self.grupo_palabras.add(self.texto9_5_1.img_palabras, self.texto9_5_2.img_palabras, self.texto9_5_3.img_palabras)
self.spserver.processtext(cont["texto9_5_1l"] + self.texto9_5_2.texto + self.texto9_5_3.texto, self.parent.config.activar_lector)
if self.x.id == u"región zuliana":
self.grupo_palabras.empty()
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.ori.apagar()
self.zulia.iluminar()
self.occ.apagar()
self.andes.apagar()
self.llanos.apagar()
self.guayana.apagar()
self.insu.apagar()
self.grupo_palabras.add(self.texto9_6_1.img_palabras, self.texto9_6_2.img_palabras, self.texto9_6_3.img_palabras)
self.spserver.processtext(cont["texto9_6_1l"] + self.texto9_6_2.texto + self.texto9_6_3.texto, self.parent.config.activar_lector)
if self.x.id == u"región los andes":
self.grupo_palabras.empty()
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.zulia.apagar()
self.ori.apagar()
self.occ.apagar()
self.andes.iluminar()
self.llanos.apagar()
self.guayana.apagar()
self.insu.apagar()
self.grupo_palabras.add(self.texto9_7_1.img_palabras, self.texto9_7_2.img_palabras, self.texto9_7_3.img_palabras)
self.spserver.processtext(cont["texto9_7_1l"] + self.texto9_7_2.texto + self.texto9_7_3.texto, self.parent.config.activar_lector)
if self.x.id == u"región nor oriental":
self.grupo_palabras.empty()
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.ori.iluminar()
self.zulia.apagar()
self.occ.apagar()
self.andes.apagar()
self.guayana.apagar()
self.insu.apagar()
self.grupo_palabras.add(self.texto9_8_1.img_palabras, self.texto9_8_2.img_palabras, self.texto9_8_3.img_palabras)
self.spserver.processtext(cont["texto9_8_1l"] + self.texto9_8_2.texto + self.texto9_8_3.texto, self.parent.config.activar_lector)
if self.x.id == u"región guayana":
self.grupo_palabras.empty()
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.ori.apagar()
self.occ.apagar()
self.zulia.apagar()
self.andes.apagar()
self.llanos.apagar()
self.insu.apagar()
self.guayana.iluminar()
self.grupo_palabras.add(self.texto9_9_1.img_palabras, self.texto9_9_2.img_palabras, self.texto9_9_3.img_palabras)
self.spserver.processtext(cont["texto9_9_1l"] + self.texto9_9_2.texto + self.texto9_9_3.texto, self.parent.config.activar_lector)
if self.x.id == u"región insular":
self.grupo_palabras.empty()
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.ori.apagar()
self.occ.apagar()
self.zulia.apagar()
self.andes.apagar()
self.llanos.apagar()
self.guayana.apagar()
self.insu.iluminar()
self.grupo_palabras.add(self.texto9_10_1.img_palabras, self.texto9_10_2.img_palabras, self.texto9_10_3.img_palabras, self.texto9_10_4.img_palabras )
self.spserver.processtext(cont["texto9_10_1l"] + self.texto9_10_2.texto + self.texto9_10_3.texto + self.texto9_10_4.texto, self.parent.config.activar_lector)
elif self.x.tipo_objeto == "boton":
if self.x.id == "volver":
self.limpiar_grupos()
self.parent.animacion = 3
self.parent.changeState(pantalla8.estado(self.parent, 3))
elif self.x.id == "config":
self.limpiar_grupos()
self.parent.pushState(menucfg.estado(self.parent, self.previa))
elif self.x.id == "home":
self.limpiar_grupos()
self.parent.changeState(pantalla2.estado(self.parent))
lista = spritecollide_pp(self.mouse, self.grupo_mapa)
if not lista == []:
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
self.deteccion_movimiento = False
self.fondo_texto = True
if lista[0].id == u"región capital":
self.central.apagar()
self.llanos.apagar()
self.ori.apagar()
self.occ.apagar()
self.zulia.apagar()
self.andes.apagar()
self.llanos.apagar()
self.capital.iluminar()
self.insu.apagar()
self.grupo_palabras.empty()
self.grupo_palabras.add(self.texto9_2_1.img_palabras, self.texto9_2_2.img_palabras, self.texto9_2_3.img_palabras , self.texto9_2_4.img_palabras)
if lista[0].id == u"región central":
self.capital.apagar()
self.llanos.apagar()
self.ori.apagar()
self.occ.apagar()
self.zulia.apagar()
self.andes.apagar()
self.llanos.apagar()
self.central.iluminar()
self.insu.apagar()
self.grupo_palabras.empty()
self.grupo_palabras.add(self.texto9_3_1.img_palabras, self.texto9_3_2.img_palabras, self.texto9_3_3.img_palabras)
if lista[0].id == u"región los llanos":
self.capital.apagar()
self.central.apagar()
self.llanos.iluminar()
self.zulia.apagar()
self.ori.apagar()
self.occ.apagar()
self.andes.apagar()
self.insu.apagar()
self.grupo_palabras.empty()
self.grupo_palabras.add(self.texto9_4_1.img_palabras, self.texto9_4_2.img_palabras, self.texto9_4_3.img_palabras)
if lista[0].id == u"región occidental":
self.capital.apagar()
self.llanos.apagar()
self.ori.apagar()
self.central.apagar()
self.zulia.apagar()
self.occ.iluminar()
self.llanos.apagar()
self.guayana.apagar()
self.andes.apagar()
self.insu.apagar()
self.grupo_palabras.empty()
self.grupo_palabras.add(self.texto9_5_1.img_palabras, self.texto9_5_2.img_palabras, self.texto9_5_3.img_palabras)
if lista[0].id == u"región zuliana":
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.ori.apagar()
self.zulia.iluminar()
self.occ.apagar()
self.andes.apagar()
self.llanos.apagar()
self.guayana.apagar()
self.insu.apagar()
self.grupo_palabras.empty()
self.grupo_palabras.add(self.texto9_6_1.img_palabras, self.texto9_6_2.img_palabras, self.texto9_6_3.img_palabras)
if lista[0].id == u"región los andes":
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.guayana.apagar()
self.zulia.apagar()
self.ori.apagar()
self.occ.apagar()
self.andes.iluminar()
self.llanos.apagar()
self.insu.apagar()
self.grupo_palabras.empty()
self.grupo_palabras.add(self.texto9_7_1.img_palabras, self.texto9_7_2.img_palabras, self.texto9_7_3.img_palabras)
if lista[0].id == u"región nor oriental":
self.capital.apagar()
self.central.apagar()
self.ori.iluminar()
self.llanos.apagar()
self.guayana.apagar()
self.zulia.apagar()
self.occ.apagar()
self.andes.apagar()
self.insu.apagar()
self.grupo_palabras.empty()
self.grupo_palabras.add(self.texto9_8_1.img_palabras, self.texto9_8_2.img_palabras, self.texto9_8_3.img_palabras)
if lista[0].id == u"región guayana":
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.ori.apagar()
self.zulia.apagar()
self.occ.apagar()
self.andes.apagar()
self.guayana.iluminar()
self.insu.apagar()
self.grupo_palabras.empty()
self.grupo_palabras.add(self.texto9_9_1.img_palabras, self.texto9_9_2.img_palabras, self.texto9_9_3.img_palabras)
if lista[0].id == u"región insular":
self.capital.apagar()
self.llanos.apagar()
self.central.apagar()
self.ori.apagar()
self.zulia.apagar()
self.occ.apagar()
self.andes.apagar()
self.guayana.apagar()
self.insu.iluminar()
self.grupo_palabras.empty()
self.grupo_palabras.add(self.texto9_10_1.img_palabras, self.texto9_10_2.img_palabras, self.texto9_10_3.img_palabras, self.texto9_10_4.img_palabras)
elif not self.deteccion_movimiento:
self.fondo_texto = False
self.capital.apagar()
self.central.apagar()
self.guayana.apagar()
self.andes.apagar()
self.zulia.apagar()
self.occ.apagar()
self.ori.apagar()
self.llanos.apagar()
self.grupo_palabras.empty()
self.grupo_fondotexto.empty()
if pygame.sprite.spritecollideany(self.raton, self.grupo_botones):
sprite = pygame.sprite.spritecollide(self.raton, self.grupo_botones, False)
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
if sprite[0].id == "volver":
self.limpiar_grupos()
self.parent.animacion = 3
self.parent.changeState(pantalla8.estado(self.parent, 3))
elif sprite[0].id == "config":
self.limpiar_grupos()
self.parent.pushState(menucfg.estado(self.parent, self.previa))
elif sprite[0].id == "home":
self.limpiar_grupos()
self.parent.changeState(pantalla2.estado(self.parent))
self.minimag(events)
def update(self):
"""
Actualiza la posición del cursor, el magnificador de pantalla en caso de que este activado, los
tooltip de los botones y animaciones o textos correspondientes.
"""
self.raton.update()
self.obj_magno.magnificar(self.parent.screen)
self.grupo_botones.update(self.grupo_tooltip)
self.mouse.rect.center = pygame.mouse.get_pos()
def draw(self):
"""
Dibuja el fondo de pantalla y los elementos pertenecientes a los grupos de sprites sobre la superficie
del manejador de pantallas.
"""
self.parent.screen.blit(self.background, (0, 0))
self.grupo_banner.draw(self.parent.screen)
self.parent.screen.blit(self.zona_r, (320, 233))
self.parent.screen.blit(self.limites1, (50, 60))
self.parent.screen.blit(self.limites2, (305, 145))
self.grupo_mapa.draw(self.parent.screen)
self.grupo_popup.draw(self.parent.screen)
if self.fondo_texto:
self.parent.screen.blit(self.bg.img, (451, 55))
self.grupo_botones.draw(self.parent.screen)
self.grupo_fondotexto.draw(self.parent.screen)
self.grupo_palabras.draw(self.parent.screen)
self.grupo_tooltip.draw(self.parent.screen)
self.parent.screen.blit(self.n_estados, (40, 95))
if self.parent.habilitar:
self.grupo_magnificador.draw(self.parent.screen, self.enable)
if self.deteccion_movimiento:
self.dibujar_rect()
def ir_glosario(self):
self.parent.pushState(pantalla10.estado(self.parent))
| gpl-3.0 | 7,081,254,828,094,702,000 | 58.114173 | 189 | 0.497036 | false | 3.289877 | true | false | false |
barct/odoo-coop | infocoop_epec_consumos/tab_fact.py | 1 | 5194 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
from openerp.osv import osv
from collections import OrderedDict
class infocoop_tab_fact(models.Model):
_inherit = "infocoop_tab_fact"
class Values():
code = "(desconocido)"
conexiones = 1
consumo = 0
cargo_fijo = 0
monto_ee = 0
monto_ts = 0
consumo_ts = 0
monto_pe = 0
consumo_pe = 0
monto_impuestos = 0
monto_otros = 0
def __iadd__(self, vals):
self.conexiones += vals.conexiones
self.consumo += vals.consumo
self.cargo_fijo += vals.cargo_fijo
self.monto_ee += vals.monto_ee
self.monto_ts += vals.monto_ts
self.consumo_ts += vals.consumo_ts
self.monto_pe += vals.monto_pe
self.consumo_pe += vals.consumo_pe
self.monto_impuestos += vals.monto_impuestos
self.monto_otros += vals.monto_otros
return self
def __unicode__(self):
txt = """code %s
conexiones %s
consumo: %s
cargo_fijo: %s
monto_ee: %s
monto_ts: %s
consumo_ts: %s
monto_pe: %s
consumo_pe: %s
monto_impuestos: %s
monto_otros: %s """
return txt % (self.code,
self.conexiones,
self.consumo,
self.cargo_fijo,
self.monto_ee,
self.monto_ts,
self.consumo_ts,
self.monto_pe,
self.consumo_pe,
self.monto_impuestos,
self.monto_otros, )
class ParticularReport(models.AbstractModel):
_name = 'report.infocoop_epec_consumos.report_example_report_view'
def get_epec_data(self, docs):
data = list()
for r in docs:
values = dict()
liq_ids = self.env["infocoop_liquidac"].search([
("servicios", "=", "/E"),
("periodo", "=", r.periodo), ])
for l in liq_ids:
if l.service_category_id.group_id:
group, level = l.service_category_id.\
group_id.define_segment(l.cons_ee)
else:
group = l.service_category_id.group_id.code
level = None
v = Values()
v.consumo = float(l.cons_ee)
v.cargo_fijo = float(l.cargo_fijo)
v.monto_ee = float(l.imp_ee)
v.monto_impuestos = float(l.neto_imp)
v.consumo_ts = float(l.ts_kwh)
v.monto_ts = float(l.ts_amount)
v.consumo_pe = float(l.pe_kwh)
v.monto_pe = float(l.pe_amount)
v.monto_otros = l.neto_serv - \
(v.monto_ee + v.cargo_fijo + v.monto_ts + v.monto_pe)
code = None
if l.service_category_id.group_id.code == "UR":
if l.pe_level == 2:
code = "5010"
elif l.pe_level == 3:
code = "5020"
elif l.ts_level == 2:
if l.cons_ee <= 150:
code = "5500"
else:
code = "5510"
elif l.ts_level == 1:
if l.cons_ee <= 150:
code = "5500"
elif l.cons_ee <= 450:
code = "5530"
else:
code = "5540"
else:
code = "5000"
v.code = group + str(level) + "-" + code
else:
if group == "NR" and level == 3:
v.code = group + str(level) + \
"-" + l.service_category_id.code
else:
v.code = group + str(level)
if v.code in values:
values[v.code] += v
else:
values[v.code] = v
data.append(
{"doc": r,
"values": OrderedDict(sorted(values.items(),
key=lambda t: t[0])), })
return data
@api.multi
def render_html(self, data=None):
report_obj = self.env['report']
report = report_obj._get_report_from_name(
'infocoop_epec_consumos.report_example_report_view')
docs = self.env['infocoop_tab_fact'].browse(self._ids)
data = self.get_epec_data(docs)
docargs = {
'doc_ids': self._ids,
'doc_model': report.model,
'docs': docs,
'data': data,
}
return report_obj.render(
'infocoop_epec_consumos.report_example_report_view', docargs)
| gpl-3.0 | -7,498,251,093,839,075,000 | 33.85906 | 78 | 0.426646 | false | 3.799561 | false | false | false |
ufal/ker | server.py | 1 | 9594 | #!/usr/bin/env python
import flask
from flask import Flask
from flask import request
from werkzeug import secure_filename
import os, random, datetime, codecs
import sys, json, magic
import cPickle as pickle
import regex as re
import keywords
import argparse
import xml.etree.ElementTree
import zipfile
app = Flask(__name__)
upload_dir = "uploads"
cs_tagger = None
cs_idf_doc_count = None
cs_idf_table = None
en_tagger = None
en_idf_doc_count = None
en_idf_table = None
@app.route('/')
def index():
return "{}\n"
def root_dir(): # pragma: no cover
return os.path.abspath(os.path.dirname(__file__))
def get_file(file_name):
try:
src = os.path.join(root_dir(), file_name)
return open(src).read()
except IOError as exc:
return str(exc)
@app.route('/web', methods=['GET'])
def show_web():
content = get_file("web.html")
print content
return flask.Response(content, mimetype="text/html")
@app.route('/demo', methods=['GET'])
def show_simple_demo():
content = get_file("web.html")
content = re.sub(r"\$\(\'#header", "//", content)
content = re.sub(r"\$\(\'#footer", "//", content)
return flask.Response(content, mimetype="text/html")
@app.route('/', methods=['POST'])
def post_request():
start_time = datetime.datetime.now()
if 'file' in request.files:
file = request.files['file']
else:
class _file_wrapper(object):
def __init__(self, data):
self._data = data
import uuid
self.filename = str(uuid.uuid4())
def save(self, path):
with codecs.open(path, mode="w+", encoding="utf-8") as fout:
fout.write(self._data)
file = _file_wrapper(request.form["data"])
tagger = cs_tagger
idf_doc_count = cs_idf_doc_count
idf_table = cs_idf_table
json_response = None
try:
post_id = datetime.datetime.now().strftime("%Y-%m-%d/%H/%M-%S-")+\
str(random.randint(10000, 99999))
post_dir = os.path.join(upload_dir, post_id)
os.makedirs(post_dir)
if request.args.get('language') == 'en':
tagger = en_tagger
idf_doc_count = en_idf_doc_count
idf_table = en_idf_table
elif request.args.get('language') == 'cs':
pass
elif request.args.get('language'):
raise Exception('Unsupported language {}'.format(request.args.get('language')))
if request.args.get('threshold'):
try:
threshold = float(request.args.get('threshold'))
except:
raise Exception("Threshold \"{}\" is not valid float.".format(request.args.get("threshold")))
else:
threshold = 0.2
if request.args.get("maximum-words"):
try:
maximum_words = int(request.args.get('maximum-words'))
except:
raise Exception("Maximum number of words \"{}\" is not an integer.".format(request.args.get("maximum-words")))
else:
maximum_words = 15
file_name = secure_filename(file.filename)
file_path = os.path.join(post_dir, file_name)
file.save(os.path.join(file_path))
data, code = \
process_file(file_path, tagger, idf_doc_count, idf_table, threshold, maximum_words)
except Exception as e:
code = 400
data = {"error": e.message}
finally:
json_response = json.dumps(data)
print json_response.encode('unicode-escape')
log = {}
log['remote_addr'] = request.remote_addr
log['response_json'] = data
log['response_code'] = code
log['time'] = start_time.strftime("%Y-%m-%d %H:%M:%S")
log['duration'] = (datetime.datetime.now() - start_time).total_seconds()
f_log = open(os.path.join(post_dir, "log.json"), 'w')
json.dump(log, f_log)
f_log.close()
response = flask.Response(json_response,
content_type='application/json; charset=utf-8')
response.headers.add('content-length', len(json_response.encode('utf-8')))
response.status_code = code
return response
def process_file(file_path, tagger, idf_doc_count, idf_table, threshold, maximum_words):
"""
Takes the uploaded file, detecs its type (plain text, alto XML, zip)
and calls a parsing function accordingly. If everything succeeds it
returns keywords and 200 code, returns an error otherwise.
"""
file_info = magic.from_file(file_path)
lines = []
if re.match("^UTF-8 Unicode (with BOM) text", file_info):
lines = lines_from_txt_file(file_path, encoding='utf-8-sig')
elif re.match("^UTF-8 Unicode", file_info):
lines = lines_from_txt_file(file_path, encoding='utf-8')
elif re.match("^ASCII text", file_info):
lines = lines_from_txt_file(file_path, encoding='utf-8')
elif re.match('^XML 1.0 document', file_info) and \
(file_path.endswith('.alto') or file_path.endswith('.xml')):
lines = lines_from_alto_file(file_path)
elif re.match('^Zip archive data', file_info):
lines = lines_from_zip_file(file_path)
else:
return {"eror": "Unsupported file type: {}".format(file_info)}, 400
if not lines:
return {"error": "Empty file"}, 400
return keywords.get_keywords(lines, tagger, idf_doc_count, idf_table, threshold, maximum_words), 200
def lines_from_txt_file(file_path, encoding='utf-8'):
"""
Loads lines of text from a plain text file.
:param file_path: Path to the alto file or a file-like object.
"""
if type(file_path) is str:
f = codecs.open(file_path, 'r', encoding)
else:
f = file_path
content = [l.strip() for l in f]
f.close()
return content
def lines_from_alto_file(file_path):
"""
Loads lines of text from a provided alto file.
:param file_path: Path to the alto file or a file-like object.
"""
e = xml.etree.ElementTree.parse(file_path).getroot()
layout = None
for c in e.getchildren():
if c.tag.endswith('Layout'):
layout = c
break
if layout is None:
raise Exception("XML is not ALTO file (does not contain layout object).")
for page in layout.getchildren():
if not page.tag.endswith("Page"):
continue
text_lines = layout.findall(".//{http://www.loc.gov/standards/alto/ns-v2#}TextLine")
for text_line in text_lines:
line_words = []
for string in text_line.getchildren():
if not string.tag.endswith('String'):
continue
line_words.append(string.attrib['CONTENT'])
yield " ".join(line_words)
def lines_from_zip_file(file_path):
"""
Loads lines of text from a provided zip file. If it contains alto file, it
uses them, otherwise looks for txt files. Files can in an arbitrary depth.
:param file_path: Path to the uploaded zip file.
:type file_path: str
"""
archive = zipfile.ZipFile(file_path)
alto_files = [n for n in archive.namelist() if n.endswith(".alto") or n.endswith(".xml")]
if alto_files:
for f_name in alto_files:
for line in lines_from_alto_file(archive.open(f_name)):
yield line
else:
txt_files = [n for n in archive.namelist() if n.endswith(".txt")]
if not txt_files:
raise Exception("Archive contains neither alto files nor text files.")
for f_name in txt_files:
for line in lines_from_txt_file(archive.open(f_name)):
yield line
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Runs the KER server.')
parser.add_argument("--cs-morphodita", help="Path to a Czech tagger model for Morphodita.", required=True)
parser.add_argument("--cs-idf", help="Czech idf model.", required=True)
parser.add_argument("--en-morphodita", help="Path to a English tagger model for Morphodita.", required=True)
parser.add_argument("--en-idf", help="English idf model.", required=True)
parser.add_argument("--port", help="Port the server runs on", type=int, default=5000)
parser.add_argument("--host", help="IP address the server will run at", type=str, default="127.0.0.1")
args = parser.parse_args()
if os.path.exists(args.cs_morphodita):
cs_tagger = keywords.Morphodita(args.cs_morphodita)
else:
print >> sys.stderr, "File with Czech Morphodita model does not exist: {}".format(args.cs_morphodita)
exit(1)
if os.path.exists(args.cs_idf):
f_idf = open(args.cs_idf, 'rb')
cs_idf_doc_count = float(pickle.load(f_idf))
cs_idf_table = pickle.load(f_idf)
f_idf.close()
else:
print >> sys.stderr, "File with Czech IDF model does not exist: {}".format(args.cs_idf)
exit(1)
if os.path.exists(args.en_morphodita):
en_tagger = keywords.Morphodita(args.en_morphodita)
else:
print >> sys.stderr, "File with English Morphodita model does not exist: {}".format(args.en_morphodita)
exit(1)
if os.path.exists(args.en_idf):
f_idf = open(args.en_idf, 'rb')
en_idf_doc_count = float(pickle.load(f_idf))
en_idf_table = pickle.load(f_idf)
f_idf.close()
else:
print >> sys.stderr, "File with English IDF model does not exist: {}".format(args.en_idf)
exit(1)
app.run(debug=True, host=args.host, port=args.port)
| lgpl-3.0 | 3,862,969,899,623,233,500 | 34.142857 | 126 | 0.603294 | false | 3.533702 | false | false | false |
Juanlu001/CBC.Solve | cbc/swing/fsinewton/solver/boundary_conditions.py | 1 | 4016 | """Module containing implementation of monolithic FSI boundary conditions"""
__author__ = "Gabriel Balaban"
__copyright__ = "Copyright (C) 2010 Simula Research Laboratory and %s" % __author__
__license__ = "GNU GPL Version 3 or any later version"
from dolfin import *
class FSIBC(object):
"""
Boundary Conditions class for Monolithic FSI
Arguments
problem
object of type pfsi.FsiNewtonTest
spaces
object of type FSISpaces
"""
def __init__(self,problem,spaces):
self.problem = problem
self.spaces = spaces
#Time dependant BC are set on the intial guess and at each time step.
self.bcallU1_ini = self.create_all_dirichlet_conditions("Initial guess")
#Newton Increment BC are homogeneous
self.bcallI = self.create_all_dirichlet_conditions("Newton Step")
[bc.homogenize() for bc in self.bcallI]
def create_all_dirichlet_conditions(self, bcsetname = ""):
info_blue("\nCreating Dirichlet Boundary Conditions " + bcsetname)
return self.create_fluid_bc() + self.create_structure_bc() + \
self.create_mesh_bc()
def create_bc(self,space,boundaries,values,bcname):
#If Boundaries specified without values assume homogeneous
if boundaries is not None and (values == [] or values is None):
dim = space.num_sub_spaces()
#A Function Space returns dim 0 but really has dim 1.
if dim == 0:
dim = 1
zeros = tuple(["0.0" for i in range(dim)])
values = [zeros for i in range(len(boundaries))]
#Try to generate the BC
bcs = []
## try:
for boundary,value in zip(boundaries,values):
if boundary == 'GammaFSI':
fsibounds = self.problem.interiorboundarynums["FSI_bound"]
interiormeshfunc = self.problem.meshfunctions["interiorfacet"]
for fsibound in fsibounds:
print fsibound
bcs += [DirichletBC(space, value,interiormeshfunc,fsibound)]
else:
bcs += [DirichletBC(space, value, boundary)]
info("Created bc %s"%bcname)
## except:
## info("No Dirichlet bc created for %s"%bcname)
return bcs
def create_fluid_bc(self):
bcv = self.create_bc(self.spaces.V_F,self.problem.fluid_velocity_dirichlet_boundaries(),\
self.problem.fluid_velocity_dirichlet_values(),"Fluid Velocity")
bcp = self.create_fluid_pressure_bc()
return bcv + bcp
def create_fluid_pressure_bc(self):
return self.create_bc(self.spaces.Q_F,self.problem.fluid_pressure_dirichlet_boundaries(),\
self.problem.fluid_pressure_dirichlet_values(),"Fluid Pressure")
def create_structure_bc(self):
bcU = self.create_bc(self.spaces.C_S,self.problem.structure_dirichlet_boundaries(),\
self.problem.structure_dirichlet_values(),"Structure Displacement")
bcP = self.create_bc(self.spaces.V_S,self.problem.structure_velocity_dirichlet_boundaries(),\
self.problem.structure_velocity_dirichlet_values(),"Structure Velocity")
return bcU + bcP
def create_mesh_bc(self):
#If no Mesh BC specified assume domain boundary and fixed"
if self.problem.mesh_dirichlet_boundaries() is None:
#The value will be set to zero in self.create_bc
return self.create_bc(self.self.spaces.C_F,["on_boundary"],None,"Mesh Displacement")
#Allow the user to explicitly create no mesh bc whatsoever.
elif self.problem.mesh_dirichlet_boundaries() == "NoBC":
return []
else:
return self.create_bc(self.spaces.C_F,self.problem.mesh_dirichlet_boundaries(),\
self.problem.mesh_dirichlet_values(),"Mesh Displacement")
| gpl-3.0 | -6,300,717,971,602,277,000 | 43.131868 | 101 | 0.608317 | false | 3.960552 | false | false | false |
timopulkkinen/BubbleFish | tools/telemetry/telemetry/core/chrome/win_platform_backend.py | 1 | 3723 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import ctypes
import subprocess
try:
import win32api # pylint: disable=F0401
import win32con # pylint: disable=F0401
import win32process # pylint: disable=F0401
except ImportError:
win32api = None
win32con = None
win32process = None
from telemetry.core.chrome import platform_backend
class WinPlatformBackend(platform_backend.PlatformBackend):
def _GetProcessHandle(self, pid):
mask = (win32con.PROCESS_QUERY_INFORMATION |
win32con.PROCESS_VM_READ)
return win32api.OpenProcess(mask, False, pid)
# pylint: disable=W0613
def StartRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def StopRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def GetRawDisplayFrameRateMeasurements(self):
raise NotImplementedError()
def IsThermallyThrottled(self):
raise NotImplementedError()
def HasBeenThermallyThrottled(self):
raise NotImplementedError()
def GetSystemCommitCharge(self):
class PerformanceInfo(ctypes.Structure):
"""Struct for GetPerformanceInfo() call
http://msdn.microsoft.com/en-us/library/ms683210
"""
_fields_ = [('size', ctypes.c_ulong),
('CommitTotal', ctypes.c_size_t),
('CommitLimit', ctypes.c_size_t),
('CommitPeak', ctypes.c_size_t),
('PhysicalTotal', ctypes.c_size_t),
('PhysicalAvailable', ctypes.c_size_t),
('SystemCache', ctypes.c_size_t),
('KernelTotal', ctypes.c_size_t),
('KernelPaged', ctypes.c_size_t),
('KernelNonpaged', ctypes.c_size_t),
('PageSize', ctypes.c_size_t),
('HandleCount', ctypes.c_ulong),
('ProcessCount', ctypes.c_ulong),
('ThreadCount', ctypes.c_ulong)]
def __init__(self):
self.size = ctypes.sizeof(self)
super(PerformanceInfo, self).__init__()
performance_info = PerformanceInfo()
ctypes.windll.psapi.GetPerformanceInfo(
ctypes.byref(performance_info), performance_info.size)
return performance_info.CommitTotal * performance_info.PageSize / 1024
def GetMemoryStats(self, pid):
memory_info = win32process.GetProcessMemoryInfo(
self._GetProcessHandle(pid))
return {'VM': memory_info['PagefileUsage'],
'VMPeak': memory_info['PeakPagefileUsage'],
'WorkingSetSize': memory_info['WorkingSetSize'],
'WorkingSetSizePeak': memory_info['PeakWorkingSetSize']}
def GetIOStats(self, pid):
io_stats = win32process.GetProcessIoCounters(
self._GetProcessHandle(pid))
return {'ReadOperationCount': io_stats['ReadOperationCount'],
'WriteOperationCount': io_stats['WriteOperationCount'],
'ReadTransferCount': io_stats['ReadTransferCount'],
'WriteTransferCount': io_stats['WriteTransferCount']}
def GetChildPids(self, pid):
"""Retunds a list of child pids of |pid|."""
child_pids = []
pid_ppid_list = subprocess.Popen(['wmic', 'process', 'get',
'ParentProcessId,ProcessId'],
stdout=subprocess.PIPE).communicate()[0]
for pid_ppid in pid_ppid_list.splitlines()[1:]: #skip header
if not pid_ppid:
continue
curr_ppid, curr_pid = pid_ppid.split()
if int(curr_ppid) == pid:
child_pids.append(int(curr_pid))
child_pids.extend(self.GetChildPids(int(curr_pid)))
return child_pids
| bsd-3-clause | 2,244,057,969,717,524,000 | 36.606061 | 77 | 0.640612 | false | 3.964856 | false | false | false |
Jajcus/pyxmpp2 | pyxmpp2/mainloop/wait.py | 1 | 2125 | #
# (C) Copyright 2011 Jacek Konieczny <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# pylint: disable-msg=W0201
"""Utility functions to wait until a socket (or object implementing .fileno()
in POSIX) is ready for input or output."""
from __future__ import absolute_import, division
__docformat__ = "restructuredtext en"
import select
if hasattr(select, "poll"):
def wait_for_read(socket, timeout = None):
"""Wait up to `timeout` seconds until `socket` is ready for reading.
"""
if timeout is not None:
timeout *= 1000
poll = select.poll()
poll.register(socket, select.POLLIN)
events = poll.poll(timeout)
return bool(events)
def wait_for_write(socket, timeout = None):
"""Wait up to `timeout` seconds until `socket` is ready for writing.
"""
if timeout is not None:
timeout *= 1000
poll = select.poll()
poll.register(socket, select.POLLOUT)
events = poll.poll(timeout)
return bool(events)
else:
def wait_for_read(socket, timeout = None):
"""Wait up to `timeout` seconds until `socket` is ready for reading.
"""
readable = select.select([socket], [], [], timeout)[0]
return bool(readable)
def wait_for_write(socket, timeout = None):
"""Wait up to `timeout` seconds until `socket` is ready for writing.
"""
writable = select.select([], [socket], [], timeout)[1]
return bool(writable)
| lgpl-2.1 | -7,421,785,645,248,673,000 | 35.637931 | 77 | 0.660235 | false | 3.994361 | false | false | false |
kamitchell/py2app | py2app/filters.py | 1 | 1048 | from pkg_resources import require
require("macholib")
import os
import sys
from macholib.util import has_filename_filter, in_system_path
def not_stdlib_filter(module, prefix=None):
"""
Return False if the module is located in the standard library
"""
if prefix is None:
prefix = sys.prefix
prefix = os.path.join(os.path.realpath(prefix), '')
rp = os.path.realpath(module.filename)
if rp.startswith(prefix):
rest = rp[len(prefix):]
if '/site-python/' in rest:
return True
elif '/site-packages/' in rest:
return True
else:
return False
return True
def not_system_filter(module):
"""
Return False if the module is located in a system directory
"""
return not in_system_path(module.filename)
def bundle_or_dylib_filter(module):
"""
Return False if the module does not have a filetype attribute
corresponding to a Mach-O bundle or dylib
"""
return getattr(module, 'filetype', None) in ('bundle', 'dylib')
| mit | 183,278,461,600,789,730 | 27.324324 | 67 | 0.645992 | false | 3.969697 | false | false | false |
tuukka/sonata-svn-test | sonata/plugins/test.py | 1 | 2113 |
# this is the magic interpreted by Sonata, referring to on_enable etc. below:
### BEGIN PLUGIN INFO
# [plugin]
# plugin_format: 0, 0
# name: Test plugin
# version: 0, 0, 1
# description: A simple test plugin.
# author: Tuukka Hastrup
# author_email: [email protected]
# url: http://sonata.berlios.de
# license: GPL v3 or later
# [capabilities]
# enablables: on_enable
# tabs: construct_tab
# playing_song_observers: on_song_change
# lyrics_fetching: on_lyrics_fetch
### END PLUGIN INFO
# nothing magical from here on
import gobject, gtk, pango
from sonata.misc import escape_html
songlabel = None
lyricslabel = None
# this gets called when the plugin is loaded, enabled, or disabled:
def on_enable(state):
global songlabel, lyricslabel
if state:
songlabel = gtk.Label("No song info received yet.")
songlabel.props.ellipsize = pango.ELLIPSIZE_END
lyricslabel = gtk.Label("No lyrics requests yet.")
lyricslabel.props.ellipsize = pango.ELLIPSIZE_END
else:
songlabel = None
lyricslabel = None
# this constructs the parts of the tab when called:
def construct_tab():
vbox = gtk.VBox()
vbox.pack_start(gtk.Label("Hello world!"))
vbox.pack_start(songlabel)
vbox.pack_start(lyricslabel)
vbox.pack_start(gtk.Label("(You can modify me at %s)" %
__file__.rstrip("c")))
vbox.show_all()
# the return value goes off to Base.new_tab(page, stock, text, focus):
# (tab content, icon name, tab name, the widget to focus on tab switch)
return (vbox, None, "Test plugin", None)
# this gets called when a new song is playing:
def on_song_change(songinfo):
if songinfo:
songlabel.set_markup("<b>Info for currently playing song:</b>"+
"\n%s" % escape_html(repr(songinfo)))
else:
songlabel.set_text("Currently not playing any song.")
songlabel.show()
# this gets requests for lyrics:
def on_lyrics_fetch(callback, artist, title):
lyricslabel.set_markup(
"Got request for lyrics for artist %r title %r." %
(artist, title))
# callback(lyrics, error)
gobject.timeout_add(0, callback, None,
"%s doesn't have lyrics for %r." %
(__name__, (artist, title)))
| gpl-3.0 | 4,461,166,768,552,095,000 | 27.554054 | 77 | 0.705632 | false | 2.984463 | false | false | false |
datacommonsorg/data | scripts/us_bjs/nps/preprocess_data.py | 1 | 19122 | import pandas as pd
from absl import flags
from absl import app
FLAGS = flags.FLAGS
flags.DEFINE_string('preprocess_file',
'NPS_1978-2018_Data.tsv',
'file path to tsv file with data to proess',
short_name='p')
def convert_nan_for_calculation(value):
if pd.isna(value):
return 0
else:
return value
def total_jurisdiction_columns_helper(df):
"""calculation to include private facility numbers"""
df["PVINF_Temp"] = df["PVINF"].apply(convert_nan_for_calculation)
df["PVOTHF_Temp"] = df["PVOTHF"].apply(convert_nan_for_calculation)
df["PVINM_Temp"] = df["PVINM"].apply(convert_nan_for_calculation)
df["PVOTHM_Temp"] = df["PVOTHM"].apply(convert_nan_for_calculation)
df["Female_Total_Temp"] = df[["JURTOTF", "PVINF_Temp", "PVOTHF_Temp"
]].sum(axis=1).where(df["PVINCLF"] == 2,
df["JURTOTF"])
df["Male_Total_Temp"] = df[["JURTOTM", "PVINM_Temp", "PVOTHM_Temp"
]].sum(axis=1).where(df["PVINCLM"] == 2,
df["JURTOTM"])
"""calculation to include local facility numbers"""
df["LFF_Temp"] = df["LFF"].apply(convert_nan_for_calculation)
df["LFM_Temp"] = df["LFM"].apply(convert_nan_for_calculation)
df["Female_Total_Temp"] = df[["Female_Total_Temp", "LFF_Temp"
]].sum(axis=1).where(df["LFINCLF"] == 2,
df["Female_Total_Temp"])
df["Male_Total_Temp"] = df[["Male_Total_Temp", "LFM_Temp"
]].sum(axis=1).where(df["LFINCLM"] == 2,
df["Male_Total_Temp"])
"""calculation to include numbers from local facilities solely to ease crowding"""
df["LFCRSTF_Temp"] = df["LFCRSTF"].apply(convert_nan_for_calculation)
df["LFCRSTM_Temp"] = df["LFCRSTM"].apply(convert_nan_for_calculation)
df["Female_Total_Temp"] = df[["Female_Total_Temp", "LFCRSTF_Temp"
]].sum(axis=1).where(df["LFCRINCF"] == 2,
df["Female_Total_Temp"])
df["Male_Total_Temp"] = df[["Male_Total_Temp", "LFCRSTM_Temp"
]].sum(axis=1).where(df["LFCRINCM"] == 2,
df["Male_Total_Temp"])
"""calculation to include federal and other state facility numbers"""
df["FEDF_Temp"] = df["FEDF"].apply(convert_nan_for_calculation)
df["OTHSTF_Temp"] = df["OTHSTF"].apply(convert_nan_for_calculation)
df["FEDM_Temp"] = df["FEDM"].apply(convert_nan_for_calculation)
df["OTHSTM_Temp"] = df["OTHSTM"].apply(convert_nan_for_calculation)
df["Female_Total_Temp"] = df[[
"Female_Total_Temp", "FEDF_Temp", "OTHSTF_Temp"
]].sum(axis=1).where(df["FACINCLF"] == 2, df["Female_Total_Temp"])
df["Male_Total_Temp"] = df[["Male_Total_Temp", "FEDM_Temp", "OTHSTM_Temp"
]].sum(axis=1).where(df["FACINCLM"] == 2,
df["Male_Total_Temp"])
def get_columns(df):
df_out = {}
total_jurisdiction_columns_helper(df)
df_out["GeoId"] = df["GeoId"]
df_out["YEAR"] = df["YEAR"]
df_out["Count_Person_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"Female_Total_Temp"]
df_out[
"Count_Person_Female_Incarcerated_WhiteAlone_MeasuredBasedOnJurisdiction"] = df[
"WHITEF"]
df_out[
"Count_Person_BlackOrAfricanAmericanAlone_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"BLACKF"]
df_out[
"Count_Person_Female_HispanicOrLatino_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"HISPF"]
df_out[
"Count_Person_AmericanIndianOrAlaskaNativeAlone_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"AIANF"]
df_out[
"Count_Person_AsianAlone_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"ASIANF"]
df_out[
"Count_Person_Female_Incarcerated_NativeHawaiianOrOtherPacificIslanderAlone_MeasuredBasedOnJurisdiction"] = df[
"NHPIF"]
df_out[
"Count_Person_Female_Incarcerated_TwoOrMoreRaces_MeasuredBasedOnJurisdiction"] = df[
"TWORACEF"]
df_out[
"Count_MortalityEvent_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHTOTF"]
df_out[
"Count_MortalityEvent_Female_Incarcerated_JudicialExecution_MeasuredBasedOnJurisdiction"] = df[
"DTHEXECF"]
df_out[
"Count_MortalityEvent_Female_IllnessOrNaturalCause_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHILLNF"]
df_out[
"Count_MortalityEvent_AIDS_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHAIDSF"]
df_out[
"Count_MortalityEvent_Female_Incarcerated_IntentionalSelf-Harm(Suicide)_MeasuredBasedOnJurisdiction"] = df[
"DTHSUICF"]
df_out[
"Count_MortalityEvent_Accidents(UnintentionalInjuries)_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHACCF"]
df_out[
"Count_MortalityEvent_DeathDueToAnotherPerson_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHPERSF"]
df_out[
"Count_MortalityEvent_Assault(Homicide)_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHHOMIF"]
df_out[
"Count_MortalityEvent_Female_Incarcerated_NPSOtherCauseOfDeath_MeasuredBasedOnJurisdiction"] = df[
"DTHOTHF"]
df_out[
"Count_IncarcerationEvent_AdmittedToPrison_Female_Incarcerated_MaxSentenceGreaterThan1Year_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"ADTOTF"]
df_out[
"Count_IncarcerationEvent_Female_Incarcerated_MaxSentenceGreaterThan1Year_ReleasedFromPrison_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"RLTOTF"]
df_out[
"Count_Person_Female_Incarcerated_MaxSentenceGreaterThan1Year_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"JURGT1F"]
df_out[
"Count_Person_Female_Incarcerated_MaxSentence1YearOrLess_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"JURLT1F"]
df_out[
"Count_Person_Female_Incarcerated_Unsentenced_MeasuredBasedOnJurisdiction"] = df[
"JURUNSF"]
df_out[
"Count_Person_Female_Incarcerated_InState_PrivatelyOperated_MeasuredBasedOnJurisdiction"] = df[
"PVINF"]
df_out[
"Count_Person_Female_Incarcerated_OutOfState_PrivatelyOperated_MeasuredBasedOnJurisdiction"] = df[
"PVOTHF"]
df_out[
"Count_Person_Female_Incarcerated_Local_LocallyOperated_MeasuredBasedOnJurisdiction"] = df[
"LFF"]
df_out[
"Count_Person_FederallyOperated_Female_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"FEDF"]
df_out[
"Count_Person_Female_Incarcerated_OutOfState_StateOperated_MeasuredBasedOnJurisdiction"] = df[
"OTHSTF"]
df_out[
"Count_Person_Female_Incarcerated_NotAUSCitizen_StateOperated&FederallyOperated&PrivatelyOperated_MeasuredBasedOnCustody"] = df[
"NCITZTOTF"]
df_out[
"Count_Person_Female_Incarcerated_MaxSentenceGreaterThan1Year_NotAUSCitizen_Sentenced_StateOperated&FederallyOperated&PrivatelyOperated_MeasuredBasedOnCustody"] = df[
"NCITZGT1F"]
df_out[
"Count_Person_Female_Incarcerated_MaxSentence1YearOrLess_NotAUSCitizen_Sentenced_StateOperated&FederallyOperated&PrivatelyOperated_MeasuredBasedOnCustody"] = df[
"NCITZLE1F"]
df_out[
"Count_Person_Female_Incarcerated_NotAUSCitizen_StateOperated&FederallyOperated&PrivatelyOperated_Unsentenced_MeasuredBasedOnCustody"] = df[
"NCITZUNSF"]
df_out[
"Count_Person_Female_Incarcerated_Under18_MeasuredBasedOnCustody"] = df[
"CUSLT18F"]
df_out["Count_Person_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"Male_Total_Temp"]
df_out[
"Count_Person_Incarcerated_Male_WhiteAlone_MeasuredBasedOnJurisdiction"] = df[
"WHITEM"]
df_out[
"Count_Person_BlackOrAfricanAmericanAlone_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"BLACKM"]
df_out[
"Count_Person_HispanicOrLatino_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"HISPM"]
df_out[
"Count_Person_AmericanIndianOrAlaskaNativeAlone_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"AIANM"]
df_out[
"Count_Person_AsianAlone_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"ASIANM"]
df_out[
"Count_Person_Incarcerated_Male_NativeHawaiianOrOtherPacificIslanderAlone_MeasuredBasedOnJurisdiction"] = df[
"NHPIM"]
df_out[
"Count_Person_Incarcerated_Male_TwoOrMoreRaces_MeasuredBasedOnJurisdiction"] = df[
"TWORACEM"]
df_out[
"Count_MortalityEvent_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"DTHTOTM"]
df_out[
"Count_MortalityEvent_Incarcerated_JudicialExecution_Male_MeasuredBasedOnJurisdiction"] = df[
"DTHEXECM"]
df_out[
"Count_MortalityEvent_IllnessOrNaturalCause_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"DTHILLNM"]
df_out[
"Count_MortalityEvent_AIDS_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"DTHAIDSM"]
df_out[
"Count_MortalityEvent_Incarcerated_IntentionalSelf-Harm(Suicide)_Male_MeasuredBasedOnJurisdiction"] = df[
"DTHSUICM"]
df_out[
"Count_MortalityEvent_Accidents(UnintentionalInjuries)_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"DTHACCM"]
df_out[
"Count_MortalityEvent_DeathDueToAnotherPerson_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"DTHPERSM"]
df_out[
"Count_MortalityEvent_Assault(Homicide)_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"DTHHOMIM"]
df_out[
"Count_MortalityEvent_Incarcerated_Male_NPSOtherCauseOfDeath_MeasuredBasedOnJurisdiction"] = df[
"DTHOTHM"]
df_out[
"Count_IncarcerationEvent_AdmittedToPrison_Incarcerated_Male_MaxSentenceGreaterThan1Year_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"ADTOTM"]
df_out[
"Count_IncarcerationEvent_Incarcerated_Male_MaxSentenceGreaterThan1Year_ReleasedFromPrison_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"RLTOTM"]
df_out[
"Count_Person_Incarcerated_Male_MaxSentenceGreaterThan1Year_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"JURGT1M"]
df_out[
"Count_Person_Incarcerated_Male_MaxSentence1YearOrLess_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"JURLT1M"]
df_out[
"Count_Person_Incarcerated_Male_Unsentenced_MeasuredBasedOnJurisdiction"] = df[
"JURUNSM"]
df_out[
"Count_Person_Incarcerated_InState_Male_PrivatelyOperated_MeasuredBasedOnJurisdiction"] = df[
"PVINM"]
df_out[
"Count_Person_Incarcerated_Male_OutOfState_PrivatelyOperated_MeasuredBasedOnJurisdiction"] = df[
"PVOTHM"]
df_out[
"Count_Person_Incarcerated_Local_LocallyOperated_Male_MeasuredBasedOnJurisdiction"] = df[
"LFM"]
df_out[
"Count_Person_FederallyOperated_Incarcerated_Male_MeasuredBasedOnJurisdiction"] = df[
"FEDM"]
df_out[
"Count_Person_Incarcerated_Male_OutOfState_StateOperated_MeasuredBasedOnJurisdiction"] = df[
"OTHSTM"]
df_out[
"Count_Person_Incarcerated_Male_NotAUSCitizen_StateOperated&FederallyOperated&PrivatelyOperated_MeasuredBasedOnCustody"] = df[
"NCITZTOTM"]
df_out[
"Count_Person_Incarcerated_Male_MaxSentenceGreaterThan1Year_NotAUSCitizen_Sentenced_StateOperated&FederallyOperated&PrivatelyOperated_MeasuredBasedOnCustody"] = df[
"NCITZGT1M"]
df_out[
"Count_Person_Incarcerated_Male_MaxSentence1YearOrLess_NotAUSCitizen_Sentenced_StateOperated&FederallyOperated&PrivatelyOperated_MeasuredBasedOnCustody"] = df[
"NCITZLE1M"]
df_out[
"Count_Person_Incarcerated_Male_NotAUSCitizen_StateOperated&FederallyOperated&PrivatelyOperated_Unsentenced_MeasuredBasedOnCustody"] = df[
"NCITZUNSM"]
df_out[
"Count_Person_Incarcerated_Male_Under18_MeasuredBasedOnCustody"] = df[
"CUSLT18M"]
df_out["Count_Person_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"Female_Total_Temp"] + df["Male_Total_Temp"]
df_out[
"Count_Person_Incarcerated_WhiteAlone_MeasuredBasedOnJurisdiction"] = df[
"WHITEF"] + df["WHITEM"]
df_out[
"Count_Person_BlackOrAfricanAmericanAlone_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"BLACKF"] + df["BLACKM"]
df_out[
"Count_Person_HispanicOrLatino_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"HISPF"] + df["HISPM"]
df_out[
"Count_Person_AmericanIndianOrAlaskaNativeAlone_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"AIANF"] + df["AIANM"]
df_out[
"Count_Person_AsianAlone_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"ASIANF"] + df["ASIANM"]
df_out[
"Count_Person_Incarcerated_NativeHawaiianOrOtherPacificIslanderAlone_MeasuredBasedOnJurisdiction"] = df[
"NHPIF"] + df["NHPIM"]
df_out[
"Count_Person_Incarcerated_TwoOrMoreRaces_MeasuredBasedOnJurisdiction"] = df[
"TWORACEF"] + df["TWORACEM"]
df_out[
"Count_MortalityEvent_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHTOTF"] + df["DTHTOTM"]
df_out[
"Count_MortalityEvent_Incarcerated_JudicialExecution_MeasuredBasedOnJurisdiction"] = df[
"DTHEXECF"] + df["DTHEXECM"]
df_out[
"Count_MortalityEvent_IllnessOrNaturalCause_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHILLNF"] + df["DTHILLNM"]
df_out[
"Count_MortalityEvent_AIDS_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHAIDSF"] + df["DTHAIDSM"]
df_out[
"Count_MortalityEvent_Incarcerated_IntentionalSelf-Harm(Suicide)_MeasuredBasedOnJurisdiction"] = df[
"DTHSUICF"] + df["DTHSUICM"]
df_out[
"Count_MortalityEvent_Accidents(UnintentionalInjuries)_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHACCF"] + df["DTHACCM"]
df_out[
"Count_MortalityEvent_DeathDueToAnotherPerson_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHPERSF"] + df["DTHPERSM"]
df_out[
"Count_MortalityEvent_Assault(Homicide)_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"DTHHOMIF"] + df["DTHHOMIM"]
df_out[
"Count_MortalityEvent_Incarcerated_NPSOtherCauseOfDeath_MeasuredBasedOnJurisdiction"] = df[
"DTHOTHF"] + df["DTHOTHM"]
df_out[
"Count_IncarcerationEvent_AdmittedToPrison_Incarcerated_MaxSentenceGreaterThan1Year_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"ADTOTF"] + df["ADTOTM"]
df_out[
"Count_IncarcerationEvent_Incarcerated_MaxSentenceGreaterThan1Year_ReleasedFromPrison_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"RLTOTF"] + df["RLTOTM"]
df_out[
"Count_Person_Incarcerated_MaxSentenceGreaterThan1Year_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"JURGT1F"] + df["JURGT1M"]
df_out[
"Count_Person_Incarcerated_MaxSentence1YearOrLess_Sentenced_MeasuredBasedOnJurisdiction"] = df[
"JURLT1F"] + df["JURLT1M"]
df_out[
"Count_Person_Incarcerated_Unsentenced_MeasuredBasedOnJurisdiction"] = df[
"JURUNSF"] + df["JURUNSM"]
df_out[
"Count_Person_Incarcerated_InState_PrivatelyOperated_MeasuredBasedOnJurisdiction"] = df[
"PVINF"] + df["PVINM"]
df_out[
"Count_Person_Incarcerated_OutOfState_PrivatelyOperated_MeasuredBasedOnJurisdiction"] = df[
"PVOTHF"] + df["PVOTHM"]
df_out[
"Count_Person_Incarcerated_Local_LocallyOperated_MeasuredBasedOnJurisdiction"] = df[
"LFF"] + df["LFM"]
df_out[
"Count_Person_FederallyOperated_Incarcerated_MeasuredBasedOnJurisdiction"] = df[
"FEDF"] + df["FEDM"]
df_out[
"Count_Person_Incarcerated_OutOfState_StateOperated_MeasuredBasedOnJurisdiction"] = df[
"OTHSTF"] + df["OTHSTM"]
df_out[
"Count_Person_Incarcerated_NotAUSCitizen_StateOperated&FederallyOperated&PrivatelyOperated_MeasuredBasedOnCustody"] = df[
"NCITZTOTF"] + df["NCITZTOTM"]
df_out[
"Count_Person_Incarcerated_MaxSentenceGreaterThan1Year_NotAUSCitizen_Sentenced_StateOperated&FederallyOperated&PrivatelyOperated_MeasuredBasedOnCustody"] = df[
"NCITZGT1F"] + df["NCITZGT1M"]
df_out[
"Count_Person_Incarcerated_MaxSentence1YearOrLess_NotAUSCitizen_Sentenced_StateOperated&FederallyOperated&PrivatelyOperated_MeasuredBasedOnCustody"] = df[
"NCITZLE1F"] + df["NCITZLE1M"]
df_out[
"Count_Person_Incarcerated_NotAUSCitizen_StateOperated&FederallyOperated&PrivatelyOperated_Unsentenced_MeasuredBasedOnCustody"] = df[
"NCITZUNSF"] + df["NCITZUNSM"]
df_out["Count_Person_Incarcerated_Under18_MeasuredBasedOnCustody"] = df[
"CUSLT18F"] + df["CUSLT18M"]
return df_out
def convert_geoId(fips_code):
"""Creates geoId column"""
return 'geoId/' + str(fips_code).zfill(2)
def convert_missing_value_to_nan(value):
"""codes for missing values are always negative and actual data is always >= 0"""
if isinstance(value, int) and value < 0:
return float("nan")
else:
return value
def convert_nan_to_empty_cell(value):
if pd.isna(value):
return ''
else:
return value
def preprocess_df(raw_df):
"""cleans raw_df
Args:
raw_data: raw data frame to be used as starting point for cleaning
"""
df = raw_df.copy()
df['GeoId'] = df['STATEID'].apply(convert_geoId)
# convert missing values to NaN for aggregation
for column_name in list(df.columns):
df[column_name] = df[column_name].apply(convert_missing_value_to_nan)
#get columns matching stat var names and add aggregate columns
df_out = pd.DataFrame(get_columns(df))
#convert NaN to empty cell
for column_name in list(df_out.columns):
df_out[column_name] = df_out[column_name].apply(
convert_nan_to_empty_cell)
return df_out
def main(args):
filename = FLAGS.preprocess_file
print('Processing {0}'.format(filename))
df = pd.read_csv(filename, delimiter='\t')
processed_df = preprocess_df(df)
processed_df.to_csv(filename.replace('.tsv', '_processed.csv'), index=False)
print('Done processing {0}'.format(filename))
if __name__ == '__main__':
app.run(main)
| apache-2.0 | 8,942,910,421,515,007,000 | 45.867647 | 174 | 0.653488 | false | 2.993425 | false | false | false |
scotwk/cloud-custodian | tools/zerodark/zerodark/ipdb.py | 1 | 24394 | # Copyright 2017-2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Kapil Thangavelu <[email protected]>"
import boto3
import click
from c7n.credentials import SessionFactory
from c7n.sqsexec import MessageIterator
from collections import Counter
from concurrent.futures import ProcessPoolExecutor, as_completed
from datetime import timedelta
from dateutil.parser import parse as date_parse
import gzip
import json
import logging
import multiprocessing
import os
import sqlite3
import time
import yaml
from .constants import RESOURCE_KEY, REGION_KEY
from .metrics import Resource
from .utils import human_size, unwrap, get_dates
log = logging.getLogger('zerodark.ipdb')
APP_TAG = os.environ.get('APP_TAG', 'app')
ENV_TAG = os.environ.get('ENV_TAG', 'env')
CONTACT_TAG = os.environ.get('CONTACT_TAG', 'contact')
def download_config(
client, bucket, prefix, account_id, region, day, store, rtypes=()):
config_prefix = "%sAWSLogs/%s/Config/%s/%s/ConfigHistory/" % (
prefix,
account_id,
region,
day.strftime('%Y/%-m/%-d'))
results = client.list_objects_v2(
Bucket=bucket,
Prefix=config_prefix)
if not os.path.exists(store):
os.makedirs(store)
files = []
downloads = Counter()
for k in results.get('Contents', ()):
found = False
for rt in rtypes:
if rt in k['Key']:
found = True
if not found:
continue
fname = k['Key'].rsplit('/', 1)[-1]
fpath = os.path.join(store, fname)
files.append(fpath)
if os.path.exists(fpath):
downloads['Cached'] += 1
downloads['CacheSize'] += k['Size']
continue
downloads['Downloads'] += 1
downloads['DownloadSize'] += k['Size']
client.download_file(bucket, k['Key'], fpath)
log.debug(
"Downloaded:%d Size:%d Cached:%d Size:%s Prefix:%s",
downloads['Downloads'],
downloads['DownloadSize'],
downloads['Cached'],
downloads['CacheSize'],
config_prefix)
return files, downloads
def process_account_resources(
account_id, bucket, prefix, region,
store, start, end, resource='NetworkInterface'):
client = boto3.client('s3')
files = []
t = time.time()
period_stats = Counter()
period = (end - start).days
resource = RESOURCE_MAPPING[resource]
for i in range(period):
day = start + timedelta(i)
d_files, stats = download_config(
client, bucket, prefix, account_id, region, day, store,
rtypes=(resource,))
files.extend(d_files)
period_stats.update(stats)
period_stats['FetchTime'] = int(time.time() - t)
return files, period_stats
def resource_info(eni_cfg):
desc = eni_cfg.get('description')
instance_id = eni_cfg['attachment'].get('instanceId', '')
if instance_id:
rtype = RESOURCE_KEY['ec2']
rid = instance_id
elif desc.startswith('ELB app/'):
rtype = RESOURCE_KEY["alb"]
rid = desc.split('/')[1]
elif desc.startswith('ELB net/'):
rtype = RESOURCE_KEY["nlb"]
rid = desc.split('/')[1]
elif desc.startswith('ELB '):
rtype = RESOURCE_KEY['elb']
rid = desc.split(' ', 1)[1]
elif desc.startswith('AWS ElasticMapReduce'):
rtype = RESOURCE_KEY['emr']
rid = desc.rsplit(' ', 1)[1]
elif desc.startswith('AWS created network interface for directory'):
rtype = RESOURCE_KEY['dir']
rid = desc.rsplit(' ', 1)[1]
elif desc.startswith('AWS Lambda VPC ENI:'):
rtype = RESOURCE_KEY['lambda']
rid = eni_cfg['requesterId'].split(':', 1)[1]
elif desc == 'RDSNetworkInterface':
rtype = RESOURCE_KEY['rds']
rid = ''
elif desc == 'RedshiftNetworkInterface':
rtype = RESOURCE_KEY['redshift']
rid = ''
elif desc.startswith('ElastiCache '):
rtype = RESOURCE_KEY['elasticache']
rid = desc.split(' ', 1)[1]
elif desc.startswith('ElastiCache+'):
rtype = RESOURCE_KEY['elasticache']
rid = desc.split('+', 1)[1]
elif desc.startswith('Interface for NAT Gateway '):
rtype = RESOURCE_KEY['nat']
rid = desc.rsplit(' ', 1)[1]
elif desc.startswith('EFS mount target'):
rtype = RESOURCE_KEY['efs-mount']
fsid, fsmd = desc.rsplit(' ', 2)[1:]
rid = "%s:%s" % (fsid, fsmd[1:-1])
elif desc.startswith('CloudHSM Managed Interface'):
rtype = RESOURCE_KEY['hsm']
rid = ''
elif desc.startswith('CloudHsm ENI '):
rtype = RESOURCE_KEY['hsmv2']
rid = desc.rsplit(' ', 1)[1]
elif desc == 'DMSNetworkInterface':
rtype = RESOURCE_KEY['dms']
rid = ''
elif desc.startswith('DAX '):
rtype = RESOURCE_KEY['dax']
rid = desc.rsplit(' ', 1)[1]
elif desc.startswith('arn:aws:ecs:'):
# a running task with attached net
# 'arn:aws:ecs:us-east-1:0111111111110:attachment/37a927f2-a8d1-46d7-8f96-d6aef13cc5b0'
# also has public ip.
rtype = RESOURCE_KEY['ecs']
rid = desc.rsplit('/', 1)[1]
elif desc.startswith('VPC Endpoint Interface'):
# instanceOwnerId: amazon-aws
# interfaceType: 'vpc_endpoint'
rtype = RESOURCE_KEY['vpce']
rid = desc.rsplit(' ', 1)[1]
elif eni_cfg['attachment']['instanceOwnerId'] == 'aws-lambda':
rtype = RESOURCE_KEY['lambda']
rid = eni_cfg['requesterId'].split(':', 1)[1]
else:
rtype = RESOURCE_KEY['unknown']
rid = json.dumps(eni_cfg)
return rtype, rid
def resource_config_iter(files, batch_size=10000):
for f in files:
with gzip.open(f) as fh:
data = json.load(fh)
for config_set in chunks(data['configurationItems'], batch_size):
yield config_set
def record_stream_filter(record_stream, record_filter, batch_size=5000):
batch = []
for record_set in record_stream:
for r in record_set:
if record_filter(r):
batch.append(r)
if len(batch) % batch_size == 0:
yield batch
batch = []
if batch:
yield batch
EBS_SCHEMA = """
create table if not exists ebs (
volume_id text primary key,
instance_id text,
account_id text,
region text,
app text,
env text,
contact text,
start text,
end text
)
"""
def index_ebs_files(db, record_stream):
stats = Counter()
t = time.time()
with sqlite3.connect(db) as conn:
cursor = conn.cursor()
cursor.execute(EBS_SCHEMA)
rows = []
deletes = {}
skipped = 0
for record_set in record_stream:
for cfg in record_set:
stats['Records'] += 1
stats['Record%s' % cfg['configurationItemStatus']] += 1
if cfg['configurationItemStatus'] in ('ResourceDeleted',):
deletes[cfg['resourceId']] = cfg['configurationItemCaptureTime']
continue
if not cfg['configuration'].get('attachments'):
skipped += 1
continue
rows.append((
cfg['resourceId'],
cfg['configuration']['attachments'][0]['instanceId'],
cfg['awsAccountId'],
cfg['awsRegion'],
cfg['tags'].get(APP_TAG),
cfg['tags'].get(ENV_TAG),
cfg['tags'].get(CONTACT_TAG),
cfg['resourceCreationTime'],
None))
if rows:
for idx, r in enumerate(rows):
if r[0] in deletes:
rows[idx] = list(r)
rows[idx][-1] = deletes[r[0]]
cursor.executemany(
'''insert or replace into ebs values (?, ?, ?, ?, ?, ?, ?, ?, ?)''', rows)
stats['RowCount'] += len(rows)
log.debug("ebs stored:%d", len(rows))
stats['RowCount'] += len(rows)
stats['IndexTime'] = int(time.time() - t)
return stats
EC2_SCHEMA = """
create table if not exists ec2 (
instance_id text primary key,
account_id text,
region text,
ip_address text,
app text,
env text,
contact text,
asg text,
start datetime,
end datetime
"""
def index_ec2_files(db, record_stream):
stats = Counter()
t = time.time()
with sqlite3.connect(db) as conn:
cursor = conn.cursor()
cursor.execute(EC2_SCHEMA)
rows = []
deletes = []
for record_set in record_stream:
for cfg in record_set:
stats['Records'] += 1
stats['Record%s' % cfg['configurationItemStatus']] += 1
if cfg['configurationItemStatus'] in ('ResourceDeleted',):
deletes.append(((
cfg['configurationItemCaptureTime'], cfg['resourceId'])))
continue
if not cfg.get('tags'):
continue
rows.append((
cfg['resourceId'],
cfg['awsAccountId'],
cfg['awsRegion'],
cfg['configuration'].get('privateIpAddress', ''),
cfg['tags'].get(APP_TAG),
cfg['tags'].get(ENV_TAG),
cfg['tags'].get(CONTACT_TAG),
cfg['tags'].get('aws:autoscaling:groupName', ''),
cfg['resourceCreationTime'],
None))
if len(rows) % 1000 == 0:
stats['RowCount'] += len(rows)
cursor.executemany(
'''insert or replace into ec2 values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''',
rows)
rows = []
if deletes:
log.info("Delete count %d", len(deletes))
stmt = 'update ec2 set end = ? where instance_id = ?'
for p in deletes:
cursor.execute(stmt, p)
if rows:
cursor.executemany(
'''insert or replace into ec2 values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''', rows)
log.debug("ec2s stored:%d", len(rows))
stats['RowCount'] += len(rows)
stats['IndexTime'] = int(time.time() - t)
return stats
S3_SCHEMA = """
create table if not exists buckets (
name text,
account_id text,
region text,
app text,
env text,
contact text,
start datetime,
end datetime,
resource text
)"""
def index_s3_files(db, record_stream):
stats = Counter()
t = time.time()
with sqlite3.connect(db) as conn:
cursor = conn.cursor()
cursor.execute(S3_SCHEMA)
deletes = {}
rows = []
for record_set in record_stream:
for cfg in record_set:
stats['Records'] += 1
stats['Record%s' % cfg['configurationItemStatus']] += 1
if cfg['configurationItemStatus'] == 'ResourceNotRecorded':
continue
if cfg['configurationItemStatus'] in ('ResourceDeleted'):
deletes[cfg['resourceId']] = cfg['configurationItemCaptureTime']
rows.append((
cfg['resourceId'], None, None, None, None, None, None,
cfg['configurationItemCaptureTime'], None))
continue
rows.append((
cfg['resourceId'],
cfg['awsAccountId'],
cfg['awsRegion'],
cfg['tags'].get(APP_TAG),
cfg['tags'].get(ENV_TAG),
cfg['tags'].get(CONTACT_TAG),
cfg['resourceCreationTime'],
None,
json.dumps(cfg)))
if len(rows) % 10000:
cursor.executemany(
'''insert or replace into buckets values (?, ?, ?, ?, ?, ?, ?, ?, ?)''', rows)
stats['RowCount'] += len(rows)
if rows:
cursor.executemany(
'''insert or replace into buckets values (?, ?, ?, ?, ?, ?, ?, ?, ?)''', rows)
stats['RowCount'] += len(rows)
stats['IndexTime'] = int(time.time() - t)
return stats
ELB_SCHEMA = """
create table if not exists elbs (
name text primary key,
account_id text,
region text,
app text,
env text,
contact text,
start datetime,
end datetime
)"""
def index_elb_files(db, record_stream):
stats = Counter()
t = time.time()
with sqlite3.connect(db) as conn:
cursor = conn.cursor()
cursor.execute(ELB_SCHEMA)
rows = []
deletes = {}
for record_set in record_stream:
for cfg in record_set:
stats['Records'] += 1
stats['Record%s' % cfg['configurationItemStatus']] += 1
if cfg['configurationItemStatus'] in ('ResourceDeleted',):
deletes[cfg['resourceId']] = cfg['configurationItemCaptureTime']
continue
rows.append((
cfg['resourceName'],
cfg['awsAccountId'],
cfg['awsRegion'],
cfg['tags'].get(APP_TAG),
cfg['tags'].get(ENV_TAG),
cfg['tags'].get(CONTACT_TAG),
cfg['resourceCreationTime'],
None))
if rows:
for idx, r in enumerate(rows):
if r[0] in deletes:
rows[idx] = list(r)
rows[idx][-1] = deletes[r[0]]
cursor.executemany(
'''insert or replace into elbs values (?, ?, ?, ?, ?, ?, ?, ?)''', rows)
stats['RowCount'] += len(rows)
log.debug("elbs stored:%d", len(rows))
stats['RowCount'] += len(rows)
stats['IndexTime'] = int(time.time() - t)
return stats
ENI_SCHEMA = """
create table if not exists enis (
eni_id text primary key,
ip_address text,
account_id text,
resource_id text,
resource_type integer,
subnet_id text,
region integer,
start datetime,
end datetime
)"""
def index_eni_files(db, record_stream):
stats = Counter()
t = time.time()
with sqlite3.connect(db) as conn:
cursor = conn.cursor()
cursor.execute(ENI_SCHEMA)
cursor.execute('create index if not exists eni_idx on enis(ip_address)')
rows = []
skipped = 0
deletes = {}
rids = set()
for record_set in record_stream:
for cfg in record_set:
stats['Records'] += 1
stats['Record%s' % cfg['configurationItemStatus']] += 1
if cfg['configurationItemStatus'] not in (
'ResourceDeleted', 'ResourceDiscovered', 'OK'):
raise ValueError(cfg)
if cfg['configurationItemStatus'] in ('ResourceDeleted',):
deletes[cfg['resourceId']] = cfg['configurationItemCaptureTime']
continue
eni = cfg['configuration']
if 'attachment' not in eni or cfg['resourceId'] in rids:
skipped += 1
continue
rids.add(cfg['resourceId'])
rtype, rid = resource_info(eni)
rows.append((
eni['networkInterfaceId'],
eni['privateIpAddress'],
cfg['awsAccountId'],
rid,
rtype,
eni['subnetId'],
REGION_KEY[cfg['awsRegion']],
eni['attachment'].get('attachTime') or cfg['configurationItemCaptureTime'],
None))
log.debug(
"Records:%d Insert:%d Deletes:%d Skipped:%d Discovered:%d Deleted:%d Ok:%d",
stats['Records'], len(rows), len(deletes), skipped,
stats['RecordResourceDiscovered'], stats['RecordResourceDeleted'],
stats['RecordOK'])
if rows:
for idx, r in enumerate(rows):
if r[0] in deletes:
rows[idx] = list(r)
rows[idx][-1] = deletes[r[0]]
del deletes[r[0]]
try:
cursor.executemany(
'''insert into enis values (?, ?, ?, ?, ?, ?, ?, ?, ?)''', rows)
except Exception:
log.error("Error inserting enis account:%s rows:%d",
cfg['awsAccountId'], len(rows))
stats['RowCount'] += len(rows)
# result = cursor.execute('select count(distinct ip_address) from enis').fetchone()
stats['SkipCount'] = skipped
stats['IndexTime'] = int(time.time() - t)
return stats
def chunks(iterable, size=50):
"""Break an iterable into lists of size"""
batch = []
for n in iterable:
batch.append(n)
if len(batch) % size == 0:
yield batch
batch = []
if batch:
yield batch
RESOURCE_MAPPING = {
'Instance': 'AWS::EC2::Instance',
'LoadBalancer': 'AWS::ElasticLoadBalancing',
'NetworkInterface': 'AWS::EC2::NetworkInterface',
'Volume': 'AWS::EC2::Volume',
'Bucket': 'AWS::S3::Bucket'
}
RESOURCE_FILE_INDEXERS = {
'Instance': index_ec2_files,
'NetworkInterface': index_eni_files,
'LoadBalancer': index_elb_files,
'Volume': index_ebs_files,
'Bucket': index_s3_files
}
@click.group()
def cli():
"""AWS Network Resource Database"""
@cli.command('worker')
@click.option('--queue')
@click.option('--s3-key')
@click.option('--period', default=60, type=click.INT)
@click.option('--verbose', default=False, is_flag=True)
def worker_config(queue, s3_key, period, verbose):
"""daemon queue worker for config notifications"""
logging.basicConfig(level=(verbose and logging.DEBUG or logging.INFO))
logging.getLogger('botocore').setLevel(logging.WARNING)
logging.getLogger('s3transfer').setLevel(logging.WARNING)
queue, region = get_queue(queue)
factory = SessionFactory(region)
session = factory()
client = session.client('sqs')
messages = MessageIterator(client, queue, timeout=20)
for m in messages:
msg = unwrap(m)
if 'configurationItemSummary' in msg:
rtype = msg['configurationItemSummary']['resourceType']
else:
rtype = msg['configurationItem']['resourceType']
if rtype not in RESOURCE_MAPPING.values():
log.info("skipping %s" % rtype)
messages.ack(m)
log.info("message received %s", m)
def get_queue(queue):
if queue.startswith('https://queue.amazonaws.com'):
region = 'us-east-1'
queue_url = queue
elif queue.startswith('https://sqs.'):
region = queue.split('.', 2)[1]
queue_url = queue
elif queue.startswith('arn:sqs'):
queue_arn_split = queue.split(':', 5)
region = queue_arn_split[3]
owner_id = queue_arn_split[4]
queue_name = queue_arn_split[5]
queue_url = "https://sqs.%s.amazonaws.com/%s/%s" % (
region, owner_id, queue_name)
return queue_url, region
@cli.command('list-app-resources')
@click.option('--app')
@click.option('--env')
@click.option('--cmdb')
@click.option('--start')
@click.option('--end')
@click.option('--tz')
@click.option(
'-r', '--resources', multiple=True,
type=click.Choice(['Instance', 'LoadBalancer', 'Volume']))
def list_app_resources(
app, env, resources, cmdb, start, end, tz):
"""Analyze flow log records for application and generate metrics per period"""
logging.basicConfig(level=logging.INFO)
start, end = get_dates(start, end, tz)
all_resources = []
for rtype_name in resources:
rtype = Resource.get_type(rtype_name)
resources = rtype.get_resources(cmdb, start, end, app, env)
all_resources.extend(resources)
print(json.dumps(all_resources, indent=2))
@cli.command('load-resources')
@click.option('--bucket', required=True, help="Config Bucket")
@click.option('--prefix', required=True, help="Config Bucket Prefix")
@click.option('--region', required=True, help="Load Config for Region")
@click.option('--account-config', type=click.File('rb'), required=True)
@click.option('-a', '--accounts', multiple=True)
@click.option('--assume', help="Assume role")
@click.option('--start')
@click.option('--end')
@click.option('-r', '--resources', multiple=True,
type=click.Choice(list(RESOURCE_FILE_INDEXERS.keys())))
@click.option('--store', type=click.Path())
@click.option('-f', '--db')
@click.option('-v', '--verbose', is_flag=True)
@click.option('--debug', is_flag=True)
def load_resources(bucket, prefix, region, account_config, accounts,
assume, start, end, resources, store, db, verbose, debug):
"""load resources into resource database."""
logging.basicConfig(level=(verbose and logging.DEBUG or logging.INFO))
logging.getLogger('botocore').setLevel(logging.WARNING)
logging.getLogger('s3transfer').setLevel(logging.WARNING)
start = date_parse(start)
end = date_parse(end)
if not resources:
resources = ['NetworkInterface', 'Instance', 'LoadBalancer']
account_map = {}
data = yaml.safe_load(account_config.read())
for a in data.get('accounts', ()):
if accounts and (a['name'] in accounts or a['account_id'] in accounts):
account_map[a['account_id']] = a
elif not accounts:
account_map[a['account_id']] = a
account_ids = list(account_map)
executor = ProcessPoolExecutor
if debug:
from c7n.executor import MainThreadExecutor
MainThreadExecutor.async = False
executor = MainThreadExecutor
stats = Counter()
t = time.time()
with executor(max_workers=multiprocessing.cpu_count()) as w:
futures = {}
for a in account_ids:
for r in resources:
futures[w.submit(
process_account_resources, a, bucket, prefix,
region, store, start, end, r)] = (a, r)
indexer = RESOURCE_FILE_INDEXERS[r]
for f in as_completed(futures):
a, r = futures[f]
if f.exception():
log.error("account:%s error:%s", a, f.exception())
continue
files, dl_stats = f.result()
idx_stats = indexer(db, resource_config_iter(files))
log.info(
"loaded account:%s files:%d bytes:%s events:%d resources:%d idx-time:%d dl-time:%d",
account_map[a]['name'], len(files),
human_size(dl_stats['DownloadSize'] + dl_stats['CacheSize']),
idx_stats['Records'],
idx_stats['RowCount'],
idx_stats['IndexTime'],
dl_stats['FetchTime'])
stats.update(dl_stats)
stats.update(idx_stats)
log.info("Loaded %d resources across %d accounts in %0.2f",
stats['RowCount'], len(account_ids), time.time() - t)
if __name__ == '__main__':
try:
cli()
except Exception:
import pdb, traceback, sys
traceback.print_exc()
pdb.post_mortem(sys.exc_info()[-1])
| apache-2.0 | -6,660,086,870,212,036,000 | 33.309423 | 100 | 0.540051 | false | 4.058902 | true | false | false |
jjdmol/LOFAR | LTA/LTAIngest/dav/webdav/acp/Acl.py | 1 | 10978 | # pylint: disable-msg=W0622
#
# Copyright 2008 German Aerospace Center (DLR)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
ACL object handling according to WebDAV ACP specification.
"""
from webdav.acp.Ace import ACE
from webdav import Constants
from webdav.Connection import WebdavError
from davlib import XML_DOC_HEADER
__version__ = "$LastChangedRevision$"
class ACL(object):
"""
This class provides access to Access Control List funcionality
as specified in the WebDAV ACP.
@ivar aces: ACEs in ACL
@type aces: C{list} of L{ACE} objects
@ivar withInherited: Flag indicating whether ACL contains inherited ACEs.
@type withInherited: C{bool}
"""
# restrict instance variables
__slots__ = ('aces', 'withInherited')
def __init__(self, domroot=None, aces=None):
"""
Constructor should be called with either no parameters (create blank ACE),
or one parameter (a DOM tree or ACE list).
@param domroot: A DOM tree (default: None).
@type domroot: L{webdav.WebdavResponse.Element} object
@param aces: ACE objects (default: None)
@type aces: C{list} of L{ACE} objects
@raise WebdavError: When non-valid parameters are passed a L{WebdavError} is raised.
"""
self.withInherited = None
self.aces = []
if domroot:
for child in domroot.children:
if child.name == Constants.TAG_ACE and child.ns == Constants.NS_DAV:
self.addAce(ACE(child))
else:
# This shouldn't happen, someone screwed up with the params ...
raise WebdavError('Non-ACE tag handed to ACL constructor: ' + child.ns + child.name)
elif isinstance(aces, list) or isinstance(aces, tuple):
self.addAces(aces)
elif domroot == None and aces == None:
# no param ==> blank object
pass
else:
# This shouldn't happen, someone screwed up with the params ...
raise WebdavError('non-valid parameters handed to ACL constructor')
def __cmp__(self, other):
if not isinstance(other, ACL):
return 1
if self.withInherited == other.withInherited:
equal = 1
for ace in self.aces:
inList = 0
for otherAce in other.aces:
if ace == otherAce:
inList = 1
if inList == 0:
equal = 0
return not equal
else:
return 1
def __repr__(self):
repr = '<class ACL: '
if self.withInherited:
repr += 'with inherited, '
first = 1
repr += 'aces: ['
for ace in self.aces:
if first:
repr += '%s' % ace
first = 0
else:
repr += ', %s' % ace
return '%s]>' % (repr)
def copy(self, other):
'''Copy an ACL object.
@param other: Another ACL to copy.
@type other: L{ACL} object
@raise WebdavError: When an object that is not an L{ACL} is passed
a L{WebdavError} is raised.
'''
if not isinstance(other, ACL):
raise WebdavError('Non-ACL object passed to copy method: %s' % other.__class__)
self.withInherited = other.withInherited
if other.aces:
self.addAces(other.aces)
def toXML(self):
"""
Returns ACL content as a string of valid XML as described in WebDAV ACP.
"""
aclTag = 'D:' + Constants.TAG_ACL
return XML_DOC_HEADER +\
'<' + aclTag + ' xmlns:D="DAV:">' + reduce(lambda xml, ace: xml + ace.toXML() + '\n', [''] + self.aces) +\
'</' + aclTag + '>'
def addAce(self, ace):
'''
Adds the passed ACE object to list if it's not in it, yet.
@param ace: An ACE.
@type ace: L{ACE} object
'''
newAce = ACE()
newAce.copy(ace)
# only add it if it's not in the list, yet ...
inList = 0
for element in self.aces:
if element == ace:
inList = 1
if not inList:
self.aces.append(newAce)
def addAces(self, aces):
'''Adds the list of passed ACE objects to list.
@param aces: ACEs
@type aces: sequence of L{ACE} objects
'''
for ace in aces:
self.addAce(ace)
def delAce(self, ace):
'''Deletes the passed ACE object from list.
@param ace: An ACE.
@type ace: L{ACE} object
@raise WebdavError: When the ACE to be deleted is not within the ACL
a L{WebdavError} is raised.
'''
# find where it is and delete it ...
count = 0
index = 0
for element in self.aces:
count += 1
if element == ace:
index = count
if index:
self.aces.pop(index - 1)
else:
raise WebdavError('ACE to be deleted not in list: %s.' % ace)
def delAces(self, aces):
'''Deletes the list of passed ACE objects from list.
@param aces: ACEs
@type aces: sequence of L{ACE} objects
'''
for ace in aces:
self.delAce(ace)
def delPrincipalsAces(self, principal):
"""
Deletes all ACEs in ACL by given principal.
@param principal: A principal.
@type principal: L{Principal} object
"""
# find where it is and delete it ...
index = 0
while index < len(self.aces):
if self.aces[index].principal.principalURL == principal.principalURL:
self.aces.pop(index)
else:
index += 1
def joinGrantDeny(self):
"""
Returns a "refined" ACL of the ACL for ease of use in the UI.
The purpose is to post the user an ACE that can contain both, granted
and denied, privileges. So possible pairs of grant and deny ACEs are joined
to return them in one ACE. This resulting ACE then of course IS NOT valid
for setting ACLs anymore. They will have to be reconverted to yield valid
ACLs for the ACL method.
@return: A (non-valid) ACL that contains both grant and deny clauses in an ACE.
@rtype: L{ACL} object
"""
joinedAces = {}
for ace in self.aces:
if not ace.principal.principalURL is None:
principalKey = ace.principal.principalURL
elif not ace.principal.property is None:
principalKey = ace.principal.property
else:
principalKey = None
if ace.inherited:
principalKey = ace.inherited + ":" + principalKey
if principalKey in joinedAces:
joinedAces[principalKey].addGrantDenies(ace.grantDenies)
else:
joinedAces[principalKey] = ACE()
joinedAces[principalKey].copy(ace)
newAcl = ACL()
newAcl.addAces(joinedAces.values())
return newAcl
def splitGrantDeny(self):
"""
Returns a "refined" ACL of the ACL for ease of use in the UI.
The purpose is to post the user an ACE that can contain both, granted
and denied, privileges. So possible joined grant and deny clauses in ACEs
splitted to return them in separate ACEs. This resulting ACE then is valid
for setting ACLs again. This method is to be seen in conjunction with the
method joinGrantDeny as it reverts its effect.
@return: A valid ACL that contains only ACEs with either grant or deny clauses.
@rtype: L{ACL} object
"""
acesGrant = {}
acesDeny = {}
for ace in self.aces:
for grantDeny in ace.grantDenies:
if grantDeny.isGrant():
if ace.principal.principalURL in acesGrant:
ace.addGrantDeny(grantDeny)
else:
acesGrant[ace.principal.principalURL] = ACE()
acesGrant[ace.principal.principalURL].copy(ace)
acesGrant[ace.principal.principalURL].grantDenies = []
acesGrant[ace.principal.principalURL].addGrantDeny(grantDeny)
else:
if ace.principal.principalURL in acesDeny:
ace.addGrantDeny(grantDeny)
else:
acesDeny[ace.principal.principalURL] = ACE()
acesDeny[ace.principal.principalURL].copy(ace)
acesDeny[ace.principal.principalURL].grantDenies = []
acesDeny[ace.principal.principalURL].addGrantDeny(grantDeny)
newAcl = ACL()
newAcl.addAces(acesGrant.values())
newAcl.addAces(acesDeny.values())
return newAcl
def isValid(self):
"""
Returns true (1) if all contained ACE objects are valid,
otherwise false (0) is returned.
@return: Validity of ACL.
@rtype: C{bool}
"""
valid = 1
if len(self.aces):
for ace in self.aces:
if not ace.isValid():
valid = 0
return valid
def stripAces(self, inherited=True, protected=True):
"""
Returns an ACL object with all ACEs stripped that are inherited
and/or protected.
@param inherited: Flag to indicate whether inherited ACEs should
be stripped (default: True).
@type inherited: C{bool}
@param protected: Flag to indicate whether protected ACEs should
be stripped (default: True).
@type protected: C{bool}
@return: An ACL without the stripped ACEs.
@rtype: L{ACL} object
"""
newAcl = ACL()
if len(self.aces):
for ace in self.aces:
keep = 1
if inherited and ace.inherited:
keep = 0
elif protected and ace.protected:
keep = 0
if keep:
newAcl.addAce(ace)
return newAcl
| gpl-3.0 | 5,891,104,285,362,545,000 | 34.299035 | 118 | 0.55092 | false | 4.081041 | false | false | false |
thundernet8/WRGameVideos-API | venv/lib/python2.7/site-packages/flask_jsonpify.py | 1 | 2543 | from flask import current_app, json, request
def __pad(strdata):
""" Pads `strdata` with a Request's callback argument, if specified, or does
nothing.
"""
if request.args.get('callback'):
return "%s(%s);" % (request.args.get('callback'), strdata)
else:
return strdata
def __mimetype():
if request.args.get('callback'):
return 'application/javascript'
else:
return 'application/json'
def __dumps(*args, **kwargs):
""" Serializes `args` and `kwargs` as JSON. Supports serializing an array
as the top-level object, if it is the only argument.
"""
indent = None
if (current_app.config.get('JSONIFY_PRETTYPRINT_REGULAR', False)
and not request.is_xhr):
indent = 2
return json.dumps(args[0] if len(args) is 1 else dict(*args, **kwargs),
indent=indent)
def jsonpify(*args, **kwargs):
"""Creates a :class:`~flask.Response` with the JSON or JSON-P
representation of the given arguments with an `application/json`
or `application/javascript` mimetype, respectively. The arguments
to this function are the same as to the :class:`dict` constructor,
but also accept an array. If a `callback` is specified in the
request arguments, the response is JSON-Padded.
Example usage::
@app.route('/_get_current_user')
def get_current_user():
return jsonify(username=g.user.username,
email=g.user.email,
id=g.user.id)
GET /_get_current_user:
This will send a JSON response like this to the browser::
{
"username": "admin",
"email": "admin@localhost",
"id": 42
}
or, if a callback is specified,
GET /_get_current_user?callback=displayUsers
Will result in a JSON response like this to the browser::
displayUsers({
"username": "admin",
"email": "admin@localhost",
"id": 42
});
This requires Python 2.6 or an installed version of simplejson. For
security reasons only objects are supported toplevel. For more
information about this, have a look at :ref:`json-security`.
.. versionadded:: 0.2
"""
return current_app.response_class(__pad(__dumps(*args, **kwargs)),
mimetype=__mimetype())
jsonify = jsonpify # allow override of Flask's jsonify.
| gpl-2.0 | -717,886,847,209,968,100 | 29.395062 | 80 | 0.583956 | false | 4.231281 | false | false | false |
taoliu/taolib | Scripts/ce_histone_matrix.py | 1 | 17749 | #!/usr/bin/env python
# Time-stamp: <2010-09-08 02:38:38 Tao Liu>
"""Module Description
Copyright (c) 2008 Tao Liu <[email protected]>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD License (see the file COPYING included with
the distribution).
@status: experimental
@version: $Revision$
@author: Tao Liu
@contact: [email protected]
"""
# ------------------------------------
# python modules
# ------------------------------------
import os
import sys
import re
import csv
import logging
from optparse import OptionParser
import reportlab
import Bio
from taolib.CoreLib.FeatIO import WigTrackI
from taolib.CoreLib.BasicStat.Func import mean,median,std
# ------------------------------------
# constants
# ------------------------------------
logging.basicConfig(level=20,
format='%(levelname)-5s @ %(asctime)s: %(message)s ',
datefmt='%a, %d %b %Y %H:%M:%S',
stream=sys.stderr,
filemode="w"
)
error = logging.critical # function alias
warn = logging.warning
debug = logging.debug
info = logging.info
# ------------------------------------
# Misc functions
# ------------------------------------
def andfilter ( cvsfile, write_func, *args ):
"""
"""
argv = args[0]
if len(argv) < 2:
sys.stderr.write("Need two extra arguments for 'organize', e.g. command: <1,2,3> <4,5,6> means the first 1,2,3 will be used as dependent variables/response, and 4,5,6 will be used as independent variables/terms/predictors.\n")
sys.exit()
responses_num = map(int,argv[0].split(","))
predictors_num = map(int,argv[1].split(","))
fields = cvsfile.fieldnames
responses_label = map(lambda x:"res."+fields[x],responses_num)
predictors_label = map(lambda x:"pre."+fields[x],predictors_num)
responses_name = map(lambda x:fields[x],responses_num)
predictors_name = map(lambda x:fields[x],predictors_num)
#write_func( "#%s\t%s\n" \
# % ( ",".join(map( lambda x:str(x[0])+":"+str(x[1]) , zip(responses_num,responses_name) )),
# ",".join(map( lambda x:str(x[0])+":"+str(x[1]) , zip(predictors_num,predictors_name) ))) )
write_func( "%s\t%s\n" \
% ( ",".join(map( lambda x:str(x) , responses_name )),
",".join(map( lambda x:str(x) , predictors_name ))) )
for l in cvsfile:
# for responses
t_str_list = []
for t in responses_name:
t_str_list.append(l.setdefault(t,"NA"))
# for predictors
v_str_list = []
for v in predictors_name:
v_str_list.append(l.setdefault(v,"NA"))
write_func( "\t".join( (",".join(t_str_list),",".join(v_str_list)) ) )
write_func( "\n" )
def combcall2draw ( cvsfile, write_func, *args ):
"""User specifies several columns to consider, this tool will call
regions where either of the column is above its threshold.
"""
argv = args[0]
if len(argv) < 6:
sys.stderr.write("Need 6 extra arguments for 'combcall2draw', options <loc column> <score column1[,score column2,...]> <cutoff1[,cutoff2,cutoff3]> <min length> <max gap> <pdf filename>\ne.g. command: <0> <1,2,3> <0.5,0.6,0.7> <10000> <2000> <a.pdf>, means to use the first column as genome coordinations to call enriched regions from the combinition of #1, #2 and #3, the thresholds to call enriched region are 0.5 for column 1, 0.6 for column 2 and 0.7 for column 3, the minimum length of region is 10k, and the maximum gap to link two nearby regions is 2k. Then the figure will be saved in a.pdf.\n")
sys.exit()
cor_column = cvsfile.fieldnames[int(argv[0])]
var_columns = map(lambda x:cvsfile.fieldnames[int(x)],argv[1].split(","))
cutoffs = map(float,argv[2].split(","))
min_len = int(argv[3])
max_gap = int(argv[4])
wtrack = WigTrackI() # combined track containing 1 if either of track is above cutoff
add_func = wtrack.add_loc
for l in cvsfile:
cor = l.setdefault(cor_column,None)
if not cor or cor =="NA":
continue
for i in range(len(var_columns)):
var_column = var_columns[i]
cutoff = cutoffs[i]
var = l.setdefault(var_column,None)
if var and var != "NA" and float(var) > cutoff:
(chrom,start,end) = cor.split(".")
add_func(chrom,int(start),1.1)
break
wtrack.span = int(end)-int(start)
bpeaks = wtrack.call_peaks(cutoff=1.0,min_length=min_len,max_gap=max_gap)
#f = argv[5]
fhd = open(argv[5].replace("pdf","bed"),"w")
fhd.write(bpeaks.tobed())
from Bio.Graphics import BasicChromosome
from reportlab.lib.colors import gray, black, white
entries = [("chrI", 15072419),
("chrII", 15279316),
("chrIII", 13783681),
("chrIV", 17493784),
("chrV", 20919398),
("chrX", 17718852)]
max_length = max([x[1] for x in entries])
chr_diagram = BasicChromosome.Organism()
for name, length in entries:
cur_chromosome = BasicChromosome.Chromosome(name)
#Set the length, adding and extra 20 percent for the tolomeres:
cur_chromosome.scale_num = max_length * 1.1
# Add an opening telomere
start = BasicChromosome.TelomereSegment()
start.scale = 0.05 * max_length
start.fill_color=gray
cur_chromosome.add(start)
#Add a body - using bp as the scale length here.
try:
cpeaks = bpeaks.peaks[name]
except:
cpeaks = []
body_regions = []
last_pos = 0
for p in cpeaks:
body_regions.append( (p[0]-last_pos,white) ) # outside regions
body_regions.append( (p[1]-p[0],black) ) # enriched regions
last_pos = p[1]
assert p[1] < length
body_regions.append( (length-last_pos,white) ) # last part
for b,c in body_regions:
body = BasicChromosome.ChromosomeSegment()
body.fill_color= c
body.scale = b
cur_chromosome.add(body)
#Add a closing telomere
end = BasicChromosome.TelomereSegment(inverted=True)
end.scale = 0.05 * max_length
end.fill_color=gray
cur_chromosome.add(end)
#This chromosome is done
chr_diagram.add(cur_chromosome)
chr_diagram.draw(argv[5], "Highlight regions in Caenorhabditis elegans" )
def call1draw ( cvsfile, write_func, *args ):
"""Call regions, then plot it in chromosome figure.
A combination of drawchrom and call1
"""
argv = args[0]
if len(argv) < 6:
sys.stderr.write("Need 6 extra arguments for 'call1draw', options <loc column> <score column> <cutoff> <min length> <max gap> <pdf filename>\ne.g. command: <0> <1> <0.5> <10000> <2000> <a.pdf>, means to use the first column as genome coordinations to call enriched regions from the second column, the threshold to call enriched region is 0.5, the minimum length of region is 10k, and the maximum gap to link two nearby regions is 2k. Then the figure will be saved in a.pdf.\n")
sys.exit()
cor_column = cvsfile.fieldnames[int(argv[0])]
var_column = cvsfile.fieldnames[int(argv[1])]
cutoff = float(argv[2])
min_len = int(argv[3])
max_gap = int(argv[4])
wtrack = WigTrackI()
add_func = wtrack.add_loc
for l in cvsfile:
cor = l.setdefault(cor_column,None)
var = l.setdefault(var_column,None)
if cor and var and cor != "NA" and var != "NA":
(chrom,start,end) = cor.split(".")
add_func(chrom,int(start),float(var))
wtrack.span = int(end)-int(start)
bpeaks = wtrack.call_peaks(cutoff=cutoff,min_length=min_len,max_gap=max_gap)
fhd = open(argv[5].replace("pdf","bed"),"w")
fhd.write(bpeaks.tobed())
from Bio.Graphics import BasicChromosome
from reportlab.lib.colors import gray, black, white
entries = [("chrI", 15072419),
("chrII", 15279316),
("chrIII", 13783681),
("chrIV", 17493784),
("chrV", 20919398),
("chrX", 17718852)]
max_length = max([x[1] for x in entries])
chr_diagram = BasicChromosome.Organism()
for name, length in entries:
cur_chromosome = BasicChromosome.Chromosome(name)
#Set the length, adding and extra 20 percent for the tolomeres:
cur_chromosome.scale_num = max_length * 1.1
# Add an opening telomere
start = BasicChromosome.TelomereSegment()
start.scale = 0.05 * max_length
start.fill_color=gray
cur_chromosome.add(start)
#Add a body - using bp as the scale length here.
try:
cpeaks = bpeaks.peaks[name]
except:
cpeaks = []
body_regions = []
last_pos = 0
for p in cpeaks:
body_regions.append( (p[0]-last_pos,white) ) # outside regions
body_regions.append( (p[1]-p[0],black) ) # enriched regions
last_pos = p[1]
assert p[1] < length
body_regions.append( (length-last_pos,white) ) # last part
for b,c in body_regions:
body = BasicChromosome.ChromosomeSegment()
body.fill_color= c
body.scale = b
cur_chromosome.add(body)
#Add a closing telomere
end = BasicChromosome.TelomereSegment(inverted=True)
end.scale = 0.05 * max_length
end.fill_color=gray
cur_chromosome.add(end)
#This chromosome is done
chr_diagram.add(cur_chromosome)
chr_diagram.draw(argv[5], "%s regions in Caenorhabditis elegans" % (var_column) )
def drawchrom ( cvsfile, write_func, *args ):
"""Draw CE chromosome tool.
Doesn't need any parameters.
"""
from Bio.Graphics import BasicChromosome
from reportlab.lib.colors import gray, black
entries = [("chrI", 15072419),
("chrII", 15279316),
("chrIII", 13783681),
("chrIV", 17493784),
("chrV", 20919398),
("chrX", 17718852)]
max_length = max([x[1] for x in entries])
chr_diagram = BasicChromosome.Organism()
for name, length in entries:
cur_chromosome = BasicChromosome.Chromosome(name)
#Set the length, adding and extra 20 percent for the tolomeres:
cur_chromosome.scale_num = max_length * 1.1
# Add an opening telomere
start = BasicChromosome.TelomereSegment()
start.scale = 0.05 * max_length
start.fill_color=black
cur_chromosome.add(start)
#Add a body - using bp as the scale length here.
body = BasicChromosome.ChromosomeSegment()
body.fill_color=gray
body.scale = length
cur_chromosome.add(body)
#Add a closing telomere
end = BasicChromosome.TelomereSegment(inverted=True)
end.scale = 0.05 * max_length
end.fill_color=black
cur_chromosome.add(end)
#This chromosome is done
chr_diagram.add(cur_chromosome)
chr_diagram.draw("simple_chrom.pdf", "Caenorhabditis elegans" )
def summary ( cvsfile, write_func, *args ):
"""Show the column names.
"""
fsnames = cvsfile.fieldnames
data_dict = {}
for f in fsnames:
data_dict[f]=[]
#print "\n".join(map( lambda x:":".join(map(str,x)) ,enumerate(fsnames)) )
for l in cvsfile:
for f in fsnames:
v = l.setdefault(f,None)
if v and v!="NA":
data_dict[f].append(v)
write_func( "colnum:colname\tsum,mean,median,std,cutoff\n" )
for (i,f) in enumerate(fsnames):
try:
v_array = map(float,data_dict[f])
v_sum = "%.2f" % sum(v_array)
v_mean = "%.2f" % mean(v_array)
v_median = "%.2f" % median(v_array)
v_std = "%.2f" % std(v_array, float(v_mean))
v_cutoff = "%.2f" % (float(v_mean)+float(v_std))
except ValueError:
(v_sum,v_mean,v_median,v_std,v_cutoff)=["NA"]*5
write_func( "%d:%s\t%s,%s,%s,%s,%s\n" % (i,f,v_sum,v_mean,v_median,v_std,v_cutoff ))
def organize ( cvsfile, write_func, *args ):
"""Re-organize the columns for data-mining.
"""
argv = args[0]
if len(argv) < 2:
sys.stderr.write("Need two extra arguments for 'organize', e.g. command: <1,2,3> <4,5,6> means the first 1,2,3 will be used as dependent variables/response, and 4,5,6 will be used as independent variables/terms/predictors.\n")
sys.exit()
responses_num = map(int,argv[0].split(","))
predictors_num = map(int,argv[1].split(","))
fields = cvsfile.fieldnames
responses_label = map(lambda x:"res."+fields[x],responses_num)
predictors_label = map(lambda x:"pre."+fields[x],predictors_num)
responses_name = map(lambda x:fields[x],responses_num)
predictors_name = map(lambda x:fields[x],predictors_num)
#write_func( "#%s\t%s\n" \
# % ( ",".join(map( lambda x:str(x[0])+":"+str(x[1]) , zip(responses_num,responses_name) )),
# ",".join(map( lambda x:str(x[0])+":"+str(x[1]) , zip(predictors_num,predictors_name) ))) )
write_func( "%s\t%s\n" \
% ( ",".join(map( lambda x:str(x) , responses_name )),
",".join(map( lambda x:str(x) , predictors_name ))) )
for l in cvsfile:
# for responses
t_str_list = []
for t in responses_name:
t_str_list.append(l.setdefault(t,"NA"))
# for predictors
v_str_list = []
for v in predictors_name:
v_str_list.append(l.setdefault(v,"NA"))
write_func( "\t".join( (",".join(t_str_list),",".join(v_str_list)) ) )
write_func( "\n" )
def call1 (cvsfile, write_func, *args ):
"""Call enrich regions from certain column
"""
argv = args[0]
if len(argv) < 5:
sys.stderr.write("Need 5 extra arguments for 'call', options <loc column> <score column> <cutoff> <min length> <max gap>\ne.g. command: <0> <1> <0.5> <10000> <2000>, means to use the first column as genome coordinations to call enriched regions from the second column, the threshold to call enriched region is 0.5, the minimum length of region is 10k, and the maximum gap to link two nearby regions is 2k.\n")
sys.exit()
cor_column = cvsfile.fieldnames[int(argv[0])]
var_column = cvsfile.fieldnames[int(argv[1])]
cutoff = float(argv[2])
min_len = int(argv[3])
max_gap = int(argv[4])
wtrack = WigTrackI()
add_func = wtrack.add_loc
for l in cvsfile:
cor = l.setdefault(cor_column,None)
var = l.setdefault(var_column,None)
if cor and var and cor != "NA" and var != "NA":
(chrom,start,end) = cor.split(".")
add_func(chrom,int(start),float(var))
wtrack.span = int(end)-int(start)
write_func( "# regions called from %s:%s\n" % (argv[1],var_column) )
bpeaks = wtrack.call_peaks(cutoff=cutoff,min_length=min_len,max_gap=max_gap)
write_func( bpeaks.tobed() )
# ------------------------------------
# Classes
# ------------------------------------
# ------------------------------------
# Main function
# ------------------------------------
def main():
usage = "usage: %prog [options]"
description = "Script to analyze C. elegans histone marks data matrix."
optparser = OptionParser(version="%prog 0.1",description=description,usage=usage,add_help_option=False)
optparser.add_option("-h","--help",action="help",help="Show this help message and exit.")
optparser.add_option("-i","--ifile",dest="ifile",type="string",
help="input file")
optparser.add_option("-o","--ofile",dest="ofile",
help="output file, default: stdout")
(options,args) = optparser.parse_args()
command_list = {"summary":summary,
"organize":organize,
"call1":call1,
"drawchrom":drawchrom,
"call1draw":call1draw,
"combcall2draw":combcall2draw,
}
command_des = {"summary":"Show the column names.",
"organize":"Re-organize the file for data-mining.",
"call1":"Call enriched regions for certain column.",
"drawchrom":"Draw ce chromosomes.",
"call1draw":"Call enriched regions and then draw chromosome figures.",
"combcall2draw":"Call enriched regions where any of the tracks is above threshold and draw them on chromosome figures.",
}
if not options.ifile or not args:
optparser.print_help()
sys.exit()
if options.ofile:
write_func = open(options.ofile,"w").write
else:
write_func = sys.stdout.write
if command_list.has_key(args[0]):
com = command_list[args[0]]
com_args = args[1:]
else:
optparser.print_help()
sys.stderr.write("Avialable Commands:\n\n")
for c in command_list.keys():
sys.stderr.write(c+": "+command_des[c]+"\n")
sys.exit()
cvsfilereader = csv.DictReader(open(options.ifile,"r"),delimiter="\t")
# run commands
com(cvsfilereader,write_func,com_args)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
sys.stderr.write("User interrupt me! ;-) See you!\n")
sys.exit(0)
| bsd-3-clause | -3,209,012,394,225,596,000 | 38.267699 | 610 | 0.57423 | false | 3.391745 | false | false | false |
antechrestos/cf-python-client | main/cloudfoundry_client/v2/service_brokers.py | 1 | 1120 | from cloudfoundry_client.v2.entities import EntityManager
class ServiceBrokerManager(EntityManager):
def __init__(self, target_endpoint, client):
super(ServiceBrokerManager, self).__init__(target_endpoint, client, '/v2/service_brokers')
def create(self, broker_url, broker_name, auth_username, auth_password, space_guid=None):
request = self._request(broker_url=broker_url, name=broker_name,
auth_username=auth_username, auth_password=auth_password)
request['space_guid'] = space_guid
return super(ServiceBrokerManager, self)._create(request)
def update(self, broker_guid, broker_url=None, broker_name=None, auth_username=None, auth_password=None):
request = self._request()
request['broker_url'] = broker_url
request['name'] = broker_name
request['auth_username'] = auth_username
request['auth_password'] = auth_password
return super(ServiceBrokerManager, self)._update(broker_guid, request)
def remove(self, broker_guid):
super(ServiceBrokerManager, self)._remove(broker_guid)
| apache-2.0 | -4,444,524,997,635,367,400 | 47.695652 | 109 | 0.68125 | false | 3.971631 | false | false | false |
Comunitea/CMNT_004_15 | project-addons/custom_account/models/payment.py | 1 | 3019 | # Copyright 2019 Omar Castiñeira, Comunitea Servicios Tecnológicos S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields, api
class PaymentOrderLine(models.Model):
_inherit = 'account.payment.line'
_order = 'partner_name'
partner_name = fields.Char(related='partner_id.name', store=True)
@api.model
def create(self, vals):
partner_bank_id = vals.get('partner_bank_id')
move_line_id = vals.get('move_line_id')
partner_id = vals.get('partner_id')
order_id = vals.get('order_id')
if order_id:
order = self.env['account.payment.order'].browse(order_id)
if order.payment_method_id.mandate_required and not \
vals.get('mandate_id'):
if move_line_id:
line = self.env['account.move.line'].browse(move_line_id)
if line.invoice_id and \
line.invoice_id.type == 'out_invoice' and \
line.invoice_id.mandate_id:
if line.invoice_id.mandate_id.state == 'valid':
vals.update({
'mandate_id': line.invoice_id.mandate_id.id,
'partner_bank_id':
line.invoice_id.mandate_id.partner_bank_id.id})
if partner_bank_id and not vals.get('mandate_id'):
mandates = self.env['account.banking.mandate'].search_read(
[('partner_bank_id', '=', partner_bank_id),
('state', '=', 'valid')], ['id'])
if mandates:
vals['mandate_id'] = mandates[0]['id']
else:
banking_mandate_valid = \
self.env['account.banking.mandate'].\
search_read([('partner_id', '=', partner_id),
('state', '=', 'valid')],
['id', 'partner_bank_id'])
if banking_mandate_valid:
vals.update({
'mandate_id': banking_mandate_valid[0]['id'],
'partner_bank_id':
banking_mandate_valid[0]['partner_bank_id'][0],
})
return super().create(vals)
class BankPaymentLine(models.Model):
_inherit = "bank.payment.line"
mandate_id = fields.Many2one("account.banking.mandate", "Mandate",
related="payment_line_ids.mandate_id",
readonly=True)
mandate_scheme = fields.Selection([('CORE', 'Basic (CORE)'),
('B2B', 'Enterprise (B2B)')],
string='Scheme', readonly=True,
related="mandate_id.scheme")
| agpl-3.0 | -3,272,857,600,221,413,000 | 44.712121 | 79 | 0.46006 | false | 4.196106 | false | false | false |
project-owner/Peppy | ui/container.py | 1 | 6297 | # Copyright 2016-2021 Peppy Player [email protected]
#
# This file is part of Peppy Player.
#
# Peppy Player is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Peppy Player is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Peppy Player. If not, see <http://www.gnu.org/licenses/>.
from ui.component import Component
class Container(Component):
""" This container class keeps the list of components and executes group methods on them """
def __init__(self, util, bounding_box=None, background=None, visible=True, content=None, image_filename=None):
""" Initializer
:param util: utility object
:param bounding_box: container bounding box
:param background: container background color
:param visible: visibility flag, True - visible, False - invisible
"""
if content:
cnt = content
else:
cnt = bounding_box
Component.__init__(self, util, c=cnt, bb=bounding_box, bgr=background, v=visible)
self.components = list()
if image_filename:
self.image_filename = image_filename
self.exit_top_y = self.exit_bottom_y = self.exit_left_x = self.exit_right_x = None
def add_component(self, component):
""" Add component to the container
:param component: component to add
"""
self.components.append(component)
def set_parent_screen(self, scr):
""" Add parent screen
:param scr: parent screen
"""
if self.is_empty(): return
self.parent_screen = scr
for c in self.components:
if c:
c.parent_screen = scr
def draw(self):
""" Draw all components in container. Doesn't draw invisible container. """
if not self.visible: return
Component.draw(self)
if self.is_empty(): return
for comp in self.components:
if comp: comp.draw()
def draw_area(self, bb):
if not self.visible: return
Component.draw(self, bb)
def is_empty(self):
""" Check if container has components
:return: True - container doesn't have components, False - container has components
"""
return not hasattr(self, "components")
def clean_draw_update(self):
""" Clean, draw and update container """
self.clean()
self.draw()
self.update()
def handle_event(self, event):
""" Handle container event. Don't handle event if container is invisible.
:param event: the event to handle
"""
if not self.visible or len(self.components) == 0: return
for i in range(len(self.components) - 1, -1, -1):
try:
comp = self.components[i]
if not hasattr(comp, "handle_event"):
continue
if getattr(comp, "popup", None) == True:
if comp.visible == True:
comp.handle_event(event)
break
else:
comp.handle_event(event)
except:
pass
def set_current(self, state=None):
""" Set container as current. Used by screens
:param state: button state (if any)
"""
pass
def set_visible(self, flag):
""" Set container visible/invisible. Set all components in container visible/invisible.
:param flag: True - visible, False - invisible
"""
Component.set_visible(self, flag)
if self.is_empty(): return
for comp in self.components:
if not comp: continue
if getattr(comp, "popup", None) == True:
if not comp.visible:
continue
else:
comp.set_visible(flag)
def refresh(self):
""" Refresh container. Used for periodical updates for example for animation.
This method will be called from the main event loop.
"""
if not self.visible: return
for comp in self.components:
try:
comp.refresh()
except AttributeError:
pass
def is_selected(self):
""" Check if conatiner has selected component
:return: True - container has selected component, False - doesn't have
"""
s = False
for c in self.components:
if c and getattr(c, "selected", False):
s = True
break
return s
def items_per_line(self, width):
""" Return the number of items in line for specified screen width
:param width: screen width
:return: number of items per line
"""
if width <= 102:
return 1
elif width <= 203:
return 2
elif width <= 304:
return 3
elif width <= 405:
return 4
elif width <= 506:
return 5
else:
return 6
def add_button_observers(self, button, update_observer, redraw_observer=None, press=True, release=True):
""" Add button observers
:param button: button to observer
:param update_observer: observer for updating the button
:param redraw_observer: observer to redraw the whole screen
"""
if press and update_observer: button.add_press_listener(update_observer)
if release and update_observer: button.add_release_listener(update_observer)
if redraw_observer and redraw_observer:
button.add_release_listener(redraw_observer)
button.redraw_observer = redraw_observer
| gpl-3.0 | -7,975,863,792,035,716,000 | 31.626943 | 114 | 0.570907 | false | 4.623348 | false | false | false |
ooici/marine-integrations | mi/dataset/driver/hypm/ctd/driver.py | 1 | 1597 | """
@package mi.dataset.driver.hypm.ctd.driver
@file marine-integrations/mi/dataset/driver/hypm/ctd/driver.py
@author Bill French
@brief Driver for the hypm/ctd
Release notes:
initial release
"""
__author__ = 'Bill French'
__license__ = 'Apache 2.0'
from mi.core.log import get_logger ; log = get_logger()
from mi.dataset.dataset_driver import SimpleDataSetDriver
from mi.dataset.parser.ctdpf import CtdpfParser
from mi.dataset.parser.ctdpf import CtdpfParserDataParticle
from mi.dataset.harvester import SingleDirectoryHarvester
class HypmCTDPFDataSetDriver(SimpleDataSetDriver):
@classmethod
def stream_config(cls):
return [CtdpfParserDataParticle.type()]
def _build_parser(self, parser_state, infile):
config = self._parser_config
config.update({
'particle_module': 'mi.dataset.parser.ctdpf',
'particle_class': 'CtdpfParserDataParticle'
})
log.debug("MYCONFIG: %s", config)
self._parser = CtdpfParser(
config,
parser_state,
infile,
self._save_parser_state,
self._data_callback,
self._sample_exception_callback
)
return self._parser
def _build_harvester(self, driver_state):
"""
Build and return the harvester
"""
self._harvester = SingleDirectoryHarvester(
self._harvester_config,
driver_state,
self._new_file_callback,
self._modified_file_callback,
self._exception_callback
)
return self._harvester
| bsd-2-clause | 1,017,401,072,070,529,700 | 27.017544 | 62 | 0.639324 | false | 3.802381 | true | false | false |
cmunk/protwis | api/views.py | 1 | 32974 | from django.shortcuts import render
from rest_framework import views, generics, viewsets
from rest_framework.response import Response
from rest_framework.parsers import MultiPartParser, FormParser, FileUploadParser
from rest_framework.renderers import JSONRenderer
from django.template.loader import render_to_string
from django.db.models import Q
from django.conf import settings
from interaction.models import ResidueFragmentInteraction
from mutation.models import MutationRaw
from protein.models import Protein, ProteinConformation, ProteinFamily, Species, ProteinSegment
from residue.models import Residue, ResidueGenericNumber, ResidueNumberingScheme, ResidueGenericNumberEquivalent
from structure.models import Structure
from structure.assign_generic_numbers_gpcr import GenericNumbering
from api.serializers import (ProteinSerializer, ProteinFamilySerializer, SpeciesSerializer, ResidueSerializer,
ResidueExtendedSerializer, StructureSerializer,
StructureLigandInteractionSerializer,
MutationSerializer)
from api.renderers import PDBRenderer
from common.alignment import Alignment
from common.definitions import *
from drugs.models import Drugs
import json, os
from io import StringIO
from Bio.PDB import PDBIO
from collections import OrderedDict
# FIXME add
# getMutations
# numberPDBfile
import coreapi
from urllib.parse import urlparse
from urllib.parse import urljoin
from rest_framework import renderers, response, schemas
from rest_framework.decorators import api_view, renderer_classes
from rest_framework import response, schemas
from rest_framework_swagger.views import get_swagger_view
schema_view = get_swagger_view(title='GPCRdb API')
class ProteinDetail(generics.RetrieveAPIView):
"""
Get a single protein instance by entry name
\n/protein/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
queryset = Protein.objects.filter(sequence_type__slug="wt").prefetch_related('family', 'species', 'source', 'residue_numbering_scheme', 'genes')
serializer_class = ProteinSerializer
lookup_field = 'entry_name'
class ProteinByAccessionDetail(ProteinDetail):
"""
Get a single protein instance by accession
\n/protein/accession/{accession}/
\n{accession} is a protein identifier from Uniprot, e.g. P07550
"""
lookup_field = 'accession'
class ProteinFamilyList(generics.ListAPIView):
"""
Get a list of protein families
\n/proteinfamily/
"""
queryset = ProteinFamily.objects.all().prefetch_related('parent')
serializer_class = ProteinFamilySerializer
class ProteinFamilyDetail(generics.RetrieveAPIView):
"""
Get a single protein family instance
\n/proteinfamily/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
queryset = ProteinFamily.objects.all().prefetch_related("parent")
serializer_class = ProteinFamilySerializer
lookup_field = 'slug'
class ProteinFamilyChildrenList(generics.ListAPIView):
"""
Get a list of child families of a protein family
\n/proteinfamily/children/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
serializer_class = ProteinFamilySerializer
def get_queryset(self):
family = self.kwargs.get('slug')
queryset = ProteinFamily.objects.all().prefetch_related("parent")
return queryset.filter(parent__slug=family)
class ProteinFamilyDescendantList(generics.ListAPIView):
"""
Get a list of descendant families of a protein family
\n/proteinfamily/descendants/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
serializer_class = ProteinFamilySerializer
def get_queryset(self):
family = self.kwargs.get('slug')
queryset = ProteinFamily.objects.all().prefetch_related("parent")
return queryset.filter(Q(slug__startswith=family) & ~Q(slug=family))
class ProteinsInFamilyList(generics.ListAPIView):
"""
Get a list of proteins in a protein family
\n/proteinfamily/proteins/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
serializer_class = ProteinSerializer
def get_queryset(self):
queryset = Protein.objects.all()
family = self.kwargs.get('slug')
return queryset.filter(sequence_type__slug='wt', family__slug__startswith=family)\
.prefetch_related('family', 'species', 'source', 'residue_numbering_scheme', 'genes')
class ProteinsInFamilySpeciesList(generics.ListAPIView):
"""
Get a list of proteins in a protein family
\n/proteinfamily/proteins/{slug}/{species}
\n{slug} is a protein family identifier, e.g. 001_001_001
\n{latin_name} is a species identifier from Uniprot, e.g. Homo sapiens
"""
serializer_class = ProteinSerializer
def get_queryset(self):
queryset = Protein.objects.all()
family = self.kwargs.get('slug')
species = self.kwargs.get('latin_name')
return queryset.filter(sequence_type__slug='wt', family__slug__startswith=family,
species__latin_name=species).prefetch_related('family',
'species', 'source', 'residue_numbering_scheme', 'genes')
class ResiduesList(generics.ListAPIView):
"""
Get a list of residues of a protein
\n/residues/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
serializer_class = ResidueSerializer
def get_queryset(self):
queryset = Residue.objects.all()
#protein_conformation__protein__sequence_type__slug='wt',
return queryset.filter(
protein_conformation__protein__entry_name=self.kwargs.get('entry_name')).prefetch_related('display_generic_number','protein_segment','alternative_generic_numbers')
class ResiduesExtendedList(ResiduesList):
"""
Get a list of residues of a protein, including alternative generic numbers
\n/residues/extended/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
serializer_class = ResidueExtendedSerializer
class SpeciesList(generics.ListAPIView):
"""
Get a list of species
\n/species/
"""
queryset = Species.objects.all()
serializer_class = SpeciesSerializer
class SpeciesDetail(generics.RetrieveAPIView):
"""
Get a single species instance
\n/species/{latin_name}/
\n{latin_name} is a species identifier from Uniprot, e.g. Homo sapiens
"""
queryset = Species.objects.all()
serializer_class = SpeciesSerializer
lookup_field = 'latin_name'
class NumberPDBStructureView(views.APIView):
"""
WRITEME
"""
pass
class StructureList(views.APIView):
"""
Get a list of structures
\n/structure/
"""
def get(self, request, pdb_code=None, entry_name=None, representative=None):
if pdb_code:
structures = Structure.objects.filter(pdb_code__index=pdb_code)
elif entry_name and representative:
structures = Structure.objects.filter(protein_conformation__protein__parent__entry_name=entry_name,
representative=True)
elif entry_name:
structures = Structure.objects.filter(protein_conformation__protein__parent__entry_name=entry_name)
elif representative:
structures = Structure.objects.filter(representative=True)
else:
structures = Structure.objects.all()
structures = structures.exclude(refined=True).prefetch_related('protein_conformation__protein__parent__species', 'pdb_code',
'protein_conformation__protein__parent__family', 'protein_conformation__protein__parent__species',
'publication__web_link', 'structureligandinteraction_set__ligand__properities', 'structure_type',
'structureligandinteraction_set__ligand__properities__ligand_type',
'structureligandinteraction_set__ligand_role')
# structures = self.get_structures(pdb_code, entry_name, representative)
# convert objects to a list of dictionaries
# normal serializers can not be used because of abstraction of tables (e.g. protein_conformation)
s = []
for structure in structures:
# essential fields
structure_data = {
'pdb_code': structure.pdb_code.index,
'protein': structure.protein_conformation.protein.parent.entry_name,
'family': structure.protein_conformation.protein.parent.family.slug,
'species': structure.protein_conformation.protein.parent.species.latin_name,
'preferred_chain': structure.preferred_chain,
'resolution': structure.resolution,
'publication_date': structure.publication_date,
'type': structure.structure_type.name,
'state': structure.state.name,
'distance': structure.distance,
}
# publication
if structure.publication:
structure_data['publication'] = structure.publication.web_link.__str__()
else:
structure_data['publication'] = None
# ligand
ligands = []
for interaction in structure.structureligandinteraction_set.filter(annotated=True):
ligand = {}
if interaction.ligand.name:
ligand['name'] = interaction.ligand.name
if interaction.ligand.properities.ligand_type and interaction.ligand.properities.ligand_type.name:
ligand['type'] = interaction.ligand.properities.ligand_type.name
if interaction.ligand_role and interaction.ligand_role.name:
ligand['function'] = interaction.ligand_role.name
if ligand:
ligands.append(ligand)
structure_data['ligands'] = ligands
s.append(structure_data)
# if a structure is selected, return a single dict rather then a list of dicts
if len(s) == 1:
s = s[0]
return Response(s)
def get_structures(self, pdb_code=None, representative=None):
return Structure.objects.all()
class RepresentativeStructureList(StructureList):
"""
Get a list of representative structures (one for each protein and activation state)
\n/structure/representative/
"""
class StructureListProtein(StructureList):
"""
Get a list of structures of a protein
\n/structure/protein/{entry_name}
"""
class RepresentativeStructureListProtein(StructureList):
"""
Get a list of representative structures of a protein (one for each activation state)
\n/structure/protein/{entry_name}/representative/
"""
class StructureDetail(StructureList):
"""
Get a single structure instance
\n/structure/{pdb_code}/
\n{pdb_code} is a structure identifier from the Protein Data Bank, e.g. 2RH1
"""
def get_structures(self, pdb_code=None, representative=None):
return Structure.objects.filter(pdb_code__index=pdb_code)
class FamilyAlignment(views.APIView):
"""
Get a full sequence alignment of a protein family including a consensus sequence
\n/alignment/family/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
def get(self, request, slug=None, segments=None, latin_name=None, statistics=False):
if slug is not None:
# Check for specific species
if latin_name is not None:
ps = Protein.objects.filter(sequence_type__slug='wt', source__id=1, family__slug__startswith=slug,
species__latin_name=latin_name)
else:
ps = Protein.objects.filter(sequence_type__slug='wt', source__id=1, family__slug__startswith=slug)
# take the numbering scheme from the first protein
#s_slug = Protein.objects.get(entry_name=ps[0]).residue_numbering_scheme_id
s_slug = ps[0].residue_numbering_scheme_id
protein_family = ps[0].family.slug[:3]
gen_list = []
segment_list = []
if segments is not None:
input_list = segments.split(",")
# fetch a list of all segments
protein_segments = ProteinSegment.objects.filter(partial=False).values_list('slug', flat=True)
for s in input_list:
# add to segment list
if s in protein_segments:
segment_list.append(s)
# get generic numbering object for generic positions
else:
# make sure the query works for all positions
gen_object = ResidueGenericNumberEquivalent.objects.get(label=s, scheme__id=s_slug)
gen_object.properties = {}
gen_list.append(gen_object)
# fetch all complete protein_segments
ss = ProteinSegment.objects.filter(slug__in=segment_list, partial=False)
else:
ss = ProteinSegment.objects.filter(partial=False)
if int(protein_family) < 100:
ss = [ s for s in ss if s.proteinfamily == 'GPCR']
elif protein_family == "100":
ss = [ s for s in ss if s.proteinfamily == 'Gprotein']
elif protein_family == "200":
ss = [ s for s in ss if s.proteinfamily == 'Arrestin']
# create an alignment object
a = Alignment()
a.show_padding = False
# load data from selection into the alignment
a.load_proteins(ps)
# load generic numbers and TMs seperately
if gen_list:
a.load_segments(gen_list)
a.load_segments(ss)
# build the alignment data matrix
a.build_alignment()
a.calculate_statistics()
residue_list = []
for aa in a.full_consensus:
residue_list.append(aa.amino_acid)
# render the fasta template as string
response = render_to_string('alignment/alignment_fasta.html', {'a': a}).split("\n")
# convert the list to a dict
ali_dict = OrderedDict({})
for row in response:
if row.startswith(">"):
k = row[1:]
else:
ali_dict[k] = row
k = False
ali_dict['CONSENSUS'] = ''.join(residue_list)
# render statistics for output
if statistics == True:
feat = {}
for i, feature in enumerate(AMINO_ACID_GROUPS):
feature_stats = a.feature_stats[i]
feature_stats_clean = []
for d in feature_stats:
sub_list = [x[0] for x in d]
feature_stats_clean.append(sub_list) # remove feature frequencies
# print(feature_stats_clean)
feat[feature] = [item for sublist in feature_stats_clean for item in sublist]
for i, AA in enumerate(AMINO_ACIDS):
feature_stats = a.amino_acid_stats[i]
feature_stats_clean = []
for d in feature_stats:
sub_list = [x[0] for x in d]
feature_stats_clean.append(sub_list) # remove feature frequencies
# print(feature_stats_clean)
feat[AA] = [item for sublist in feature_stats_clean for item in sublist]
ali_dict["statistics"] = feat
return Response(ali_dict)
class FamilyAlignmentPartial(FamilyAlignment):
"""
Get a partial sequence alignment of a protein family
\n/alignment/family/{slug}/{segments}/
\n{slug} is a protein family identifier, e.g. 001_001_001
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
"""
class FamilyAlignmentSpecies(FamilyAlignment):
"""
Get a full sequence alignment of a protein family
\n/alignment/family/{slug}//{species}
\n{slug} is a protein family identifier, e.g. 001_001_001
\n{species} is a species identifier from Uniprot, e.g. Homo sapiens
"""
class FamilyAlignmentPartialSpecies(FamilyAlignment):
"""
Get a partial sequence alignment of a protein family
\n/alignment/family/{slug}/{segments}/{species}
\n{slug} is a protein family identifier, e.g. 001_001_001
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
\n{species} is a species identifier from Uniprot, e.g. Homo sapiens
"""
class ProteinSimilaritySearchAlignment(views.APIView):
"""
Get a segment sequence alignment of two or more proteins ranked by similarity
\n/alignment/similarity/{proteins}/{segments}/
\n{proteins} is a comma separated list of protein identifiers, e.g. adrb2_human,5ht2a_human,cxcr4_human,
where the first protein is the query protein and the following the proteins to compare it to
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
"""
def get(self, request, proteins=None, segments=None):
if proteins is not None:
protein_list = proteins.split(",")
# first in API should be reference
ps = Protein.objects.filter(sequence_type__slug='wt', entry_name__in=protein_list[1:])
reference = Protein.objects.filter(sequence_type__slug='wt', entry_name__in=[protein_list[0]])
# take the numbering scheme from the first protein
s_slug = Protein.objects.get(entry_name=protein_list[0]).residue_numbering_scheme_id
protein_family = ps[0].family.slug[:3]
gen_list = []
segment_list = []
if segments is not None:
input_list = segments.split(",")
# fetch a list of all segments
protein_segments = ProteinSegment.objects.filter(partial=False).values_list('slug', flat=True)
for s in input_list:
# add to segment list
if s in protein_segments:
segment_list.append(s)
# get generic numbering object for generic positions
else:
# make sure the query works for all positions
gen_object = ResidueGenericNumberEquivalent.objects.get(label=s, scheme__id=s_slug)
gen_object.properties = {}
gen_list.append(gen_object)
# fetch all complete protein_segments
ss = ProteinSegment.objects.filter(slug__in=segment_list, partial=False)
else:
ss = ProteinSegment.objects.filter(partial=False)
if int(protein_family) < 100:
ss = [ s for s in ss if s.proteinfamily == 'GPCR']
elif protein_family == "100":
ss = [ s for s in ss if s.proteinfamily == 'Gprotein']
elif protein_family == "200":
ss = [ s for s in ss if s.proteinfamily == 'Arrestin']
# create an alignment object
a = Alignment()
a.show_padding = False
# load data from API into the alignment
a.load_reference_protein(reference[0])
a.load_proteins(ps)
# load generic numbers and TMs seperately
if gen_list:
a.load_segments(gen_list)
a.load_segments(ss)
# build the alignment data matrix
a.build_alignment()
# calculate identity and similarity of each row compared to the reference
a.calculate_similarity()
# render the fasta template as string
response = render_to_string('alignment/alignment_fasta.html', {'a': a}).split("\n")
# convert the list to a dict
ali_dict = {}
k = False
num = 0
for i, row in enumerate(response):
if row.startswith(">"):
k = row[1:]
elif k:
# add the query as 100 identical/similar to the beginning (like on the website)
if num == 0:
a.proteins[num].identity = 100
a.proteins[num].similarity = 100
# order dict after custom list
keyorder = ["similarity","identity","AA"]
ali_dict[k] = {"AA": row, "identity": int(str(a.proteins[num].identity).replace(" ","")),
"similarity": int(str(a.proteins[num].similarity).replace(" ",""))}
ali_dict[k] = OrderedDict(sorted(ali_dict[k].items(), key=lambda t: keyorder.index(t[0])))
num+=1
k = False
ali_dict_ordered = OrderedDict(sorted(ali_dict.items(), key=lambda x: x[1]['similarity'], reverse=True))
return Response(ali_dict_ordered)
class ProteinAlignment(views.APIView):
"""
Get a full sequence alignment of two or more proteins
\n/alignment/protein/{proteins}/
\n{proteins} is a comma separated list of protein identifiers, e.g. adrb2_human,5ht2a_human
"""
def get(self, request, proteins=None, segments=None, statistics=False):
if proteins is not None:
protein_list = proteins.split(",")
ps = Protein.objects.filter(sequence_type__slug='wt', entry_name__in=protein_list)
# take the numbering scheme from the first protein
#s_slug = Protein.objects.get(entry_name=protein_list[0]).residue_numbering_scheme_id
s_slug = ps[0].residue_numbering_scheme_id
protein_family = ps[0].family.slug[:3]
gen_list = []
segment_list = []
if segments is not None:
input_list = segments.split(",")
# fetch a list of all segments
protein_segments = ProteinSegment.objects.filter(partial=False).values_list('slug', flat=True)
for s in input_list:
# add to segment list
if s in protein_segments:
segment_list.append(s)
# get generic numbering object for generic positions
else:
gen_object = ResidueGenericNumberEquivalent.objects.get(label=s, scheme__id=s_slug)
gen_object.properties = {}
gen_list.append(gen_object)
# fetch all complete protein_segments
ss = ProteinSegment.objects.filter(slug__in=segment_list, partial=False)
else:
ss = ProteinSegment.objects.filter(partial=False)
if int(protein_family) < 100:
ss = [ s for s in ss if s.proteinfamily == 'GPCR']
elif protein_family == "100":
ss = [ s for s in ss if s.proteinfamily == 'Gprotein']
elif protein_family == "200":
ss = [ s for s in ss if s.proteinfamily == 'Arrestin']
# create an alignment object
a = Alignment()
a.show_padding = False
# load data from selection into the alignment
a.load_proteins(ps)
# load generic numbers and TMs seperately
if gen_list:
a.load_segments(gen_list)
a.load_segments(ss)
# build the alignment data matrix
a.build_alignment()
# calculate statistics
if statistics == True:
a.calculate_statistics()
# render the fasta template as string
response = render_to_string('alignment/alignment_fasta.html', {'a': a}).split("\n")
# convert the list to a dict
ali_dict = {}
k = False
for row in response:
if row.startswith(">"):
k = row[1:]
elif k:
ali_dict[k] = row
k = False
# render statistics for output
if statistics == True:
feat = {}
for i, feature in enumerate(AMINO_ACID_GROUPS):
feature_stats = a.feature_stats[i]
feature_stats_clean = []
for d in feature_stats:
sub_list = [x[0] for x in d]
feature_stats_clean.append(sub_list) # remove feature frequencies
# print(feature_stats_clean)
feat[feature] = [item for sublist in feature_stats_clean for item in sublist]
for i, AA in enumerate(AMINO_ACIDS):
feature_stats = a.amino_acid_stats[i]
feature_stats_clean = []
for d in feature_stats:
sub_list = [x[0] for x in d]
feature_stats_clean.append(sub_list) # remove feature frequencies
# print(feature_stats_clean)
feat[AA] = [item for sublist in feature_stats_clean for item in sublist]
ali_dict["statistics"] = feat
return Response(ali_dict)
class ProteinAlignmentStatistics(ProteinAlignment):
"""
Add a /statics at the end of an alignment in order to
receive an additional residue property statistics output e.g.:
\n/alignment/protein/{proteins}/{segments}/statistics
\n{proteins} is a comma separated list of protein identifiers, e.g. adrb2_human,5ht2a_human
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
"""
class ProteinAlignmentPartial(ProteinAlignment):
"""
Get a partial sequence alignment of two or more proteins
\n/alignment/protein/{proteins}/{segments}/
\n{proteins} is a comma separated list of protein identifiers, e.g. adrb2_human,5ht2a_human
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
"""
class StructureTemplate(views.APIView):
"""
Get the most similar structure template for a protein using a 7TM alignment
\n/structure/template/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
def get(self, request, entry_name=None, segments=None):
if entry_name is not None:
ref = Protein.objects.get(sequence_type__slug='wt', entry_name=entry_name)
structures = Structure.objects.order_by('protein_conformation__protein__parent', 'state',
'resolution').distinct('protein_conformation__protein__parent', 'state')
ps = []
for structure in structures:
ps.append(structure.protein_conformation.protein.parent)
if segments is not None:
input_list = segments.split(",")
ss = ProteinSegment.objects.filter(slug__in=input_list, partial=False)
else:
ss = ProteinSegment.objects.filter(partial=False, category='helix')
# create an alignment object
a = Alignment()
a.show_padding = False
# load data from selection into the alignment
a.load_reference_protein(ref)
a.load_proteins(ps)
a.load_segments(ss)
# build the alignment data matrix
a.build_alignment()
# calculate identity and similarity of each row compared to the reference
a.calculate_similarity()
# return the entry_name of the closest template
return Response(a.proteins[1].protein.entry_name)
class StructureTemplatePartial(StructureTemplate):
"""
Get the most similar structure template for a protein using a partial alignment
\n/structure/template/{entry_name}/{segments}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
\n{segments} is a comma separated list of protein segment identifiers, e.g. TM3,TM5,TM6
"""
class StructureAssignGenericNumbers(views.APIView):
"""
Assign generic residue numbers (Ballesteros-Weinstein and GPCRdb schemes) to an uploaded pdb file.
\n/structure/assign_generic_numbers\n
e.g.
curl -X POST -F "[email protected]" http://gpcrdb.org/services/structure/assign_generic_numbers
"""
parser_classes = (FileUploadParser,)
renderer_classes = (PDBRenderer, )
def post(self, request):
root, ext = os.path.splitext(request.FILES['pdb_file'].name)
generic_numbering = GenericNumbering(StringIO(request.FILES['pdb_file'].file.read().decode('UTF-8',"ignore")))
out_struct = generic_numbering.assign_generic_numbers()
out_stream = StringIO()
io = PDBIO()
io.set_structure(out_struct)
io.save(out_stream)
print(len(out_stream.getvalue()))
# filename="{}_GPCRdb.pdb".format(root)
return Response(out_stream.getvalue())
class StructureSequenceParser(views.APIView):
"""
Analyze the uploaded pdb structure listing auxiliary proteins, mutations, deletions and insertions.
\n/structure/structure/parse_pdb\n
e.g.
curl -X POST -F "[email protected]" http://gpcrdb.org/services/structure/parse_pdb
"""
parser_classes = (FileUploadParser,)
renderer_classes =(JSONRenderer)
def post(self, request):
root, ext = os.path.splitext(request.FILES['pdb_file'].name)
header = parse_pdb_header(request.FILES['pdb_file'])
parser = SequenceParser(request.FILES['pdb_file'])
json_data = OrderedDict()
json_data["header"] = header
json_data.update(parser.get_fusions())
json_data.update(parser.get_mutations())
json_data.update(parser.get_deletions())
return Response(json_data)
class StructureLigandInteractions(generics.ListAPIView):
"""
Get a list of interactions between structure and ligand
\n/structure/{pdb_code}/interaction/
\n{pdb_code} is a structure identifier from the Protein Data Bank, e.g. 2RH1
"""
serializer_class = StructureLigandInteractionSerializer
def get_queryset(self):
queryset = ResidueFragmentInteraction.objects.all()
queryset = queryset.prefetch_related('structure_ligand_pair__structure__pdb_code',
'interaction_type',
'fragment__residue__generic_number',
'fragment__residue__display_generic_number',
)
queryset = queryset.exclude(interaction_type__type='hidden').order_by('fragment__residue__sequence_number')
slug = self.kwargs.get('pdb_code')
return queryset.filter(structure_ligand_pair__structure__pdb_code__index=slug,
structure_ligand_pair__annotated=True)
class MutantList(generics.ListAPIView):
"""
Get a list of mutants of single protein instance by entry name
\n/mutant/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
serializer_class = MutationSerializer
def get_queryset(self):
queryset = MutationRaw.objects.all()
return queryset.filter(protein=self.kwargs.get('entry_name'))
class DrugList(views.APIView):
"""
Get a list of drugs for a single protein instance by entry name
\n/drugs/{proteins}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
def get(self, request, entry_name=None):
drugs = Drugs.objects.filter(target__entry_name=entry_name).distinct()
druglist = []
for drug in drugs:
drugname = drug.name
drugtype = drug.drugtype
clinical = drug.clinicalstatus
phasedate = drug.phasedate
if clinical != '-':
status = drug.status + ' (' + drug.clinicalstatus + ', ' + phasedate + ')'
else:
status = drug.status
approval = drug.approval
indication = drug.indication
moa = drug.moa
novelty = drug.novelty
druglist.append({'name':drugname, 'approval': approval, 'indication': indication, 'status':status, 'drugtype':drugtype, 'moa':moa, 'novelty': novelty})
return Response(druglist)
| apache-2.0 | -7,819,170,572,501,628,000 | 38.48982 | 175 | 0.611724 | false | 4.041922 | false | false | false |
Djabx/mgd | mgdpck/writters/cbz.py | 1 | 1116 | #! /usr/bin/python
# -*- coding: utf-8 -*-
'''
A cbz writter
'''
from mgdpck import actions
import os
import mimetypes
import zipfile
class CbzWritter(actions.AbsWritter):
@classmethod
def get_name(cls):
return 'cbz'
def __init__(self, outdir):
self.outdir = outdir
self.out = None
def done(self):
if self.out:
self.out.close()
def export_book(self, lsb, chapter_min, chapter_max):
self.out_file = os.path.join(self.outdir, "{0.book.short_name}_{1.num:>03}_{2.num:>03}.cbz".format(lsb, chapter_min, chapter_max))
self.out = zipfile.ZipFile(self.out_file, "w", compression=zipfile.ZIP_DEFLATED)
def export_cover(self, lsb):
cv_path = "{0:>03}_{0:>03}_{1}{2}".format(0, 'cover',
mimetypes.guess_extension(lsb.image.mimetype))
self.out.writestr(cv_path, lsb.image.content)
def export_chapter(self, ch):
pass
def export_page(self, pa):
pa_path = "{0.chapter.num:>03}_{0.num:>03}{1}".format(pa,
mimetypes.guess_extension(pa.image.mimetype))
self.out.writestr(pa_path, pa.image.content)
actions.register_writter(CbzWritter)
| apache-2.0 | 603,414,583,288,958,700 | 21.32 | 134 | 0.654122 | false | 2.883721 | false | false | false |
RicardoJohann/frappe | frappe/utils/goal.py | 1 | 4641 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from six.moves import xrange
def get_monthly_results(goal_doctype, goal_field, date_col, filter_str, aggregation = 'sum'):
'''Get monthly aggregation values for given field of doctype'''
# TODO: move to ORM?
if(frappe.conf.db_type == 'postgres'):
month_year_format_query = '''to_char("{}", 'MM-YYYY')'''.format(date_col)
else:
month_year_format_query = 'date_format(`{}`, "%m-%Y")'.format(date_col)
conditions = ('where ' + filter_str) if filter_str else ''
results = frappe.db.sql('''SELECT {aggregation}(`{goal_field}`) AS {goal_field},
{month_year_format_query} AS month_year
FROM `{table_name}` {conditions}
GROUP BY month_year'''
.format(
aggregation=aggregation,
goal_field=goal_field,
month_year_format_query=month_year_format_query,
table_name="tab" + goal_doctype,
conditions=conditions
), as_dict=True)
month_to_value_dict = {}
for d in results:
month_to_value_dict[d['month_year']] = d[goal_field]
return month_to_value_dict
@frappe.whitelist()
def get_monthly_goal_graph_data(title, doctype, docname, goal_value_field, goal_total_field, goal_history_field,
goal_doctype, goal_doctype_link, goal_field, date_field, filter_str, aggregation="sum"):
'''
Get month-wise graph data for a doctype based on aggregation values of a field in the goal doctype
:param title: Graph title
:param doctype: doctype of graph doc
:param docname: of the doc to set the graph in
:param goal_value_field: goal field of doctype
:param goal_total_field: current month value field of doctype
:param goal_history_field: cached history field
:param goal_doctype: doctype the goal is based on
:param goal_doctype_link: doctype link field in goal_doctype
:param goal_field: field from which the goal is calculated
:param filter_str: where clause condition
:param aggregation: a value like 'count', 'sum', 'avg'
:return: dict of graph data
'''
from frappe.utils.formatters import format_value
import json
meta = frappe.get_meta(doctype)
doc = frappe.get_doc(doctype, docname)
goal = doc.get(goal_value_field)
formatted_goal = format_value(goal, meta.get_field(goal_value_field), doc)
current_month_value = doc.get(goal_total_field)
formatted_value = format_value(current_month_value, meta.get_field(goal_total_field), doc)
from frappe.utils import today, getdate, formatdate, add_months
current_month_year = formatdate(today(), "MM-yyyy")
history = doc.get(goal_history_field)
try:
month_to_value_dict = json.loads(history) if history and '{' in history else None
except ValueError:
month_to_value_dict = None
if month_to_value_dict is None:
doc_filter = (goal_doctype_link + " = '" + docname + "'") if doctype != goal_doctype else ''
if filter_str:
doc_filter += ' and ' + filter_str if doc_filter else filter_str
month_to_value_dict = get_monthly_results(goal_doctype, goal_field, date_field, doc_filter, aggregation)
frappe.db.set_value(doctype, docname, goal_history_field, json.dumps(month_to_value_dict))
month_to_value_dict[current_month_year] = current_month_value
months = []
months_formatted = []
values = []
values_formatted = []
for i in range(0, 12):
date_value = add_months(today(), -i)
month_value = formatdate(date_value, "MM-yyyy")
month_word = getdate(date_value).strftime('%b')
month_year = getdate(date_value).strftime('%B') + ', ' + getdate(date_value).strftime('%Y')
months.insert(0, month_word)
months_formatted.insert(0, month_year)
if month_value in month_to_value_dict:
val = month_to_value_dict[month_value]
else:
val = 0
values.insert(0, val)
values_formatted.insert(0, format_value(val, meta.get_field(goal_total_field), doc))
y_markers = []
summary_values = [
{
'title': _("This month"),
'color': '#ffa00a',
'value': formatted_value
}
]
if float(goal) > 0:
y_markers = [
{
'label': _("Goal"),
'lineType': "dashed",
'value': goal
},
]
summary_values += [
{
'title': _("Goal"),
'color': '#5e64ff',
'value': formatted_goal
},
{
'title': _("Completed"),
'color': '#28a745',
'value': str(int(round(float(current_month_value)/float(goal)*100))) + "%"
}
]
data = {
'title': title,
# 'subtitle':
'data': {
'datasets': [
{
'values': values,
'formatted': values_formatted
}
],
'labels': months,
'yMarkers': y_markers
},
'summary': summary_values,
}
return data
| mit | 6,556,498,908,767,743,000 | 29.333333 | 112 | 0.676578 | false | 3.023453 | false | false | false |
nmarincic/numbasom | numbasom/numbasom.py | 1 | 8667 | from numba import jit
import numpy as np
import math
import collections
from timeit import default_timer as timer
@jit(nopython=True)
def normalize(data, min_val=0, max_val=1):
no_vectors, dim = data.shape
D = np.empty((no_vectors,dim), dtype=np.float64)
inf = 1.7976931348623157e+308
min_arr = np.empty(dim, dtype=np.float64)
min_arr[:] = inf
max_arr = np.empty(dim, dtype=np.float64)
max_arr[:] = -inf
diff = np.empty(dim, dtype=np.float64)
for vec in range(no_vectors):
for d in range(dim):
val = data[vec,d]
if val < min_arr[d]:
min_arr[d] = val
if val > max_arr[d]:
max_arr[d] = val
for d in range(dim):
diff[d] = max_arr[d] - min_arr[d]
for i in range(no_vectors):
for j in range(dim):
if diff[j] != 0:
D[i,j] = (data[i, j] - min_arr[j]) / diff[j]
else:
D[i,j] = 0
return D
@jit(nopython=True)
def normalize_with_mutate(data, min_val=0, max_val=1):
no_vectors, dim = data.shape
#D = np.empty((no_vectors,dim), dtype=np.float64)
inf = 1.7976931348623157e+308
min_arr = np.empty(dim, dtype=np.float64)
min_arr[:] = inf
max_arr = np.empty(dim, dtype=np.float64)
max_arr[:] = -inf
diff = np.empty(dim, dtype=np.float64)
for vec in range(no_vectors):
for d in range(dim):
val = data[vec,d]
if val < min_arr[d]:
min_arr[d] = val
if val > max_arr[d]:
max_arr[d] = val
for d in range(dim):
diff[d] = max_arr[d] - min_arr[d]
for i in range(no_vectors):
for j in range(dim):
data[i,j] = (data[i, j] - min_arr[j]) / diff[j]
def pairwise(X):
M = X.shape[0]
N = X.shape[1]
D = np.empty((M, M), dtype=np.float64)
for i in range(M):
for j in range(M):
d = 0.0
for k in range(N):
tmp = X[i, k] - X[j, k]
d += tmp * tmp
D[i, j] = np.sqrt(d)
return D
def pairwise_squared(X):
M = X.shape[0]
N = X.shape[1]
# type will depend on the size of the matrix
D = np.empty((M, M), dtype=np.uint32)
for i in range(M):
for j in range(M):
d = 0.0
for k in range(N):
tmp = X[i, k] - X[j, k]
d += tmp * tmp
D[i, j] = d
return D
@jit(nopython=True)
def random_lattice(som_size, dimensionality):
X, Y, Z = som_size[0], som_size[1], dimensionality
D = np.empty((X,Y,Z), dtype=np.float64)
for x in range(X):
for y in range(Y):
for z in range(Z):
D[x,y,z] = np.random.random()
return D
@jit
def get_all_BMU_indexes(BMU, X, Y):
BMUx, BMUy = BMU[0], BMU[1]
BMU2x, BMU3x, BMU4x = BMU[0], BMU[0], BMU[0]
BMU2y, BMU3y, BMU4y = BMU[1], BMU[1], BMU[1]
if BMUx > X / 2:
BMU2x = BMUx - X
else:
BMU2x = BMUx + X
if BMUy > Y / 2:
BMU3y = BMUy - Y
else:
BMU3y = BMUy + Y
BMU4x = BMU2x
BMU4y = BMU3y
return BMU, (BMU2x, BMU2y), (BMU3x, BMU3y), (BMU4x, BMU4y)
@jit(nopython=True)
def som_calc(som_size, num_iterations, data_scaled, is_torus=False):
#data_scaled = normalize(data)
initial_radius = (max(som_size[0],som_size[1])/2)**2
time_constant = num_iterations/math.log(initial_radius)
start_lrate = 0.1
lattice = random_lattice(som_size, data_scaled.shape[1])
datalen = len(data_scaled)
X, Y, Z = lattice.shape
for current_iteration in range(num_iterations):
current_radius = initial_radius * math.exp(-current_iteration/time_constant)
current_lrate = start_lrate * math.exp(-current_iteration/num_iterations)
rand_input = np.random.randint(datalen)
rand_vector = data_scaled[rand_input]
BMU_dist = 1.7976931348623157e+308
BMU = (0,0)
for x in range(X):
for y in range(Y):
d = 0.0
for z in range(Z):
val = lattice[x,y,z]-rand_vector[z]
valsqr = val * val
d += valsqr
if d < BMU_dist:
BMU_dist = d
BMU = (x,y)
if is_torus:
BMUs = get_all_BMU_indexes(BMU, X, Y)
for BMU in BMUs:
adapt(lattice, rand_vector, BMU, current_radius, current_lrate)
else:
adapt(lattice, rand_vector, BMU, current_radius, current_lrate)
return lattice
@jit(nopython=True)
def adapt(lattice, rand_vector, BMU, current_radius, current_lrate):
X, Y, Z = lattice.shape
for x in range(X):
for y in range(Y):
a = x-BMU[0]
b = y-BMU[1]
d = a*a + b*b
if d < current_radius:
up = d * d
down = current_radius * current_radius
res = -up / (2 * down)
influence = math.exp(res)
for z in range(Z):
diff = (rand_vector[z] - lattice[x,y,z]) * influence * current_lrate
lattice[x,y,z] += diff
@jit(nopython=True)
def euclidean(vec1, vec2):
L = vec1.shape[0]
dist = 0
for l in range(L):
val = vec2[l] - vec1[l]
valsqr = val * val
dist += valsqr
return math.sqrt(dist)
@jit(nopython=True)
def euclidean_squared(vec1, vec2):
L = vec1.shape[0]
dist = 0
for l in range(L):
val = vec2[l] - vec1[l]
valsqr = val * val
dist += valsqr
return dist
@jit(nopython=True)
def u_matrix(lattice):
X, Y, Z = lattice.shape
u_values = np.empty((X,Y), dtype=np.float64)
for y in range(Y):
for x in range(X):
current = lattice[x,y]
dist = 0
num_neigh = 0
# left
if x-1 >= 0:
#middle
vec = lattice[x-1,y]
dist += euclidean(current, vec)
num_neigh += 1
if y - 1 >= 0:
#sup
vec = lattice[x-1, y-1]
dist += euclidean(current, vec)
num_neigh += 1
if y + 1 < Y:
# down
vec = lattice[x-1,y+1]
dist += euclidean(current, vec)
num_neigh += 1
# middle
if y - 1 >= 0:
# up
vec = lattice[x,y-1]
dist += euclidean(current, vec)
num_neigh += 1
# down
if y + 1 < Y:
vec = lattice[x,y+1]
dist += euclidean(current, vec)
num_neigh += 1
# right
if x + 1 < X:
# middle
vec = lattice[x+1,y]
dist += euclidean(current, vec)
num_neigh += 1
if y - 1 >= 0:
#up
vec = lattice[x+1,y-1]
dist += euclidean(current, vec)
num_neigh += 1
if y + 1 < lattice.shape[1]:
# down
vec = lattice[x+1,y+1]
dist += euclidean(current, vec)
num_neigh += 1
u_values[x,y] = dist / num_neigh
return u_values
def project_on_som(data, lattice, additional_list=None, data_scaled=False):
start = timer()
if data_scaled:
data_scaled = data
else:
data_scaled = normalize(data)
#create all keys
projected = collections.defaultdict(list)
X, Y, Z = lattice.shape
for x in range(X):
for y in range(Y):
projected[(x,y)]
# fill keys
for index, vec in enumerate(data_scaled):
winning_cell, wi = find_closest(index, vec, lattice)
projected[winning_cell].append(wi)
if additional_list:
final = {key: [additional_list[v] for v in value] for key, value in projected.items()}
else:
final = {key: [data[v] for v in value] for key, value in projected.items()}
end = timer()
print("Projecting on SOM took: %f seconds." %(end - start))
return final
@jit(nopython=True)
def find_closest_data_index(lattice_vec, data):
min_val = 1.7976931348623157e+308
winning_index = -1
data_len = len(data)
for i in range(data_len):
data_point = data[i]
dist = euclidean_squared(lattice_vec,data_point)
if dist < min_val:
min_val = dist
winning_index = i
return winning_index
def lattice_closest_vectors(data, lattice, additional_list=None, data_scaled=False):
start = timer()
if data_scaled:
data_scaled = data
else:
data_scaled = normalize(data)
X, Y, Z = lattice.shape
# create dictionary
projected = {}
# fill keys
for x in range(X):
for y in range(Y):
lattice_vec = lattice[x,y]
winning_index = find_closest_data_index(lattice_vec, data_scaled)
if additional_list:
projected[(x,y)] = [additional_list[winning_index]]
else:
projected[(x,y)] = data[winning_index]
end = timer()
print("Finding closest data points took: %f seconds." %(end - start))
return projected
@jit(nopython=True)
def find_closest(index, vec, lattice):
X, Y, Z = lattice.shape
min_val = 1.7976931348623157e+308
win_index = -1
win_cell = (-1,-1)
for x in range(X):
for y in range(Y):
dist = euclidean_squared(vec, lattice[x,y])
if dist < min_val:
min_val = dist
win_index = index
win_cell = (x,y)
return win_cell, win_index
def som(som_size, num_iterations, data, is_torus=False, is_scaled=False):
data_scaled = data
if not is_scaled:
start = timer()
data_scaled = normalize(data)
end = timer()
print("Data scaling took: %f seconds." %(end - start))
start = timer()
lattice = som_calc(som_size, num_iterations, data_scaled, is_torus)
end = timer()
print("SOM training took: %f seconds." %(end - start))
return lattice
def save_lattice(lattice, filename):
np.save(filename, lattice)
print ("SOM lattice saved at %s" %filename)
def load_lattice(filename):
lattice = np.load(filename)
print ("SOM lattice loaded from %s" %filename)
return lattice
| mit | 1,146,145,510,830,641,200 | 22.81044 | 88 | 0.621207 | false | 2.410178 | false | false | false |
Buchhold/QLever | misc/move_language_into_relation.py | 1 | 1046 | import argparse
import sys
__author__ = 'buchholb'
parser = argparse.ArgumentParser()
parser.add_argument('--nt',
type=str,
help='n-triple file.',
required=True)
def writeNtFileToStdout(nt):
for line in open(nt):
cols = line.strip('\n').split('\t')
if len(cols) != 4 or cols[3] != '.':
print('Ignoring malformed line: ' + line, file=sys.stderr)
else:
lang_start = cols[2].rfind('"@');
if lang_start > 0 and cols[2].rfind('"', lang_start + 1) == -1:
lang = cols[2][lang_start + 2:]
if cols[1][-1] == '>':
cols[1] = cols[1][:-1] + '.' + lang + '>'
else:
cols[1] += ('.' + lang)
cols[2] = cols[2][:lang_start + 1]
print('\t'.join([cols[0], cols[1], cols[2], '.']))
def main():
args = vars(parser.parse_args())
nt = args['nt']
writeNtFileToStdout(nt)
if __name__ == '__main__':
main()
| apache-2.0 | 8,025,750,356,437,480,000 | 26.526316 | 75 | 0.448375 | false | 3.521886 | false | false | false |
google-research/language | language/xsp/model/constants.py | 1 | 1574 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains constants required for the model."""
# TODO(alanesuhr): These are used in convert_to_tf_examples.py.
# Use these constants instead of strings there.
# These constants define the keys used for the TFRecords.
COPIABLE_INPUT_KEY = 'copiable_input'
ALIGNED_KEY = 'utterance_schema_alignment'
SEGMENT_ID_KEY = 'segment_ids'
FOREIGN_KEY_KEY = 'indicates_foreign_key'
SOURCE_WORDPIECES_KEY = 'source_wordpieces'
SOURCE_LEN_KEY = 'source_len'
LANGUAGE_KEY = 'language'
REGION_KEY = 'region'
TAG_KEY = 'tag'
OUTPUT_TYPE_KEY = 'type'
WEIGHT_KEY = 'weight'
TARGET_ACTION_TYPES_KEY = 'target_action_types'
TARGET_ACTION_IDS_KEY = 'target_action_ids'
TARGET_LEN_KEY = 'target_len'
SCORES_KEY = 'scores'
# Symbol IDs.
TARGET_START_SYMBOL_ID = 2
TARGET_END_SYMBOL_ID = 1
PAD_SYMBOL_ID = 0
GENERATE_ACTION = 1
COPY_ACTION = 2
NUM_RESERVED_OUTPUT_SYMBOLS = 3
PREDICTED_ACTION_TYPES = 'predicted_action_types'
PREDICTED_ACTION_IDS = 'predicted_action_ids'
| apache-2.0 | -5,873,433,957,893,586,000 | 30.48 | 74 | 0.747776 | false | 3.238683 | false | false | false |
cangermueller/deepcpg | deepcpg/data/dna.py | 1 | 2502 | """Functions for representing DNA sequences."""
from __future__ import division
from __future__ import print_function
from collections import OrderedDict
import numpy as np
from six.moves import range
# Mapping of nucleotides to integers
CHAR_TO_INT = OrderedDict([('A', 0), ('T', 1), ('G', 2), ('C', 3), ('N', 4)])
# Mapping of integers to nucleotides
INT_TO_CHAR = {v: k for k, v in CHAR_TO_INT.items()}
def get_alphabet(special=False, reverse=False):
"""Return char->int alphabet.
Parameters
----------
special: bool
If `True`, remove special 'N' character.
reverse: bool
If `True`, return int->char instead of char->int alphabet.
Returns
-------
OrderedDict
DNA alphabet.
"""
alpha = OrderedDict(CHAR_TO_INT)
if not special:
del alpha['N']
if reverse:
alpha = {v: k for k, v in alpha.items()}
return alpha
def char_to_int(seq):
"""Translate chars of single sequence `seq` to ints.
Parameters
----------
seq: str
DNA sequence.
Returns
-------
list
Integer-encoded `seq`.
"""
return [CHAR_TO_INT[x] for x in seq.upper()]
def int_to_char(seq, join=True):
"""Translate ints of single sequence `seq` to chars.
Parameters
----------
seq: list
Integers of sequences
join: bool
If `True` joint characters to `str`.
Returns
-------
If `join=True`, `str`, otherwise list of chars.
"""
t = [INT_TO_CHAR[x] for x in seq]
if join:
t = ''.join(t)
return t
def int_to_onehot(seqs, dim=4):
"""One-hot encodes array of integer sequences.
Takes array [nb_seq, seq_len] of integer sequence end encodes them one-hot.
Special nucleotides (int > 4) will be encoded as [0, 0, 0, 0].
Paramters
---------
seqs: :class:`numpy.ndarray`
[nb_seq, seq_len] :class:`numpy.ndarray` of integer sequences.
dim: int
Number of nucleotides
Returns
-------
:class:`numpy.ndarray`
[nb_seq, seq_len, dim] :class:`numpy.ndarray` of one-hot encoded
sequences.
"""
seqs = np.atleast_2d(np.asarray(seqs))
n = seqs.shape[0]
l = seqs.shape[1]
enc_seqs = np.zeros((n, l, dim), dtype='int8')
for i in range(dim):
t = seqs == i
enc_seqs[t, i] = 1
return enc_seqs
def onehot_to_int(seqs, axis=-1):
"""Translates one-hot sequences to integer sequences."""
return seqs.argmax(axis=axis)
| mit | -3,484,228,904,838,836,700 | 22.383178 | 79 | 0.583933 | false | 3.460581 | false | false | false |
pynamodb/PynamoDB | tests/test_discriminator.py | 1 | 6828 | import pytest
from pynamodb.attributes import DiscriminatorAttribute
from pynamodb.attributes import DynamicMapAttribute
from pynamodb.attributes import ListAttribute
from pynamodb.attributes import MapAttribute
from pynamodb.attributes import NumberAttribute
from pynamodb.attributes import UnicodeAttribute
from pynamodb.models import Model
class_name = lambda cls: cls.__name__
class TypedValue(MapAttribute):
_cls = DiscriminatorAttribute(attr_name = 'cls')
name = UnicodeAttribute()
class NumberValue(TypedValue, discriminator=class_name):
value = NumberAttribute()
class StringValue(TypedValue, discriminator=class_name):
value = UnicodeAttribute()
class RenamedValue(TypedValue, discriminator='custom_name'):
value = UnicodeAttribute()
class DiscriminatorTestModel(Model, discriminator='Parent'):
class Meta:
host = 'http://localhost:8000'
table_name = 'test'
hash_key = UnicodeAttribute(hash_key=True)
value = TypedValue()
values = ListAttribute(of=TypedValue)
type = DiscriminatorAttribute()
class ChildModel(DiscriminatorTestModel, discriminator='Child'):
value = UnicodeAttribute()
class DynamicSubclassedMapAttribute(DynamicMapAttribute):
string_attr = UnicodeAttribute()
class DynamicMapDiscriminatorTestModel(Model, discriminator='Parent'):
class Meta:
host = 'http://localhost:8000'
table_name = 'test'
hash_key = UnicodeAttribute(hash_key=True)
value = DynamicSubclassedMapAttribute(default=dict)
type = DiscriminatorAttribute()
class DynamicMapDiscriminatorChildTestModel(DynamicMapDiscriminatorTestModel, discriminator='Child'):
value = UnicodeAttribute()
class TestDiscriminatorAttribute:
def test_serialize(self):
dtm = DiscriminatorTestModel()
dtm.hash_key = 'foo'
dtm.value = StringValue(name='foo', value='Hello')
dtm.values = [NumberValue(name='bar', value=5), RenamedValue(name='baz', value='World')]
assert dtm.serialize() == {
'hash_key': {'S': 'foo'},
'type': {'S': 'Parent'},
'value': {'M': {'cls': {'S': 'StringValue'}, 'name': {'S': 'foo'}, 'value': {'S': 'Hello'}}},
'values': {'L': [
{'M': {'cls': {'S': 'NumberValue'}, 'name': {'S': 'bar'}, 'value': {'N': '5'}}},
{'M': {'cls': {'S': 'custom_name'}, 'name': {'S': 'baz'}, 'value': {'S': 'World'}}}
]}
}
def test_deserialize(self):
item = {
'hash_key': {'S': 'foo'},
'type': {'S': 'Parent'},
'value': {'M': {'cls': {'S': 'StringValue'}, 'name': {'S': 'foo'}, 'value': {'S': 'Hello'}}},
'values': {'L': [
{'M': {'cls': {'S': 'NumberValue'}, 'name': {'S': 'bar'}, 'value': {'N': '5'}}},
{'M': {'cls': {'S': 'custom_name'}, 'name': {'S': 'baz'}, 'value': {'S': 'World'}}}
]}
}
dtm = DiscriminatorTestModel.from_raw_data(item)
assert dtm.hash_key == 'foo'
assert dtm.value.value == 'Hello'
assert dtm.values[0].value == 5
assert dtm.values[1].value == 'World'
def test_condition_expression(self):
condition = DiscriminatorTestModel.value._cls == RenamedValue
placeholder_names, expression_attribute_values = {}, {}
expression = condition.serialize(placeholder_names, expression_attribute_values)
assert expression == "#0.#1 = :0"
assert placeholder_names == {'value': '#0', 'cls': '#1'}
assert expression_attribute_values == {':0': {'S': 'custom_name'}}
def test_multiple_discriminator_values(self):
class TestAttribute(MapAttribute, discriminator='new_value'):
cls = DiscriminatorAttribute()
TestAttribute.cls.register_class(TestAttribute, 'old_value')
# ensure the first registered value is used during serialization
assert TestAttribute.cls.get_discriminator(TestAttribute) == 'new_value'
assert TestAttribute.cls.serialize(TestAttribute) == 'new_value'
# ensure the second registered value can be used to deserialize
assert TestAttribute.cls.deserialize('old_value') == TestAttribute
assert TestAttribute.cls.deserialize('new_value') == TestAttribute
def test_multiple_discriminator_classes(self):
with pytest.raises(ValueError):
# fail when attempting to register a class with an existing discriminator value
class RenamedValue2(TypedValue, discriminator='custom_name'):
pass
class TestDiscriminatorModel:
def test_serialize(self):
cm = ChildModel()
cm.hash_key = 'foo'
cm.value = 'bar'
cm.values = []
assert cm.serialize() == {
'hash_key': {'S': 'foo'},
'type': {'S': 'Child'},
'value': {'S': 'bar'},
'values': {'L': []}
}
def test_deserialize(self):
item = {
'hash_key': {'S': 'foo'},
'type': {'S': 'Child'},
'value': {'S': 'bar'},
'values': {'L': []}
}
cm = DiscriminatorTestModel.from_raw_data(item)
assert isinstance(cm, ChildModel)
assert cm.hash_key == 'foo'
assert cm.value == 'bar'
class TestDynamicDiscriminatorModel:
def test_serialize_parent(self):
m = DynamicMapDiscriminatorTestModel()
m.hash_key = 'foo'
m.value.string_attr = 'foostr'
m.value.bar_attribute = 3
assert m.serialize() == {
'hash_key': {'S': 'foo'},
'type': {'S': 'Parent'},
'value': {'M': {'string_attr': {'S': 'foostr'}, 'bar_attribute': {'N': '3'}}},
}
def test_deserialize_parent(self):
item = {
'hash_key': {'S': 'foo'},
'type': {'S': 'Parent'},
'value': {
'M': {'string_attr': {'S': 'foostr'}, 'bar_attribute': {'N': '3'}}
}
}
m = DynamicMapDiscriminatorTestModel.from_raw_data(item)
assert m.hash_key == 'foo'
assert m.value
assert m.value.string_attr == 'foostr'
assert m.value.bar_attribute == 3
def test_serialize_child(self):
m = DynamicMapDiscriminatorChildTestModel()
m.hash_key = 'foo'
m.value = 'string val'
assert m.serialize() == {
'hash_key': {'S': 'foo'},
'type': {'S': 'Child'},
'value': {'S': 'string val'}
}
def test_deserialize_child(self):
item = {
'hash_key': {'S': 'foo'},
'type': {'S': 'Child'},
'value': {'S': 'string val'}
}
m = DynamicMapDiscriminatorChildTestModel.from_raw_data(item)
assert m.hash_key == 'foo'
assert m.value == 'string val'
| mit | -3,047,368,666,517,998,000 | 33.836735 | 105 | 0.57645 | false | 3.776549 | true | false | false |
samuelcolvin/django-importexport | views.py | 1 | 5063 | from django import forms
import Imex.models as m
import Imex.tasks as tasks
import HotDjango.views_base as viewb
from django.core.urlresolvers import reverse
from django.db import models
import settings
from django.shortcuts import redirect
import Imex
import_groups, export_groups = Imex.get_imex_groups()
actions = {'imex_import':import_groups, 'imex_export': export_groups}
class Export(viewb.TemplateBase):
template_name = 'export.html'
menu_active = 'imex_export'
side_menu = False
show_crums = False
def get_context_data(self, **kw):
self._context['title'] = 'Export'
self._context['page_menu'] = self.set_links()
return self._context
def set_links(self):
links= []
for group, label in actions['imex_export']:
links.append({'url': reverse('imex_process', kwargs={'command': 'imex_export', 'group': group}), 'name': label})
return links
class ExcelUploadForm(forms.Form):
xlfile = forms.FileField(
label='Select Excel (xlsx) File to Upload',
help_text='should be in standard format for this system'
)
import_group = forms.ChoiceField(widget=forms.RadioSelect, choices=import_groups, label='Import Type', initial=import_groups[0][0])
class Import(viewb.TemplateBase):
template_name = 'import.html'
menu_active = 'imex_import'
side_menu = False
show_crums = False
def get_context_data(self, **kw):
self._context['title'] = 'Import'
self._context['process_url'] = reverse('imex_process', kwargs={'command': 'imex_import'})
self._context['upload_form'] = ExcelUploadForm()
if 'errors' in self.request.session:
self._context['errors'] = self.request.session['errors']
return self._context
class Process(viewb.TemplateBase):
template_name = 'process.html'
side_menu = False
show_crums = False
_redirect = None
def get(self, request, *args, **kw):
if 'menu_active' in request.session:
self.menu_active = request.session['menu_active']
return super(Process, self).get(request, *args, **kw)
def post(self, request, *args, **kw):
page = self.get(request, *args, **kw)
if self._redirect:
return self._redirect
return page
_act_map = {'imex_export': 'EX', 'imex_import':'IM'}
def get_context_data(self, **kw):
self._context['expected_ms'] = 0
act = self._act_map[kw['command']]
self._context['act'] = act
prev_successful = m.Process.objects.filter(complete=True, successful=True, action=act)
if prev_successful.exists():
# print 'average_of %s' % ','.join([ '%0.3f' % p.time_taken for p in prev_successful])
expected_time = prev_successful.aggregate(expected_time = models.Avg('time_taken'))['expected_time']
self._context['expected_ms'] = '%0.0f' % (expected_time * 1000)
success = self.choose_func(kw)
if not success:
return self._context
self._context['media_url'] = settings.MEDIA_URL
self._context['json_url'] = '%s/%d.json' % (reverse('rest-Imex-Process-list'), self._pid)
return self._context
def choose_func(self, kw):
if 'command' in kw:
command = kw['command']
if command in [func_name for func_name, _ in self._act_map.items()]:
return getattr(self, command)(kw)
else:
self._context['errors'] = ['No function called %s' % command]
def imex_export(self, kw):
group = kw['group']
assert group in [g for g, _ in export_groups], \
'group %s not found in export_groups: %r' % (group, export_groups)
processor = m.Process.objects.create(action='EX', group=group)
self._pid = processor.id
tasks.perform_export(self._pid)
return True
def imex_import(self, kw):
error = None
if self.request.method != 'POST':
error = "No post data"
else:
form = ExcelUploadForm(self.request.POST, self.request.FILES)
import_group = form['import_group'].value()
if not form.is_valid():
error = "Form not valid"
elif not str(self.request.FILES['xlfile']).endswith('.xlsx'):
error = 'File must be xlsx, not xls or any other format.'
elif import_group not in [g for g, _ in import_groups]:
error = 'Group %s is not one of the import groups: %r' % (import_group, import_groups)
if error:
print 'refused'
self.request.session['errors'] = [error]
self._redirect = redirect(reverse('imex_import'))
return
p = m.Process.objects.create(action='IM', imex_file = self.request.FILES['xlfile'], group=import_group)
msg = tasks.perform_import(p.id)
if msg:
self._context['errors'].append(msg)
self._pid = p.id
return True
| gpl-2.0 | 4,235,616,666,778,978,300 | 37.648855 | 135 | 0.596089 | false | 3.7146 | false | false | false |
ghostop14/sparrow-wifi | sparrowwifiagent.py | 1 | 114857 | #!/usr/bin/python3
#
# Copyright 2017 ghostop14
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import os
import sys
import datetime
import json
import re
import argparse
import configparser
# import subprocess
from socket import *
from time import sleep
from threading import Thread, Lock
from dateutil import parser
from http import server as HTTPServer
from socketserver import ThreadingMixIn
from wirelessengine import WirelessEngine
from sparrowgps import GPSEngine, GPSEngineStatic, GPSStatus, SparrowGPS
try:
from sparrowdrone import SparrowDroneMavlink
hasDroneKit = True
except:
hasDroneKit = False
from sparrowrpi import SparrowRPi
from sparrowbluetooth import SparrowBluetooth, BluetoothDevice
from sparrowhackrf import SparrowHackrf
from sparrowcommon import gzipCompress
try:
from manuf import manuf
hasOUILookup = True
except:
hasOUILookup = False
# ------ Global setup ------------
gpsEngine = None
curTime = datetime.datetime.now()
useMavlink = False
vehicle = None
mavlinkGPSThread = None
hasFalcon = False
hasBluetooth = False
hasUbertooth = False
falconWiFiRemoteAgent = None
bluetooth = None
hackrf = SparrowHackrf()
debugHTTP = False
allowCors = False
# Lock list is a dictionary of thread locks for scanning interfaces
lockList = {}
allowedIPs = []
useRPILeds = False
# runningcfg is created in main
runningcfg = None
recordThread = None
announceThread = None
# ------ Global functions ------------
def stringtobool(instr):
if (instr == 'True' or instr == 'true'):
return True
else:
return False
def TwoDigits(instr):
# Fill in a leading zero for single-digit numbers
while len(instr) < 2:
instr = '0' + instr
return instr
def deleteRecordingFiles(filelist):
dirname, filename = os.path.split(os.path.abspath(__file__))
recordingsDir = dirname + '/recordings'
retVal = ''
for curFilename in filelist:
# This split is simply a safety check to prevent path traversal attacks
dirname, filename = os.path.split(curFilename)
if len(filename) > 0:
fullpath = recordingsDir + '/' + filename
try:
os.remove(fullpath)
except:
if len(retVal) == 0:
retVal = filename
else:
retVal += ',' + filename
return retVal
def getRecordingFiles():
dirname, filename = os.path.split(os.path.abspath(__file__))
recordingsDir = dirname + '/recordings'
if not os.path.exists(recordingsDir):
os.makedirs(recordingsDir)
retVal = []
try:
for filename in os.listdir(recordingsDir):
fullPath = recordingsDir + '/' + filename
if not os.path.isdir(fullPath):
curFile = FileSystemFile()
curFile.filename = filename
curFile.size = os.path.getsize(fullPath)
try:
curFile.timestamp = datetime.datetime.fromtimestamp(os.path.getmtime(fullPath))
except:
curFile.timestamp = None
retVal.append(curFile.toJsondict())
except:
pass
return retVal
def restartAgent():
global bluetooth
if mavlinkGPSThread:
mavlinkGPSThread.signalStop = True
print('Waiting for mavlink GPS thread to terminate...')
while (mavlinkGPSThread.threadRunning):
sleep(0.2)
stopRecord()
stopAnnounceThread()
if bluetooth:
bluetooth.stopScanning()
if runningcfg.useRPiLEDs:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
if hasFalcon:
falconWiFiRemoteAgent.cleanup()
if os.path.isfile('/usr/local/bin/python3.5') or os.path.isfile('/usr/bin/python3.5'):
exefile = 'python3.5'
else:
exefile = 'python3'
# params = [exefile, __file__, '--delaystart=2']
newCommand = exefile + ' ' + __file__ + ' --delaystart=2 &'
os.system(newCommand)
# subprocess.Popen(params, stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# result = subprocess.run(params, stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# restartResult = result.stdout.decode('UTF-8')
os.kill(os.getpid(), 9)
def updateRunningConfig(newCfg):
global runningcfg
if runningcfg.ipAllowedList != newCfg.ipAllowedList:
buildAllowedIPs(newCfg.ipAllowedList)
# port we ignore since we're already running
# useRPiLEDs will just update
# Announce
if runningcfg.announce != newCfg.announce:
if not newCfg.announce:
stopAnnounceThread()
else:
# start will check if it's already running
startAnnounceThread()
# mavlinkGPS
# Need to restart to update mavlinkGPS
# So just copy forward
newCfg.mavlinkGPS = runningcfg.mavlinkGPS
# recordInterface
if runningcfg.recordInterface != newCfg.recordInterface:
if len(newCfg.recordInterface) == 0:
stopRecord()
else:
# start will check if it's already running
startRecord(newCfg.recordInterface)
# Finally swap out the config
runningcfg = newCfg
def startRecord(interface):
global recordThread
if recordThread:
return
if len(interface) > 0:
interfaces = WirelessEngine.getInterfaces()
if interface in interfaces:
recordThread = AutoAgentScanThread(interface)
recordThread.start()
else:
print('ERROR: Record was requested on ' + interface + ' but that interface was not found.')
else:
recordThread = None
def stopRecord():
global recordThread
if recordThread:
recordThread.signalStop = True
print('Waiting for record thread to terminate...')
i=0
maxCycles = 2 /0.2
while (recordThread.threadRunning) and (i<maxCycles):
sleep(0.2)
i += 1
def stopAnnounceThread():
global announceThread
if announceThread:
announceThread.signalStop = True
print('Waiting for announce thread to terminate...')
sleep(0.2)
# i=0
# maxCycles = 5 # int(2.0 /0.2)
# while (announceThread.threadRunning) and (i<maxCycles):
# sleep(0.2)
# i += 1
announceThread = None
def startAnnounceThread():
global runningcfg
global announceThread
# Start announce if needed
if announceThread:
# It's already running
return
print('Sending agent announcements on port ' + str(runningcfg.port) + '.')
announceThread = AnnounceThread(runningcfg.port)
announceThread.start()
def buildAllowedIPs(allowedIPstr):
global allowedIPs
allowedIPs = []
if len(allowedIPstr) > 0:
ippattern = re.compile('([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})')
if ',' in allowedIPstr:
tmpList = allowedIPstr.split(',')
for curItem in tmpList:
ipStr = curItem.replace(' ', '')
try:
ipValue = ippattern.search(ipStr).group(1)
except:
ipValue = ""
print('ERROR: Unknown IP pattern: ' + ipStr)
exit(3)
if len(ipValue) > 0:
allowedIPs.append(ipValue)
else:
ipStr = allowedIPstr.replace(' ', '')
try:
ipValue = ippattern.search(ipStr).group(1)
except:
ipValue = ""
print('ERROR: Unknown IP pattern: ' + ipStr)
return False
if len(ipValue) > 0:
allowedIPs.append(ipValue)
return True
# ------ OUI lookup functions ------------
def getOUIDB():
ouidb = None
if hasOUILookup:
if os.path.isfile('manuf'):
# We have the file but let's not update it every time we run the app.
# every 90 days should be plenty
last_modified_date = datetime.datetime.fromtimestamp(os.path.getmtime('manuf'))
now = datetime.datetime.now()
age = now - last_modified_date
if age.days > 90:
updateflag = True
else:
updateflag = False
else:
# We don't have the file, let's get it
updateflag = True
try:
ouidb = manuf.MacParser(update=updateflag)
except:
ouidb = None
else:
ouidb = None
return ouidb
# ------------------ File ------------------------------
class FileSystemFile(object):
def __init__(self):
self.filename = ""
self.size = 0
self.timestamp = None
def __str__(self):
retVal = self.filename
return retVal
def toJsondict(self):
jsondict = {}
jsondict['filename'] = self.filename
jsondict['size'] = self.size
jsondict['timestamp'] = str(self.timestamp)
return jsondict
def fromJsondict(self, jsondict):
self.filename = jsondict['filename']
self.size = jsondict['size']
if jsondict['timestamp'] == 'None':
self.timestamp = None
else:
self.timestamp = parser.parse(jsondict['timestamp'])
# ------------------ Config Settings ------------------------------
class AgentConfigSettings(object):
def __init__(self):
self.cancelStart = False
self.port = 8020
self.announce = False
self.useRPiLEDs = False
self.recordInterface=""
self.recordRunning = False
self.mavlinkGPS = ""
self.ipAllowedList = ""
self.allowCors = False
def __str__(self):
retVal = "Cancel Start: " + str(self.cancelStart) + "\n"
retVal += "Port: " + str(self.port) + "\n"
retVal += "Announce Agent: " + str(self.announce) + "\n"
retVal += "Use RPi LEDs: " + str(self.useRPiLEDs) + "\n"
retVal += "Record Interface: " + self.recordInterface + "\n"
retVal += "Record Running (for running configs): " + str(self.recordRunning) + "\n"
retVal += "Mavlink GPS: " + self.mavlinkGPS + "\n"
retVal += "IP Allowed List: " + self.ipAllowedList + "\n"
retVal += "Allow CORS: " + str(self.allowCors) + "\n"
return retVal
def __eq__(self, obj):
# This is equivance.... ==
if not isinstance(obj, AgentConfigSettings):
return False
if self.cancelStart != obj.cancelStart:
return False
if self.port != obj.port:
return False
if self.announce != obj.announce:
return False
if self.useRPiLEDs != obj.useRPiLEDs:
return False
if self.recordInterface != obj.recordInterface:
return False
if self.mavlinkGPS != obj.mavlinkGPS:
return False
if self.ipAllowedList != obj.ipAllowedList:
return False
if self.allowCors != obj.allowCors:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def toJsondict(self):
dictjson = {}
dictjson['cancelstart'] = str(self.cancelStart)
dictjson['port'] = self.port
dictjson['announce'] = str(self.announce)
dictjson['recordrunning'] = str(self.recordRunning)
dictjson['userpileds'] = str(self.useRPiLEDs)
dictjson['recordinterface'] = self.recordInterface
dictjson['mavlinkgps'] = self.mavlinkGPS
dictjson['allowedips'] = self.ipAllowedList
dictjson['allowcors'] = str(self.allowCors)
return dictjson
def toJson(self):
dictjson = self.toJsondict()
return json.dumps(dictjson)
def fromJsondict(self, dictjson):
try:
self.cancelStart = stringtobool(dictjson['cancelstart'])
self.port = int(dictjson['port'])
self.announce = stringtobool(dictjson['announce'])
self.recordRunning = stringtobool(dictjson['recordrunning'])
self.useRPiLEDs = stringtobool(dictjson['userpileds'])
self.recordInterface = dictjson['recordinterface']
self.mavlinkGPS = dictjson['mavlinkgps']
self.ipAllowedList = dictjson['allowedips']
# if 'allowcors' in dictjson.keys():
self.allowCors = stringtobool(dictjson['allowcors'])
# else:
# print("allowCors not set in dictjson!")
except Exception as e:
print(e)
def fromJson(self, jsonstr):
dictjson = json.loads(jsonstr)
self.fromJsondict(dictjson)
def toConfigFile(self, cfgFile):
config = configparser.ConfigParser()
config['agent'] = self.toJsondict()
try:
with open(cfgFile, 'w') as configfile:
config.write(configfile)
return True
except:
return False
def fromConfigFile(self, cfgFile):
if os.path.isfile(cfgFile):
cfgParser = configparser.ConfigParser()
try:
cfgParser.read(cfgFile)
section="agent"
options = cfgParser.options(section)
for option in options:
try:
if option =='cancelstart':
self.cancelStart = stringtobool(cfgParser.get(section, option))
elif option == 'sendannounce':
self.announce = stringtobool(cfgParser.get(section, option))
elif option == 'userpileds':
self.useRPiLEDs = stringtobool(cfgParser.get(section, option))
elif option == 'port':
self.port=int(cfgParser.get(section, option))
elif option == 'recordinterface':
self.recordInterface=cfgParser.get(section, option)
elif option == 'mavlinkgps':
self.mavlinkGPS=cfgParser.get(section, option)
elif option == 'allowedips':
self.ipAllowedList = cfgParser.get(section, option)
elif option == 'allowcors':
self.allowCors = stringtobool(cfgParser.get(section, option))
except:
print("exception on %s!" % option)
settings[option] = None
except:
print("ERROR: Unable to read config file: ", cfgFile)
return False
else:
return False
return True
# ------------------ Agent auto scan thread ------------------------------
class AutoAgentScanThread(Thread):
def __init__(self, interface):
global lockList
global hasBluetooth
super(AutoAgentScanThread, self).__init__()
self.interface = interface
self.signalStop = False
self.scanDelay = 0.5 # seconds
self.threadRunning = False
self.discoveredNetworks = {}
self.discoveredBluetoothDevices = {}
self.daemon = True
try:
self.hostname = os.uname()[1]
except:
self.hostname = 'unknown'
if len(self.hostname) == 0:
self.hostname = 'unknown'
self.ouiLookupEngine = getOUIDB()
if interface not in lockList.keys():
lockList[interface] = Lock()
if not os.path.exists('./recordings'):
os.makedirs('./recordings')
now = datetime.datetime.now()
self.filename = './recordings/' + self.hostname + '_wifi_' + str(now.year) + "-" + TwoDigits(str(now.month)) + "-" + TwoDigits(str(now.day))
self.filename += "_" + TwoDigits(str(now.hour)) + "_" + TwoDigits(str(now.minute)) + "_" + TwoDigits(str(now.second)) + ".csv"
self.btfilename = './recordings/' + self.hostname + '_bt_' + str(now.year) + "-" + TwoDigits(str(now.month)) + "-" + TwoDigits(str(now.day))
self.btfilename += "_" + TwoDigits(str(now.hour)) + "_" + TwoDigits(str(now.minute)) + "_" + TwoDigits(str(now.second)) + ".csv"
if hasBluetooth:
print('Capturing on ' + interface + ' and writing wifi to ' + self.filename)
print('and writing bluetooth to ' + self.btfilename)
else:
print('Capturing on ' + interface + ' and writing wifi to ' + self.filename)
def run(self):
global lockList
global hasBluetooth
self.threadRunning = True
if self.interface not in lockList.keys():
lockList[self.interface] = Lock()
curLock = lockList[self.interface]
if hasBluetooth:
# Start normal discovery
bluetooth.startDiscovery(False)
lastState = -1
while (not self.signalStop):
# Scan all / normal mode
if (curLock):
curLock.acquire()
retCode, errString, wirelessNetworks = WirelessEngine.scanForNetworks(self.interface)
if (curLock):
curLock.release()
if (retCode == 0):
if useMavlink:
gpsCoord = GPSStatus()
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
elif gpsEngine.gpsValid():
gpsCoord = gpsEngine.lastCoord
if useRPILeds and (lastState !=SparrowRPi.LIGHT_STATE_ON):
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
lastState = SparrowRPi.LIGHT_STATE_ON
else:
gpsCoord = GPSStatus()
if useRPILeds and (lastState !=SparrowRPi.LIGHT_STATE_HEARTBEAT) :
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
lastState = SparrowRPi.LIGHT_STATE_HEARTBEAT
# self.statusBar().showMessage('Scan complete. Found ' + str(len(wirelessNetworks)) + ' networks')
if wirelessNetworks and (len(wirelessNetworks) > 0) and (not self.signalStop):
for netKey in wirelessNetworks.keys():
curNet = wirelessNetworks[netKey]
curNet.gps.copy(gpsCoord)
curNet.strongestgps.copy(gpsCoord)
curKey = curNet.getKey()
if curKey not in self.discoveredNetworks.keys():
self.discoveredNetworks[curKey] = curNet
else:
# Network exists, need to update it.
pastNet = self.discoveredNetworks[curKey]
# Need to save strongest gps and first seen. Everything else can be updated.
# Carry forward firstSeen
curNet.firstSeen = pastNet.firstSeen # This is one field to carry forward
# Check strongest signal
if pastNet.strongestsignal > curNet.signal:
curNet.strongestsignal = pastNet.strongestsignal
curNet.strongestgps.latitude = pastNet.strongestgps.latitude
curNet.strongestgps.longitude = pastNet.strongestgps.longitude
curNet.strongestgps.altitude = pastNet.strongestgps.altitude
curNet.strongestgps.speed = pastNet.strongestgps.speed
curNet.strongestgps.isValid = pastNet.strongestgps.isValid
self.discoveredNetworks[curKey] = curNet
if not self.signalStop:
self.exportNetworks()
# Now if we have bluetooth running export these:
if hasBluetooth and bluetooth.discoveryRunning():
bluetooth.deviceLock.acquire()
# Update GPS
now = datetime.datetime.now()
for curKey in bluetooth.devices.keys():
curDevice = bluetooth.devices[curKey]
elapsedTime = now - curDevice.lastSeen
# This is a little bit of a hack for the BlueHydra side since it can take a while to see devices or have
# them show up in the db. For LE discovery scans this will always be pretty quick.
if elapsedTime.total_seconds() < 120:
curDevice.gps.copy(gpsCoord)
if curDevice.rssi >= curDevice.strongestRssi:
curDevice.strongestRssi = curDevice.rssi
curDevice.strongestgps.copy(gpsCoord)
# export
self.exportBluetoothDevices(bluetooth.devices)
bluetooth.deviceLock.release()
sleep(self.scanDelay)
if hasBluetooth:
# Start normal discovery
bluetooth.stopDiscovery()
self.threadRunning = False
def ouiLookup(self, macAddr):
clientVendor = ""
if hasOUILookup:
try:
if self.ouiLookupEngine:
clientVendor = self.ouiLookupEngine.get_manuf(macAddr)
except:
clientVendor = ""
return clientVendor
def exportBluetoothDevices(self, devices):
try:
btOutputFile = open(self.btfilename, 'w')
except:
print('ERROR: Unable to write to bluetooth file ' + self.filename)
return
btOutputFile.write('uuid,Address,Name,Company,Manufacturer,Type,RSSI,TX Power,Strongest RSSI,Est Range (m),Last Seen,GPS Valid,Latitude,Longitude,Altitude,Speed,Strongest GPS Valid,Strongest Latitude,Strongest Longitude,Strongest Altitude,Strongest Speed\n')
for curKey in devices.keys():
curData = devices[curKey]
btType = ""
if curData.btType == BluetoothDevice.BT_LE:
btType = "BTLE"
else:
btType = "Classic"
if curData.txPowerValid:
txPower = str(curData.txPower)
else:
txPower = 'Unknown'
btOutputFile.write(curData.uuid + ',' + curData.macAddress + ',"' + curData.name + '","' + curData.company + '","' + curData.manufacturer)
btOutputFile.write('","' + btType + '",' + str(curData.rssi) + ',' + str(curData.strongestRssi) + ',' + txPower + ',' + str(curData.iBeaconRange) + ',' +
curData.lastSeen.strftime("%m/%d/%Y %H:%M:%S") + ',' +
str(curData.gps.isValid) + ',' + str(curData.gps.latitude) + ',' + str(curData.gps.longitude) + ',' + str(curData.gps.altitude) + ',' + str(curData.gps.speed) + ',' +
str(curData.strongestgps.isValid) + ',' + str(curData.strongestgps.latitude) + ',' + str(curData.strongestgps.longitude) + ',' + str(curData.strongestgps.altitude) + ',' + str(curData.strongestgps.speed) + '\n')
btOutputFile.close()
def exportNetworks(self):
try:
self.outputFile = open(self.filename, 'w')
except:
print('ERROR: Unable to write to wifi file ' + self.filename)
return
self.outputFile.write('macAddr,vendor,SSID,Security,Privacy,Channel,Frequency,Signal Strength,Strongest Signal Strength,Bandwidth,Last Seen,First Seen,GPS Valid,Latitude,Longitude,Altitude,Speed,Strongest GPS Valid,Strongest Latitude,Strongest Longitude,Strongest Altitude,Strongest Speed\n')
for netKey in self.discoveredNetworks.keys():
curData = self.discoveredNetworks[netKey]
vendor = self.ouiLookup(curData.macAddr)
if vendor is None:
vendor = ''
self.outputFile.write(curData.macAddr + ',' + vendor + ',"' + curData.ssid + '",' + curData.security + ',' + curData.privacy)
self.outputFile.write(',' + curData.getChannelString() + ',' + str(curData.frequency) + ',' + str(curData.signal) + ',' + str(curData.strongestsignal) + ',' + str(curData.bandwidth) + ',' +
curData.lastSeen.strftime("%m/%d/%Y %H:%M:%S") + ',' + curData.firstSeen.strftime("%m/%d/%Y %H:%M:%S") + ',' +
str(curData.gps.isValid) + ',' + str(curData.gps.latitude) + ',' + str(curData.gps.longitude) + ',' + str(curData.gps.altitude) + ',' + str(curData.gps.speed) + ',' +
str(curData.strongestgps.isValid) + ',' + str(curData.strongestgps.latitude) + ',' + str(curData.strongestgps.longitude) + ',' + str(curData.strongestgps.altitude) + ',' + str(curData.strongestgps.speed) + '\n')
self.outputFile.close()
# ------------------ Announce thread ------------------------------
class AnnounceThread(Thread):
def __init__(self, port):
super(AnnounceThread, self).__init__()
self.signalStop = False
self.sendDelay = 4.0 # seconds
self.threadRunning = False
self.daemon = True
self.broadcastSocket = socket(AF_INET, SOCK_DGRAM)
self.broadcastSocket.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
self.broadcastSocket.setsockopt(SOL_SOCKET, SO_BROADCAST, 1)
self.broadcastPort = port
self.broadcastAddr=('255.255.255.255', self.broadcastPort)
def sendAnnounce(self):
try:
self.broadcastSocket.sendto(bytes('sparrowwifiagent', "utf-8"),self.broadcastAddr)
except:
pass
def run(self):
self.threadRunning = True
while (not self.signalStop):
self.sendAnnounce()
# 4 second delay, but check every second for termination signal
i=0
while i<4 and not self.signalStop:
sleep(1.0)
i += 1
self.threadRunning = False
# ------------------ Local network scan thread ------------------------------
class MavlinkGPSThread(Thread):
def __init__(self, vehicle):
super(MavlinkGPSThread, self).__init__()
self.signalStop = False
self.scanDelay = 0.5 # seconds
self.threadRunning = False
self.vehicle = vehicle
self.synchronized = False
self.latitude = 0.0
self.longitude = 0.0
self.altitude = 0.0
self.daemon = True
def run(self):
self.threadRunning = True
lastState = -1
while (not self.signalStop):
self.synchronized, self.latitude, self.longitude, self.altitude = self.vehicle.getGlobalGPS()
if self.synchronized:
# Solid on synchronized
if useRPILeds and (lastState != SparrowRPi.LIGHT_STATE_ON):
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
lastState = SparrowRPi.LIGHT_STATE_ON
else:
# heartbeat on unsynchronized
if useRPILeds and (lastState != SparrowRPi.LIGHT_STATE_HEARTBEAT):
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
lastState = SparrowRPi.LIGHT_STATE_HEARTBEAT
sleep(self.scanDelay)
self.threadRunning = False
class SparrowWiFiAgent(object):
# See https://docs.python.org/3/library/http.server.html
# For HTTP Server info
def run(self, port):
global useRPILeds
global hackrf
global bluetooth
global falconWiFiRemoteAgent
server_address = ('', port)
try: # httpd = HTTPServer.HTTPServer(server_address, SparrowWiFiAgentRequestHandler)
httpd = MultithreadHTTPServer(server_address, SparrowWiFiAgentRequestHandler)
except OSError as e:
curTime = datetime.datetime.now()
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Unable to bind to port " + str(port) + ". " + e.strerror)
if runningcfg.useRPiLEDs:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
exit(1)
curTime = datetime.datetime.now()
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Starting Sparrow-wifi agent on port " + str(port))
if useRPILeds:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
if useRPILeds:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
if hasFalcon:
falconWiFiRemoteAgent.cleanup()
if bluetooth:
bluetooth.stopScanning()
if hackrf.scanRunning():
hackrf.stopScanning()
curTime = datetime.datetime.now()
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Sparrow-wifi agent stopped.")
# --------------- Multithreaded HTTP Server ------------------------------------
class MultithreadHTTPServer(ThreadingMixIn, HTTPServer.HTTPServer):
pass
# --------------- HTTP Request Handler --------------------
# Sample handler: https://wiki.python.org/moin/BaseHttpServer
class SparrowWiFiAgentRequestHandler(HTTPServer.BaseHTTPRequestHandler):
def log_message(self, format, *args):
global debugHTTP
if not debugHTTP:
return
else:
HTTPServer.BaseHTTPRequestHandler(format, *args)
def do_HEAD(s):
global allowCors
s.send_response(200)
s.send_header("Content-type", "text/html")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
def do_POST(s):
global runningcfg
global falconWiFiRemoteAgent
if len(s.client_address) == 0:
# This should have the connecting client IP. If this isn't at least 1, something is wrong
return
if len(allowedIPs) > 0:
if s.client_address[0] not in allowedIPs:
try:
s.send_response(403)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Connections not authorized from your IP address</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
return
if (not s.isValidPostURL()):
try:
s.send_response(404)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Page not found.</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
return
# Get the size of the posted data
try:
length = int(s.headers['Content-Length'])
except:
length = 0
if length <= 0:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Agent received a zero-length request.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
# get the POSTed payload
jsonstr_data = s.rfile.read(length).decode('utf-8')
# Try to convert it to JSON
try:
jsondata = json.loads(jsonstr_data)
except:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'bad posted data.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
if s.path == '/system/config':
# ------------- Update startup config ------------------
try:
scfg = jsondata['startup']
startupCfg = AgentConfigSettings()
startupCfg.fromJsondict(scfg)
dirname, filename = os.path.split(os.path.abspath(__file__))
cfgFile = dirname + '/sparrowwifiagent.cfg'
retVal = startupCfg.toConfigFile(cfgFile)
if not retVal:
# HTML 400 = Bad request
s.send_response(400)
responsedict = {}
responsedict['errcode'] = 2
responsedict['errmsg'] = 'An error occurred saving the startup config.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
responsedict = {}
responsedict['errcode'] = 3
responsedict['errmsg'] = 'Bad startup config.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
# ------------- Check if we should reboot ------------------
if 'rebootagent' in jsondata:
rebootFlag = jsondata['rebootagent']
if rebootFlag:
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = 'Restarting agent.'
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
restartAgent()
# If we're restarting, we'll never get to running config.
# ------------- Update Running config ------------------
try:
rcfg = jsondata['running']
tmpcfg = AgentConfigSettings()
tmpcfg.fromJsondict(rcfg)
updateRunningConfig(tmpcfg)
try:
s.send_response(200)
s.send_header("Content-Length", 0)
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
except:
pass
except Exception as e:
print(e)
responsedict = {}
responsedict['errcode'] = 4
responsedict['errmsg'] = 'Bad running config.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
# ------------- Done updating config ------------------
elif s.path == '/system/deleterecordings':
try:
filelist = jsondata['files']
problemfiles=deleteRecordingFiles(filelist)
responsedict = {}
if len(problemfiles) == 0:
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = problemfiles
jsonstr = json.dumps(responsedict)
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
except:
pass
elif s.path == '/falcon/stopdeauth':
if not hasFalcon:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Should get a FalconDeauth object
# This is in jsondata
try:
apMacAddr = jsondata['apmacaddr']
clientMacAddr = jsondata['stationmacaddr']
channel = jsondata['channel']
curInterface = jsondata['interface']
falconWiFiRemoteAgent.stopDeauth(apMacAddr, clientMacAddr, curInterface, channel)
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/falcon/deauth':
if not hasFalcon:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Should get a FalconDeauth object
# This is in jsondata
try:
apMacAddr = jsondata['apmacaddr']
clientMacAddr = jsondata['stationmacaddr']
channel = jsondata['channel']
curInterface = jsondata['interface']
continuous = jsondata['continuous']
if len(clientMacAddr) == 0:
newDeauth = falconWiFiRemoteAgent.deauthAccessPoint(apMacAddr, curInterface, channel, continuous)
else:
newDeauth = falconWiFiRemoteAgent.deauthAccessPointAndClient(apMacAddr, clientMacAddr, curInterface, channel, continuous)
if not continuous:
# There's nothing to check. Just return
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
if newDeauth:
# Deauth was started
try:
s.send_response(200)
#s.send_header("Content-type", "text/html")
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Something went wrong with the start
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = "An error occurred starting the deauth process."
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/falcon/startcrack':
if not hasFalcon:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Extract necessary info for cracking
try:
crackType = jsondata['cracktype'] # This will be wep or wpapsk
curInterface = jsondata['interface']
channel = jsondata['channel']
ssid = jsondata['ssid']
apMacAddr=jsondata['apmacaddr']
hasClient = jsondata['hasclient']
# For now you can only run 1 crack globally due to tmp flie naming.
# At some point I'll scale it out
if crackType == 'wep':
if curInterface in falconWiFiRemoteAgent.WEPCrackList:
wepCrack = falconWiFiRemoteAgent.WEPCrackList[curInterface]
# Stop one if it was already running
wepCrack.stopCrack()
else:
wepCrack = WEPCrack()
falconWiFiRemoteAgent.WEPCrackList[curInterface] = wepCrack
wepCrack.cleanupTempFiles()
retVal, errMsg = wepCrack.startCrack(curInterface, channel, ssid, apMacAddr, hasClient)
else:
if curInterface in falconWiFiRemoteAgent.WPAPSKCrackList:
wpaPSKCrack = falconWiFiRemoteAgent.WPAPSKCrackList[curInterface]
# Stop one if it was already running
wpaPSKCrack.stopCrack()
else:
wpaPSKCrack = WPAPSKCrack()
falconWiFiRemoteAgent.WPAPSKCrackList[curInterface] = wpaPSKCrack
wpaPSKCrack.cleanupTempFiles()
retVal, errMsg = wpaPSKCrack.startCrack(curInterface, channel, ssid, apMacAddr, hasClient)
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
responsedict = {}
# For start, retVal is True/False
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
try:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = 'Bad request.'
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
def isValidPostURL(s):
allowedfullurls = ['/system/config',
'/falcon/startcrack',
'/falcon/deauth',
'/falcon/stopdeauth',
'/system/deleterecordings']
allowedstarturls=[]
if s.path in allowedfullurls:
return True
else:
for curURL in allowedstarturls:
if s.path.startswith(curURL):
return True
return False
def isValidGetURL(s):
# Full urls
allowedfullurls = ['/wireless/interfaces',
'/wireless/moninterfaces',
'/falcon/getscanresults',
'/falcon/getalldeauths',
'/system/getrecordings',
'/bluetooth/present',
'/bluetooth/scanstart',
'/bluetooth/scanstop',
'/bluetooth/scanstatus',
'/bluetooth/running',
'/bluetooth/beaconstart',
'/bluetooth/beaconstop',
'/bluetooth/discoverystartp',
'/bluetooth/discoverystarta',
'/bluetooth/discoverystop',
'/bluetooth/discoveryclear',
'/bluetooth/discoverystatus',
'/spectrum/scanstart24',
'/spectrum/scanstart5',
'/spectrum/scanstop',
'/spectrum/scanstatus',
'/spectrum/hackrfstatus',
'/gps/status']
# partials that have more in the URL
allowedstarturls=['/wireless/networks/',
'/falcon/startmonmode/',
'/falcon/stopmonmode/',
'/falcon/scanrunning/',
'/falcon/startscan/',
'/falcon/stopscan/',
'/falcon/stopalldeauths',
'/falcon/crackstatuswpapsk',
'/falcon/crackstatuswep',
'/falcon/stopcrack',
'/system/config',
'/system/startrecord',
'/system/stoprecord',
'/system/getrecording']
if s.path in allowedfullurls:
return True
else:
for curURL in allowedstarturls:
if s.path.startswith(curURL):
return True
return False
def sendFile(s, passedfilename):
# Directory traversal safety check
dirname, runfilename = os.path.split(os.path.abspath(__file__))
tmpdirname, filename = os.path.split(passedfilename)
recordingsDir = dirname + '/recordings'
fullPath = recordingsDir + '/' + filename
if not os.path.isfile(fullPath):
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'File not found.'
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
return
try:
f = open(fullPath, 'rb')
except:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 2
responsedict['errmsg'] = 'Unable to open file.'
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
return
fileExtension = filename.split(".")[-1]
if fileExtension in ['txt', 'csv', 'json', 'xml']:
contentType = 'text/plain'
elif fileExtension == 'html':
contentType = 'text/html'
else:
contentType = 'application/octet-stream'
s.send_response(200)
#s.send_header("Content-type", "text/html")
s.send_header("Content-type", contentType)
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
try:
s.wfile.write(f.read())
except:
pass
f.close()
return
def do_GET(s):
global gpsEngine
global useMavlink
global mavlinkGPSThread
global lockList
global allowedIPs
global runningcfg
global falconWiFiRemoteAgent
global hasBluetooth
global hasUbertooth
global bluetooth
global allowCors
# For RPi LED's, using it during each get request wasn't completely working. Short transactions like
# status and interface list were so quick the light would get "confused" and stay off. So
# the LED is only used for long calls like scan
if len(s.client_address) == 0:
# This should have the connecting client IP. If this isn't at least 1, something is wrong
return
try:
# If the pipe gets broken mid-stream it'll throw an exception
if len(allowedIPs) > 0:
if s.client_address[0] not in allowedIPs:
try:
s.send_response(403)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Connections not authorized from your IP address</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
return
if not s.isValidGetURL():
try:
s.send_response(404)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Bad Request</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
return
"""Respond to a GET request."""
if (not s.path.startswith('/system/getrecording/') and (not s.path == ('/bluetooth/scanstatus')) and
(not s.path == ('/spectrum/scanstatus'))):
# In getrecording we may adjust the content type header based on file extension
# Spectrum we'll gzip
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
except:
pass
# NOTE: In python 3, string is a bit different. Examples write strings directly for Python2,
# In python3 you have to convert it to UTF-8 bytes
# s.wfile.write("<html><head><title>Sparrow-wifi agent</title></head><body>".encode("utf-8"))
if s.path == '/wireless/interfaces':
wirelessInterfaces = WirelessEngine.getInterfaces()
jsondict={}
jsondict['interfaces']=wirelessInterfaces
jsonstr = json.dumps(jsondict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/wireless/networks/' in s.path:
# THIS IS THE NORMAL SCAN
inputstr = s.path.replace('/wireless/networks/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
if '?' in inputstr:
splitlist = inputstr.split('?')
curInterface = splitlist[0]
else:
curInterface = inputstr
p = re.compile('.*Frequencies=(.*)', re.IGNORECASE)
try:
channelStr = p.search(inputstr).group(1)
except:
channelStr = ""
huntChannelList = []
if ',' in channelStr:
tmpList = channelStr.split(',')
else:
tmpList = []
if len(tmpList) > 0:
for curItem in tmpList:
try:
if len(curItem) > 0:
huntChannelList.append(int(curItem))
# Get results for the specified interface
# Need to iterate through the channels and aggregate the results
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
sleep(0.1)
if curInterface not in lockList.keys():
lockList[curInterface] = Lock()
curLock = lockList[curInterface]
if (curLock):
curLock.acquire()
if useMavlink:
gpsCoord = GPSStatus()
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
retCode, errString, jsonstr=WirelessEngine.getNetworksAsJson(fieldValue, gpsCoord, huntChannelList)
elif gpsEngine.gpsValid():
retCode, errString, jsonstr=WirelessEngine.getNetworksAsJson(fieldValue, gpsEngine.lastCoord, huntChannelList)
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
retCode, errString, jsonstr=WirelessEngine.getNetworksAsJson(fieldValue, None, huntChannelList)
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
if (curLock):
curLock.release()
s.wfile.write(jsonstr.encode("UTF-8"))
elif s.path == '/gps/status':
jsondict={}
if not useMavlink:
jsondict['gpsinstalled'] = str(GPSEngine.GPSDInstalled())
jsondict['gpsrunning'] = str(GPSEngine.GPSDRunning())
jsondict['gpssynch'] = str(gpsEngine.gpsValid())
if gpsEngine.gpsValid():
gpsPos = {}
gpsPos['latitude'] = gpsEngine.lastCoord.latitude
gpsPos['longitude'] = gpsEngine.lastCoord.longitude
gpsPos['altitude'] = gpsEngine.lastCoord.altitude
gpsPos['speed'] = gpsEngine.lastCoord.speed
jsondict['gpspos'] = gpsPos
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
else:
jsondict['gpsinstalled'] = 'True'
jsondict['gpsrunning'] = 'True'
jsondict['gpssynch'] = str(mavlinkGPSThread.synchronized)
gpsPos = {}
gpsPos['latitude'] = mavlinkGPSThread.latitude
gpsPos['longitude'] = mavlinkGPSThread.longitude
gpsPos['altitude'] = mavlinkGPSThread.altitude
gpsPos['speed'] = mavlinkGPSThread.vehicle.getAirSpeed()
jsondict['gpspos'] = gpsPos
jsonstr = json.dumps(jsondict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/wireless/moninterfaces':
wirelessInterfaces = WirelessEngine.getMonitoringModeInterfaces()
jsondict={}
jsondict['interfaces']=wirelessInterfaces
jsonstr = json.dumps(jsondict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/system/getrecordings':
filelist = getRecordingFiles()
responsedict = {}
responsedict['files'] = filelist
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/system/getrecording/'):
filename = s.path.replace('/system/getrecording/', '')
s.sendFile(filename)
elif s.path == '/bluetooth/present':
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
responsedict['hasbluetooth'] = hasBluetooth
if hasBluetooth:
responsedict['scanrunning'] = bluetooth.scanRunnning()
else:
responsedict['scanrunning'] = False
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/bluetooth/beacon'):
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/bluetooth/beacon', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='start':
if bluetooth.discoveryRunning():
bluetooth.stopDiscovery()
retVal = bluetooth.startBeacon()
if not retVal:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unable to start beacon.'
elif function == 'stop':
bluetooth.stopBeacon()
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/bluetooth/scan'):
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/bluetooth/scan', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='start':
bluetooth.startScanning()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif function == 'stop':
bluetooth.stopScanning()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif function == 'status':
channelData = bluetooth.spectrumToChannels()
responsedict['channeldata'] = channelData
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
s.send_header("Content-Encoding", "gzip")
s.end_headers()
except:
pass
jsonstr = json.dumps(responsedict)
gzipBytes = gzipCompress(jsonstr)
# s.wfile.write(jsonstr.encode("UTF-8"))
try:
s.wfile.write(gzipBytes)
except:
pass
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/bluetooth/discovery'):
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/bluetooth/discovery', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='startp':
# Promiscuous with ubertooth
if hasUbertooth:
bluetooth.startDiscovery(True)
else:
responsedict['errcode'] = 2
responsedict['errmsg'] = 'Ubertooth not supported on this agent'
elif function == 'starta':
# Normal with Bluetooth
bluetooth.startDiscovery(False)
elif function == 'stop':
bluetooth.stopDiscovery()
elif function == 'clear':
# Device list accumulates in the bluetooth class over time
# If you want a fresh list every time, you need to clear the old list.
bluetooth.clearDeviceList()
# Add in successful response
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif function == 'status':
# have to get the GPS:
gpsCoord = SparrowGPS()
if useMavlink:
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
elif gpsEngine.gpsValid():
gpsCoord.copy(gpsEngine.lastCoord)
# errcode, devices = bluetooth.getDiscoveredDevices()
bluetooth.updateDeviceList()
bluetooth.deviceLock.acquire()
devdict = []
now = datetime.datetime.now()
for curKey in bluetooth.devices.keys():
curDevice = bluetooth.devices[curKey]
elapsedTime = now - curDevice.lastSeen
# This is a little bit of a hack for the BlueHydra side since it can take a while to see devices or have
# them show up in the db. For LE discovery scans this will always be pretty quick.
if elapsedTime.total_seconds() < 120:
curDevice.gps.copy(gpsCoord)
if curDevice.rssi >= curDevice.strongestRssi:
curDevice.strongestRssi = curDevice.rssi
curDevice.strongestgps.copy(gpsCoord)
entryDict = curDevice.toJsondict()
devdict.append(entryDict)
bluetooth.deviceLock.release()
responsedict['devices'] = devdict
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/bluetooth/running':
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
responsedict['hasbluetooth'] = hasBluetooth
responsedict['hasubertooth'] = hasUbertooth
responsedict['spectrumscanrunning'] = False
responsedict['discoveryscanrunning'] = False
responsedict['beaconrunning'] = False
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
responsedict['hasbluetooth'] = hasBluetooth
responsedict['hasubertooth'] = hasUbertooth
responsedict['spectrumscanrunning'] = bluetooth.scanRunning()
responsedict['discoveryscanrunning'] = bluetooth.discoveryRunning()
responsedict['beaconrunning'] = bluetooth.beaconRunning()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/spectrum/hackrfstatus':
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
responsedict['hashackrf'] = hackrf.hasHackrf
responsedict['scan24running'] = hackrf.scanRunning24()
responsedict['scan5running'] = hackrf.scanRunning5()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/spectrum/scan'):
if not hackrf.hasHackrf:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'HackRF is not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/spectrum/scan', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='start24':
hackrf.startScanning24()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
elif function == 'start5':
hackrf.startScanning5()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
elif function == 'stop':
hackrf.stopScanning()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
elif function == 'status':
if hackrf.scanRunning24():
channelData = hackrf.spectrum24ToChannels()
responsedict['scanrunning'] = hackrf.scanRunning24()
elif hackrf.scanRunning5():
channelData = hackrf.spectrum5ToChannels()
responsedict['scanrunning'] = hackrf.scanRunning24()
else:
channelData = {} # Shouldn't be here but just in case.
responsedict['scanrunning'] = False
responsedict['channeldata'] = channelData
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
s.send_header("Content-Encoding", "gzip")
s.end_headers()
jsonstr = json.dumps(responsedict)
gzipBytes = gzipCompress(jsonstr)
# s.wfile.write(jsonstr.encode("UTF-8"))
s.wfile.write(gzipBytes)
except:
pass
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/system/config':
cfgSettings = AgentConfigSettings()
cfgSettings.fromConfigFile('sparrowwifiagent.cfg')
responsedict = {}
responsedict['startup'] = cfgSettings.toJsondict()
if recordThread:
runningcfg.recordRunning = True
runningcfg.recordInterface = recordThread.interface
responsedict['running'] = runningcfg.toJsondict()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/system/startrecord'):
recordinterface = s.path.replace('/system/startrecord/', '')
# Check that the specified interface is valid:
interfaces = WirelessEngine.getInterfaces()
if recordinterface in interfaces:
startRecord(recordinterface)
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
jsonstr = json.dumps(responsedict)
else:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'The requested interface was not found on the system.'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/system/stoprecord':
stopRecord()
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/startmonmode' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/startmonmode/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
retVal, errMsg = falconWiFiRemoteAgent.startMonitoringInterface(fieldValue)
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopmonmode' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopmonmode/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
retVal, errMsg = falconWiFiRemoteAgent.stopMonitoringInterface(fieldValue)
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/scanrunning' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/scanrunning/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
scanrunning = falconWiFiRemoteAgent.isScanRunning(fieldValue)
if scanrunning:
retVal = 0
errMsg = "scan for " + fieldValue + " is running"
else:
retVal = 1
errMsg = "scan for " + fieldValue + " is not running"
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/startscan' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/startscan/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
scanProc = falconWiFiRemoteAgent.startCapture(fieldValue)
if scanProc is not None:
retVal = 0
errMsg = ""
else:
retVal = -1
errMsg = "Unable to start scanning process."
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopscan' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopscan/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
retVal = falconWiFiRemoteAgent.stopCapture(fieldValue)
if retVal == 0:
errMsg = ""
else:
errMsg = "Unable to stop scanning process."
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopcrack' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopcrack/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
curInterface = p.search(inputstr).group(1)
except:
curInterface = ""
if len(curInterface) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
try:
if curInterface in falconWiFiRemoteAgent.WEPCrackList:
falconWiFiRemoteAgent.WEPCrackList[curInterface].stopCrack()
falconWiFiRemoteAgent.WEPCrackList[curInterface].cleanupTempFiles()
del falconWiFiRemoteAgent.WEPCrackList[curInterface]
if curInterface in falconWiFiRemoteAgent.WPAPSKCrackList:
falconWiFiRemoteAgent.WPAPSKCrackList[curInterface].stopCrack()
falconWiFiRemoteAgent.WPAPSKCrackList[curInterface].cleanupTempFiles()
del falconWiFiRemoteAgent.WPAPSKCrackList[curInterface]
except:
pass
retVal = 0
errMsg = ""
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/crackstatus' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
if 'crackstatuswep' in s.path:
type='wep'
else:
type = 'wpapsk'
inputstr = s.path.replace('/falcon/crackstatus'+type+'/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
curInterface = p.search(inputstr).group(1)
except:
curInterface = ""
if len(curInterface) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + curInterface
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
responsedict = {}
retVal = -1
errMsg = "Unable to find running crack."
try:
if type == 'wep':
if curInterface in falconWiFiRemoteAgent.WEPCrackList:
wepCrack = falconWiFiRemoteAgent.WEPCrackList[curInterface]
retVal = 0
errMsg = ""
responsedict['isrunning'] = wepCrack.isRunning()
responsedict['ivcount'] = wepCrack.getIVCount()
responsedict['ssid'] = wepCrack.SSID
responsedict['crackedpasswords'] = wepCrack.getCrackedPasswords()
else:
if curInterface in falconWiFiRemoteAgent.WPAPSKCrackList:
wpaPSKCrack = falconWiFiRemoteAgent.WPAPSKCrackList[curInterface]
retVal = 0
errMsg = ""
responsedict['isrunning'] = wpaPSKCrack.isRunning()
hasHandshake = wpaPSKCrack.hasHandshake()
responsedict['hashandshake'] = hasHandshake
if hasHandshake:
# For WPAPSK, lets copy the capture file to our recording directory for recovery
dirname, filename = os.path.split(os.path.abspath(__file__))
fullpath, filename=wpaPSKCrack.copyCaptureFile(dirname + '/recordings')
responsedict['capturefile'] = filename
else:
responsedict['capturefile'] = ""
except:
pass
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/falcon/getscanresults':
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
if useMavlink:
gpsCoord = GPSStatus()
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
retCode, errString, jsonstr=falconWiFiRemoteAgent.getNetworksAsJson(gpsCoord)
elif gpsEngine.gpsValid():
retCode, errString, jsonstr=falconWiFiRemoteAgent.getNetworksAsJson(gpsEngine.lastCoord)
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
retCode, errString, jsonstr=falconWiFiRemoteAgent.getNetworksAsJson(None)
if useRPILeds:
# This just signals that the GPS isn't synced
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopalldeauths' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopalldeauths/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
return
falconWiFiRemoteAgent.stopAllDeauths(fieldValue)
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/getalldeauths' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
responsedict = falconWiFiRemoteAgent.getAllDeauthsAsJsonDict()
# Add in successful response
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Catch-all. Should never be here
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
# ----------------- Bluetooth check -----------------------------
def checkForBluetooth():
global hasBluetooth
global hasUbertooth
global bluetooth
numBtAdapters = len(SparrowBluetooth.getBluetoothInterfaces())
if numBtAdapters > 0:
hasBluetooth = True
if SparrowBluetooth.getNumUbertoothDevices() > 0:
#SparrowBluetooth.ubertoothStopSpecan()
errcode, errmsg = SparrowBluetooth.hasUbertoothTools()
# errcode, errmsg = SparrowBluetooth.ubertoothOnline()
if errcode == 0:
hasUbertooth = True
bluetooth = SparrowBluetooth()
if hasBluetooth:
print("Found bluetooth hardware. Bluetooth capabilities enabled.")
else:
print("Bluetooth hardware not found. Bluetooth capabilities disabled.")
if hasUbertooth:
print("Found ubertooth hardware and software. Ubertooth capabilities enabled.")
else:
print("Ubertooth hardware and/or software not found. Ubertooth capabilities disabled.")
# ----------------- Main -----------------------------
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description='Sparrow-wifi agent')
argparser.add_argument('--port', help='Port for HTTP server to listen on. Default is 8020.', default=8020, required=False)
argparser.add_argument('--allowedips', help="IP addresses allowed to connect to this agent. Default is any. This can be a comma-separated list for multiple IP addresses", default='', required=False)
argparser.add_argument('--staticcoord', help="Use user-defined lat,long,altitude(m) rather than GPS. Ex: 40.1,-75.3,150", default='', required=False)
argparser.add_argument('--mavlinkgps', help="Use Mavlink (drone) for GPS. Options are: '3dr' for a Solo, 'sitl' for local simulator, or full connection string ('udp/tcp:<ip>:<port>' such as: 'udp:10.1.1.10:14550')", default='', required=False)
argparser.add_argument('--sendannounce', help="Send a UDP broadcast packet on the specified port to announce presence", action='store_true', default=False, required=False)
argparser.add_argument('--userpileds', help="Use RPi LEDs to signal state. Red=GPS [off=None,blinking=Unsynchronized,solid=synchronized], Green=Agent Running [On=Running, blinking=servicing HTTP request]", action='store_true', default=False, required=False)
argparser.add_argument('--recordinterface', help="Automatically start recording locally with the given wireless interface (headless mode) in a recordings directory", default='', required=False)
argparser.add_argument('--ignorecfg', help="Don't load any config files (useful for overriding and/or testing)", action='store_true', default=False, required=False)
argparser.add_argument('--cfgfile', help="Use the specified config file rather than the default sparrowwifiagent.cfg file", default='', required=False)
argparser.add_argument('--allowcors', help="Allow Cross Domain Resource Sharing", action='store_true', default=False, required=False)
argparser.add_argument('--delaystart', help="Wait <delaystart> seconds before initializing", default=0, required=False)
argparser.add_argument('--debughttp', help="Print each URL request", action='store_true', default=False, required=False)
args = argparser.parse_args()
if len(args.staticcoord) > 0:
coord_array = args.staticcoord.split(",")
if len(coord_array) < 3:
print("ERROR: Provided static coordinates are not in the format latitude,longitude,altitude.")
exit(1)
usingStaticGPS = True
gpsEngine = GPSEngineStatic(float(coord_array[0]), float(coord_array[1]), float(coord_array[2]))
else:
usingStaticGPS = False
gpsEngine = GPSEngine()
debugHTTP = args.debughttp
if os.geteuid() != 0:
print("ERROR: You need to have root privileges to run this script. Please try again, this time using 'sudo'. Exiting.\n")
exit(2)
# Code to add paths
dirname, filename = os.path.split(os.path.abspath(__file__))
if dirname not in sys.path:
sys.path.insert(0, dirname)
# Check for Falcon offensive plugin
pluginsdir = dirname+'/plugins'
if os.path.exists(pluginsdir):
if pluginsdir not in sys.path:
sys.path.insert(0,pluginsdir)
if os.path.isfile(pluginsdir + '/falconwifi.py'):
from falconwifi import FalconWiFiRemoteAgent, WPAPSKCrack, WEPCrack
hasFalcon = True
falconWiFiRemoteAgent = FalconWiFiRemoteAgent()
if not falconWiFiRemoteAgent.toolsInstalled():
print("ERROR: aircrack suite of tools does not appear to be installed. Please install it.")
exit(4)
checkForBluetooth()
# See if we have a config file:
dirname, filename = os.path.split(os.path.abspath(__file__))
settings = {}
runningcfg=AgentConfigSettings()
if len(args.cfgfile) == 0:
cfgFile = dirname + '/sparrowwifiagent.cfg'
else:
cfgFile = args.cfgfile
# Since it's user-specified, let's see if it exists.
if not os.path.isfile(cfgFile):
print("ERROR: Unable to find the specified config file.")
exit(3)
if os.path.isfile(cfgFile) and (not args.ignorecfg):
cfgParser = configparser.ConfigParser()
try:
cfgParser.read(cfgFile)
section="agent"
options = cfgParser.options(section)
for option in options:
try:
if (option == 'sendannounce' or option == 'userpileds' or
option == 'cancelstart' or option == 'allowcors'):
settings[option] = stringtobool(cfgParser.get(section, option))
else:
settings[option] = cfgParser.get(section, option)
except:
print("exception on %s!" % option)
settings[option] = None
except:
print("ERROR: Unable to read config file: ", cfgFile)
exit(1)
# Set up parameters
if 'cancelstart' in settings.keys():
if settings['cancelstart']:
exit(0)
delayStart = int(args.delaystart)
if delayStart > 0:
sleep(delayStart)
runningcfg.cancelStart = False
if 'port' not in settings.keys():
port = args.port
else:
port = int(settings['port'])
runningcfg.port = port
if 'sendannounce' not in settings.keys():
sendannounce = args.sendannounce
else:
sendannounce = settings['sendannounce']
runningcfg.announce = sendannounce
if 'userpileds' not in settings.keys():
useRPILeds = args.userpileds
else:
useRPILeds = settings['userpileds']
runningcfg.useRPiLEDs = useRPILeds
if 'allowedips' not in settings.keys():
allowedIPstr = args.allowedips
else:
allowedIPstr = settings['allowedips']
runningcfg.ipAllowedList = allowedIPstr
if 'mavlinkgps' not in settings.keys():
mavlinksetting = args.mavlinkgps
else:
mavlinksetting = settings['mavlinkgps']
runningcfg.mavlinkGPS = mavlinksetting
if 'recordinterface' not in settings.keys():
recordinterface = args.recordinterface
else:
recordinterface = settings['recordinterface']
runningcfg.recordInterface = recordinterface
if 'allowcors' not in settings.keys():
allowCors = args.allowcors
else:
allowCors = settings['allowcors']
runningcfg.allowCors = allowCors
print("Allow CORS: " + str(runningcfg.allowCors))
# Now start logic
if runningcfg.useRPiLEDs:
# One extra check that the LED's are really present
runningcfg.useRPiLEDs = SparrowRPi.hasLights()
if not runningcfg.useRPiLEDs:
# we changed state. Print warning
print('WARNING: RPi LEDs were requested but were not found on this platform.')
# Now check again:
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
buildAllowedIPs(allowedIPstr)
if len(runningcfg.mavlinkGPS) > 0 and hasDroneKit:
vehicle = SparrowDroneMavlink()
print('Connecting to ' + runningcfg.mavlinkGPS)
connected = False
synchronized = False
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_OFF)
# If we're in drone gps mode, wait for the drone to be up and gps synchronized before starting.
while (not connected) or (not synchronized):
if not connected:
if runningcfg.mavlinkGPS == '3dr':
retVal = vehicle.connectToSolo()
elif (runningcfg.mavlinkGPS == 'sitl'):
retVal = vehicle.connectToSimulator()
else:
retVal = vehicle.connect(runningcfg.mavlinkGPS)
connected = retVal
if connected:
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
print('Mavlink connected.')
print('Current GPS Info:')
# get synchronized flag and position
synchronized, latitude, longitude, altitude = vehicle.getGlobalGPS()
print('Synchronized: ' + str(synchronized))
print('Latitude: ' + str(latitude))
print('Longitude: ' + str(longitude))
print('Altitude (m): ' + str(altitude))
print('Heading: ' + str(vehicle.getHeading()))
if synchronized:
useMavlink = True
mavlinkGPSThread = MavlinkGPSThread(vehicle)
mavlinkGPSThread.start()
print('Mavlink GPS synchronized. Continuing.')
else:
print('Mavlink GPS not synchronized yet. Waiting...')
sleep(2)
else:
print("ERROR: Unable to connect to " + mavlinksetting + '. Retrying...')
sleep(2)
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
# No mavlink specified. Check the local GPS.
if GPSEngine.GPSDRunning():
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
gpsEngine.start()
if usingStaticGPS:
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Using static lat/long/altitude(m): " + args.staticcoord)
else:
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Local gpsd Found. Providing GPS coordinates when synchronized.")
if useRPILeds:
sleep(1)
if gpsEngine.gpsValid():
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] No local gpsd running. No GPS data will be provided.")
if runningcfg.announce:
startAnnounceThread()
if len(runningcfg.recordInterface) > 0:
startRecord(runningcfg.recordInterface)
# -------------- Run HTTP Server / Main Loop--------------
server = SparrowWiFiAgent()
server.run(runningcfg.port)
# -------------- This is the shutdown process --------------
if mavlinkGPSThread:
mavlinkGPSThread.signalStop = True
print('Waiting for mavlink GPS thread to terminate...')
while (mavlinkGPSThread.threadRunning):
sleep(0.2)
stopRecord()
if hasDroneKit and useMavlink and vehicle:
vehicle.close()
stopAnnounceThread()
if runningcfg.useRPiLEDs:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
#for curKey in lockList.keys():
# curLock = lockList[curKey]
# try:
# curLock.release()
# except:
# pass
# os._exit(0)
exit(0)
| gpl-3.0 | 6,750,538,930,191,065,000 | 39.385724 | 300 | 0.488999 | false | 4.707833 | false | false | false |
roxxup/PartialTuring | WebcamSound.py | 1 | 3038 | import threading
from threading import Thread
import cv2
import sys
#import wikipedia
#from chatterbot import ChatBot
import shlex, subprocess
import speech_recognition as sr
import pyvona
from googlesearch import GoogleSearch
import xml.etree.ElementTree as ET
import requests
cascPath = sys.argv[1]
def wikileaks(string):
string=wikipedia.summary(string,sentences=1)
chatvoice(string)
def speak():
# obtain audio from the microphone
r = sr.Recognizer()
with sr.Microphone() as source:
r.adjust_for_ambient_noise(source) # listen for 1 second to calibrate the energy threshold for ambient noise levels
print("Say something!")
audio = r.listen(source)
# recognize speech using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
string = r.recognize_google(audio)
print "you said "+string
return string
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
def Google1(string):
gs = GoogleSearch(string)
for hit in gs.top_results():
#send(hit[u'content'])
chatvoice(hit[u'content'])
break
def chatvoice(string):
v = pyvona.create_voice('username','password')
#v.region('en-IN')
#print v.list_voices()
v.speak(string)
#v.speak(a)
def intelbot(string):
payload = {'input':string,'botid':'9fa364f2fe345a10'}
r = requests.get("http://fiddle.pandorabots.com/pandora/talk-xml", params=payload)
for child in ET.fromstring(r.text):
if child.tag == "that":
chatvoice(child.text)
def Camera():
faceCascade = cv2.CascadeClassifier(cascPath)
video_capture = cv2.VideoCapture(1)
while True:
# Capture frame-by-frame
ret, frame = video_capture.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30),
#flags=cv2.cv.CV_HAAR_SCALE_IMAGE
flags = 0
)
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
cv2.rectangle(gray, (x, y), (x+w, y+h), (0, 255, 0), 2)
# Display the resulting frame
cv2.imshow('Video', gray)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything is done, release the capture
video_capture.release()
cv2.destroyAllWindows()
def Sound():
while True:
takeString = speak()
intelbot(takeString)
if __name__ == '__main__':
Thread(target = Camera).start()
Thread(target = Sound).start()
| gpl-3.0 | 5,042,716,747,227,126,000 | 26.369369 | 123 | 0.628045 | false | 3.544924 | false | false | false |
yohanyee/simple-neural-net | classes/example_pipelines.py | 1 | 1478 | import numpy as np
from data import *
from construct import *
from train import *
from hippocampi_to_patches import *
class DigitsPipeline(object):
def __init__(self):
self.D = Data()
self.D.load_digits_data()
self.D.reshape([16,16],[1])
self.N = FeedForwardNetwork()
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.input_shape, neuron_activation='identity'))
self.N.auto_add_layer_fullyconnected(NeuronLayer(10, neuron_activation='logistic', neuron_bias=True))
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.output_shape, neuron_activation='logistic', neuron_bias=True))
self.Trainer = BackpropagationTrainer(self.D, self.N)
class DigitsConvolutionPipeline(object):
def __init__(self):
self.D = Data()
self.D.load_digits_data()
self.D.reshape([16,16],[1])
self.N = FeedForwardNetwork()
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.input_shape, neuron_activation='identity'))
self.N.auto_add_layer_convolution(NeuronLayer([4,8,8], neuron_activation='logistic', neuron_bias=True))
self.N.auto_add_layer_fullyconnected(NeuronLayer(10, neuron_activation='logistic', neuron_bias=True))
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.output_shape, neuron_activation='logistic', neuron_bias=True))
self.Trainer = BackpropagationTrainer(self.D, self.N)
| mit | -7,236,642,412,433,993,000 | 42.470588 | 126 | 0.673207 | false | 3.485849 | false | false | false |
anurag03/integration_tests | cfme/tests/services/test_provision_stack.py | 1 | 9381 | import fauxfactory
import pytest
from widgetastic_patternfly import DropdownItemDisabled
from cfme import test_requirements
from cfme.cloud.provider import CloudProvider
from cfme.cloud.provider.azure import AzureProvider
from cfme.cloud.provider.ec2 import EC2Provider
from cfme.cloud.provider.openstack import OpenStackProvider
from cfme.services.myservice import MyService
from cfme.services.service_catalogs import ServiceCatalogs
from cfme.utils.blockers import BZ
from cfme.utils.conf import credentials
from cfme.utils.datafile import load_data_file
from cfme.utils.path import orchestration_path
pytestmark = [
pytest.mark.meta(server_roles='+automate'),
pytest.mark.ignore_stream('upstream'),
test_requirements.stack,
pytest.mark.tier(2),
pytest.mark.usefixtures("setup_provider_modscope"),
pytest.mark.provider([CloudProvider],
required_fields=[['provisioning', 'stack_provisioning']],
scope='module'),
]
@pytest.fixture
def stack_data(appliance, provider, provisioning):
random_base = fauxfactory.gen_alphanumeric()
stackname = 'test{}'.format(random_base)
vm_name = 'test-{}'.format(random_base)
stack_timeout = '20'
if provider.one_of(AzureProvider):
try:
template = provider.data.templates.small_template
vm_user = credentials[template.creds].username
vm_password = credentials[template.creds].password
except AttributeError:
pytest.skip('Could not find small_template or credentials for {}'.format(provider.name))
stack_data = {
'stack_name': stackname,
'resource_group': provisioning.get('resource_group'),
'deploy_mode': provisioning.get('mode'),
'location': provisioning.get('region_api'),
'vmname': vm_name,
'vmuser': vm_user,
'vmpassword': vm_password,
'vmsize': provisioning.get('vm_size'),
'cloudnetwork': provisioning.get('cloud_network').split()[0],
'cloudsubnet': provisioning.get('cloud_subnet').split()[0]
}
elif provider.one_of(OpenStackProvider):
stack_prov = provisioning['stack_provisioning']
stack_data = {
'stack_name': stackname,
'key': stack_prov['key_name'],
'flavor': stack_prov['instance_type'],
}
else:
stack_prov = provisioning['stack_provisioning']
if appliance.version < '5.9':
stack_data = {
'stack_name': stackname,
'stack_timeout': stack_timeout,
'virtualMachineName': vm_name,
'KeyName': stack_prov['key_name'],
'InstanceType': stack_prov['instance_type'],
'SSHLocation': provisioning['ssh_location']
}
else:
stack_data = {
'stack_name': stackname,
'stack_timeout': stack_timeout,
'param_virtualMachineName': vm_name,
'param_KeyName': stack_prov['key_name']
}
return stack_data
@pytest.fixture
def dialog_name():
return 'dialog_{}'.format(fauxfactory.gen_alphanumeric())
@pytest.fixture
def template(appliance, provider, provisioning, dialog_name, stack):
template_group = provisioning['stack_provisioning']['template_type']
template_type = provisioning['stack_provisioning']['template_type_dd']
template_name = fauxfactory.gen_alphanumeric()
file = provisioning['stack_provisioning']['data_file']
data_file = load_data_file(str(orchestration_path.join(file)))
content = data_file.read().replace('CFMETemplateName', template_name)
collection = appliance.collections.orchestration_templates
template = collection.create(template_group=template_group, template_name=template_name,
template_type=template_type, description="my template",
content=content)
template.create_service_dialog_from_template(dialog_name)
yield template
if stack.exists:
stack.retire_stack()
if template.exists:
template.delete()
@pytest.fixture
def catalog(appliance):
cat_name = "cat_{}".format(fauxfactory.gen_alphanumeric())
catalog = appliance.collections.catalogs.create(name=cat_name, description="my catalog")
yield catalog
if catalog.exists:
catalog.delete()
@pytest.fixture
def catalog_item(appliance, dialog, catalog, template, provider, dialog_name):
item_name = fauxfactory.gen_alphanumeric()
catalog_item = appliance.collections.catalog_items.create(
appliance.collections.catalog_items.ORCHESTRATION,
name=item_name,
description="my catalog",
display_in=True,
catalog=catalog,
dialog=dialog_name,
orch_template=template,
provider_name=provider.name,
)
yield catalog_item
if catalog_item.exists:
catalog_item.delete()
@pytest.fixture
def service_catalogs(appliance, catalog_item, stack_data):
return ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name, stack_data)
@pytest.fixture
def stack(appliance, provider, stack_data):
return appliance.collections.cloud_stacks.instantiate(stack_data['stack_name'],
provider=provider)
@pytest.fixture
def order_stack(appliance, request, service_catalogs, stack):
"""Fixture which prepares provisioned stack"""
provision_request = service_catalogs.order()
provision_request.wait_for_request(method='ui')
request.addfinalizer(lambda: _cleanup(appliance, provision_request))
assert provision_request.is_succeeded()
stack.wait_for_exists()
return provision_request, stack
def _cleanup(appliance=None, provision_request=None, service=None):
if not service:
last_message = provision_request.get_request_row_from_ui()['Last Message'].text
service_name = last_message.split()[2].strip('[]')
myservice = MyService(appliance, service_name)
else:
myservice = service
if myservice.exists:
myservice.delete()
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_provision_stack(order_stack):
"""Tests stack provisioning
Metadata:
test_flag: provision
"""
provision_request, stack = order_stack
assert provision_request.is_succeeded()
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_reconfigure_service(appliance, service_catalogs, request):
"""Tests service reconfiguring
Metadata:
test_flag: provision
"""
provision_request = service_catalogs.order()
provision_request.wait_for_request(method='ui')
last_message = provision_request.get_request_row_from_ui()['Last Message'].text
service_name = last_message.split()[2].strip('[]')
myservice = MyService(appliance, service_name)
request.addfinalizer(lambda: _cleanup(service=myservice))
assert provision_request.is_succeeded()
myservice.reconfigure_service()
@pytest.mark.uncollectif(lambda provider: provider.one_of(EC2Provider),
reason='EC2 locks template between Stack order and template removal')
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_remove_non_read_only_orch_template(appliance, provider, template, service_catalogs,
request):
"""
Steps:
1. Order Service which uses Orchestration template
2. Try to remove this Orchestration template
Metadata:
test_flag: provision
"""
provision_request = service_catalogs.order()
request.addfinalizer(lambda: _cleanup(appliance, provision_request))
template.delete()
assert (provision_request.rest.message == 'Service_Template_Provisioning failed' or
provision_request.status == 'Error')
assert not template.exists
@pytest.mark.uncollectif(lambda provider: not provider.one_of(EC2Provider),
reason='Only EC2 locks orchestration template')
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_remove_read_only_orch_template_neg(appliance, provider, template, service_catalogs,
request):
"""
For RHOS/Azure the original template will remain stand-alone while the stack links
to a new template read from the RHOS/Azure provider. Hence we can delete used orchestration
template for RHOS/Azure.
Steps:
1. Order Service which uses Orchestration template
2. Try to remove this Orchestration template
Metadata:
test_flag: provision
"""
provision_request = service_catalogs.order()
request.addfinalizer(lambda: _cleanup(appliance, provision_request))
provision_request.wait_for_request(method='ui')
with pytest.raises(DropdownItemDisabled):
template.delete()
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_retire_stack(order_stack):
"""Tests stack retirement.
Steps:
1. Retire Orchestration stack
2. Verify it doesn't exist in UI
Metadata:
test_flag: provision
"""
_, stack = order_stack
stack.retire_stack()
assert not stack.exists, "Stack still visible in UI"
| gpl-2.0 | -7,078,827,523,268,368,000 | 35.933071 | 100 | 0.663895 | false | 4.057526 | true | false | false |
Telestream/telestream-cloud-python-sdk | telestream_cloud_flip_sdk/telestream_cloud_flip/models/extra_file.py | 1 | 4957 | # coding: utf-8
"""
Flip API
Flip # noqa: E501
The version of the OpenAPI document: 3.1
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from telestream_cloud_flip.configuration import Configuration
class ExtraFile(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'tag': 'str',
'file_size': 'int',
'file_name': 'str'
}
attribute_map = {
'tag': 'tag',
'file_size': 'file_size',
'file_name': 'file_name'
}
def __init__(self, tag=None, file_size=None, file_name=None, local_vars_configuration=None): # noqa: E501
"""ExtraFile - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._tag = None
self._file_size = None
self._file_name = None
self.discriminator = None
self.tag = tag
self.file_size = file_size
self.file_name = file_name
@property
def tag(self):
"""Gets the tag of this ExtraFile. # noqa: E501
:return: The tag of this ExtraFile. # noqa: E501
:rtype: str
"""
return self._tag
@tag.setter
def tag(self, tag):
"""Sets the tag of this ExtraFile.
:param tag: The tag of this ExtraFile. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and tag is None: # noqa: E501
raise ValueError("Invalid value for `tag`, must not be `None`") # noqa: E501
self._tag = tag
@property
def file_size(self):
"""Gets the file_size of this ExtraFile. # noqa: E501
:return: The file_size of this ExtraFile. # noqa: E501
:rtype: int
"""
return self._file_size
@file_size.setter
def file_size(self, file_size):
"""Sets the file_size of this ExtraFile.
:param file_size: The file_size of this ExtraFile. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and file_size is None: # noqa: E501
raise ValueError("Invalid value for `file_size`, must not be `None`") # noqa: E501
self._file_size = file_size
@property
def file_name(self):
"""Gets the file_name of this ExtraFile. # noqa: E501
:return: The file_name of this ExtraFile. # noqa: E501
:rtype: str
"""
return self._file_name
@file_name.setter
def file_name(self, file_name):
"""Sets the file_name of this ExtraFile.
:param file_name: The file_name of this ExtraFile. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and file_name is None: # noqa: E501
raise ValueError("Invalid value for `file_name`, must not be `None`") # noqa: E501
self._file_name = file_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ExtraFile):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ExtraFile):
return True
return self.to_dict() != other.to_dict()
| mit | 4,721,568,431,818,250,000 | 27.164773 | 110 | 0.556788 | false | 3.934127 | true | false | false |
pombreda/https-gitorious.org-appstream-software-center | softwarecenter/ui/gtk3/widgets/buttons.py | 1 | 21538 | # Copyright (C) 2011 Canonical
#
# Authors:
# Matthew McGowan
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import cairo
from gi.repository import Gtk, Gdk, Pango, GObject, GdkPixbuf
from gettext import gettext as _
from softwarecenter.backend import get_install_backend
from softwarecenter.db.application import AppDetails
from softwarecenter.enums import Icons
from softwarecenter.ui.gtk3.em import StockEms, em
from softwarecenter.ui.gtk3.drawing import darken
from softwarecenter.ui.gtk3.widgets.stars import Star, StarSize
_HAND = Gdk.Cursor.new(Gdk.CursorType.HAND2)
def _update_icon(image, icon, icon_size):
if isinstance(icon, GdkPixbuf.Pixbuf):
image = image.set_from_pixbuf(icon)
elif isinstance(icon, Gtk.Image):
image = image.set_from_pixbuf(icon.get_pixbuf())
elif isinstance(icon, str):
image = image.set_from_icon_name(icon, icon_size)
else:
msg = "Acceptable icon values: None, GdkPixbuf, GtkImage or str"
raise TypeError(msg)
return image
class _Tile(object):
MIN_WIDTH = em(7)
def __init__(self):
self.set_focus_on_click(False)
self.set_relief(Gtk.ReliefStyle.NONE)
self.box = Gtk.Box.new(Gtk.Orientation.VERTICAL, 0)
self.box.set_size_request(self.MIN_WIDTH, -1)
self.add(self.box)
def build_default(self, label, icon, icon_size):
if icon is not None:
if isinstance(icon, Gtk.Image):
self.image = icon
else:
self.image = Gtk.Image()
_update_icon(self.image, icon, icon_size)
self.box.pack_start(self.image, True, True, 0)
self.label = Gtk.Label.new(label)
self.box.pack_start(self.label, True, True, 0)
class TileButton(Gtk.Button, _Tile):
def __init__(self):
Gtk.Button.__init__(self)
_Tile.__init__(self)
class TileToggleButton(Gtk.RadioButton, _Tile):
def __init__(self):
Gtk.RadioButton.__init__(self)
self.set_mode(False)
_Tile.__init__(self)
class LabelTile(TileButton):
MIN_WIDTH = -1
def __init__(self, label, icon, icon_size=Gtk.IconSize.MENU):
TileButton.__init__(self)
self.build_default(label, icon, icon_size)
self.label.set_line_wrap(True)
context = self.label.get_style_context()
context.add_class("label-tile")
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
def do_draw(self, cr):
cr.save()
A = self.get_allocation()
if self.has_focus():
Gtk.render_focus(self.get_style_context(),
cr,
3, 3,
A.width - 6, A.height - 6)
for child in self:
self.propagate_draw(child, cr)
cr.restore()
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
class CategoryTile(TileButton):
def __init__(self, label, icon, icon_size=Gtk.IconSize.DIALOG):
TileButton.__init__(self)
self.set_size_request(em(8), -1)
self.build_default(label, icon, icon_size)
self.label.set_justify(Gtk.Justification.CENTER)
self.label.set_alignment(0.5, 0.0)
self.label.set_line_wrap(True)
self.box.set_border_width(StockEms.SMALL)
context = self.label.get_style_context()
context.add_class("category-tile")
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
def do_draw(self, cr):
cr.save()
A = self.get_allocation()
if self.has_focus():
Gtk.render_focus(self.get_style_context(),
cr,
3, 3,
A.width - 6, A.height - 6)
for child in self:
self.propagate_draw(child, cr)
cr.restore()
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
_global_featured_tile_width = em(11)
class FeaturedTile(TileButton):
INSTALLED_OVERLAY_SIZE = 22
_MARKUP = '<b><small>%s</small></b>'
def __init__(self, helper, doc, icon_size=48):
TileButton.__init__(self)
self._pressed = False
label = helper.get_appname(doc)
icon = helper.get_icon_at_size(doc, icon_size, icon_size)
stats = helper.get_review_stats(doc)
helper.update_availability(doc)
helper.connect("needs-refresh", self._on_needs_refresh, doc, icon_size)
self.is_installed = helper.is_installed(doc)
self._overlay = helper.icons.load_icon(Icons.INSTALLED_OVERLAY,
self.INSTALLED_OVERLAY_SIZE,
0) # flags
self.box.set_orientation(Gtk.Orientation.HORIZONTAL)
self.box.set_spacing(StockEms.SMALL)
self.content_left = Gtk.Box.new(Gtk.Orientation.VERTICAL,
StockEms.MEDIUM)
self.content_right = Gtk.Box.new(Gtk.Orientation.VERTICAL, 1)
self.box.pack_start(self.content_left, False, False, 0)
self.box.pack_start(self.content_right, False, False, 0)
self.image = Gtk.Image()
_update_icon(self.image, icon, icon_size)
self.content_left.pack_start(self.image, False, False, 0)
self.title = Gtk.Label.new(self._MARKUP %
GObject.markup_escape_text(label))
self.title.set_alignment(0.0, 0.5)
self.title.set_use_markup(True)
self.title.set_ellipsize(Pango.EllipsizeMode.END)
self.content_right.pack_start(self.title, False, False, 0)
categories = helper.get_categories(doc)
if categories is not None:
self.category = Gtk.Label.new('<span font_desc="%i">%s</span>' %
(em(0.6), GObject.markup_escape_text(categories)))
self.category.set_use_markup(True)
self.category.set_alignment(0.0, 0.5)
self.category.set_ellipsize(Pango.EllipsizeMode.END)
self.content_right.pack_start(self.category, False, False, 4)
stats_a11y = None
if stats is not None:
self.stars = Star(size=StarSize.SMALL)
self.stars.render_outline = True
self.stars.set_rating(stats.ratings_average)
self.rating_box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL,
StockEms.SMALL)
self.rating_box.pack_start(self.stars, False, False, 0)
self.n_ratings = Gtk.Label.new(
'<span font_desc="%i"> (%i)</span>' % (
em(0.45), stats.ratings_total))
self.n_ratings.set_use_markup(True)
self.n_ratings.set_name("subtle-label")
self.n_ratings.set_alignment(0.0, 0.5)
self.rating_box.pack_start(self.n_ratings, False, False, 0)
self.content_right.pack_start(self.rating_box, False, False, 0)
# TRANSLATORS: this is an accessibility description for eg orca and
# is not visible in the ui
stats_a11y = _('%(stars)d stars - %(reviews)d reviews') % {
'stars': stats.ratings_average, 'reviews': stats.ratings_total}
# work out width tile needs to be to ensure ratings text is all
# visible
req_width = (self.stars.size_request().width +
self.image.size_request().width +
self.n_ratings.size_request().width +
StockEms.MEDIUM * 3
)
global _global_featured_tile_width
_global_featured_tile_width = max(_global_featured_tile_width,
req_width)
details = AppDetails(db=helper.db, doc=doc)
# TRANSLATORS: Free here means Gratis
price = details.price or _("Free")
if price == '0.00':
# TRANSLATORS: Free here means Gratis
price = _("Free")
# TRANSLATORS: Free here means Gratis
if price != _("Free"):
price = 'US$ ' + price
self.price = Gtk.Label.new(
'<span font_desc="%i">%s</span>' % (em(0.6), price))
self.price.set_use_markup(True)
self.price.set_name("subtle-label")
self.price.set_alignment(0.0, 0.5)
self.content_right.pack_start(self.price, False, False, 0)
self.set_name("featured-tile")
a11y_name = '. '.join([t
for t in [label, categories, stats_a11y, price] if t])
self.get_accessible().set_name(a11y_name)
backend = get_install_backend()
backend.connect("transaction-finished",
self.on_transaction_finished,
helper, doc)
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
self.connect("button-press-event", self.on_press)
self.connect("button-release-event", self.on_release)
def _on_needs_refresh(self, helper, pkgname, doc, icon_size):
icon = helper.get_icon_at_size(doc, icon_size, icon_size)
_update_icon(self.image, icon, icon_size)
def do_get_preferred_width(self):
w = _global_featured_tile_width
return w, w
def do_draw(self, cr):
cr.save()
A = self.get_allocation()
if self._pressed:
cr.translate(1, 1)
if self.has_focus():
Gtk.render_focus(self.get_style_context(),
cr,
3, 3,
A.width - 6, A.height - 6)
for child in self:
self.propagate_draw(child, cr)
if self.is_installed:
# paint installed tick overlay
if self.get_direction() != Gtk.TextDirection.RTL:
x = y = 36
else:
x = A.width - 56
y = 36
Gdk.cairo_set_source_pixbuf(cr, self._overlay, x, y)
cr.paint()
cr.restore()
def on_transaction_finished(self, backend, result, helper, doc):
trans_pkgname = str(result.pkgname)
pkgname = helper.get_pkgname(doc)
if trans_pkgname != pkgname:
return
# update installed state
helper.update_availability(doc)
self.is_installed = helper.is_installed(doc)
self.queue_draw()
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
return True
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
self._pressed = False
return True
def on_press(self, widget, event):
self._pressed = True
def on_release(self, widget, event):
if not self._pressed:
return
self.emit("clicked")
self._pressed = False
class ChannelSelector(Gtk.Button):
PADDING = 0
def __init__(self, section_button):
Gtk.Button.__init__(self)
alignment = Gtk.Alignment.new(0.5, 0.5, 0.0, 1.0)
alignment.set_padding(self.PADDING, self.PADDING,
self.PADDING, self.PADDING)
self.add(alignment)
self.arrow = Gtk.Arrow.new(Gtk.ArrowType.DOWN, Gtk.ShadowType.IN)
alignment.add(self.arrow)
# vars
self.parent_style_type = Gtk.Toolbar
self.section_button = section_button
self.popup = None
self.connect("button-press-event", self.on_button_press)
def do_draw(self, cr):
cr.save()
parent_style = self.get_ancestor(self.parent_style_type)
context = parent_style.get_style_context()
color = darken(context.get_border_color(Gtk.StateFlags.ACTIVE), 0.2)
cr.set_line_width(1)
a = self.get_allocation()
lin = cairo.LinearGradient(0, 0, 0, a.height)
lin.add_color_stop_rgba(0.1,
color.red,
color.green,
color.blue,
0.0) # alpha
lin.add_color_stop_rgba(0.5,
color.red,
color.green,
color.blue,
1.0) # alpha
lin.add_color_stop_rgba(1.0,
color.red,
color.green,
color.blue,
0.1) # alpha
cr.set_source(lin)
cr.move_to(0.5, 0.5)
cr.rel_line_to(0, a.height)
cr.stroke()
cr.move_to(a.width - 0.5, 0.5)
cr.rel_line_to(0, a.height)
cr.stroke()
cr.restore()
for child in self:
self.propagate_draw(child, cr)
def on_button_press(self, button, event):
if self.popup is None:
self.build_channel_selector()
self.show_channel_sel_popup(self, event)
#~
#~ def on_style_updated(self, widget):
#~ context = widget.get_style_context()
#~ context.save()
#~ context.add_class("menu")
#~ bgcolor = context.get_background_color(Gtk.StateFlags.NORMAL)
#~ context.restore()
#~
#~ self._dark_color = darken(bgcolor, 0.5)
def show_channel_sel_popup(self, widget, event):
def position_func(menu, (window, a)):
if self.get_direction() != Gtk.TextDirection.RTL:
tmpx = a.x
else:
tmpx = a.x + a.width - self.popup.get_allocation().width
x, y = window.get_root_coords(tmpx,
a.y + a.height)
return (x, y, False)
a = self.section_button.get_allocation()
window = self.section_button.get_window()
self.popup.popup(None, None, position_func, (window, a),
event.button, event.time)
def set_build_func(self, build_func):
self.build_func = build_func
def build_channel_selector(self):
self.popup = Gtk.Menu()
self.popup.set_name('toolbar-popup') # to set 'padding: 0;'
self.popup.get_style_context().add_class('primary-toolbar')
self.build_func(self.popup)
class SectionSelector(TileToggleButton):
MIN_WIDTH = em(5)
_MARKUP = '<small>%s</small>'
def __init__(self, label, icon, icon_size=Gtk.IconSize.DIALOG):
TileToggleButton.__init__(self)
markup = self._MARKUP % label
self.build_default(markup, icon, icon_size)
self.label.set_use_markup(True)
self.label.set_justify(Gtk.Justification.CENTER)
context = self.get_style_context()
context.add_class("section-sel-bg")
context = self.label.get_style_context()
context.add_class("section-sel")
self.draw_hint_has_channel_selector = False
self._alloc = None
self._bg_cache = {}
self.connect('size-allocate', self.on_size_allocate)
self.connect('style-updated', self.on_style_updated)
def on_size_allocate(self, *args):
alloc = self.get_allocation()
if (self._alloc is None or
self._alloc.width != alloc.width or
self._alloc.height != alloc.height):
self._alloc = alloc
# reset the bg cache
self._bg_cache = {}
def on_style_updated(self, *args):
# also reset the bg cache
self._bg_cache = {}
def _cache_bg_for_state(self, state):
a = self.get_allocation()
# tmp surface on which we render the button bg as per the gtk
# theme engine
_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32,
a.width, a.height)
cr = cairo.Context(_surf)
context = self.get_style_context()
context.save()
context.set_state(state)
Gtk.render_background(context, cr,
-5, -5, a.width + 10, a.height + 10)
Gtk.render_frame(context, cr,
-5, -5, a.width + 10, a.height + 10)
del cr
# new surface which will be cached which
surf = cairo.ImageSurface(cairo.FORMAT_ARGB32,
a.width, a.height)
cr = cairo.Context(surf)
# gradient for masking
lin = cairo.LinearGradient(0, 0, 0, a.height)
lin.add_color_stop_rgba(0.0, 1, 1, 1, 0.1)
lin.add_color_stop_rgba(0.25, 1, 1, 1, 0.7)
lin.add_color_stop_rgba(0.5, 1, 1, 1, 1.0)
lin.add_color_stop_rgba(0.75, 1, 1, 1, 0.7)
lin.add_color_stop_rgba(1.0, 1, 1, 1, 0.1)
cr.set_source_surface(_surf, 0, 0)
cr.mask(lin)
del cr
# cache the resulting surf...
self._bg_cache[state] = surf
def do_draw(self, cr):
state = self.get_state_flags()
if self.get_active():
if state not in self._bg_cache:
self._cache_bg_for_state(state)
cr.set_source_surface(self._bg_cache[state], 0, 0)
cr.paint()
for child in self:
self.propagate_draw(child, cr)
class Link(Gtk.Label):
__gsignals__ = {
"clicked": (GObject.SignalFlags.RUN_LAST,
None,
(),)
}
def __init__(self, markup="", uri="none"):
Gtk.Label.__init__(self)
self._handler = 0
self.set_markup(markup, uri)
def set_markup(self, markup="", uri="none"):
markup = '<a href="%s">%s</a>' % (uri, markup)
Gtk.Label.set_markup(self, markup)
if self._handler == 0:
self._handler = self.connect("activate-link",
self.on_activate_link)
# synonyms for set_markup
def set_label(self, label):
return self.set_markup(label)
def set_text(self, text):
return self.set_markup(text)
def on_activate_link(self, uri, data):
self.emit("clicked")
def disable(self):
self.set_sensitive(False)
self.set_name("subtle-label")
def enable(self):
self.set_sensitive(True)
self.set_name("label")
class MoreLink(Gtk.Button):
_MARKUP = '<b>%s</b>'
_MORE = _("More")
def __init__(self):
Gtk.Button.__init__(self)
self.label = Gtk.Label()
self.label.set_padding(StockEms.SMALL, 0)
self.label.set_markup(self._MARKUP % _(self._MORE))
self.add(self.label)
self._init_event_handling()
context = self.get_style_context()
context.add_class("more-link")
def _init_event_handling(self):
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
def do_draw(self, cr):
if self.has_focus():
layout = self.label.get_layout()
a = self.get_allocation()
e = layout.get_pixel_extents()[1]
xo, yo = self.label.get_layout_offsets()
Gtk.render_focus(self.get_style_context(), cr,
xo - a.x - 3, yo - a.y - 1,
e.width + 6, e.height + 2)
for child in self:
self.propagate_draw(child, cr)
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
def _build_channels_list(popup):
for i in range(3):
item = Gtk.MenuItem.new()
label = Gtk.Label.new("channel_name %s" % i)
box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL, StockEms.MEDIUM)
box.pack_start(label, False, False, 0)
item.add(box)
item.show_all()
popup.attach(item, 0, 1, i, i + 1)
def get_test_buttons_window():
win = Gtk.Window()
win.set_size_request(200, 200)
vb = Gtk.VBox(spacing=12)
win.add(vb)
link = Link("<small>test link</small>", uri="www.google.co.nz")
vb.pack_start(link, False, False, 0)
button = Gtk.Button()
button.set_label("channels")
channels_button = ChannelSelector(button)
channels_button.parent_style_type = Gtk.Window
channels_button.set_build_func(_build_channels_list)
hb = Gtk.HBox()
hb.pack_start(button, False, False, 0)
hb.pack_start(channels_button, False, False, 0)
vb.pack_start(hb, False, False, 0)
win.show_all()
win.connect("destroy", Gtk.main_quit)
return win
if __name__ == "__main__":
win = get_test_buttons_window()
Gtk.main()
| gpl-3.0 | -1,711,772,411,216,868,400 | 31.782344 | 79 | 0.562308 | false | 3.589068 | false | false | false |
lucperkins/heron | integration_test/src/python/integration_test/topology/one_spout_bolt_multi_tasks/one_spout_bolt_multi_tasks.py | 1 | 1368 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# copyright 2016 twitter. all rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=missing-docstring
from heronpy.api.stream import Grouping
from integration_test.src.python.integration_test.core import TestTopologyBuilder
from integration_test.src.python.integration_test.common.bolt import IdentityBolt
from integration_test.src.python.integration_test.common.spout import ABSpout
def one_spout_bolt_multi_tasks_builder(topology_name, http_server_url):
builder = TestTopologyBuilder(topology_name, http_server_url)
ab_spout = builder.add_spout("ab-spout", ABSpout, 3)
builder.add_bolt("identity-bolt", IdentityBolt,
inputs={ab_spout: Grouping.SHUFFLE},
par=3,
optional_outputs=['word'])
return builder.create_topology()
| apache-2.0 | 9,151,532,406,057,182,000 | 39.235294 | 81 | 0.741228 | false | 3.768595 | true | false | false |
burtgulash/PaTrie | patrie.py | 1 | 2824 | #!/usr/bin/python3
class TNode:
def __init__(self, children):
self.children = children
def is_leaf(self):
return self.children is None
def __repr__(self):
return repr(self.children)
class PaTrie:
def __init__(self):
self.root = None
def __contains__(self, word):
cur = self.root
if cur is None:
return False
i = 0
while cur is not None and not cur.is_leaf():
for label, child in cur.children.items():
if len(label) == 0 and i < len(word):
continue
if word[i:i + len(label)] == label:
cur = child
i += len(label)
break
else:
return False
return i == len(word)
def insert(self, word):
cur = self.root
if cur is None:
self.root = TNode({ word: None })
return
i = 0
while not cur.is_leaf():
for label, child in cur.children.items():
cl = self.common_prefix_len(word[i:], label)
if cl:
if cl == len(label):
cur = child
i += len(label)
break
del cur.children[label]
cur.children[label[:cl]] = TNode({
label[cl:]: child,
word[i + cl:]: TNode(None),
})
return
else:
cur.children[word[i:]] = TNode(None)
return
cur.children = {
"": TNode(None),
word[i:]: TNode(None)
}
def __str__(self):
s = []
def _str(tnode, sofar, label, prepend):
if tnode is None:
return
if tnode.is_leaf():
if label:
s.append(prepend + "+ " + label)
s.append(prepend + " {"+sofar+"}")
else:
s.append(prepend + "+ " + label)
for label, child in tnode.children.items():
_str(child, sofar + label, label, prepend + " ")
if self.root is not None:
_str(self.root, "", "", "")
return "\n".join(s)
def common_prefix_len(self, a, b):
i = 0
for x, y in zip(a, b):
if x == y:
i += 1
else:
break
return i
if __name__ == "__main__":
t = PaTrie()
words = "autobus", "auto", "abraka", "dabra", "abrakadabra", "honza", "honirna", "honicka", "hony", "ho", "h"
for w in words:
t.insert(w)
print("AFTER INSERTING", w)
print(t.root)
print(t)
print()
| mit | -5,929,119,131,326,029,000 | 24.672727 | 113 | 0.410765 | false | 4.092754 | false | false | false |
stringertheory/names | update_meter.py | 1 | 4118 | import sys
import string
import termcolor
import pymongo
import pronouncing
import unidecode
import distance
# a decorator that caches functions but stores results with a db backend would be nice.
def mongo_collection():
collection = pymongo.MongoClient().poetry.poems
return collection
def word_tokenize(sentence):
ascii_version = unidecode.unidecode(sentence.lower())
word_list = []
for word in ascii_version.split():
stripped = word.strip(string.punctuation).strip()
if stripped:
word_list.append(stripped)
return word_list
def phones_for_sentence(word_list):
approximate_words = []
phones_list = []
for word in word_list:
replacement, phones = distance.phones_for_word(word)
approximate_words.append(replacement)
# for now, just pick first alternative from list
phones_list.append(phones[0])
return approximate_words, phones_list
def stress_pattern(phones):
return pronouncing.stresses(''.join(p for p in phones))
collection = mongo_collection()
for index, document in enumerate(collection.find(no_cursor_timeout=True).sort("_id", pymongo.DESCENDING).batch_size(5), 1):
if 'analyzed' in document or not 'lines' in document:
print('skipping %s' % document['_id'], file=sys.stderr)
continue
else:
print('analyzing %s' % document['_id'], file=sys.stderr)
normalized = [word_tokenize(sentence) for sentence in document['lines']]
approximate = []
phones = []
for sentence in normalized:
a, p = phones_for_sentence(sentence)
approximate.append(a)
phones.append(p)
stresses = [stress_pattern(sentence) for sentence in phones]
# zip up for easier storage
analyzed = []
for n, a, p in zip(normalized, approximate, phones):
sentence = []
for n_, a_, p_ in zip(n, a, p):
word = {
'ascii': n_,
'closest': a_,
'phones': p_,
}
sentence.append(word)
analyzed.append(sentence)
document['analyzed'] = analyzed
document['stresses'] = stresses
collection.update_one(
{'_id': document.get('_id')},
{'$set': {'analyzed': analyzed, 'stresses': stresses}},
)
print(index, 'inserted', document['_id'])
row_list = []
for signal, line in zip(stresses, document['lines']):
terminal = []
block_list = []
for i in signal:
if int(i):
block_list.append('<div class="diagram stressed"></div>')
terminal.append(termcolor.colored(' ', 'green', 'on_blue'))
else:
terminal.append(termcolor.colored(' ', 'green', 'on_yellow'))
block_list.append('<div class="diagram unstressed"></div>')
row = '<div class="diagram sentence">%s</div>' % ''.join(block_list)
row_list.append(row)
print(''.join(terminal), file=sys.stderr)
diagram = '<div class="diagram container">%s</div>' % ''.join(row_list)
with open('formatted/%s.html' % document['_id'], 'w') as outfile:
outfile.write('<html>')
outfile.write('<head>')
outfile.write('<link rel="stylesheet" type="text/css" href="diagram.css">')
outfile.write('</head>')
outfile.write('<body>')
outfile.write(document['html'])
outfile.write('\n')
outfile.write(diagram)
outfile.write('\n')
outfile.write('</body>')
outfile.write('</html>')
# c_a, c_d = pywt.dwt(signal, 'haar')
# for i, j in enumerate(signal):
# print i, j
# print ''
# # for i in c_a:
# # print i
# # print ''
# # for i in c_d:
# # print i
# # print ''
# ps = np.abs(np.fft.fft(signal))**2
# # for i, j in enumerate(ps):
# # print i, j
# time_step = 1
# freqs = np.fft.fftfreq(len(signal), time_step)
# print >> sys.stderr, freqs
# idx = np.argsort(freqs)
# for x, y in zip(freqs[idx], ps[idx]):
# print x, y
| mit | -8,512,225,107,606,674,000 | 29.503704 | 123 | 0.575765 | false | 3.706571 | false | false | false |
chromatic-universe/imap2017 | src/imap-python-gadget/cci_imap_gadget/core_on_login.py | 1 | 4756 | # core_on_login.py chromatic universe william k. johnson 2018
from time import sleep
#cci
from cci_imap_gadget.imap_gadget_base import cci_chilkat , \
cci_ecosys , \
cci_mini_imap_mail
from cci_imap_gadget.core_on_logout import on_logout
import cci_utils.cci_io_tools as io
# ----------------------------------------------------------------------------------------
class on_login( object ) :
"""
on_login
"""
def __init__( self ,
cci_chilkat = None ,
cci_mail = None ,
cci_ico = None ) :
"""
:param cci_chilkat:
:param cci_mail:
:param cci_ico:
"""
# logging
self._logger = io.init_logging( self.__class__.__name__ )
self._logger.info( self.__class__.__name__ + '...' )
self._cci = cci_chilkat
self._mail = cci_mail
self._ecosys = cci_ico
self._imap_states = set()
self._imap_states.add( 'non-authenticated' )
@property
def cci( self ) :
return self._cci
@cci.setter
def cci( self , cc ) :
self._cci = cc
@property
def mail( self ) :
return self._mail
@mail.setter
def mail( self , m ) :
self._mail = m
@property
def eco( self ) :
return self._ecosys
@eco.setter
def eco( self , ec ) :
self._eco = ec
@property
def logger( self ) :
return self._logger
@logger.setter
def logger( self , log ) :
self._logger = log
@property
def imap_states( self ) :
return self._imap_states
@imap_states.setter
def imap_states( self , states ) :
self._imap_states = states
def perform( self ) :
"""
:return:
"""
# xoonnect to an imap server.
self.cci.imap.put_KeepSessionLog( True )
self.cci.imap.put_VerboseLogging( True )
print( self.mail.imap_addr )
b_ret = self.cci.imap.Connect( self.mail.imap_addr )
if b_ret :
#login
b_ret = self.cci.imap.Login( self.mail.imap_mail_account ,
self.mail.imap_mail_auth )
if not b_ret :
self.logger.error( '...login failed....')
#self.logger.info( self.cci.imap.lastErrorText() )
self.logger.info( self.cci.imap.sessionLog() )
self.imap_states.remove( 'non-authenticated' )
self.imap_states.add( 'authenticated' )
sleep( 2 )
# ----------------------------------------------------------------------------------------
if __name__ == '__main__' :
try :
#imap default params
mini_mail = cci_mini_imap_mail( mail_account="wiljoh" , mail_auth="Argentina1" )
mini_mail.logger.info( repr( mini_mail) )
#cci microkernel
mini_ecosys = cci_ecosys( mta_addr='127.0.0.1' ,
mda_addr='127.0.0.1' ,
imap_mail=mini_mail )
mini_ecosys.logger.info( repr( mini_ecosys ) )
#chilkat instances
cci = cci_chilkat()
for idx in range( 0 , 9 ) :
login = on_login( cci , mini_mail , mini_ecosys )
login.perform()
#logout
logout = on_logout( cci , mini_mail , mini_ecosys )
logout.perform()
sleep( 8 )
except Exception as e :
print( str( e ) )
| mit | 2,335,134,393,039,799,300 | 34.22963 | 108 | 0.353869 | false | 4.858018 | false | false | false |
uclmr/inferbeddings | tests/inferbeddings/adversarial/closedform/test_lifted_simple_distmult_unit_cube.py | 1 | 3989 | # -*- coding: utf-8 -*-
import numpy as np
import tensorflow as tf
from inferbeddings.models import base as models
from inferbeddings.models import similarities
from inferbeddings.knowledgebase import Fact, KnowledgeBaseParser
from inferbeddings.parse import parse_clause
from inferbeddings.models.training import constraints
from inferbeddings.adversarial import Adversarial
from inferbeddings.adversarial.closedform import ClosedForm
import logging
import pytest
logger = logging.getLogger(__name__)
triples = [
('a', 'p', 'b'),
('c', 'p', 'd'),
('a', 'q', 'b')
]
facts = [Fact(predicate_name=p, argument_names=[s, o]) for s, p, o in triples]
parser = KnowledgeBaseParser(facts)
nb_entities = len(parser.entity_to_index)
nb_predicates = len(parser.predicate_to_index)
# Clauses
clause_str = 'q(X, Y) :- p(X, Y)'
clauses = [parse_clause(clause_str)]
# Instantiating the model parameters
model_class = models.get_function('DistMult')
similarity_function = similarities.get_function('dot')
model_parameters = dict(similarity_function=similarity_function)
@pytest.mark.closedform
def test_distmult_unit_cube():
for seed in range(32):
tf.reset_default_graph()
np.random.seed(seed)
tf.set_random_seed(seed)
entity_embedding_size = np.random.randint(low=1, high=5)
predicate_embedding_size = entity_embedding_size
# Instantiating entity and predicate embedding layers
entity_embedding_layer = tf.get_variable('entities',
shape=[nb_entities + 1, entity_embedding_size],
initializer=tf.contrib.layers.xavier_initializer())
predicate_embedding_layer = tf.get_variable('predicates',
shape=[nb_predicates + 1, predicate_embedding_size],
initializer=tf.contrib.layers.xavier_initializer())
# Adversary - used for computing the adversarial loss
adversarial = Adversarial(clauses=clauses, parser=parser,
entity_embedding_layer=entity_embedding_layer,
predicate_embedding_layer=predicate_embedding_layer,
model_class=model_class,
model_parameters=model_parameters,
batch_size=1)
adv_projection_steps = [constraints.unit_cube(adv_emb_layer) for adv_emb_layer in adversarial.parameters]
adversarial_loss = adversarial.loss
v_optimizer = tf.train.AdagradOptimizer(learning_rate=1e-1)
v_training_step = v_optimizer.minimize(- adversarial_loss, var_list=adversarial.parameters)
init_op = tf.global_variables_initializer()
closed_form_lifted = ClosedForm(parser=parser,
predicate_embedding_layer=predicate_embedding_layer,
model_class=model_class, model_parameters=model_parameters,
is_unit_cube=True)
opt_adversarial_loss = closed_form_lifted(clauses[0])
with tf.Session() as session:
session.run(init_op)
for finding_epoch in range(1, 100 + 1):
_ = session.run([v_training_step])
for projection_step in adv_projection_steps:
session.run([projection_step])
violation_loss_val, opt_adversarial_loss_val = session.run([adversarial_loss, opt_adversarial_loss])
if violation_loss_val + 1e-1 > opt_adversarial_loss_val:
print('{} <= {}'.format(violation_loss_val, opt_adversarial_loss_val))
assert violation_loss_val <= (opt_adversarial_loss_val + 1e-4)
tf.reset_default_graph()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
pytest.main([__file__])
| mit | -1,489,150,325,355,351,800 | 36.632075 | 116 | 0.610679 | false | 3.922321 | false | false | false |
uskudnik/ggrc-core | src/ggrc_workflows/migrations/versions/20150514130212_1431e7094e26_add_new_notification_type.py | 1 | 1859 |
"""add new notification type
Revision ID: 1431e7094e26
Revises: 2b89912f95f1
Create Date: 2015-05-14 13:02:12.165612
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
from datetime import timedelta, date
from sqlalchemy import and_
from ggrc import db
from ggrc_workflows.models import Workflow
from ggrc_workflows.notification.notification_handler import (
get_notification_type,
add_notif,
)
# revision identifiers, used by Alembic.
revision = '1431e7094e26'
down_revision = '2b89912f95f1'
def upgrade():
notification_types_table = table(
'notification_types',
column('id', sa.Integer),
column('name', sa.String),
column('description', sa.Text),
column('template', sa.String),
column('instant', sa.Boolean),
column('advance_notice', sa.Integer),
column('advance_notice_end', sa.Integer),
column('created_at', sa.DateTime),
column('modified_by_id', sa.Integer),
column('updated_at', sa.DateTime),
column('context_id', sa.Integer),
)
notification_types = [
# cycle created notifictions
{"name": "cycle_start_failed",
"description": ("Notify workflow owners that a cycle has failed to"
"start for a recurring workflow"),
"template": "cycle_start_failed",
"advance_notice": 0,
"instant": False,
},
]
op.bulk_insert(notification_types_table, notification_types)
existing_wfs = Workflow.query.filter(and_(
Workflow.frequency.in_(["weekly", "monthly", "quarterly", "annually"]),
Workflow.next_cycle_start_date >= date.today()
))
for wf in existing_wfs:
notif_type = get_notification_type("cycle_start_failed")
add_notif(wf, notif_type, wf.next_cycle_start_date + timedelta(1))
db.session.commit()
def downgrade():
pass
| apache-2.0 | 2,134,548,577,250,535,400 | 26.338235 | 77 | 0.669715 | false | 3.568138 | false | false | false |
shimpe/frescobaldi | frescobaldi_app/matcher.py | 1 | 7388 | # This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Highlights matching tokens such as { and }, << and >> etc.
"""
from __future__ import unicode_literals
import weakref
from PyQt4.QtGui import QAction
import app
import plugin
import ly.lex
import lydocument
import viewhighlighter
import actioncollection
import actioncollectionmanager
class AbstractMatcher(object):
def __init__(self, view=None):
"""Initialize with an optional View. (Does not keep a reference.)"""
self._view = lambda: None
if view:
self.setView(view)
app.settingsChanged.connect(self.updateSettings)
self.updateSettings()
def updateSettings(self):
from PyQt4.QtCore import QSettings
s = QSettings()
s.beginGroup("editor_highlighting")
self._match_duration = s.value("match", 1, int) * 1000
def setView(self, view):
"""Set the current View (to monitor for cursor position changes)."""
old = self._view()
if old:
old.cursorPositionChanged.disconnect(self.showMatches)
if view:
self._view = weakref.ref(view)
view.cursorPositionChanged.connect(self.showMatches)
else:
self._view = lambda: None
def view(self):
"""Return the current View."""
return self._view()
def highlighter(self):
"""Implement to return an ArbitraryHighlighter for the current View."""
pass
def showMatches(self):
"""Highlights matching tokens if the view's cursor is at such a token."""
cursors = matches(self.view().textCursor(), self.view())
if cursors:
self.highlighter().highlight("match", cursors, 2, self._match_duration)
else:
self.highlighter().clear("match")
class Matcher(AbstractMatcher, plugin.MainWindowPlugin):
"""One Matcher automatically handling the current View."""
def __init__(self, mainwindow):
super(Matcher, self).__init__()
ac = self.actionCollection = Actions()
actioncollectionmanager.manager(mainwindow).addActionCollection(ac)
ac.view_matching_pair.triggered.connect(self.moveto_match)
ac.view_matching_pair_select.triggered.connect(self.select_match)
mainwindow.currentViewChanged.connect(self.setView)
view = mainwindow.currentView()
if view:
self.setView(view)
def highlighter(self):
return viewhighlighter.highlighter(self.view())
def moveto_match(self):
"""Jump to the matching token."""
self.goto_match(False)
def select_match(self):
"""Select from the current to the matching token."""
self.goto_match(True)
def goto_match(self, select=False):
"""Jump to the matching token, selecting the text if select is True."""
cursor = self.view().textCursor()
cursors = matches(cursor)
if len(cursors) < 2:
return
if select:
if cursors[0] < cursors[1]:
anchor, pos = cursors[0].selectionStart(), cursors[1].selectionEnd()
else:
anchor, pos = cursors[0].selectionEnd(), cursors[1].selectionStart()
cursor.setPosition(anchor)
cursor.setPosition(pos, cursor.KeepAnchor)
else:
cursor.setPosition(cursors[1].selectionStart())
self.view().setTextCursor(cursor)
class Actions(actioncollection.ActionCollection):
name = "matchingpair"
def createActions(self, parent):
self.view_matching_pair = QAction(parent)
self.view_matching_pair_select = QAction(parent)
def translateUI(self):
self.view_matching_pair.setText(_("Matching Pai&r"))
self.view_matching_pair_select.setText(_("&Select Matching Pair"))
def matches(cursor, view=None):
"""Return a list of zero to two cursors specifying matching tokens.
If the list is empty, the cursor was not at a MatchStart/MatchEnd token,
if the list only contains one cursor the matching token could not be found,
if the list contains two cursors, the first is the token the cursor was at,
and the second is the matching token.
If view is given, only the visible part of the document is searched.
"""
block = cursor.block()
column = cursor.position() - block.position()
tokens = lydocument.Runner(lydocument.Document(cursor.document()))
tokens.move_to_block(block)
if view is not None:
first_block = view.firstVisibleBlock()
bottom = view.contentOffset().y() + view.viewport().height()
pred_forward = lambda: view.blockBoundingGeometry(tokens.block).top() <= bottom
pred_backward = lambda: tokens.block >= first_block
else:
pred_forward = lambda: True
pred_backward = lambda: True
source = None
for token in tokens.forward_line():
if token.pos <= column <= token.end:
if isinstance(token, ly.lex.MatchStart):
match, other = ly.lex.MatchStart, ly.lex.MatchEnd
def source_gen():
while pred_forward():
for t in tokens.forward_line():
yield t
if not tokens.next_block():
break
source = source_gen()
break
elif isinstance(token, ly.lex.MatchEnd):
match, other = ly.lex.MatchEnd, ly.lex.MatchStart
def source_gen():
while pred_backward():
for t in tokens.backward_line():
yield t
if not tokens.previous_block():
break
source = source_gen()
break
elif token.pos > column:
break
cursors = []
if source:
# we've found a matcher item
cursors.append(tokens.cursor())
nest = 0
for token2 in source:
if isinstance(token2, other) and token2.matchname == token.matchname:
if nest == 0:
# we've found the matching item!
cursors.append(tokens.cursor())
break
else:
nest -= 1
elif isinstance(token2, match) and token2.matchname == token.matchname:
nest += 1
return cursors
app.mainwindowCreated.connect(Matcher.instance)
| gpl-2.0 | -4,223,175,629,480,719,000 | 34.864078 | 87 | 0.611668 | false | 4.330598 | false | false | false |
TriggeredMessaging/pydotmailer | pydotmailer.py | 1 | 26995 | # pydotmailer - A lightweight wrapper for the dotMailer API, written in Python.
# Copyright (c) 2012 Triggered Messaging Ltd, released under the MIT license
# Home page:
# https://github.com/TriggeredMessaging/pydotmailer/
# See README and LICENSE files.
#
# dotMailer API docs are at http://www.dotmailer.co.uk/api/
# This class was influenced by earllier work: https://github.com/JeremyJones/dotmailer-client/blob/master/dotmailer.py
import base64
import time
from datetime import datetime, timedelta
from suds.client import Client as SOAPClient
__version__ = '0.1.2'
try:
import simplejson as json
except ImportError:
import json # fall back to traditional json module.
import logging
logger = logging.getLogger(__name__)
from dotmailersudsplugin import DotMailerSudsPlugin
class PyDotMailer(object):
version = '0.1'
class RESULT_FIELDS_ERROR_CODE:
"""
Defines for RESULT_FIELDS.ERROR_CODE error codes which we're deriving from the string the ESP
e.g dotMailer returns.
"""
ERROR_CAMPAIGN_NOT_FOUND = 'ERROR_CAMPAIGN_NOT_FOUND' # no email template
ERROR_CAMPAIGN_SENDNOTPERMITTED = 'ERROR_CAMPAIGN_SENDNOTPERMITTED'
# not paid enough? dotMailer tends to return this if you've run out of campaign credits or a similar issue.
ERROR_CAMPAIGN_APINOTPERMITTED = 'ERROR_CAMPAIGN_APINOTPERMITTED' # e,g, exceeded dotmailer API limits (API_USAGE_EXCEEDED)
ERROR_GENERIC = 'ERROR_UNKNOWN' # code which couldn't be parsed.
ERROR_CONTACT_NOT_FOUND = 'ERROR_CONTACT_NOT_FOUND' # no email address?
ERROR_CONTACT_UNSUBSCRIBED = 'ERROR_CONTACT_UNSUBSCRIBED' # no send permission
ERROR_CONTACT_BLACKHOLED = 'ERROR_CONTACT_BLACKHOLED' # address blackholed
ERROR_OTHER = 'ERROR_OTHER' # Etc
TIMEOUT_ERROR = 'Timeout Error' # Timeout from ESP
ERROR_UNFINISHED = "ERROR_UNFINISHED" # Load had not finished
ERROR_ESP_LOAD_FAIL = 'ERROR_ESP_LOAD_FAIL' # Data not loaded
# Cache the information on the API location on the server
api_url = ''
def __init__(self, api_username='', api_password='', secure=True):
"""
Connect to the dotMailer API at apiconnector.com, using SUDS.
param string $ap_key Not present, because the dotMailer API doesn't support an API key
@param api_username Your dotMailer user name
@param api_password Your dotMailer password
@param secure Whether or not this should use a secure connection (HTTPS).
Always True if the ESP doesn't support an insecure API.
"""
# Remember the HTTPS flag
self.secure = secure or False # Cast to a boolean (?)
# Choose the dotMailer API URL
if secure:
self.api_url = 'https://apiconnector.com/API.asmx?WSDL'
else:
self.api_url = 'http://apiconnector.com/API.asmx?WSDL'
# Connect to the API, using SUDS. Log before and after to track the time taken.
logger.debug("Connecting to web service")
self.client = SOAPClient(self.api_url,
plugins=[DotMailerSudsPlugin()]) # Plugin makes a tiny XML patch for dotMailer
logger.debug("Connected to web service")
# Change the logging level to CRITICAL to avoid logging errors for every API call which fails via suds
logging.getLogger('suds.client').setLevel(logging.CRITICAL)
# Remember the username and password. There's no API key to remember with dotMailer
self.api_username = api_username
self.api_password = api_password
if (not api_username) or (not api_password):
raise Exception('Bad username or password')
self.last_exception = None
def unpack_exception(self, e):
""" unpack the exception thrown by suds. This contains a string code in e.fault.faultstring containing text e.g.
Server was unable to process request. ---> Campaign not found ERROR_CAMPAIGN_NOT_FOUND
Use this to set a suitable value for dict_result
@param e exception
@return dict_result, e.g. {'ok':False,
'errors':[e.message],
'error_code':PyDotMailer.ERRORS.ERROR_CAMPAIGN_NOT_FOUND }
"""
self.last_exception = e # in case caller cares
fault_string = ''
# http://stackoverflow.com/questions/610883/how-to-know-if-an-object-has-an-attribute-in-python
if e and hasattr(e, 'fault') and hasattr(e.fault, 'faultstring'):
fault_string = e.fault.faultstring
# todo clearly a more generic way of doing this would be good.
if 'ERROR_CAMPAIGN_NOT_FOUND' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_NOT_FOUND
elif 'ERROR_CAMPAIGN_SENDNOTPERMITTED' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_SENDNOTPERMITTED
elif 'ERROR_APIUSAGE_EXCEEDED' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_APINOTPERMITTED
elif 'ERROR_CONTACT_NOT_FOUND' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_NOT_FOUND
elif 'ERROR_CONTACT_SUPPRESSED' in fault_string:
# Server was unable to process request. ---> Contact is suppressed. ERROR_CONTACT_SUPPRESSED
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_UNSUBSCRIBED
else:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_OTHER
dict_result = {'ok': False, 'errors': [e.message], 'error_code': error_code}
return dict_result
def add_contacts_to_address_book(self, address_book_id, s_contacts, wait_to_complete_seconds=False):
"""
Add a list of contacts to the address book
@param address_book_id the id of the address book
@param s_contacts containing the contacts to be added. You may upload either a .csv or .xls file.
It must contain one column with the heading "Email".
Other columns must will attempt to map to your custom data fields.
@param wait_to_complete_seconds seconds to wait.
@return dict e.g. {'progress_id': 15edf1c4-ce5f-42e3-b182-3b20c880bcf8, 'ok': True, 'result': Finished}
http://www.dotmailer.co.uk/api/address_books/add_contacts_to_address_book_with_progress.aspx
"""
dict_result = {'ok': True}
return_code = None
base64_data = base64.b64encode(s_contacts)
try:
progress_id = self.client.service.AddContactsToAddressBookWithProgress(username=self.api_username,
password=self.api_password,
addressbookID=address_book_id,
data=base64_data,
dataType='CSV')
dict_result = {'ok': True}
if wait_to_complete_seconds:
# retry loop...
dt_wait_until = datetime.utcnow() + timedelta(seconds=wait_to_complete_seconds) # wait for max
sleep_time = 0.2 # start with short sleep between retries
while (not return_code or return_code.get('result') == 'NotFinished') and \
datetime.utcnow() < dt_wait_until:
time.sleep(sleep_time)
return_code = self.get_contact_import_progress(progress_id) # E.g: {'error_code': 'ERROR_UNFINISHED', 'ok': False, 'result': NotFinished}
# gradually backoff with longer sleep intervals up to a max of 5 seconds
sleep_time = min(sleep_time * 2, 5.0)
if return_code:
dict_result = return_code
dict_result.update({'progress_id': progress_id})
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result
def add_contact_to_address_book(self, address_book_id, email_address, d_fields, email_type="Html",
audience_type="Unknown",
opt_in_type="Unknown"):
"""
add a single contact into an address book. - uses AddContactToAddressBook
@param address_book_id the id of the address book
@param email_address The email address to add
@param d_fields - dict containing the data to be added. e.g. { 'firstname': 'mike', 'lastname': 'austin'}.
columns must map to standard fields in DM or will attempt to map to your custom data fields in DM.
@param email_type = "Html" - the new contact will be set to receive this format by default.
@return dict e.g. {'contact_id': 123532543, 'ok': True, 'contact': APIContact object }
"""
# Initialise the result dictionary
dict_result = {'ok': False}
# Create an APIContact object with the details of the record to load. For example:
# APIContact: (APIContact){
# ID = None, Email = None,
# AudienceType = (ContactAudienceTypes){ value = None, }
# DataFields = (ContactDataFields){ Keys = (ArrayOfString){ string[] = <empty> }
# Values = (ArrayOfAnyType){ anyType[] = <empty> }
# OptInType = (ContactOptInTypes){ value = None }
# EmailType = (ContactEmailTypes){ value = None }
# Notes = None }
contact = self.client.factory.create('APIContact')
del contact.ID
contact.Email = email_address
# Copy field data into the call
for field_name in d_fields:
if field_name != 'email' and d_fields.get(field_name):
contact.DataFields.Keys[0].append(field_name)
contact.DataFields.Values[0].append(d_fields.get(field_name))
# remove some empty values that will upset suds/dotMailer
####del contact.AudienceType
####del contact.OptInType
contact.AudienceType = audience_type
contact.OptInType = opt_in_type
contact.EmailType = email_type
#### logging.getLogger('suds.client').setLevel(logging.DEBUG)
try:
created_contact = self.client.service.AddContactToAddressBook(username=self.api_username,
password=self.api_password,
contact=contact,
addressbookId=address_book_id)
# Example dict_result contents:
# { 'contact': (APIContact){ ID = 417373614, Email = "[email protected]",
# AudienceType = "Unknown",
# DataFields = (ContactDataFields){
# Keys = (ArrayOfString){ string[] = "Postcode", }
# Values = (ArrayOfAnyType){ anyType[] = "SW1A 0AA", } }
# OptInType = "Unknown", EmailType = "Html" },
# 'ok': True, 'contact_id': 417373614}
dict_result = ({'ok': True, 'contact_id': created_contact.ID, 'contact': created_contact})
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result
def get_contact_import_progress(self, progress_id):
"""
@param progress_id the progress_id from add_contacts_to_address_book
@return dict e.g. {'ok': False, 'result': NotFinished} or dict: {'ok': True, 'result': Finished}
http://www.dotmailer.co.uk/api/contacts/get_contact_import_progress.aspx
"""
dict_result = {'ok': True}
try:
return_code = self.client.service.GetContactImportProgress(username=self.api_username,
password=self.api_password,
progressID=progress_id)
if return_code == 'Finished':
dict_result = {'ok': True, 'result': return_code, 'errors': [' Load OK. See report at https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=%s ' % progress_id] }
elif return_code == 'RejectedByWatchdog':
# API call AddContactsToAddressBookWithProgress has triggered "RejectedByWatchdog" for one client and (we believe) dotMailer blocked the whole upload.
# https://support.dotmailer.com/entries/44346548-Data-Watchdog-FAQs
# https://support.dotmailer.com/entries/21449156-Better-API-feedback-for-Reject...
dict_result = {'ok': False, 'result': return_code, 'error_code':PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_ESP_LOAD_FAIL,
'errors': [' Load Fail. See report at https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=%s ' % progress_id]}
else:
dict_result = {'ok': False, 'result': return_code, 'error_code':PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_UNFINISHED,
'errors': [' Load Unfinished. See report at https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=%s ' % progress_id]}
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result # E.g: {'ok': True, 'result': Finished, 'errors': [u'<a href="https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=d82602bb-adfb-4e2d-aabc-5fb77af2ae3d">Load OK Report</a>']}
def send_campaign_to_contact(self, campaign_id, contact_id, send_date=None):
"""
@param campaign_id
@param contact_id
@param send_date date/time in server time when the campaign should be sent.
@return dict e.g. {'ok': True} or {'ok': False,
'result': <return code if there is one>,
'errors':['sample error']}
http://www.dotmailer.co.uk/api/campaigns/send_campaign_to_contact.aspx
"""
# format the date in ISO format, e.g. "2012-03-28T19:51:00" for sending via SOAP call.
if not send_date:
send_date = datetime.utcnow()
dict_result = {'ok': True}
iso_send_date = self.dt_to_iso_date(send_date)
return_code = None
try:
return_code = self.client.service.SendCampaignToContact(username=self.api_username,
password=self.api_password,
campaignId=campaign_id,
contactid=contact_id,
sendDate=iso_send_date) # note inconsistent case
# in DM API
if return_code:
# return code, which means an error
dict_result = {'ok': False, 'result': return_code}
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result
def get_contact_by_email(self, email):
"""
@param email email address to search for.
@return dict e.g. {'ok': True,
contact_id: 32323232, # the dotMailer contact ID
email: # the email address of the returned record
d_fields: { field_name: field_value }, # dictionary with multiple fields, keyed by field name
# The result member is the raw return from dotMailer.
'result': (APIContact){
ID = 367568124
Email = "[email protected]"
AudienceType = "Unknown"
DataFields =
(ContactDataFields){
Keys =
(ArrayOfString){
string[] =
"FIRSTNAME",
"FULLNAME",
"GENDER",
"LASTNAME",
"POSTCODE",
}
Values =
(ArrayOfAnyType){
anyType[] =
None,
None,
None,
None,
}
}
OptInType = "Unknown"
EmailType = "Html"
Notes = None
}}
http://www.dotmailer.co.uk/api/contacts/get_contact_by_email.aspx
"""
dict_result = {'ok': True}
data_fields = None
try:
return_code = self.client.service.GetContactByEmail(username=self.api_username,
password=self.api_password,
email=email)
dict_result = {'ok': True, 'result': return_code}
if dict_result.get('ok'):
# create a dictionary with structure { field_name: field_value }
try:
data_fields = dict_result.get('result').DataFields
d_fields = self._clean_returned_data_fields(data_fields=data_fields)
dict_result.update({'d_fields': d_fields})
except:
logger.exception("Exception unpacking fields in GetContactByEmail for email=%s" % email)
# log additional info separately in case something bad has happened
# which'll cause this logging line to raise.
logger.error("Further info: data_fields=%s" % data_fields)
contact_id = return_code.ID
dict_result.update({'contact_id': contact_id})
returned_email_address = return_code.Email
dict_result.update({'email': returned_email_address})
except Exception as e:
dict_result = self.unpack_exception(e)
error_code = dict_result.get("error_code")
if error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_NOT_FOUND:
pass # ignore these expected errors
elif error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_APINOTPERMITTED:
pass
else:
logger.exception("Exception in GetContactByEmail")
return dict_result
def dt_to_iso_date(self, dt):
""" convert a python datetime to an iso date, e.g. "2012-03-28T19:51:00"
ready to send via SOAP
http://www.iso.org/iso/date_and_time_format
"""
try:
iso_dt = dt.strftime('%Y-%m-%dT%H:%M:%S')
except:
logger.exception('Exception converting dt to iso')
iso_dt = None
return iso_dt
def _clean_returned_data_fields(self, data_fields):
"""
Case 1886: If there's an empty first name/last name key, then dotMailer fails to return a value,
so the lengths don't match.
If this happens, scan through the keys and add an extra value of None just before the dodgy key(s)
len_data_fields_names = len(data_fields_keys)
len_data_fields_values = len(data_fields_values)
if len_data_fields_names > len_data_fields_values:
# Different number of keys and values, so do a copy but insert None when necessary
name_index = 0
value_index = 0
while name_index < len_data_fields_names:
field_name = data_fields_keys[name_index]
if name_index+1 < len_data_fields_names:
next_field_name = data_fields_keys[name_index+1]
else:
next_field_name = ""
if ((len_data_fields_names > len_data_fields_values)
and (next_field_name=="FIRSTNAME" or next_field_name=="LASTNAME" or next_field_name=="FULLNAME")):
d_fields.update({field_name: None }) # Insert new value Null
len_data_fields_values += 1 # Count one more value, but don't step on to next value
else:
d_fields.update({field_name: data_fields_values[value_index] }) # Copy the real value
value_index += 1 # Step on to next value
name_index += 1 # Next key
"""
d_fields = {}
data_fields_keys = data_fields.Keys[0]
data_fields_values = data_fields.Values[0]
# Case 1886: If there's an empty first name/last name key, then dotMailer fails to return a value,
# so the lengths don't match
# If this happens, scan through the keys and add an extra value of None just before the dodgy key(s)
len_data_fields_names = len(data_fields_keys)
len_data_fields_values = len(data_fields_values)
if len_data_fields_names > len_data_fields_values:
# Different number of keys and values, so do a copy but insert None when necessary
name_index = 0
value_index = 0
while name_index < len_data_fields_names:
field_name = data_fields_keys[name_index]
if name_index+1 < len_data_fields_names:
next_field_name = data_fields_keys[name_index+1]
else:
next_field_name = ""
if ((len_data_fields_names > len_data_fields_values)
and (next_field_name == "FIRSTNAME"
or next_field_name == "LASTNAME"
or next_field_name == "FULLNAME")):
d_fields.update({field_name: None}) # Insert new value Null
len_data_fields_values += 1 # Count one more value, but don't step on to next value
else:
d_fields.update({field_name: data_fields_values[value_index]}) # Copy the real value
value_index += 1 # Step on to next value
name_index += 1 # Next key
else:
# Same number of keys and values, so just do a straightforward copy
for idx, field_name in enumerate(data_fields_keys):
logger.debug(idx, field_name, data_fields_values[idx])
d_fields.update({field_name: data_fields_values[idx]})
return d_fields
def get_contact_by_id(self, contact_id):
"""
@param contact_id - id to search for
@return dict e.g. {'ok': True,
contact_id: 32323232, # the dotMailer contact ID
email: # the email address of the returned record
d_fields: { field_name: field_value }, # dictionary with multiple fields, keyed by field name
# The result member is the raw return from dotMailer.
'result': (APIContact){
ID = 367568124
Email = "[email protected]"
AudienceType = "Unknown"
DataFields =
(ContactDataFields){
Keys =
(ArrayOfString){
string[] =
"FIRSTNAME",
"FULLNAME",
"GENDER",
"LASTNAME",
"POSTCODE",
}
Values =
(ArrayOfAnyType){
anyType[] =
None,
None,
None,
None,
}
}
OptInType = "Unknown"
EmailType = "Html"
Notes = None
}}
http://www.dotmailer.co.uk/api/contacts/get_contact_by_id.aspx
"""
dict_result = {'ok': True}
data_fields = None
try:
return_code = self.client.service.GetContactById(username=self.api_username, password=self.api_password,
id=contact_id)
dict_result = {'ok': True, 'result': return_code}
if dict_result.get('ok'):
# create a dictionary with structure { field_name: field_value }
try:
d_fields = {}
data_fields = dict_result.get('result').DataFields
d_fields = self._clean_returned_data_fields(data_fields=data_fields)
dict_result.update({'d_fields': d_fields })
except:
logger.exception("Exception unpacking fields in GetContactById for id=%s" % contact_id)
# log additional info separately in case something bad has happened
# which'll cause this logging line to raise.
logger.error("Further info: data_fields=%s" % data_fields)
contact_id = return_code.ID
dict_result.update({'contact_id': contact_id})
returned_email_address = return_code.Email
dict_result.update({'email': returned_email_address})
except Exception as e:
dict_result = self.unpack_exception(e)
error_code = dict_result.get('error_code')
if error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_NOT_FOUND:
pass # Don't log these expected errors
elif error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_APINOTPERMITTED:
pass
return dict_result
"""
might implement a command line at some point.
def main():
try:
addressbookid = sys.argv[2] #should use argparse or similar.
contactsfilename = sys.argv[3]
except IndexError:
print "Usage: dotmailer addcontactstoaddressbook addressbookid contactsfilename\n"
sys.exit(1)
initial_data = open(contactsfilename, 'r').read()
"""
| mit | 7,280,185,620,400,163,000 | 54.204499 | 215 | 0.544619 | false | 4.276097 | false | false | false |
Runscope/pysaml2 | tests/test_30_mdstore.py | 1 | 7860 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import re
from saml2.httpbase import HTTPBase
from saml2.mdstore import MetadataStore, MetaDataMDX
from saml2.mdstore import destinations
from saml2.mdstore import name
from saml2 import md
from saml2 import sigver
from saml2 import BINDING_SOAP
from saml2 import BINDING_HTTP_REDIRECT
from saml2 import BINDING_HTTP_POST
from saml2 import BINDING_HTTP_ARTIFACT
from saml2 import saml
from saml2 import config
from saml2.attribute_converter import ac_factory
from saml2.attribute_converter import d_to_local_name
from saml2.extension import mdui
from saml2.extension import idpdisc
from saml2.extension import dri
from saml2.extension import mdattr
from saml2.extension import ui
from saml2.s_utils import UnknownPrincipal
import xmldsig
import xmlenc
from pathutils import full_path
sec_config = config.Config()
#sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
ONTS = {
saml.NAMESPACE: saml,
mdui.NAMESPACE: mdui,
mdattr.NAMESPACE: mdattr,
dri.NAMESPACE: dri,
ui.NAMESPACE: ui,
idpdisc.NAMESPACE: idpdisc,
md.NAMESPACE: md,
xmldsig.NAMESPACE: xmldsig,
xmlenc.NAMESPACE: xmlenc
}
ATTRCONV = ac_factory(full_path("attributemaps"))
METADATACONF = {
"1": {
"local": [full_path("swamid-1.0.xml")]
},
"2": {
"local": [full_path("InCommon-metadata.xml")]
},
"3": {
"local": [full_path("extended.xml")]
},
"7": {
"local": [full_path("metadata_sp_1.xml"),
full_path("InCommon-metadata.xml")],
"remote": [
{"url": "https://kalmar2.org/simplesaml/module.php/aggregator/?id=kalmarcentral2&set=saml2",
"cert": full_path("kalmar2.pem")}]
},
"4": {
"local": [full_path("metadata_example.xml")]
},
"5": {
"local": [full_path("metadata.aaitest.xml")]
},
"8": {
"mdfile": [full_path("swamid.md")]
}
}
def _eq(l1, l2):
return set(l1) == set(l2)
def _fix_valid_until(xmlstring):
new_date = datetime.datetime.now() + datetime.timedelta(days=1)
new_date = new_date.strftime("%Y-%m-%dT%H:%M:%SZ")
return re.sub(r' validUntil=".*?"', ' validUntil="%s"' % new_date,
xmlstring)
def test_swami_1():
UMU_IDP = 'https://idp.umu.se/saml2/idp/metadata.php'
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["1"])
assert len(mds) == 1 # One source
idps = mds.with_descriptor("idpsso")
assert idps.keys()
idpsso = mds.single_sign_on_service(UMU_IDP)
assert len(idpsso) == 1
assert destinations(idpsso) == [
'https://idp.umu.se/saml2/idp/SSOService.php']
_name = name(mds[UMU_IDP])
assert _name == u'Umeå University (SAML2)'
certs = mds.certs(UMU_IDP, "idpsso", "signing")
assert len(certs) == 1
sps = mds.with_descriptor("spsso")
assert len(sps) == 108
wants = mds.attribute_requirement('https://connect8.sunet.se/shibboleth')
lnamn = [d_to_local_name(mds.attrc, attr) for attr in wants["optional"]]
assert _eq(lnamn, ['eduPersonPrincipalName', 'mail', 'givenName', 'sn',
'eduPersonScopedAffiliation'])
wants = mds.attribute_requirement('https://beta.lobber.se/shibboleth')
assert wants["required"] == []
lnamn = [d_to_local_name(mds.attrc, attr) for attr in wants["optional"]]
assert _eq(lnamn, ['eduPersonPrincipalName', 'mail', 'givenName', 'sn',
'eduPersonScopedAffiliation', 'eduPersonEntitlement'])
def test_incommon_1():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["2"])
print mds.entities()
assert mds.entities() > 1700
idps = mds.with_descriptor("idpsso")
print idps.keys()
assert len(idps) > 300 # ~ 18%
try:
_ = mds.single_sign_on_service('urn:mace:incommon:uiuc.edu')
except UnknownPrincipal:
pass
idpsso = mds.single_sign_on_service('urn:mace:incommon:alaska.edu')
assert len(idpsso) == 1
print idpsso
assert destinations(idpsso) == [
'https://idp.alaska.edu/idp/profile/SAML2/Redirect/SSO']
sps = mds.with_descriptor("spsso")
acs_sp = []
for nam, desc in sps.items():
if "attribute_consuming_service" in desc:
acs_sp.append(nam)
assert len(acs_sp) == 0
# Look for attribute authorities
aas = mds.with_descriptor("attribute_authority")
print aas.keys()
assert len(aas) == 180
def test_ext_2():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["3"])
# No specific binding defined
ents = mds.with_descriptor("spsso")
for binding in [BINDING_SOAP, BINDING_HTTP_POST, BINDING_HTTP_ARTIFACT,
BINDING_HTTP_REDIRECT]:
assert mds.single_logout_service(ents.keys()[0], binding, "spsso")
def test_example():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["4"])
assert len(mds.keys()) == 1
idps = mds.with_descriptor("idpsso")
assert idps.keys() == [
'http://xenosmilus.umdc.umu.se/simplesaml/saml2/idp/metadata.php']
certs = mds.certs(
'http://xenosmilus.umdc.umu.se/simplesaml/saml2/idp/metadata.php',
"idpsso", "signing")
assert len(certs) == 1
def test_switch_1():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["5"])
assert len(mds.keys()) > 160
idps = mds.with_descriptor("idpsso")
print idps.keys()
idpsso = mds.single_sign_on_service(
'https://aai-demo-idp.switch.ch/idp/shibboleth')
assert len(idpsso) == 1
print idpsso
assert destinations(idpsso) == [
'https://aai-demo-idp.switch.ch/idp/profile/SAML2/Redirect/SSO']
assert len(idps) > 30
aas = mds.with_descriptor("attribute_authority")
print aas.keys()
aad = aas['https://aai-demo-idp.switch.ch/idp/shibboleth']
print aad.keys()
assert len(aad["attribute_authority_descriptor"]) == 1
assert len(aad["idpsso_descriptor"]) == 1
sps = mds.with_descriptor("spsso")
dual = [eid for eid, ent in idps.items() if eid in sps]
print len(dual)
assert len(dual) == 0
def test_metadata_file():
sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["8"])
print len(mds.keys())
assert len(mds.keys()) == 560
def test_mdx_service():
sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
http = HTTPBase(verify=False, ca_bundle=None)
mdx = MetaDataMDX(ONTS.values(), ATTRCONV, "http://pyff-test.nordu.net",
sec_config, None, http)
foo = mdx.service("https://idp.umu.se/saml2/idp/metadata.php",
"idpsso_descriptor", "single_sign_on_service")
assert len(foo) == 1
assert foo.keys()[0] == BINDING_HTTP_REDIRECT
def test_mdx_certs():
sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
http = HTTPBase(verify=False, ca_bundle=None)
mdx = MetaDataMDX(ONTS.values(), ATTRCONV, "http://pyff-test.nordu.net",
sec_config, None, http)
foo = mdx.certs("https://idp.umu.se/saml2/idp/metadata.php", "idpsso")
assert len(foo) == 1
if __name__ == "__main__":
test_mdx_certs()
| bsd-2-clause | 1,031,400,579,156,348,700 | 30.063241 | 104 | 0.630233 | false | 3.077134 | true | false | false |
python-xlib/python-xlib | examples/xdamage.py | 1 | 4638 | #!/usr/bin/python
#
# examples/xdamage.py -- demonstrate damage extension
#
# Copyright (C) 2019 Mohit Garg <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
# Python 2/3 compatibility.
from __future__ import print_function
import sys
import os
# Change path so we find Xlib
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from Xlib import display, X, threaded,Xutil
import time
try:
import thread
except ModuleNotFoundError:
import _thread as thread
from Xlib.ext import damage
from PIL import Image, ImageTk
import traceback
def redraw(win, gc):
# win.clear_area()
win.fill_rectangle(gc, 0, 0, 60, 60)
def blink(display, win, gc, cols):
while 1:
time.sleep(2)
print('Changing color', cols[0])
gc.change(foreground = cols[0])
cols = (cols[1], cols[0])
redraw(win, gc)
display.flush()
def get_image_from_win(win, pt_w, pt_h, pt_x=0, pt_y=0):
try:
raw = win.get_image(pt_x, pt_y, pt_w, pt_h, X.ZPixmap, 0xffffffff)
image = Image.frombytes("RGB", (pt_w, pt_h), raw.data, "raw", "BGRX")
return image
except Exception:
traceback.print_exc()
def check_ext(disp):
# Check for extension
if not disp.has_extension('DAMAGE'):
sys.stderr.write('server does not have the DAMAGE extension\n')
sys.stderr.write("\n".join(disp.list_extensions()))
if disp.query_extension('DAMAGE') is None:
sys.exit(1)
else:
r = disp.damage_query_version()
print('DAMAGE version {}.{}'.format(r.major_version, r.minor_version))
def main():
d = display.Display()
root = d.screen().root
check_ext(d)
colormap = d.screen().default_colormap
red = colormap.alloc_named_color("red").pixel
blue = colormap.alloc_named_color("blue").pixel
background = colormap.alloc_named_color("white").pixel
window1 = root.create_window(100, 100, 250, 100, 1,
X.CopyFromParent, X.InputOutput,
X.CopyFromParent,
background_pixel = background,
event_mask = X.StructureNotifyMask | X.ExposureMask)
window1.set_wm_name('Changing Window')
window1.map()
gc = window1.create_gc(foreground = red)
thread.start_new_thread(blink, (d, window1, gc, (blue, red)))
window1.damage_create(damage.DamageReportRawRectangles)
window1.set_wm_normal_hints(
flags=(Xutil.PPosition | Xutil.PSize | Xutil.PMinSize),
min_width=50,
min_height=50
)
window2 = root.create_window(100, 250, 250, 100, 1,
X.CopyFromParent, X.InputOutput,
X.CopyFromParent,
background_pixel = background,
event_mask = X.StructureNotifyMask | X.ExposureMask)
window2.set_wm_normal_hints(
flags=(Xutil.PPosition | Xutil.PSize | Xutil.PMinSize),
min_width=50,
min_height=50
)
window2.set_wm_name('Tracking Window')
window2.map()
while 1:
event = d.next_event()
if event.type == X.Expose:
if event.count == 0:
redraw(window1, gc)
elif event.type == d.extension_event.DamageNotify:
image = get_image_from_win(window1, event.area.width, event.area.height, event.area.x, event.area.y)
bgpm = window2.create_pixmap(image.width, image.height, d.screen().root_depth)
bggc = window2.create_gc(foreground=0, background=0)
bgpm.put_pil_image(bggc, 0, 0, image)
window2.copy_area(bggc, bgpm, 0, 0, image.width, image.height, 0, 0)
# bggc.free()
elif event.type == X.DestroyNotify:
sys.exit(0)
if __name__ == "__main__":
main()
| lgpl-2.1 | -16,081,992,391,398,216 | 32.366906 | 112 | 0.609314 | false | 3.540458 | false | false | false |
lindegroup/lookback | config/models.py | 1 | 1764 | # This Python file uses the following encoding: utf-8
# Part of the Lookback project (https://github.com/lindegroup/lookback)
# Copyright 2015 The Linde Group Computer Support, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
from django.db import models
from config.crypt import CryptoHelper
class Configuration(models.Model):
"""This model stores all configuration options for accessing the external
system."""
url = models.URLField('URL')
user = models.CharField('Username', max_length=255)
password = models.CharField('Password', max_length=512)
# System type choices
JSS = 'JSS'
SYSTEM_TYPE_CHOICES = (
(JSS, 'JSS'),
)
system_type = models.CharField('System Type',
choices=SYSTEM_TYPE_CHOICES,
default=JSS,
max_length=32)
def save(self, *args, **kwargs):
"""automatically encrypt during save"""
helper = CryptoHelper()
self.password = helper.encrypt(self.password)
super(Configuration, self).save(*args, **kwargs)
def decrypt_password(self):
helper = CryptoHelper()
return helper.decrypt(self.password)
| apache-2.0 | 6,610,376,122,505,525,000 | 35.75 | 77 | 0.670635 | false | 4.281553 | false | false | false |
louiejtaylor/pyViKO | examples/batch.py | 1 | 1173 | if __name__ == '__main__':
#####
##temporary dev hack
import os,time
os.chdir('..')
#####
from pyviko import core, mutation, restriction
#ovr = core.readFasta('test/dem/over/2000.fasta')
#toKO = core.readFasta('test/dem/ko/2000.fasta')
'''#True batch script
t1 = time.time()
for i in range(len(toKO)):
m = mutation.Mutant(toKO[i][1],numMutations=1,regEx=True)
m.setOverGene(overSeq = ovr[i][1])
#print toKO[i][1]
print m.findMutants(ignoreRxSites = False, rSiteLength='all')[:5]
print
print m.findMutants()[:5]
print "done "+str(i)
print
print time.time()-t1
'''
#testing RC search
ovr=['ATGATTACCCGGGTTTCCCAAAGGGTTTCATCCTAA']
z=''' TTACCCGGGTTTCCCAAAGGGTTTCAT'''
toKO = ['ATGAAACCCTTTGGGAAACCCGGGTAA']
t1 = time.time()
for i in range(len(toKO))[:1]:
m = mutation.Mutant(toKO[i][1],numMutations=1,regEx=True)
m.setOverGene(overSeq = ovr[i][1])
#print toKO[i][1]
print m.findMutants(ignoreRxSites = False, rSiteLength='all')[:5]
print
print m.findMutants()[:5]
print "done "+str(i)
print
print time.time()-t1
#overlaps = [core.findOverlap(toKO[i][1],ovr[i][1]) for i in range(len(toKO))]
#print overlaps
| mit | 7,485,453,507,291,470,000 | 26.302326 | 79 | 0.658994 | false | 2.423554 | false | false | false |
tangledhelix/dp_pp_utils | make_project.py | 1 | 11438 | #!/usr/bin/env python3
import json
import requests
import os
import sys
import re
import shutil
from os.path import basename
from jinja2 import Template
from subprocess import call
from trello import TrelloClient
from zipfile import ZipFile
AUTH_CONFIG = "auth-config.json"
TRELLO_TEMPLATE = "TEMPLATE: PPgen workflow"
PGDP_URL = "https://www.pgdp.net"
GITHUB_REMOTE = "origin"
GITHUB_BRANCH = "main"
# Set true to assume we'll use ppgen; false otherwise
PPGEN = True
class MakeProject():
def __init__(self):
self.dp_base = f"{os.environ['HOME']}/dp"
self.projects_base = f"{self.dp_base}/pp"
self.template_dir = f"{self.dp_base}/util/templates"
self.params = {}
self.trello_template = TRELLO_TEMPLATE
with open(f"{self.dp_base}/util/{AUTH_CONFIG}") as file:
self.auth = json.loads(file.read())
def get_param(self, param_name, prompt_text):
param_answer = input(f"{prompt_text}: ")
if param_name == "project_id":
param_answer = param_answer.replace("projectID", "")
self.params[param_name] = param_answer
def get_params(self):
self.get_param("project_name", 'Project name, e.g. "missfairfax"')
self.get_param("project_id", 'Project ID, e.g. "projectID5351bd1e5eca9"')
self.project_dir = f"{self.projects_base}/{self.params['project_name']}"
#self.params["kindlegen_dir"] = self.dp_base + "/kindlegen"
def pgdp_login(self):
payload = {
"destination": "/c/",
"userNM": self.auth["pgdp"]["username"],
"userPW": self.auth["pgdp"]["password"],
}
r = requests.post(f"{PGDP_URL}/c/accounts/login.php", data=payload)
if r.status_code != 200:
print("Error: unable to log into DP site")
sys.exit(1)
self.dp_cookie = r.headers["Set-Cookie"].split(";")[0]
def scrape_project_info(self):
r = requests.post(
f"{PGDP_URL}/c/project.php?id=projectID{self.params['project_id']}",
headers={"Cookie": self.dp_cookie}
)
if r.status_code != 200:
print("Error: unable to retrieve DP project info")
sys.exit(1)
html_doc = re.sub(r"\n", "", r.text)
self.params["title"] = re.sub(
#
# This version broke on cavalry. Changing " to <, see if it works
# r'.*<td[^>]+><b>Title</b></td><td[^>]+>([^"]+)</td>.*',
#
# This version broke on irishjournal, the site updated
# to use th instead of tr... updating to match site.
#r'.*<td[^>]+><b>Title</b></td><td[^>]+>([^<]+)</td>.*',
#
r'.*<th\s+class=["\']label["\']>Title</th>\s*<td[^>]+>([^<]+)</td>.*',
r"\1",
html_doc
)
self.params["author"] = re.sub(
# This version broke on irishjournal, the site updated
# to use th instead of tr... updating to match site.
#r'.*<td[^>]+><b>Author</b></td><td[^>]+>([^<]+)</td>.*',
#
r'.*<th\s+class=["\']label["\']>Author</th>\s*<td[^>]+>([^<]+)</td>.*',
r"\1",
html_doc
)
#<tr><th class='label'>Forum</th><td colspan='4'><a href='https://www.pgdp.net/phpBB3/viewtopic.php?t=63502'>Discuss this project</a> (19 replies)</td></tr>
self.params["forum_link"] = re.sub(
# This version broke on irishjournal, the site updated
# to use th instead of tr... updating to match site.
#r".*<td[^>]+><b>Forum</b></td><td[^>]+><a href='([^']+)'>.*",
#<a href='([^']+)'>
#
r".*<th\s+class=.label.>Forum</th>\s*<td[^>]+>\s*<a href='([^']+)'.*",
r"\1",
html_doc
)
def create_directories(self):
os.mkdir(self.project_dir, mode=0o755)
os.chdir(self.project_dir)
os.mkdir("images", mode=0o755)
os.mkdir("illustrations", mode=0o755)
os.mkdir("pngs", mode=0o755)
def create_git_repository(self):
call(["git", "init"])
call(["git", "add", "."])
call(["git", "commit", "-m", "Initial import from DP"])
call(["git", "remote", "add", GITHUB_REMOTE, self.git_remote_url])
call(["git", "push", "-u", GITHUB_REMOTE, GITHUB_BRANCH])
def process_template(self, src_filename, dst_filename=None):
if not dst_filename:
dst_filename = src_filename
with open(f"{self.template_dir}/{src_filename}") as file:
template = Template(file.read())
with open(f"{self.project_dir}/{dst_filename}", "w") as file:
file.write(template.render(self.params))
def copy_text_file(self):
project_id = self.params["project_id"]
project_name = self.params["project_name"]
project_dir = self.project_dir
input_file = f"{project_dir}/projectID{project_id}.txt"
if PPGEN:
output_file = f"{project_dir}/{project_name}-src.txt"
else:
output_file = f"{project_dir}/{project_name}-utf8.txt"
shutil.copyfile(input_file, output_file)
def make_github_repo(self):
headers = {
"Accept": "application/vnd.github.v3+json",
"Content-Type": "application/json",
}
payload = {
"name": f"DP_{self.params['project_name']}",
"description": 'DP PP project "{self.params["title"]}" ID {self.params["project_id"]}',
"private": False,
"has_issues": False,
"has_wiki": False,
"has_downloads": False,
"auto_init": False,
}
auth_data = (
self.auth["github"]["username"],
self.auth["github"]["password"],
)
r = requests.post("https://api.github.com/user/repos",
auth=auth_data, headers=headers,
data=json.dumps(payload))
if r.status_code == 201:
print("Created GitHub repository")
json_response = json.loads(r.text)
self.git_remote_url = json_response["clone_url"].replace(
"github.com",
self.auth["github"]["username"] + "@github.com"
)
else:
print(f"ERROR: GitHub response code {r.status_code} unexpected.")
def make_gitlab_repo(self):
headers = {
"Content-Type": "application/json",
"PRIVATE-TOKEN": self.auth["gitlab"],
}
payload = {
"name": f"DP_{self.params['project_name']}",
"description": f'DP PP project "{self.params["title"]}" ID {self.params["project_id"]}',
"visibility": "private",
"issues_enabled": False,
"merge_requests_enabled": False,
"jobs_enabled": False,
"wiki_enabled": False,
"snippets_enabled": False,
"container_registry_enabled": False,
"shared_runners_enabled": False,
"lfs_enabled": False,
"request_access_enabled": False,
}
r = requests.post("https://gitlab.com/api/v4/projects",
headers=headers,
data=json.dumps(payload))
if r.status_code == 201:
print("Created Gitlab repository")
json_response = json.loads(r.text)
self.git_remote_url = json_response["ssh_url_to_repo"]
else:
print(f"ERROR: Gitlab response code {r.status_code} unexpected.")
print(r.text)
def make_online_repo(self):
if self.auth["git_site"] == "github":
project.make_github_repo()
elif self.auth["git_site"] == "gitlab":
project.make_gitlab_repo()
def make_trello_board(self):
client = TrelloClient(
api_key=self.auth["trello"]["api_key"],
api_secret=self.auth["trello"]["api_secret"],
token=self.auth["trello"]["token"],
token_secret=self.auth["trello"]["token_secret"],
)
template = None
for board in client.list_boards():
if board.name == self.trello_template:
template = board
break
new_board = client.add_board(
f"DP: {self.params['title']}",
source_board=template,
permission_level="public"
)
for _list in new_board.list_lists():
if _list.name == "Notes":
for _card in _list.list_cards():
if _card.name == "Project info":
info_card = _card
break
break
new_description = info_card.desc.replace(
"{{PROJECT_NAME}}", self.params["project_name"]
).replace(
"{{PROJECT_ID}}", self.params["project_id"]
)
info_card.set_description(new_description)
self.params["trello_url"] = new_board.url
print(f"Created Trello board - {new_board.url}")
def download_text(self):
print("Downloading text from DP ...", end="", flush=True)
zipfile = f"projectID{self.params['project_id']}.zip"
url = f"{PGDP_URL}/projects/projectID{self.params['project_id']}/projectID{self.params['project_id']}.zip"
r = requests.get(url, headers={"Cookie": self.dp_cookie})
with open(zipfile, "wb") as file:
file.write(r.content)
self.unzip_file(zipfile, self.project_dir)
print(" done.")
def download_images(self):
print("Downloading images from DP ...", end="", flush=True)
zipfile = f"projectID{self.params['project_id']}images.zip"
url = f"{PGDP_URL}/c/tools/download_images.php?projectid=projectID{self.params['project_id']}"
r = requests.get(url, headers={"Cookie": self.dp_cookie})
with open(zipfile, "wb") as file:
file.write(r.content)
self.unzip_file(zipfile, f"{self.project_dir}/pngs")
print(" done.")
def unzip_file(self, filename, path):
with ZipFile(filename, "r") as zip_ref:
zip_ref.extractall(path)
os.remove(filename)
if __name__ == "__main__":
# By default, create remote resources like Trello & GitHub.
CREATE_REMOTE = True
# Process arguments, if any
if len(sys.argv) >= 2:
if sys.argv[1] == "-h" or sys.argv[1] == "--help":
print(f"Usage: {sys.argv[0]} [<option(s)>]")
print(" -h, --help: print this help")
print(" -l, --local: only create local resources (for debug)")
sys.exit(1)
elif sys.argv[1] == "-l" or sys.argv[1] == "--local":
CREATE_REMOTE = False
project = MakeProject()
project.get_params()
project.pgdp_login()
project.scrape_project_info()
project.create_directories()
project.download_text()
project.download_images()
# Make a copy of the text to work on
project.copy_text_file()
if CREATE_REMOTE:
project.make_online_repo()
project.make_trello_board()
project.process_template("Makefile")
project.process_template("README.md")
project.process_template("pp-gitignore", ".gitignore")
if CREATE_REMOTE:
# This is only done if remote, because it will try to push.
project.create_git_repository()
| mit | -6,135,142,467,199,165,000 | 34.302469 | 164 | 0.539255 | false | 3.633418 | false | false | false |
sahlinet/fastapp | fastapp/api_serializers.py | 1 | 3087 | from rest_framework import serializers
from rest_framework.reverse import reverse
from fastapp.models import Base, Apy, Setting, Counter, TransportEndpoint, Transaction, LogEntry
import logging
logger = logging.getLogger(__name__)
class CounterSerializer(serializers.ModelSerializer):
class Meta:
model = Counter
fields = ('executed', 'failed')
class LogSerializer(serializers.ModelSerializer):
class Meta:
model = LogEntry
fields = ('level', 'msg', 'created', )
class TransactionSerializer(serializers.ModelSerializer):
logs = LogSerializer(many=True, read_only=True)
class Meta:
model = Transaction
fields = ('rid', 'tin', 'tout', 'status', 'created', 'modified', 'async', 'logs', )
class ApySerializer(serializers.ModelSerializer):
counter = CounterSerializer(many=False, read_only=True)
class Meta:
model = Apy
fields = ('id', 'name', 'module', 'counter', 'description', 'public', 'schedule', 'everyone')
def save_object(self, obj, **kwargs):
logger.debug("save_and_sync")
obj.save_and_sync(**kwargs)
class PublicApySerializer(serializers.ModelSerializer):
"""
Return all Apy objects which are made public. Enrich
"""
first_lastname = serializers.SerializerMethodField(method_name="creator")
base = serializers.SerializerMethodField(method_name="base_name")
url = serializers.SerializerMethodField(method_name="detail_view")
class Meta:
model = Apy
fields = ('id', 'name', 'module', 'description',
'first_lastname', 'url', 'base')
def creator(self, obj):
try:
user = obj.base.user
return user.first_name + " " + user.last_name
except Base.DoesNotExist, e:
logger.warn(e)
def base_name(self, obj):
return obj.base.name
def detail_view(self, obj):
return reverse('public-apy-detail', args=[obj.pk],
request=self.context['request'])
class SettingSerializer(serializers.ModelSerializer):
class Meta:
model = Setting
fields = ('id', 'key', 'value', 'public')
class TransportEndpointSerializer(serializers.ModelSerializer):
class Meta:
model = TransportEndpoint
fields = ('id', 'url', 'override_settings_priv',
'override_settings_pub', 'token')
class BaseSerializer(serializers.ModelSerializer):
apy = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
state = serializers.Field()
executors = serializers.Field()
foreign_apys = serializers.HyperlinkedRelatedField(
many=True,
read_only=False,
view_name='public-apy-detail'
)
class Meta:
model = Base
fields = ('id', 'name', 'state', 'uuid',
'executors', 'content', 'foreign_apys', 'public', 'static_public',)
def save_object(self, obj, **kwargs):
super(BaseSerializer, self).save_object(obj, **kwargs)
logger.debug("save_and_sync")
obj.save_and_sync(**kwargs)
| mit | -5,149,677,560,269,219,000 | 29.564356 | 101 | 0.63816 | false | 4.056505 | false | false | false |
kern3020/opportunity | opportunity/tracker/migrations/0002_auto__add_mentorship.py | 1 | 13807 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Mentorship'
db.create_table('tracker_mentorship', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('jobseeker', self.gf('django.db.models.fields.related.ForeignKey')(related_name='jobseeker', to=orm['tracker.UserProfile'])),
('mentor', self.gf('django.db.models.fields.related.ForeignKey')(related_name='mentor', to=orm['tracker.UserProfile'])),
('coach', self.gf('django.db.models.fields.related.ForeignKey')(related_name='coach', to=orm['tracker.UserProfile'])),
('startDate', self.gf('django.db.models.fields.DateField')()),
))
db.send_create_signal('tracker', ['Mentorship'])
def backwards(self, orm):
# Deleting model 'Mentorship'
db.delete_table('tracker_mentorship')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tracker.apply': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Apply'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Position']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.company': {
'Meta': {'ordering': "['name']", 'object_name': 'Company'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'division': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'state_province': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'zipCode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'})
},
'tracker.conversation': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Conversation'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'via': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.gratitude': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Gratitude'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.interview': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Interview'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Position']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'when': ('django.db.models.fields.DateField', [], {}),
'withWhom': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']"})
},
'tracker.lunch': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Lunch'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'venue': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DateField', [], {}),
'withWhom': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']"})
},
'tracker.mentorship': {
'Meta': {'object_name': 'Mentorship'},
'coach': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'coach'", 'to': "orm['tracker.UserProfile']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jobseeker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'jobseeker'", 'to': "orm['tracker.UserProfile']"}),
'mentor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'mentor'", 'to': "orm['tracker.UserProfile']"}),
'startDate': ('django.db.models.fields.DateField', [], {})
},
'tracker.networking': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Networking'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'venue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Company']", 'unique': 'True'}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.onlinepresence': {
'Meta': {'ordering': "['name']", 'object_name': 'OnlinePresence'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.par': {
'Meta': {'ordering': "['question']", 'object_name': 'PAR'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'par_response': ('django.db.models.fields.TextField', [], {}),
'question': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.person': {
'Meta': {'object_name': 'Person'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Company']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.pitch': {
'Meta': {'object_name': 'Pitch'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'thePitch': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.position': {
'Meta': {'ordering': "['title']", 'object_name': 'Position'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Company']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'tracker.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['tracker'] | mit | -6,980,998,740,839,843,000 | 71.673684 | 182 | 0.539581 | false | 3.700616 | false | false | false |
adhish20/TwitterWithCassandra | users/forms.py | 1 | 1902 | import uuid
from django import forms
import cass
class LoginForm(forms.Form):
username = forms.CharField(max_length=30)
password = forms.CharField(widget=forms.PasswordInput(render_value=False))
def clean(self):
username = self.cleaned_data['username']
password = self.cleaned_data['password']
try:
user = cass.get_user_by_username(username)
except cass.DatabaseError:
raise forms.ValidationError(u'Invalid username and/or password')
if user.get('password') != password:
raise forms.ValidationError(u'Invalid username and/or password')
return self.cleaned_data
def get_username(self):
return self.cleaned_data['username']
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^\w+$', max_length=30)
password1 = forms.CharField(widget=forms.PasswordInput(render_value=False))
password2 = forms.CharField(widget=forms.PasswordInput(render_value=False))
def clean_username(self):
username = self.cleaned_data['username']
try:
cass.get_user_by_username(username)
raise forms.ValidationError(u'Username is already taken')
except cass.DatabaseError:
pass
return username
def clean(self):
if ('password1' in self.cleaned_data and 'password2' in
self.cleaned_data):
password1 = self.cleaned_data['password1']
password2 = self.cleaned_data['password2']
if password1 != password2:
raise forms.ValidationError(
u'You must type the same password each time')
return self.cleaned_data
def save(self):
username = self.cleaned_data['username']
password = self.cleaned_data['password1']
cass.save_user(username, password)
return username
| mit | 3,853,147,340,635,723,000 | 34.222222 | 79 | 0.638275 | false | 4.303167 | false | false | false |
tobiasgehring/qudi | logic/sequence_generator_logic.py | 1 | 52322 | # -*- coding: utf-8 -*-
"""
This file contains the Qudi sequence generator logic for general sequence structure.
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
import numpy as np
import pickle
import os
import time
from qtpy import QtCore
from collections import OrderedDict
import inspect
import importlib
import sys
from logic.pulse_objects import PulseBlockElement
from logic.pulse_objects import PulseBlock
from logic.pulse_objects import PulseBlockEnsemble
from logic.pulse_objects import PulseSequence
from logic.generic_logic import GenericLogic
from logic.sampling_functions import SamplingFunctions
from logic.samples_write_methods import SamplesWriteMethods
class SequenceGeneratorLogic(GenericLogic, SamplingFunctions, SamplesWriteMethods):
"""unstable: Nikolas Tomek
This is the Logic class for the pulse (sequence) generation.
The basis communication with the GUI should be done as follows:
The logic holds all the created objects in its internal lists. The GUI is
able to view this list and get the element of this list.
How the logic will contruct its objects according to configuration dicts.
The configuration dicts contain essentially, which parameters of either the
PulseBlockElement objects or the PulseBlock objects can be changed and
set via the GUI.
In the end the information transfer happend through lists (read by the GUI)
and dicts (set by the GUI). The logic sets(creats) the objects in the list
and read the dict, which tell it which parameters to expect from the GUI.
"""
_modclass = 'sequencegeneratorlogic'
_modtype = 'logic'
# define signals
sigBlockDictUpdated = QtCore.Signal(dict)
sigEnsembleDictUpdated = QtCore.Signal(dict)
sigSequenceDictUpdated = QtCore.Signal(dict)
sigSampleEnsembleComplete = QtCore.Signal(str, np.ndarray, np.ndarray)
sigSampleSequenceComplete = QtCore.Signal(str, list)
sigCurrentBlockUpdated = QtCore.Signal(object)
sigCurrentEnsembleUpdated = QtCore.Signal(object)
sigCurrentSequenceUpdated = QtCore.Signal(object)
sigSettingsUpdated = QtCore.Signal(list, str, float, dict, str)
sigPredefinedSequencesUpdated = QtCore.Signal(dict)
sigPredefinedSequenceGenerated = QtCore.Signal(str)
def __init__(self, config, **kwargs):
super().__init__(config=config, **kwargs)
self.log.info('The following configuration was found.')
# checking for the right configuration
for key in config.keys():
self.log.info('{0}: {1}'.format(key, config[key]))
# Get all the attributes from the SamplingFunctions module:
SamplingFunctions.__init__(self)
# Get all the attributes from the SamplesWriteMethods module:
SamplesWriteMethods.__init__(self)
# here the currently shown data objects of the editors should be stored
self.current_block = None
self.current_ensemble = None
self.current_sequence = None
# The created PulseBlock objects are saved in this dictionary. The keys are the names.
self.saved_pulse_blocks = OrderedDict()
# The created PulseBlockEnsemble objects are saved in this dictionary.
# The keys are the names.
self.saved_pulse_block_ensembles = OrderedDict()
# The created Sequence objects are saved in this dictionary. The keys are the names.
self.saved_pulse_sequences = OrderedDict()
if 'pulsed_file_dir' in config.keys():
self.pulsed_file_dir = config['pulsed_file_dir']
if not os.path.exists(self.pulsed_file_dir):
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('The directort defined in "pulsed_file_dir" in the config for '
'SequenceGeneratorLogic class does not exist! The default home '
'directory\n{0}'
'\nwill be taken instead.'.format(self.pulsed_file_dir))
else:
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('No directory with the attribute "pulsed_file_dir" is defined for the '
'SequenceGeneratorLogic! The default home directory\n{0}\nwill be '
'taken instead.'.format(self.pulsed_file_dir))
# Byte size of the max. memory usage during sampling/write-to-file process
if 'overhead_bytes' in config.keys():
self.sampling_overhead_bytes = config['overhead_bytes']
else:
self.sampling_overhead_bytes = None
self.log.warning('No max. memory overhead specified in config.\nIn order to avoid '
'memory overflow during sampling/writing of Pulse objects you must '
'set "overhead_bytes".')
# directory for additional generate methods to import
# (other than qudi/logic/predefined_methods)
if 'additional_methods_dir' in config.keys():
if os.path.exists(config['additional_methods_dir']):
self.additional_methods_dir = config['additional_methods_dir']
else:
self.additional_methods_dir = None
self.log.error('Specified path "{0}" for import of additional generate methods '
'does not exist.'.format(config['additional_methods_dir']))
else:
self.additional_methods_dir = None
self.block_dir = self._get_dir_for_name('pulse_block_objects')
self.ensemble_dir = self._get_dir_for_name('pulse_ensemble_objects')
self.sequence_dir = self._get_dir_for_name('sequence_objects')
self.waveform_dir = self._get_dir_for_name('sampled_hardware_files')
self.temp_dir = self._get_dir_for_name('temporary_files')
# Information on used channel configuration for sequence generation
# IMPORTANT: THIS CONFIG DOES NOT REPRESENT THE ACTUAL SETTINGS ON THE HARDWARE
self.analog_channels = 2
self.digital_channels = 4
self.activation_config = ['a_ch1', 'd_ch1', 'd_ch2', 'a_ch2', 'd_ch3', 'd_ch4']
self.laser_channel = 'd_ch1'
self.amplitude_dict = OrderedDict({'a_ch1': 0.5, 'a_ch2': 0.5, 'a_ch3': 0.5, 'a_ch4': 0.5})
self.sample_rate = 25e9
# The file format for the sampled hardware-compatible waveforms and sequences
self.waveform_format = 'wfmx' # can be 'wfmx', 'wfm' or 'fpga'
self.sequence_format = 'seq' # only .seq file format
# a dictionary with all predefined generator methods and measurement sequence names
self.generate_methods = None
def on_activate(self):
""" Initialisation performed during activation of the module.
"""
self._get_blocks_from_file()
self._get_ensembles_from_file()
self._get_sequences_from_file()
self._attach_predefined_methods()
if 'activation_config' in self._statusVariables:
self.activation_config = self._statusVariables['activation_config']
if 'laser_channel' in self._statusVariables:
self.laser_channel = self._statusVariables['laser_channel']
if 'amplitude_dict' in self._statusVariables:
self.amplitude_dict = self._statusVariables['amplitude_dict']
if 'sample_rate' in self._statusVariables:
self.sample_rate = self._statusVariables['sample_rate']
if 'waveform_format' in self._statusVariables:
self.waveform_format = self._statusVariables['waveform_format']
self.analog_channels = len([chnl for chnl in self.activation_config if 'a_ch' in chnl])
self.digital_channels = len([chnl for chnl in self.activation_config if 'd_ch' in chnl])
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
def on_deactivate(self):
""" Deinitialisation performed during deactivation of the module.
"""
self._statusVariables['activation_config'] = self.activation_config
self._statusVariables['laser_channel'] = self.laser_channel
self._statusVariables['amplitude_dict'] = self.amplitude_dict
self._statusVariables['sample_rate'] = self.sample_rate
self._statusVariables['waveform_format'] = self.waveform_format
def _attach_predefined_methods(self):
"""
Retrieve in the folder all files for predefined methods and attach their methods to the
@return:
"""
self.generate_methods = OrderedDict()
filenames_list = []
additional_filenames_list = []
# The assumption is that in the directory predefined_methods, there are
# *.py files, which contain only methods!
path = os.path.join(self.get_main_dir(), 'logic', 'predefined_methods')
for entry in os.listdir(path):
filepath = os.path.join(path, entry)
if os.path.isfile(filepath) and entry.endswith('.py'):
filenames_list.append(entry[:-3])
# Also attach methods from the non-default additional methods directory if defined in config
if self.additional_methods_dir is not None:
# attach to path
sys.path.append(self.additional_methods_dir)
for entry in os.listdir(self.additional_methods_dir):
filepath = os.path.join(self.additional_methods_dir, entry)
if os.path.isfile(filepath) and entry.endswith('.py'):
additional_filenames_list.append(entry[:-3])
for filename in filenames_list:
mod = importlib.import_module('logic.predefined_methods.{0}'.format(filename))
# To allow changes in predefined methods during runtime by simply reloading
# sequence_generator_logic.
importlib.reload(mod)
for method in dir(mod):
try:
# Check for callable function or method:
ref = getattr(mod, method)
if callable(ref) and (inspect.ismethod(ref) or inspect.isfunction(ref)):
# Bind the method as an attribute to the Class
setattr(SequenceGeneratorLogic, method, getattr(mod, method))
# Add method to dictionary if it is a generator method
if method.startswith('generate_'):
self.generate_methods[method[9:]] = eval('self.'+method)
except:
self.log.error('It was not possible to import element {0} from {1} into '
'SequenceGenerationLogic.'.format(method, filename))
for filename in additional_filenames_list:
mod = importlib.import_module(filename)
for method in dir(mod):
try:
# Check for callable function or method:
ref = getattr(mod, method)
if callable(ref) and (inspect.ismethod(ref) or inspect.isfunction(ref)):
# Bind the method as an attribute to the Class
setattr(SequenceGeneratorLogic, method, getattr(mod, method))
# Add method to dictionary if it is a generator method
if method.startswith('generate_'):
self.generate_methods[method[9:]] = eval('self.'+method)
except:
self.log.error('It was not possible to import element {0} from {1} into '
'SequenceGenerationLogic.'.format(method, filepath))
self.sigPredefinedSequencesUpdated.emit(self.generate_methods)
return
def _get_dir_for_name(self, name):
""" Get the path to the pulsed sub-directory 'name'.
@param str name: name of the folder
@return: str, absolute path to the directory with folder 'name'.
"""
path = os.path.join(self.pulsed_file_dir, name)
if not os.path.exists(path):
os.makedirs(os.path.abspath(path))
return os.path.abspath(path)
def request_init_values(self):
"""
@return:
"""
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
self.sigCurrentBlockUpdated.emit(self.current_block)
self.sigCurrentEnsembleUpdated.emit(self.current_ensemble)
self.sigCurrentSequenceUpdated.emit(self.current_sequence)
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
self.sigPredefinedSequencesUpdated.emit(self.generate_methods)
return
def set_settings(self, activation_config, laser_channel, sample_rate, amplitude_dict, waveform_format):
"""
Sets all settings for the generator logic.
@param activation_config:
@param laser_channel:
@param sample_rate:
@param amplitude_dict:
@param waveform_format:
@return:
"""
# check if the currently chosen laser channel is part of the config and adjust if this
# is not the case. Choose first digital channel in that case.
if laser_channel not in activation_config:
laser_channel = None
for channel in activation_config:
if 'd_ch' in channel:
laser_channel = channel
break
if laser_channel is None:
self.log.warning('No digital channel present in sequence generator activation '
'config.')
self.laser_channel = laser_channel
self.activation_config = activation_config
self.analog_channels = len([chnl for chnl in activation_config if 'a_ch' in chnl])
self.digital_channels = len([chnl for chnl in activation_config if 'd_ch' in chnl])
self.amplitude_dict = amplitude_dict
self.sample_rate = sample_rate
self.waveform_format = waveform_format
self.sigSettingsUpdated.emit(activation_config, laser_channel, sample_rate, amplitude_dict,
waveform_format)
return self.activation_config, self.laser_channel, self.sample_rate, self.amplitude_dict, \
waveform_format
# -----------------------------------------------------------------------------
# BEGIN sequence/block generation
# -----------------------------------------------------------------------------
def get_saved_asset(self, name):
"""
Returns the data object for a saved Ensemble/Sequence with name "name". Searches in the
saved assets for a Sequence object first. If no Sequence by that name could be found search
for Ensembles instead. If neither could be found return None.
@param name: Name of the Sequence/Ensemble
@return: PulseSequence | PulseBlockEnsemble | None
"""
if name == '':
asset_obj = None
elif name in list(self.saved_pulse_sequences):
asset_obj = self.saved_pulse_sequences[name]
elif name in list(self.saved_pulse_block_ensembles):
asset_obj = self.saved_pulse_block_ensembles[name]
else:
asset_obj = None
self.log.warning('No PulseSequence or PulseBlockEnsemble by the name "{0}" could be '
'found in saved assets. Returning None.'.format(name))
return asset_obj
def save_block(self, name, block):
""" Serialize a PulseBlock object to a *.blk file.
@param name: string, name of the block to save
@param block: PulseBlock object which will be serialized
"""
# TODO: Overwrite handling
block.name = name
self.current_block = block
self.saved_pulse_blocks[name] = block
self._save_blocks_to_file()
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
self.sigCurrentBlockUpdated.emit(self.current_block)
return
def load_block(self, name):
"""
@param name:
@return:
"""
if name not in self.saved_pulse_blocks:
self.log.error('PulseBlock "{0}" could not be found in saved pulse blocks. Load failed.'
''.format(name))
return
block = self.saved_pulse_blocks[name]
self.current_block = block
self.sigCurrentBlockUpdated.emit(self.current_block)
return
def delete_block(self, name):
""" Remove the serialized object "name" from the block list and HDD.
@param name: string, name of the PulseBlock object to be removed.
"""
if name in list(self.saved_pulse_blocks):
del(self.saved_pulse_blocks[name])
if hasattr(self.current_block, 'name'):
if self.current_block.name == name:
self.current_block = None
self.sigCurrentBlockUpdated.emit(self.current_block)
self._save_blocks_to_file()
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
else:
self.log.warning('PulseBlock object with name "{0}" not found in saved '
'blocks.\nTherefore nothing is removed.'.format(name))
return
def _get_blocks_from_file(self):
""" Update the saved_pulse_block dict from file """
block_files = [f for f in os.listdir(self.block_dir) if 'block_dict.blk' in f]
if len(block_files) == 0:
self.log.info('No serialized block dict was found in {0}.'.format(self.block_dir))
self.saved_pulse_blocks = OrderedDict()
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
return
# raise error if more than one file is present
if len(block_files) > 1:
self.log.error('More than one serialized block dict was found in {0}.\n'
'Using {1}.'.format(self.block_dir, block_files[-1]))
block_files = block_files[-1]
try:
with open(os.path.join(self.block_dir, block_files), 'rb') as infile:
self.saved_pulse_blocks = pickle.load(infile)
except:
self.saved_pulse_blocks = OrderedDict()
self.log.error('Failed to deserialize ensemble dict "{0}" from "{1}".'
''.format(block_files, self.block_dir))
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
return
def _save_blocks_to_file(self):
""" Saves the saved_pulse_block dict to file """
try:
with open(os.path.join(self.block_dir, 'block_dict.blk.tmp'), 'wb') as outfile:
pickle.dump(self.saved_pulse_blocks, outfile)
except:
self.log.error('Failed to serialize ensemble dict in "{0}".'
''.format(os.path.join(self.block_dir, 'block_dict.blk.tmp')))
return
# remove old file and rename temp file
try:
os.rename(os.path.join(self.block_dir, 'block_dict.blk.tmp'),
os.path.join(self.block_dir, 'block_dict.blk'))
except WindowsError:
os.remove(os.path.join(self.block_dir, 'block_dict.blk'))
os.rename(os.path.join(self.block_dir, 'block_dict.blk.tmp'),
os.path.join(self.block_dir, 'block_dict.blk'))
return
def save_ensemble(self, name, ensemble):
""" Saves a PulseBlockEnsemble with name name to file.
@param str name: name of the ensemble, which will be serialized.
@param obj ensemble: a PulseBlockEnsemble object
"""
# TODO: Overwrite handling
ensemble.name = name
self.current_ensemble = ensemble
self.saved_pulse_block_ensembles[name] = ensemble
self._save_ensembles_to_file()
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
self.sigCurrentEnsembleUpdated.emit(self.current_ensemble)
return
def load_ensemble(self, name):
"""
@param name:
@return:
"""
if name not in self.saved_pulse_block_ensembles:
self.log.error('PulseBlockEnsemble "{0}" could not be found in saved pulse block '
'ensembles. Load failed.'.format(name))
return
ensemble = self.saved_pulse_block_ensembles[name]
# set generator settings if found in ensemble metadata
if ensemble.sample_rate is not None:
self.sample_rate = ensemble.sample_rate
if ensemble.amplitude_dict is not None:
self.amplitude_dict = ensemble.amplitude_dict
if ensemble.activation_config is not None:
self.activation_config = ensemble.activation_config
if ensemble.laser_channel is not None:
self.laser_channel = ensemble.laser_channel
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
self.current_ensemble = ensemble
self.sigCurrentEnsembleUpdated.emit(ensemble)
return
def delete_ensemble(self, name):
""" Remove the ensemble with 'name' from the ensemble list and HDD. """
if name in list(self.saved_pulse_block_ensembles):
del(self.saved_pulse_block_ensembles[name])
if hasattr(self.current_ensemble, 'name'):
if self.current_ensemble.name == name:
self.current_ensemble = None
self.sigCurrentEnsembleUpdated.emit(self.current_ensemble)
self._save_ensembles_to_file()
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
else:
self.log.warning('PulseBlockEnsemble object with name "{0}" not found in saved '
'ensembles.\nTherefore nothing is removed.'.format(name))
return
def _get_ensembles_from_file(self):
""" Update the saved_pulse_block_ensembles dict from file """
ensemble_files = [f for f in os.listdir(self.ensemble_dir) if 'ensemble_dict.ens' in f]
if len(ensemble_files) == 0:
self.log.info('No serialized ensembles dict was found in {0}.'
''.format(self.ensemble_dir))
self.saved_pulse_block_ensembles = OrderedDict()
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
return
# raise error if more than one file is present
if len(ensemble_files) > 1:
self.log.error('More than one serialized ensemble dict was found in {0}.\n'
'Using {1}.'.format(self.ensemble_dir, ensemble_files[-1]))
ensemble_files = ensemble_files[-1]
try:
with open(os.path.join(self.ensemble_dir, ensemble_files), 'rb') as infile:
self.saved_pulse_block_ensembles = pickle.load(infile)
except:
self.saved_pulse_block_ensembles = OrderedDict()
self.log.error('Failed to deserialize ensemble dict "{0}" from "{1}".'
''.format(ensemble_files, self.ensemble_dir))
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
return
def _save_ensembles_to_file(self):
""" Saves the saved_pulse_block_ensembles dict to file """
try:
with open(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp'), 'wb') as outfile:
pickle.dump(self.saved_pulse_block_ensembles, outfile)
except:
self.log.error('Failed to serialize ensemble dict in "{0}".'
''.format(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp')))
return
# remove old file and rename temp file
try:
os.rename(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp'),
os.path.join(self.ensemble_dir, 'ensemble_dict.ens'))
except WindowsError:
os.remove(os.path.join(self.ensemble_dir, 'ensemble_dict.ens'))
os.rename(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp'),
os.path.join(self.ensemble_dir, 'ensemble_dict.ens'))
return
def save_sequence(self, name, sequence):
""" Serialize the PulseSequence object with name 'name' to file.
@param str name: name of the sequence object.
@param object sequence: a PulseSequence object, which is going to be
serialized to file.
@return: str: name of the serialized object, if needed.
"""
# TODO: Overwrite handling
sequence.name = name
self.current_sequence = sequence
self.saved_pulse_sequences[name] = sequence
self._save_sequences_to_file()
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
self.sigCurrentSequenceUpdated.emit(self.current_sequence)
def load_sequence(self, name):
"""
@param name:
@return:
"""
if name not in self.saved_pulse_sequences:
self.log.error('PulseSequence "{0}" could not be found in saved pulse sequences. '
'Load failed.'.format(name))
return
sequence = self.saved_pulse_sequences[name]
# set generator settings if found in seqeunce metadata
if sequence.sample_rate is not None:
self.sample_rate = sequence.sample_rate
if sequence.amplitude_dict is not None:
self.amplitude_dict = sequence.amplitude_dict
if sequence.activation_config is not None:
self.activation_config = sequence.activation_config
if sequence.laser_channel is not None:
self.laser_channel = sequence.laser_channel
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
self.current_sequence = sequence
self.sigCurrentSequenceUpdated.emit(sequence)
return
def delete_sequence(self, name):
""" Remove the sequence "name" from the sequence list and HDD.
@param str name: name of the sequence object, which should be deleted.
"""
if name in list(self.saved_pulse_sequences):
del(self.saved_pulse_sequences[name])
if hasattr(self.current_sequence, 'name'):
if self.current_sequence.name == name:
self.current_sequence = None
self.sigCurrentSequenceUpdated.emit(self.current_sequence)
self._save_sequences_to_file()
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
else:
self.log.warning('PulseBlockEnsemble object with name "{0}" not found in saved '
'ensembles.\nTherefore nothing is removed.'.format(name))
return
def generate_predefined_sequence(self, predefined_sequence_name, args):
"""
@param predefined_sequence_name:
@param args:
@return:
"""
gen_method = self.generate_methods[predefined_sequence_name]
try:
gen_method(*args)
except:
self.log.error('Generation of predefined sequence "{0}" failed.'
''.format(predefined_sequence_name))
return
self.sigPredefinedSequenceGenerated.emit(predefined_sequence_name)
return
def _get_sequences_from_file(self):
""" Update the saved_pulse_sequences dict from file """
sequence_files = [f for f in os.listdir(self.sequence_dir) if 'sequence_dict.sequ' in f]
if len(sequence_files) == 0:
self.log.info('No serialized sequence dict was found in {0}.'.format(self.sequence_dir))
self.saved_pulse_sequences = OrderedDict()
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
return
# raise error if more than one file is present
if len(sequence_files) > 1:
self.log.error('More than one serialized sequence dict was found in {0}.\n'
'Using {1}.'.format(self.sequence_dir, sequence_files[-1]))
sequence_files = sequence_files[-1]
try:
with open(os.path.join(self.sequence_dir, sequence_files), 'rb') as infile:
self.saved_pulse_sequences = pickle.load(infile)
except:
self.saved_pulse_sequences = OrderedDict()
self.log.error('Failed to deserialize sequence dict "{0}" from "{1}".'
''.format(sequence_files, self.sequence_dir))
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
return
def _save_sequences_to_file(self):
""" Saves the saved_pulse_sequences dict to file """
try:
with open(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp'), 'wb') as outfile:
pickle.dump(self.saved_pulse_sequences, outfile)
except:
self.log.error('Failed to serialize ensemble dict in "{0}".'
''.format(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp')))
return
# remove old file and rename temp file
try:
os.rename(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp'),
os.path.join(self.sequence_dir, 'sequence_dict.sequ'))
except WindowsError:
os.remove(os.path.join(self.sequence_dir, 'sequence_dict.sequ'))
os.rename(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp'),
os.path.join(self.sequence_dir, 'sequence_dict.sequ'))
return
#---------------------------------------------------------------------------
# END sequence/block generation
#---------------------------------------------------------------------------
#---------------------------------------------------------------------------
# BEGIN sequence/block sampling
#---------------------------------------------------------------------------
def _analyze_block_ensemble(self, ensemble):
"""
@param ensemble:
@return:
"""
state_length_bins_arr = np.array([], dtype=int)
number_of_elements = 0
for block, reps in ensemble.block_list:
number_of_elements += (reps+1)*len(block.element_list)
num_state_changes = (reps+1) * len(block.element_list)
tmp_length_bins = np.zeros(num_state_changes, dtype=int)
# Iterate over all repertitions of the current block
state_index = 0
for rep_no in range(reps+1):
# Iterate over the Block_Elements inside the current block
for elem_index, block_element in enumerate(block.element_list):
init_length_s = block_element.init_length_s
increment_s = block_element.increment_s
element_length_s = init_length_s + (rep_no * increment_s)
tmp_length_bins[state_index] = int(np.rint(element_length_s * self.sample_rate))
state_index += 1
state_length_bins_arr = np.append(state_length_bins_arr, tmp_length_bins)
number_of_samples = np.sum(state_length_bins_arr)
number_of_states = len(state_length_bins_arr)
return number_of_samples, number_of_elements, number_of_states, state_length_bins_arr
def sample_pulse_block_ensemble(self, ensemble_name, write_to_file=True, offset_bin=0,
name_tag=None):
""" General sampling of a PulseBlockEnsemble object, which serves as the construction plan.
@param str ensemble_name: Name, which should correlate with the name of on of the displayed
ensembles.
@param bool write_to_file: Write either to RAM or to File (depends on the available space
in RAM). If set to FALSE, this method will return the samples
(digital and analog) as numpy arrays
@param int offset_bin: If many pulse ensembles are samples sequentially, then the
offset_bin of the previous sampling can be passed to maintain
rotating frame across pulse_block_ensembles
@param str name_tag: a name tag, which is used to keep the sampled files together, which
where sampled from the same PulseBlockEnsemble object but where
different offset_bins were used.
@return tuple: of length 4 with
(analog_samples, digital_samples, [<created_files>], offset_bin).
analog_samples:
numpy arrays containing the sampled voltages
digital_samples:
numpy arrays containing the sampled logic levels
[<created_files>]:
list of strings, with the actual created files through the pulsing
device
offset_bin:
integer, which is used for maintaining the rotation frame.
This method is creating the actual samples (voltages and logic states) for each time step
of the analog and digital channels specified in the PulseBlockEnsemble.
Therefore it iterates through all blocks, repetitions and elements of the ensemble and
calculates the exact voltages (float64) according to the specified math_function. The
samples are later on stored inside a float32 array.
So each element is calculated with high precision (float64) and then down-converted to
float32 to be stored.
To preserve the rotating frame, an offset counter is used to indicate the absolute time
within the ensemble. All calculations are done with time bins (dtype=int) to avoid rounding
errors. Only in the last step when a single PulseBlockElement object is sampled these
integer bin values are translated into a floating point time.
The chunkwise write mode is used to save memory usage at the expense of time. Here for each
PulseBlockElement the write_to_file method in the HW module is called to avoid large
arrays inside the memory. In other words: The whole sample arrays are never created at any
time. This results in more function calls and general overhead causing the much longer time
to complete.
"""
# lock module if it's not already locked (sequence sampling in progress)
if self.getState() == 'idle':
self.lock()
sequence_sampling_in_progress = False
else:
sequence_sampling_in_progress = True
# determine if chunkwise writing is enabled (the overhead byte size is set)
chunkwise = self.sampling_overhead_bytes is not None
# Set the filename (excluding the channel naming suffix, i.e. '_ch1')
if name_tag is None:
filename = ensemble_name
else:
filename = name_tag
# check for old files associated with the new ensemble and delete them from host PC
if write_to_file:
# get sampled filenames on host PC referring to the same ensemble
# be careful, in contrast to linux os, windows os is in general case
# insensitive! Therefore one needs to check and remove all files
# matching the case insensitive case for windows os.
if 'win' in sys.platform:
# make it simple and make everything lowercase.
filename_list = [f for f in os.listdir(self.waveform_dir) if
f.lower().startswith(filename.lower() + '_ch')]
else:
filename_list = [f for f in os.listdir(self.waveform_dir) if
f.startswith(filename + '_ch')]
# delete all filenames in the list
for file in filename_list:
os.remove(os.path.join(self.waveform_dir, file))
if len(filename_list) != 0:
self.log.info('Found old sampled ensembles for name "{0}". Files deleted before '
'sampling: {1}'.format(filename, filename_list))
start_time = time.time()
# get ensemble
ensemble = self.saved_pulse_block_ensembles[ensemble_name]
# Ensemble parameters to determine the shape of sample arrays
ana_channels = ensemble.analog_channels
dig_channels = ensemble.digital_channels
ana_chnl_names = [chnl for chnl in self.activation_config if 'a_ch' in chnl]
dig_chnl_names = [chnl for chnl in self.activation_config if 'd_ch' in chnl]
if self.digital_channels != dig_channels or self.analog_channels != ana_channels:
self.log.error('Sampling of PulseBlockEnsemble "{0}" failed!\nMismatch in number of '
'analog and digital channels between logic ({1}, {2}) and '
'PulseBlockEnsemble ({3}, {4}).'
''.format(ensemble_name, self.analog_channels, self.digital_channels,
ana_channels, dig_channels))
return np.array([]), np.array([]), -1
number_of_samples, number_of_elements, number_of_states, state_length_bins_arr = self._analyze_block_ensemble(ensemble)
# The time bin offset for each element to be sampled to preserve rotating frame.
if chunkwise and write_to_file:
# Flags and counter for chunkwise writing
is_first_chunk = True
is_last_chunk = False
element_count = 0
else:
# Allocate huge sample arrays if chunkwise writing is disabled.
analog_samples = np.empty([ana_channels, number_of_samples], dtype = 'float32')
digital_samples = np.empty([dig_channels, number_of_samples], dtype = bool)
# Starting index for the sample array entrys
entry_ind = 0
# Iterate over all blocks within the PulseBlockEnsemble object
for block, reps in ensemble.block_list:
# Iterate over all repertitions of the current block
for rep_no in range(reps+1):
# Iterate over the Block_Elements inside the current block
for elem_ind, block_element in enumerate(block.element_list):
parameters = block_element.parameters
init_length_s = block_element.init_length_s
increment_s = block_element.increment_s
digital_high = block_element.digital_high
pulse_function = block_element.pulse_function
element_length_s = init_length_s + (rep_no*increment_s)
element_length_bins = int(np.rint(element_length_s * self.sample_rate))
# create floating point time array for the current element inside rotating frame
time_arr = (offset_bin + np.arange(element_length_bins, dtype='float64')) / self.sample_rate
if chunkwise and write_to_file:
# determine it the current element is the last one to be sampled.
# Toggle the is_last_chunk flag accordingly.
element_count += 1
if element_count == number_of_elements:
is_last_chunk = True
# allocate temporary sample arrays to contain the current element
analog_samples = np.empty([ana_channels, element_length_bins], dtype='float32')
digital_samples = np.empty([dig_channels, element_length_bins], dtype=bool)
# actually fill the allocated sample arrays with values.
for i, state in enumerate(digital_high):
digital_samples[i] = np.full(element_length_bins, state, dtype=bool)
for i, func_name in enumerate(pulse_function):
analog_samples[i] = np.float32(self._math_func[func_name](time_arr, parameters[i])/self.amplitude_dict[ana_chnl_names[i]])
# write temporary sample array to file
self._write_to_file[self.waveform_format](filename, analog_samples,
digital_samples,
number_of_samples, is_first_chunk,
is_last_chunk)
# set flag to FALSE after first write
is_first_chunk = False
else:
# if the ensemble should be sampled as a whole (chunkwise = False) fill the
# entries in the huge sample arrays
for i, state in enumerate(digital_high):
digital_samples[i, entry_ind:entry_ind+element_length_bins] = np.full(element_length_bins, state, dtype=bool)
for i, func_name in enumerate(pulse_function):
analog_samples[i, entry_ind:entry_ind+element_length_bins] = np.float32(self._math_func[func_name](time_arr, parameters[i])/self.amplitude_dict[ana_chnl_names[i]])
# increment the index offset of the overall sample array for the next
# element
entry_ind += element_length_bins
# if the rotating frame should be preserved (default) increment the offset
# counter for the time array.
if ensemble.rotating_frame:
offset_bin += element_length_bins
if not write_to_file:
# return a status message with the time needed for sampling the entire ensemble as a
# whole without writing to file.
self.log.info('Time needed for sampling and writing PulseBlockEnsemble to file as a '
'whole: {0} sec.'.format(int(np.rint(time.time() - start_time))))
# return the sample arrays for write_to_file was set to FALSE
if not sequence_sampling_in_progress:
self.unlock()
self.sigSampleEnsembleComplete.emit(filename, analog_samples, digital_samples)
return analog_samples, digital_samples, offset_bin
elif chunkwise:
# return a status message with the time needed for sampling and writing the ensemble
# chunkwise.
self.log.info('Time needed for sampling and writing to file chunkwise: {0} sec'
''.format(int(np.rint(time.time()-start_time))))
if not sequence_sampling_in_progress:
self.unlock()
self.sigSampleEnsembleComplete.emit(filename, np.array([]), np.array([]))
return np.array([]), np.array([]), offset_bin
else:
# If the sampling should not be chunkwise and write to file is enabled call the
# write_to_file method only once with both flags set to TRUE
is_first_chunk = True
is_last_chunk = True
self._write_to_file[self.waveform_format](filename, analog_samples, digital_samples,
number_of_samples, is_first_chunk,
is_last_chunk)
# return a status message with the time needed for sampling and writing the ensemble as
# a whole.
self.log.info('Time needed for sampling and writing PulseBlockEnsemble to file as a '
'whole: {0} sec'.format(int(np.rint(time.time()-start_time))))
if not sequence_sampling_in_progress:
self.unlock()
self.sigSampleEnsembleComplete.emit(filename, np.array([]), np.array([]))
return np.array([]), np.array([]), offset_bin
def sample_pulse_sequence(self, sequence_name, write_to_file=True):
""" Samples the PulseSequence object, which serves as the construction plan.
@param str ensemble_name: Name, which should correlate with the name of on of the displayed
ensembles.
@param bool write_to_file: Write either to RAM or to File (depends on the available space
in RAM). If set to FALSE, this method will return the samples
(digital and analog) as numpy arrays
The sequence object is sampled by call subsequently the sampling routine for the
PulseBlockEnsemble objects and passing if needed the rotating frame option.
Only those PulseBlockEnsemble object where sampled that are different! These can be
directly obtained from the internal attribute different_ensembles_dict of a PulseSequence.
Right now two 'simple' methods of sampling where implemented, which reuse the sample
function for the Pulse_Block_Ensembles. One, which samples by preserving the phase (i.e.
staying in the rotating frame) and the other which samples without keep a phase
relationship between the different entries of the PulseSequence object.
More sophisticated sequence sampling method can be implemented here.
"""
# lock module
if self.getState() == 'idle':
self.lock()
else:
self.log.error('Cannot sample sequence "{0}" because the sequence generator logic is '
'still busy (locked).\nFunction call ignored.'.format(sequence_name))
return
if write_to_file:
# get sampled filenames on host PC referring to the same ensemble
filename_list = [f for f in os.listdir(self.sequence_dir) if
f.startswith(sequence_name + '.seq')]
# delete all filenames in the list
for file in filename_list:
os.remove(os.path.join(self.sequence_dir, file))
if len(filename_list) != 0:
self.log.warning('Found old sequence for name "{0}". Files deleted before '
'sampling: {1}'.format(sequence_name, filename_list))
start_time = time.time()
ana_chnl_names = [chnl for chnl in self.activation_config if 'a_ch' in chnl]
ana_chnl_num = [int(chnl.split('ch')[-1]) for chnl in ana_chnl_names]
# get ensemble
sequence_obj = self.saved_pulse_sequences[sequence_name]
sequence_param_dict_list = []
# if all the Pulse_Block_Ensembles should be in the rotating frame, then each ensemble
# will be created in general with a different offset_bin. Therefore, in order to keep track
# of the sampled Pulse_Block_Ensembles one has to introduce a running number as an
# additional name tag, so keep the sampled files separate.
if sequence_obj.rotating_frame:
ensemble_index = 0 # that will indicate the ensemble index
offset_bin = 0 # that will be used for phase preserving
for ensemble_obj, seq_param in sequence_obj.ensemble_param_list:
# to make something like 001
name_tag = sequence_name + '_' + str(ensemble_index).zfill(3)
dummy1, \
dummy2, \
offset_bin_return = self.sample_pulse_block_ensemble(ensemble_obj.name,
write_to_file=write_to_file,
offset_bin=offset_bin,
name_tag=name_tag)
# the temp_dict is a format how the sequence parameter will be saved
temp_dict = dict()
name_list = []
for ch_num in ana_chnl_num:
name_list.append(name_tag + '_ch' + str(ch_num) + '.' + self.waveform_format)
temp_dict['name'] = name_list
# update the sequence parameter to the temp dict:
temp_dict.update(seq_param)
# add the whole dict to the list of dicts, containing information about how to
# write the sequence properly in the hardware file:
sequence_param_dict_list.append(temp_dict)
# for the next run, the returned offset_bin will serve as starting point for
# phase preserving.
offset_bin = offset_bin_return
ensemble_index += 1
else:
# if phase prevervation between the sequence entries is not needed, then only the
# different ensembles will be sampled, since the offset_bin does not matter for them:
for ensemble_name in sequence_obj.different_ensembles_dict:
self.sample_pulse_block_ensemble(ensemble_name, write_to_file=write_to_file,
offset_bin=0, name_tag=None)
# go now through the sequence list and replace all the entries with the output of the
# sampled ensemble file:
for ensemble_obj, seq_param in sequence_obj.ensemble_param_list:
temp_dict = dict()
name_list = []
for ch_num in ana_chnl_num:
name_list.append(ensemble_obj.name + '_ch' + str(ch_num) + '.' + self.waveform_format)
temp_dict['name'] = name_list
# update the sequence parameter to the temp dict:
temp_dict.update(seq_param)
sequence_param_dict_list.append(temp_dict)
if write_to_file:
# pass the whole information to the sequence creation method:
self._write_to_file[self.sequence_format](sequence_name, sequence_param_dict_list)
self.log.info('Time needed for sampling and writing Pulse Sequence to file: {0} sec.'
''.format(int(np.rint(time.time() - start_time))))
else:
self.log.info('Time needed for sampling Pulse Sequence: {0} sec.'
''.format(int(np.rint(time.time() - start_time))))
# unlock module
self.unlock()
self.sigSampleSequenceComplete.emit(sequence_name, sequence_param_dict_list)
return
#---------------------------------------------------------------------------
# END sequence/block sampling
#---------------------------------------------------------------------------
| gpl-3.0 | -7,307,263,554,439,006,000 | 50.447394 | 191 | 0.595218 | false | 4.409405 | true | false | false |
rcgee/oq-hazardlib | openquake/hazardlib/site.py | 1 | 18827 | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2012-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module :mod:`openquake.hazardlib.site` defines :class:`Site`.
"""
import numpy
from openquake.baselib.python3compat import range
from openquake.baselib.slots import with_slots
from openquake.baselib.general import split_in_blocks
from openquake.hazardlib.geo.mesh import Mesh
from openquake.hazardlib.geo.utils import cross_idl
@with_slots
class Site(object):
"""
Site object represents a geographical location defined by its position
as well as its soil characteristics.
:param location:
Instance of :class:`~openquake.hazardlib.geo.point.Point` representing
where the site is located.
:param vs30:
Average shear wave velocity in the top 30 m, in m/s.
:param vs30measured:
Boolean value, ``True`` if ``vs30`` was measured on that location
and ``False`` if it was inferred.
:param z1pt0:
Vertical distance from earth surface to the layer where seismic waves
start to propagate with a speed above 1.0 km/sec, in meters.
:param z2pt5:
Vertical distance from earth surface to the layer where seismic waves
start to propagate with a speed above 2.5 km/sec, in km.
:param backarc":
Boolean value, ``True`` if the site is in the subduction backarc and
``False`` if it is in the subduction forearc or is unknown
:raises ValueError:
If any of ``vs30``, ``z1pt0`` or ``z2pt5`` is zero or negative.
.. note::
:class:`Sites <Site>` are pickleable
"""
_slots_ = 'location vs30 vs30measured z1pt0 z2pt5 backarc'.split()
def __init__(self, location, vs30, vs30measured, z1pt0, z2pt5,
backarc=False):
if not vs30 > 0:
raise ValueError('vs30 must be positive')
if not z1pt0 > 0:
raise ValueError('z1pt0 must be positive')
if not z2pt5 > 0:
raise ValueError('z2pt5 must be positive')
self.location = location
self.vs30 = vs30
self.vs30measured = vs30measured
self.z1pt0 = z1pt0
self.z2pt5 = z2pt5
self.backarc = backarc
def __str__(self):
"""
>>> import openquake.hazardlib
>>> loc = openquake.hazardlib.geo.point.Point(1, 2, 3)
>>> str(Site(loc, 760.0, True, 100.0, 5.0))
'<Location=<Latitude=2.000000, Longitude=1.000000, Depth=3.0000>, \
Vs30=760.0000, Vs30Measured=True, Depth1.0km=100.0000, Depth2.5km=5.0000, \
Backarc=False>'
"""
return (
"<Location=%s, Vs30=%.4f, Vs30Measured=%r, Depth1.0km=%.4f, "
"Depth2.5km=%.4f, Backarc=%r>") % (
self.location, self.vs30, self.vs30measured, self.z1pt0,
self.z2pt5, self.backarc)
def __hash__(self):
return hash((self.location.x, self.location.y))
def __eq__(self, other):
return (self.location.x, self.location.y) == (
other.location.x, other.location.y)
def __repr__(self):
"""
>>> import openquake.hazardlib
>>> loc = openquake.hazardlib.geo.point.Point(1, 2, 3)
>>> site = Site(loc, 760.0, True, 100.0, 5.0)
>>> str(site) == repr(site)
True
"""
return self.__str__()
def _extract(array_or_float, indices):
try: # if array
return array_or_float[indices]
except TypeError: # if float
return array_or_float
@with_slots
class SiteCollection(object):
"""
A collection of :class:`sites <Site>`.
Instances of this class are intended to represent a large collection
of sites in a most efficient way in terms of memory usage.
.. note::
Because calculations assume that :class:`Sites <Site>` are on the
Earth's surface, all `depth` information in a :class:`SiteCollection`
is discarded. The collection `mesh` will only contain lon and lat. So
even if a :class:`SiteCollection` is created from sites containing
`depth` in their geometry, iterating over the collection will yield
:class:`Sites <Site>` with a reference depth of 0.0.
:param sites:
A list of instances of :class:`Site` class.
"""
dtype = numpy.dtype([
('sids', numpy.uint32),
('lons', numpy.float64),
('lats', numpy.float64),
('_vs30', numpy.float64),
('_vs30measured', numpy.bool),
('_z1pt0', numpy.float64),
('_z2pt5', numpy.float64),
('_backarc', numpy.bool),
])
_slots_ = dtype.names
@classmethod
def from_points(cls, lons, lats, sitemodel):
"""
Build the site collection from
:param lons:
a sequence of longitudes
:param lats:
a sequence of latitudes
:param sitemodel:
an object containing the attributes
reference_vs30_value,
reference_vs30_type,
reference_depth_to_1pt0km_per_sec,
reference_depth_to_2pt5km_per_sec,
reference_backarc
"""
assert len(lons) == len(lats), (len(lons), len(lats))
self = cls.__new__(cls)
self.complete = self
self.total_sites = len(lons)
self.sids = numpy.arange(len(lons), dtype=numpy.uint32)
self.lons = numpy.array(lons)
self.lats = numpy.array(lats)
self._vs30 = sitemodel.reference_vs30_value
self._vs30measured = sitemodel.reference_vs30_type == 'measured'
self._z1pt0 = sitemodel.reference_depth_to_1pt0km_per_sec
self._z2pt5 = sitemodel.reference_depth_to_2pt5km_per_sec
self._backarc = sitemodel.reference_backarc
return self
def __init__(self, sites):
self.complete = self
self.total_sites = n = len(sites)
self.sids = numpy.zeros(n, dtype=int)
self.lons = numpy.zeros(n, dtype=float)
self.lats = numpy.zeros(n, dtype=float)
self._vs30 = numpy.zeros(n, dtype=float)
self._vs30measured = numpy.zeros(n, dtype=bool)
self._z1pt0 = numpy.zeros(n, dtype=float)
self._z2pt5 = numpy.zeros(n, dtype=float)
self._backarc = numpy.zeros(n, dtype=bool)
for i in range(n):
self.sids[i] = i
self.lons[i] = sites[i].location.longitude
self.lats[i] = sites[i].location.latitude
self._vs30[i] = sites[i].vs30
self._vs30measured[i] = sites[i].vs30measured
self._z1pt0[i] = sites[i].z1pt0
self._z2pt5[i] = sites[i].z2pt5
self._backarc[i] = sites[i].backarc
# protect arrays from being accidentally changed. it is useful
# because we pass these arrays directly to a GMPE through
# a SiteContext object and if a GMPE is implemented poorly it could
# modify the site values, thereby corrupting site and all the
# subsequent calculation. note that this doesn't protect arrays from
# being changed by calling itemset()
for arr in (self._vs30, self._vs30measured, self._z1pt0, self._z2pt5,
self.lons, self.lats, self._backarc, self.sids):
arr.flags.writeable = False
def __toh5__(self):
array = numpy.zeros(self.total_sites, self.dtype)
for slot in self._slots_:
array[slot] = getattr(self, slot)
attrs = dict(total_sites=self.total_sites)
return array, attrs
def __fromh5__(self, array, attrs):
for slot in self._slots_:
setattr(self, slot, array[slot])
vars(self).update(attrs)
self.complete = self
@property
def mesh(self):
"""Return a mesh with the given lons and lats"""
return Mesh(self.lons, self.lats, depths=None)
@property
def indices(self):
"""The full set of indices from 0 to total_sites - 1"""
return numpy.arange(0, self.total_sites)
def split_in_tiles(self, hint):
"""
Split a SiteCollection into a set of tiles (SiteCollection instances).
:param hint: hint for how many tiles to generate
"""
tiles = []
for seq in split_in_blocks(range(len(self)), hint or 1):
indices = numpy.array(seq, int)
sc = SiteCollection.__new__(SiteCollection)
sc.complete = sc
sc.total_sites = len(indices)
sc.sids = self.sids[indices]
sc.lons = self.lons[indices]
sc.lats = self.lats[indices]
sc._vs30 = _extract(self._vs30, indices)
sc._vs30measured = _extract(self._vs30measured, indices)
sc._z1pt0 = _extract(self._z1pt0, indices)
sc._z2pt5 = _extract(self._z2pt5, indices)
sc._backarc = _extract(self._backarc, indices)
tiles.append(sc)
return tiles
def __iter__(self):
"""
Iterate through all :class:`sites <Site>` in the collection, yielding
one at a time.
"""
if isinstance(self.vs30, float): # from points
for i, location in enumerate(self.mesh):
yield Site(location, self._vs30, self._vs30measured,
self._z1pt0, self._z2pt5, self._backarc)
else: # from sites
for i, location in enumerate(self.mesh):
yield Site(location, self.vs30[i], self.vs30measured[i],
self.z1pt0[i], self.z2pt5[i], self.backarc[i])
def filter(self, mask):
"""
Create a FilteredSiteCollection with only a subset of sites
from this one.
:param mask:
Numpy array of boolean values of the same length as this sites
collection. ``True`` values should indicate that site with that
index should be included into the filtered collection.
:returns:
A new :class:`FilteredSiteCollection` instance, unless all the
values in ``mask`` are ``True``, in which case this site collection
is returned, or if all the values in ``mask`` are ``False``,
in which case method returns ``None``. New collection has data
of only those sites that were marked for inclusion in mask.
See also :meth:`expand`.
"""
assert len(mask) == len(self), (len(mask), len(self))
if mask.all():
# all sites satisfy the filter, return
# this collection unchanged
return self
if not mask.any():
# no sites pass the filter, return None
return None
# extract indices of Trues from the mask
[indices] = mask.nonzero()
return FilteredSiteCollection(indices, self)
def expand(self, data, placeholder):
"""
For non-filtered site collections just checks that data
has the right number of elements and returns it. It is
here just for API compatibility with filtered site collections.
"""
assert len(data) == len(self), (len(data), len(self))
return data
def __len__(self):
"""
Return the number of sites in the collection.
"""
return self.total_sites
def __repr__(self):
return '<SiteCollection with %d sites>' % self.total_sites
# adding a number of properties for the site model data
for name in 'vs30 vs30measured z1pt0 z2pt5 backarc'.split():
def getarray(sc, name=name): # sc is a SiteCollection
value = getattr(sc, '_' + name)
if isinstance(value, (float, bool)):
arr = numpy.array([value] * len(sc), dtype=type(value))
arr.flags.writeable = False
return arr
else:
return value
setattr(SiteCollection, name, property(getarray, doc='%s array' % name))
@with_slots
class FilteredSiteCollection(object):
"""
A class meant to store proper subsets of a complete collection of sites
in a memory-efficient way.
:param indices:
an array of indices referring to the complete site collection
:param complete:
the complete site collection the filtered collection was
derived from
Notice that if you filter a FilteredSiteCollection `fsc`, you will
get a different FilteredSiteCollection referring to the complete
SiteCollection `fsc.complete`, not to the filtered collection `fsc`.
"""
_slots_ = 'indices complete'.split()
def __init__(self, indices, complete):
if complete is not complete.complete:
raise ValueError(
'You should pass a full site collection, not %s' % complete)
self.indices = indices
self.complete = complete
@property
def total_sites(self):
"""The total number of the original sites, without filtering"""
return self.complete.total_sites
@property
def mesh(self):
"""Return a mesh with the given lons and lats"""
return Mesh(self.lons, self.lats, depths=None)
def filter(self, mask):
"""
Create a FilteredSiteCollection with only a subset of sites
from this one.
:param mask:
Numpy array of boolean values of the same length as this
filtered sites collection. ``True`` values should indicate
that site with that index should be included into the
filtered collection.
:returns:
A new :class:`FilteredSiteCollection` instance, unless all the
values in ``mask`` are ``True``, in which case this site collection
is returned, or if all the values in ``mask`` are ``False``,
in which case method returns ``None``. New collection has data
of only those sites that were marked for inclusion in mask.
See also :meth:`expand`.
"""
assert len(mask) == len(self), (len(mask), len(self))
if mask.all():
return self
elif not mask.any():
return None
indices = self.indices.take(mask.nonzero()[0])
return FilteredSiteCollection(indices, self.complete)
def expand(self, data, placeholder):
"""
Expand a short array `data` over a filtered site collection of the
same length and return a long array of size `total_sites` filled
with the placeholder.
The typical workflow is the following: there is a whole site
collection, the one that has an information about all the sites.
Then it gets filtered for performing some calculation on a limited
set of sites (like for instance filtering sites by their proximity
to a rupture). That filtering process can be repeated arbitrary
number of times, i.e. a collection that is already filtered can
be filtered for further limiting the set of sites to compute on.
Then the (supposedly expensive) computation is done on a limited
set of sites which still appears as just a :class:`SiteCollection`
instance, so that computation code doesn't need to worry about
filtering, it just needs to handle site collection objects. The
calculation result comes in a form of 1d or 2d numpy array (that
is, either one value per site or one 1d array per site) with length
equal to number of sites in a filtered collection. That result
needs to be expanded to an array of similar structure but the one
that holds values for all the sites in the original (unfiltered)
collection. This is what :meth:`expand` is for. It creates a result
array of ``total_sites`` length and puts values from ``data`` into
appropriate places in it remembering indices of sites that were
chosen for actual calculation and leaving ``placeholder`` value
everywhere else.
:param data:
1d or 2d numpy array with first dimension representing values
computed for site from this collection.
:param placeholder:
A scalar value to be put in result array for those sites that
were filtered out and no real calculation was performed for them.
:returns:
Array of length ``total_sites`` with values from ``data``
distributed in the appropriate places.
"""
len_data = data.shape[0]
assert len_data == len(self), (len_data, len(self))
assert len_data <= self.total_sites
assert self.indices[-1] < self.total_sites, (
self.indices[-1], self.total_sites)
if data.ndim == 1:
# single-dimensional array
result = numpy.empty(self.total_sites)
result.fill(placeholder)
result.put(self.indices, data)
return result
assert data.ndim == 2
# two-dimensional array
num_values = data.shape[1]
result = numpy.empty((self.total_sites, num_values))
result.fill(placeholder)
for i in range(num_values):
result[:, i].put(self.indices, data[:, i])
return result
def __iter__(self):
"""
Iterate through all :class:`sites <Site>` in the collection, yielding
one at a time.
"""
for i, location in enumerate(self.mesh):
yield Site(location, self.vs30[i], self.vs30measured[i],
self.z1pt0[i], self.z2pt5[i], self.backarc[i])
def __len__(self):
"""Return the number of filtered sites"""
return len(self.indices)
def __repr__(self):
return '<FilteredSiteCollection with %d of %d sites>' % (
len(self.indices), self.total_sites)
def _extract_site_param(fsc, name):
# extract the site parameter 'name' from the filtered site collection
return getattr(fsc.complete, name).take(fsc.indices)
# attach a number of properties filtering the arrays
for name in 'vs30 vs30measured z1pt0 z2pt5 backarc lons lats sids'.split():
prop = property(
lambda fsc, name=name: _extract_site_param(fsc, name),
doc='Extract %s array from FilteredSiteCollection' % name)
setattr(FilteredSiteCollection, name, prop)
| agpl-3.0 | 877,749,385,576,351,600 | 37.738683 | 79 | 0.616296 | false | 3.922292 | false | false | false |
gsnbng/erpnext | erpnext/loan_management/doctype/loan_disbursement/test_loan_disbursement.py | 1 | 3110 | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.utils import (nowdate, add_days, get_datetime, get_first_day, get_last_day, date_diff, flt, add_to_date)
from erpnext.loan_management.doctype.loan.test_loan import (create_loan_type, create_loan_security_pledge, create_repayment_entry,
make_loan_disbursement_entry, create_loan_accounts, create_loan_security_type, create_loan_security, create_demand_loan, create_loan_security_price)
from erpnext.loan_management.doctype.process_loan_interest_accrual.process_loan_interest_accrual import process_loan_interest_accrual_for_demand_loans
from erpnext.loan_management.doctype.loan_interest_accrual.loan_interest_accrual import days_in_year
from erpnext.selling.doctype.customer.test_customer import get_customer_dict
class TestLoanDisbursement(unittest.TestCase):
def setUp(self):
create_loan_accounts()
create_loan_type("Demand Loan", 2000000, 13.5, 25, 0, 5, 'Cash', 'Payment Account - _TC', 'Loan Account - _TC',
'Interest Income Account - _TC', 'Penalty Income Account - _TC')
create_loan_security_type()
create_loan_security()
create_loan_security_price("Test Security 1", 500, "Nos", get_datetime() , get_datetime(add_to_date(nowdate(), hours=24)))
create_loan_security_price("Test Security 2", 250, "Nos", get_datetime() , get_datetime(add_to_date(nowdate(), hours=24)))
if not frappe.db.exists("Customer", "_Test Loan Customer"):
frappe.get_doc(get_customer_dict('_Test Loan Customer')).insert(ignore_permissions=True)
self.applicant = frappe.db.get_value("Customer", {'name': '_Test Loan Customer'}, 'name')
def test_loan_topup(self):
pledges = []
pledges.append({
"loan_security": "Test Security 1",
"qty": 4000.00,
"haircut": 50,
"loan_security_price": 500.00
})
loan_security_pledge = create_loan_security_pledge(self.applicant, pledges)
loan = create_demand_loan(self.applicant, "Demand Loan", loan_security_pledge.name,
posting_date=get_first_day(nowdate()))
loan.submit()
first_date = get_first_day(nowdate())
last_date = get_last_day(nowdate())
no_of_days = date_diff(last_date, first_date) + 1
accrued_interest_amount = (loan.loan_amount * loan.rate_of_interest * no_of_days) \
/ (days_in_year(get_datetime().year) * 100)
make_loan_disbursement_entry(loan.name, loan.loan_amount, disbursement_date=first_date)
process_loan_interest_accrual_for_demand_loans(posting_date=add_days(last_date, 1))
# Should not be able to create loan disbursement entry before repayment
self.assertRaises(frappe.ValidationError, make_loan_disbursement_entry, loan.name,
500000, first_date)
repayment_entry = create_repayment_entry(loan.name, self.applicant, add_days(get_last_day(nowdate()), 5),
"Regular Payment", 611095.89)
repayment_entry.submit()
loan.reload()
# After repayment loan disbursement entry should go through
make_loan_disbursement_entry(loan.name, 500000, disbursement_date=add_days(last_date, 16))
| agpl-3.0 | 3,905,159,435,813,598,700 | 41.60274 | 150 | 0.736334 | false | 2.871653 | true | false | false |
jmhal/CCAPython | framework/manage/services.py | 1 | 9027 | from CCAPython.gov.cca import Services
from CCAPython.gov.cca.ports import ConnectionEventService
from CCAPython.gov.cca.ports import EventType
from CCAPython.framework.info.connectioninfo import ConnectionEvent
from CCAPython.framework.common.typemap import TypeMapDict
from CCAPython.framework.common.exceptions import PortNotFoundException
class ServicesHandle(Services, ConnectionEventService):
def __init__(self):
# Maps strings portName to a list (CCAPython.gov.cca.Ports, CCAPython.gov.cca.TypeMap).
# (portName) -> [Port, TypeMap]
self.d_usesPort = {}
self.d_providesPorts = {}
# Maps string ports names to string ports types
# (portName) -> (portType)
self.d_portType = {}
# Maps a CCAPython.gov.cca.ports.EventType value to a list of CCAPython.gov.cca.ports.EventListener
# (EventType) -> (ConnectionEventListener [])
self.d_listeners = {}
# A CCAPython.gov.cca.Type containing the properties of the component instance
self.d_instanceProperties = TypeMapDict()
# New methods
def initialize(self, fwk, componentID, properties, is_alias):
"""
input: a CCAPython.gov.cca.AbstractFramework fwk, a CCAPython.gov.cca.ComponentID componentID and a CCAPython.gov.cca.TypeMap properties
ouput: void
"""
self.framework = fwk
self.componentID = componentID
self.properties = properties
self.d_is_alias = is_alias
def getInstanceProperties():
"""
input: none
output: a CCAPython.gov.cca.TypeMap object
"""
return self.d_instanceProperties
def setInstanceProperties(self, properties):
"""
input: a CCAPython.gov.cca.TypeMap properties
output: none
"""
self.d_instanceProperties = properties
return
def setPortProperties(self, portName, properties):
"""
input: a string portName, a CCAPython.gov.cca.TypeMap properties
output: none
"""
if portName in self.d_providesPorts:
elf.d_providesPorts[portName][1] = properties
elif portName in self.d_usesPort:
self.d_usesPort[portName][1] = properties
else:
raise PortNotFoundException(portName)
def getProvidedPortNames(self):
"""
input: none
output: a list of strings
"""
return self.d_providesPorts.keys()
def getUsedPortNames(self):
"""
input: none
output: a list of strings
"""
return self.d_usesPort.keys()
def bindPort(self, portName, port):
"""
input: a string portName, a CCAPython.gov.cca.Port object
output: void
"""
if portName not in self.d_usesPort.keys():
raise PortNotFoundException(portName)
self.d_usesPort[portName] = [port, TypeMapDict()]
return
def getProvidesPort(self, name):
"""
input: string name
output: void
"""
if name not in self.d_providesPorts.keys():
raise PortNotFoundException(name)
return self.d_providesPorts[name][0]
def notifyConnectionEvent(self, portName, event):
"""
This method will notify the component from the calling Services of an event
input: string portName, a CCAPython.gov.cca.ports.EventType value event
output: void
"""
listenerList = []
for ev in self.d_listeners:
if ev == event:
listenerList += self.d_listeners[event]
tm = TypeMapDict()
tm.putString("cca.PortName", portName)
tm.putString("cca.PortType", self.d_portType[portName])
ce = ConnectionEvent(event, tm)
for listener in listenerList:
listener.connectionActivity(ce)
return
# Methods from CCAPython.gov.cca.Services
def getComponentID(self):
"""
input: none
output: a ComponentID object
"""
return self.componentID
def createTypeMap(self):
"""
input: none
output: a TypeMap object
throws CCAException
"""
return TypeMapDict()
def registerUsesPort(self, portName, _type, properties):
"""
input: string portName, string type, and TypeMap properties
output: void
throws CCAException
"""
if portName in self.d_providesPorts or portName in self.d_usesPort:
print portName + " is not unique. Not doing anything."
return
else:
self.d_usesPort[portName] = [None, properties]
self.d_portType[portName] = _type
if self.framework != None:
if self.framework.isProvidedService(_type):
self.framework.provideRequestedServices(self.d_componentID, portName, _type)
def unregisterUsesPort(self, portName):
"""
input: string portName
output: void
throws CCAException
"""
self.d_usesPort.pop(portName, None)
self.d_portType.pop(portName, None)
return
def addProvidesPort(self, inPort, portName, _type, properties):
"""
input: Port inPort, string portName, string type, and TypeMap properties
output: void
throws CCAException
"""
if portName in self.d_providesPorts or portName in self.d_usesPort:
print portName + " is not unique. Not doing anything."
return
if not self.d_is_alias and not inPort.isType(_type):
print "Port instance is not an instance of specified type"
return
self.d_providesPorts[portName] = [inPort, properties]
self.d_portType[portName] = _type
return
def removeProvidesPort(self, portName):
"""
input: string portName
output: void
throws CCAException
"""
self.d_providesPorts.pop(portName, None)
self.d_portType.pop(portName, None)
return
def getPortProperties(self, portName):
"""
input: string portName
output: a TypeMap object
"""
if portName in self.d_usesPort:
return self.d_usesPort[portName][1]
elif portName in self.d_providesPorts:
return self.d_providesPorts[portName][1]
else :
return None
def getPort(self, portName):
"""
input: string portName
output: a Port object
throws CCAException
"""
if portName in self.d_usesPort:
return self.d_usesPort[portName][0]
def getPortNonblocking(self, portName):
"""
input: string portName
output: a Port object
throws CCAException
"""
return self.getPort(portName)
def releasePort(self, portName):
"""
input: string portName
output: void
throws CCAException
"""
if portName in self.d_usesPort:
self.d_usesPort[portName] = None
def registerForRelease(self, callback):
"""
input: a CCAPython.gov.cca.ComponentRelease object callback
output: void
"""
self.framework.setInstanceRelease(self.componentID, callback)
# Methods from CCAPython.gov.cca.ports.ServiceRegistry
def addService(self, serviceType, portProvider):
"""
input: a string serviceType, a CCAPython.gov.cca.ports.ServiceProvider object portProvider
output: a boolean
throws CCAException
"""
self.framework.addServiceProvider(serviceType, self.componentID, portProvider)
return True
def addSingletonService(self, serviceType, server):
"""
input: a string serviceType, a CCAPython.gov.cca.Port object server
output: a boolean
throws CCAException
"""
self.framework.addServicePort(serviceType, server)
return true
def removeService(self, serviceType):
"""
input: a string serviceType
output: none
throws CCAException
"""
self.framework.removeFromRegistry(serviceType)
return None
# Methods from CCAPython.gov.cca.ports.ConnectionEventService
def addConnectionEventListener(self, et, cel):
"""
input: a CCAPython.gov.cca.ports.EventType et, a CCAPython.gov.cca.ports.ConnectionEventListener cel
output: void
"""
if et == EventType.Error:
return
if et == EventType.ALL:
self.addConnectionEventListener(EventType.ConnectPending)
self.addConnectionEventListener(EventType.Connected)
self.addConnectionEventListener(EventType.DisconnectPending)
self.addConnectionEventListener(EventType.Disconnected)
elif cel not in self.d_listeners[et]:
self.d_listeners[et].append(cel)
return
def removeConnectionEventListener(self, et, cel):
"""
input: a CCAPython.gov.cca.ports.EventType et, a CCAPython.gov.cca.ports.ConnectionEventListener cel
output: void
"""
if et == EventType.Error:
return
if et == EventType.ALL:
for event in self.d_listeners:
self.removeConnectionEventListener(event, cel)
return
else:
self.d_listeners[et].remove(cel)
return
| apache-2.0 | 4,384,464,468,484,002,300 | 30.127586 | 142 | 0.643625 | false | 4.006658 | false | false | false |
JensTimmerman/radical.pilot | src/radical/pilot/utils/analysis.py | 1 | 12671 |
import os
# ------------------------------------------------------------------------------
#
def get_experiment_frames(experiments, datadir=None):
"""
read profiles for all sessions in the given 'experiments' dict. That dict
is expected to be like this:
{ 'test 1' : [ [ 'rp.session.thinkie.merzky.016609.0007', 'stampede popen sleep 1/1/1/1 (?)'] ],
'test 2' : [ [ 'rp.session.ip-10-184-31-85.merzky.016610.0112', 'stampede shell sleep 16/8/8/4' ] ],
'test 3' : [ [ 'rp.session.ip-10-184-31-85.merzky.016611.0013', 'stampede shell mdrun 16/8/8/4' ] ],
'test 4' : [ [ 'rp.session.titan-ext4.marksant1.016607.0005', 'titan shell sleep 1/1/1/1 a' ] ],
'test 5' : [ [ 'rp.session.titan-ext4.marksant1.016607.0006', 'titan shell sleep 1/1/1/1 b' ] ],
'test 6' : [ [ 'rp.session.ip-10-184-31-85.merzky.016611.0013', 'stampede - isolated', ],
[ 'rp.session.ip-10-184-31-85.merzky.016612.0012', 'stampede - integrated', ],
[ 'rp.session.titan-ext4.marksant1.016607.0006', 'blue waters - integrated' ] ]
} name in
ie. iname in t is a list of experiment names, and each label has a list of
session/label pairs, where the label will be later used to label (duh) plots.
we return a similar dict where the session IDs are data frames
"""
import pandas as pd
exp_frames = dict()
if not datadir:
datadir = os.getcwd()
print 'reading profiles in %s' % datadir
for exp in experiments:
print " - %s" % exp
exp_frames[exp] = list()
for sid, label in experiments[exp]:
print " - %s" % sid
import glob
for prof in glob.glob ("%s/%s-pilot.*.prof" % (datadir, sid)):
print " - %s" % prof
frame = get_profile_frame (prof)
exp_frames[exp].append ([frame, label])
return exp_frames
# ------------------------------------------------------------------------------
#
def get_profile_frame (prof):
import pandas as pd
return pd.read_csv(prof)
# ------------------------------------------------------------------------------
#
tmp = None
def add_concurrency (frame, tgt, spec):
"""
add a column 'tgt' which is a cumulative sum of conditionals of enother row.
The purpose is the following: if a unit enters a component, the tgt row counter is
increased by 1, if the unit leaves the component, the counter is decreases by 1.
For any time, the resulting row contains the number of units which is in the
component. Or state. Or whatever.
The arguments are:
'tgt' : name of the new column
'spec' : a set of filters to determine if a unit enters or leaves
'spec' is expected to be a dict of the following format:
spec = { 'in' : [{'col1' : 'pat1',
'col2' : 'pat2'},
...],
'out' : [{'col3' : 'pat3',
'col4' : 'pat4'},
...]
}
where:
'in' : filter set to determine the unit entering
'out' : filter set to determine the unit leaving
'col' : name of column for which filter is defined
'event' : event which correlates to entering/leaving
'msg' : qualifier on the event, if event is not unique
Example:
spec = {'in' : [{'state' :'Executing'}],
'out' : [{'state' :'Done'},
{'state' :'Failed'},
{'state' :'Cancelled'}]
}
get_concurrency (df, 'concurrently_running', spec)
"""
import numpy
# create a temporary row over which we can do the commulative sum
# --------------------------------------------------------------------------
def _conc (row, spec):
# row must match any filter dict in 'spec[in/out]'
# for any filter dict it must match all col/pat pairs
# for each in filter
for f in spec['in']:
match = 1
# for each col/val in that filter
for col, pat in f.iteritems():
if row[col] != pat:
match = 0
break
if match:
# one filter matched!
# print " + : %-20s : %.2f : %-20s : %s " % (row['uid'], row['time'], row['event'], row['message'])
return 1
# for each out filter
for f in spec['out']:
match = 1
# for each col/val in that filter
for col, pat in f.iteritems():
if row[col] != pat:
match = 0
break
if match:
# one filter matched!
# print " - : %-20s : %.2f : %-20s : %s " % (row['uid'], row['time'], row['event'], row['message'])
return -1
# no filter matched
# print " : %-20s : %.2f : %-20s : %s " % (row['uid'], row['time'], row['event'], row['message'])
return 0
# --------------------------------------------------------------------------
# we only want to later look at changes of the concurrency -- leading or trailing
# idle times are to be ignored. We thus set repeating values of the cumsum to NaN,
# so that they can be filtered out when ploting: df.dropna().plot(...).
# That specifically will limit the plotted time range to the area of activity.
# The full time range can still be plotted when ommitting the dropna() call.
# --------------------------------------------------------------------------
def _time (x):
global tmp
if x != tmp: tmp = x
else : x = numpy.NaN
return x
# --------------------------------------------------------------------------
# sanitize concurrency: negative values indicate incorrect event ordering,
# so we set the repesctive values to 0
# --------------------------------------------------------------------------
def _abs (x):
if x < 0:
return numpy.NaN
return x
# --------------------------------------------------------------------------
frame[tgt] = frame.apply(lambda row: _conc(row, spec), axis=1).cumsum()
frame[tgt] = frame.apply(lambda row: _abs (row[tgt]), axis=1)
frame[tgt] = frame.apply(lambda row: _time(row[tgt]), axis=1)
# print frame[[tgt, 'time']]
# ------------------------------------------------------------------------------
#
t0 = None
def calibrate_frame(frame, spec):
"""
move the time axis of a profiling frame so that t_0 is at the first event
matching the given 'spec'. 'spec' has the same format as described in
'add_concurrency' (list of dicts with col:pat filters)
"""
# --------------------------------------------------------------------------
def _find_t0 (row, spec):
# row must match any filter dict in 'spec[in/out]'
# for any filter dict it must match all col/pat pairs
global t0
if t0 is not None:
# already found t0
return
# for each col/val in that filter
for f in spec:
match = 1
for col, pat in f.iteritems():
if row[col] != pat:
match = 0
break
if match:
# one filter matched!
t0 = row['time']
return
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
def _calibrate (row, t0):
if t0 is None:
# no t0...
return
return row['time'] - t0
# --------------------------------------------------------------------------
# we need to iterate twice over the frame: first to find t0, then to
# calibrate the time axis
global t0
t0 = None # no t0
frame.apply(lambda row: _find_t0 (row, spec), axis=1)
if t0 == None:
print "Can't recalibrate, no matching timestamp found"
return
frame['time'] = frame.apply(lambda row: _calibrate(row, t0 ), axis=1)
# ------------------------------------------------------------------------------
#
def create_plot():
"""
create a plot object and tune its layout to our liking.
"""
import matplotlib.pyplot as plt
fig, plot = plt.subplots(figsize=(12,6))
plot.xaxis.set_tick_params(width=1, length=7)
plot.yaxis.set_tick_params(width=1, length=7)
plot.spines['right' ].set_position(('outward', 10))
plot.spines['top' ].set_position(('outward', 10))
plot.spines['bottom'].set_position(('outward', 10))
plot.spines['left' ].set_position(('outward', 10))
plt.xticks(fontsize=14)
plt.yticks(fontsize=14)
fig.tight_layout()
return fig, plot
# ------------------------------------------------------------------------------
#
def frame_plot (frames, axis, title=None, logx=False, logy=False,
legend=True, figdir=None):
"""
plot the given axis from the give data frame. We create a plot, and plot
all frames given in the list. The list is expected to contain [frame,label]
pairs
frames: list of tuples of dataframes and labels
frames = [[stampede_df_1, 'stampede - popen'],
[stampede_df_2, 'stampede - shell'],
[stampede_df_3, 'stampede - ORTE' ]]
axis: tuple of data frame column index and axis label
axis = ['time', 'time (s)']
"""
# create figure and layout
fig, plot = create_plot()
# set plot title
if title:
plot.set_title(title, y=1.05, fontsize=18)
# plot the data frames
# NOTE: we need to set labels separately, because of
# https://github.com/pydata/pandas/issues/9542
labels = list()
for frame, label in frames:
try:
frame.dropna().plot(ax=plot, logx=logx, logy=logy,
x=axis[0][0], y=axis[1][0],
drawstyle='steps',
label=label, legend=False)
except Exception as e:
print "skipping frame '%s': '%s'" % (label, e)
if legend:
plot.legend(labels=labels, loc='upper right', fontsize=14, frameon=True)
# set axis labels
plot.set_xlabel(axis[0][1], fontsize=14)
plot.set_ylabel(axis[1][1], fontsize=14)
plot.set_frame_on(True)
# save as png and pdf. Use the title as base for names
if title: base = title
else : base = "%s_%s" % (axis[0][1], axis[1][1])
# clean up base name -- only keep alphanum and such
import re
base = re.sub('[^a-zA-Z0-9\.\-]', '_', base)
base = re.sub('_+', '_', base)
if not figdir:
figdir = os.getcwd()
print 'saving %s/%s.png' % (figdir, base)
fig.savefig('%s/%s.png' % (figdir, base), bbox_inches='tight')
print 'saving %s/%s.pdf' % (figdir, base)
fig.savefig('%s/%s.pdf' % (figdir, base), bbox_inches='tight')
return fig, plot
# ------------------------------------------------------------------------------
#
def create_analytical_frame (idx, kind, args, limits, step):
"""
create an artificial data frame, ie. a data frame which does not contain
data gathered from an experiment, but data representing an analytical
construct of some 'kind'.
idx: data frame column index to fill (a time column is always created)
kind: construct to use (only 'rate' is supporte right now)
args: construct specific parameters
limits: time range for which data are to be created
step: time steps for which data are to be created
"""
import pandas as pd
# --------------------------------------------------------------------------
def _frange(start, stop, step):
while start <= stop:
yield start
start += step
# --------------------------------------------------------------------------
if kind == 'rate' :
t_0 = args.get ('t_0', 0.0)
rate = args.get ('rate', 1.0)
data = list()
for t in _frange(limits[0], limits[1], step):
data.append ({'time': t+t_0, idx: t*rate})
return pd.DataFrame (data)
else:
raise ValueError ("No such frame kind '%s'" % kind)
# ------------------------------------------------------------------------------
| mit | -4,651,960,791,394,131,000 | 34.793785 | 113 | 0.475495 | false | 4.072967 | true | false | false |
ludojmj/treelud | server/paramiko/dsskey.py | 1 | 6975 | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
DSS keys.
"""
import os
from hashlib import sha1
from Crypto.PublicKey import DSA
from paramiko import util
from paramiko.common import zero_byte
from paramiko.py3compat import long
from paramiko.ssh_exception import SSHException
from paramiko.message import Message
from paramiko.ber import BER, BERException
from paramiko.pkey import PKey
class DSSKey (PKey):
"""
Representation of a DSS key which can be used to sign an verify SSH2
data.
"""
def __init__(self, msg=None, data=None, filename=None, password=None, vals=None, file_obj=None):
self.p = None
self.q = None
self.g = None
self.y = None
self.x = None
if file_obj is not None:
self._from_private_key(file_obj, password)
return
if filename is not None:
self._from_private_key_file(filename, password)
return
if (msg is None) and (data is not None):
msg = Message(data)
if vals is not None:
self.p, self.q, self.g, self.y = vals
else:
if msg is None:
raise SSHException('Key object may not be empty')
if msg.get_text() != 'ssh-dss':
raise SSHException('Invalid key')
self.p = msg.get_mpint()
self.q = msg.get_mpint()
self.g = msg.get_mpint()
self.y = msg.get_mpint()
self.size = util.bit_length(self.p)
def asbytes(self):
m = Message()
m.add_string('ssh-dss')
m.add_mpint(self.p)
m.add_mpint(self.q)
m.add_mpint(self.g)
m.add_mpint(self.y)
return m.asbytes()
def __str__(self):
return self.asbytes()
def __hash__(self):
h = hash(self.get_name())
h = h * 37 + hash(self.p)
h = h * 37 + hash(self.q)
h = h * 37 + hash(self.g)
h = h * 37 + hash(self.y)
# h might be a long by now...
return hash(h)
def get_name(self):
return 'ssh-dss'
def get_bits(self):
return self.size
def can_sign(self):
return self.x is not None
def sign_ssh_data(self, data):
digest = sha1(data).digest()
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q), long(self.x)))
# generate a suitable k
qsize = len(util.deflate_long(self.q, 0))
while True:
k = util.inflate_long(os.urandom(qsize), 1)
if (k > 2) and (k < self.q):
break
r, s = dss.sign(util.inflate_long(digest, 1), k)
m = Message()
m.add_string('ssh-dss')
# apparently, in rare cases, r or s may be shorter than 20 bytes!
rstr = util.deflate_long(r, 0)
sstr = util.deflate_long(s, 0)
if len(rstr) < 20:
rstr = zero_byte * (20 - len(rstr)) + rstr
if len(sstr) < 20:
sstr = zero_byte * (20 - len(sstr)) + sstr
m.add_string(rstr + sstr)
return m
def verify_ssh_sig(self, data, msg):
if len(msg.asbytes()) == 40:
# spies.com bug: signature has no header
sig = msg.asbytes()
else:
kind = msg.get_text()
if kind != 'ssh-dss':
return 0
sig = msg.get_binary()
# pull out (r, s) which are NOT encoded as mpints
sigR = util.inflate_long(sig[:20], 1)
sigS = util.inflate_long(sig[20:], 1)
sigM = util.inflate_long(sha1(data).digest(), 1)
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q)))
return dss.verify(sigM, (sigR, sigS))
def _encode_key(self):
if self.x is None:
raise SSHException('Not enough key information')
keylist = [0, self.p, self.q, self.g, self.y, self.x]
try:
b = BER()
b.encode(keylist)
except BERException:
raise SSHException('Unable to create ber encoding of key')
return b.asbytes()
def write_private_key_file(self, filename, password=None):
self._write_private_key_file('DSA', filename, self._encode_key(), password)
def write_private_key(self, file_obj, password=None):
self._write_private_key('DSA', file_obj, self._encode_key(), password)
def generate(bits=1024, progress_func=None):
"""
Generate a new private DSS key. This factory function can be used to
generate a new host key or authentication key.
:param int bits: number of bits the generated key should be.
:param function progress_func:
an optional function to call at key points in key generation (used
by ``pyCrypto.PublicKey``).
:return: new `.DSSKey` private key
"""
dsa = DSA.generate(bits, os.urandom, progress_func)
key = DSSKey(vals=(dsa.p, dsa.q, dsa.g, dsa.y))
key.x = dsa.x
return key
generate = staticmethod(generate)
### internals...
def _from_private_key_file(self, filename, password):
data = self._read_private_key_file('DSA', filename, password)
self._decode_key(data)
def _from_private_key(self, file_obj, password):
data = self._read_private_key('DSA', file_obj, password)
self._decode_key(data)
def _decode_key(self, data):
# private key file contains:
# DSAPrivateKey = { version = 0, p, q, g, y, x }
try:
keylist = BER(data).decode()
except BERException as e:
raise SSHException('Unable to parse key file: ' + str(e))
if (type(keylist) is not list) or (len(keylist) < 6) or (keylist[0] != 0):
raise SSHException('not a valid DSA private key file (bad ber encoding)')
self.p = keylist[1]
self.q = keylist[2]
self.g = keylist[3]
self.y = keylist[4]
self.x = keylist[5]
self.size = util.bit_length(self.p)
| mit | -2,605,472,682,343,415,000 | 33.227273 | 100 | 0.567455 | false | 3.540609 | false | false | false |
snipsco/snipsskills | snipsmanager/utils/microphone_setup.py | 1 | 1997 | # -*-: coding utf-8 -*-
""" Downloader for Snips assistants. """
import os
import shutil
from .os_helpers import cmd_exists, is_raspi_os, execute_command, pipe_commands
from .. import ASOUNDCONF_DEST_PATH
# pylint: disable=too-few-public-methods
class MicrophoneSetup:
""" Downloader for Snips assistants. """
ASOUNDCONF_PATH = "../config/asound.conf"
@staticmethod
def setup_asoundconf(microphone_id):
if not is_raspi_os():
return
if microphone_id == 'respeaker':
MicrophoneSetup._copy_asoundconf("asound.conf.respeaker")
elif microphone_id == 'jabra':
MicrophoneSetup._copy_asoundconf("asound.conf.jabra")
else:
MicrophoneSetup._copy_asoundconf("asound.conf.default")
@staticmethod
def _copy_asoundconf(asoundconf_file):
""" Copy asound.conf configuration to local path.
:param asoundconf_file: the name of the asound.conf configuration, as
present in the config folder.
"""
this_dir, this_filename = os.path.split(__file__)
asoundconf_path = os.path.join(this_dir, MicrophoneSetup.ASOUNDCONF_PATH, asoundconf_file)
shutil.copy2(asoundconf_path, ASOUNDCONF_DEST_PATH)
class RespeakerMicrophoneSetup:
@staticmethod
def setup(vendor_id, product_id):
if not is_raspi_os():
return
execute_command("sudo rm -f /lib/udev/rules.d/50-rspk.rules")
echo_command = ("echo ACTION==\"add\", SUBSYSTEMS==\"usb\", ATTRS{{idVendor}}==\"{}\", " +
"ATTRS{{idProduct}}==\"{}\", MODE=\"660\", GROUP=\"plugdev\"") \
.format(vendor_id, product_id)
tee_command = "sudo tee --append /lib/udev/rules.d/50-rspk.rules"
pipe_commands(echo_command, tee_command, silent=True)
execute_command("sudo adduser pi plugdev")
execute_command("sudo udevadm control --reload")
execute_command("sudo udevadm trigger")
| mit | -8,214,272,238,034,088,000 | 33.431034 | 98 | 0.625939 | false | 3.503509 | false | false | false |
edickie/ciftify | ciftify/bin/ciftify_seed_corr.py | 1 | 8983 | #!/usr/bin/env python3
"""
Produces a correlation map of the mean time series within the seed with
every voxel in the functional file.
Usage:
ciftify_seed_corr [options] <func> <seed>
Arguments:
<func> functional data (nifti or cifti)
<seed> seed mask (nifti, cifti or gifti)
Options:
--outputname STR Specify the output filename
--output-ts Also output write the from the seed to text
--roi-label INT Specify the numeric label of the ROI you want a seedmap for
--hemi HEMI If the seed is a gifti file, specify the hemisphere (R or L) here
--mask FILE brainmask
--fisher-z Apply the fisher-z transform (arctanh) to the correlation map
--weighted compute weighted average timeseries from the seed map
--use-TRs FILE Only use the TRs listed in the file provided (TR's in file starts with 1)
-v,--verbose Verbose logging
--debug Debug logging
-h, --help Prints this message
DETAILS:
The default output filename is created from the <func> and <seed> filenames,
(i.e. func.dscalar.nii + seed.dscalar.nii --> func_seed.dscalar.nii)
and written to same folder as the <func> input. Use the '--outputname'
argument to specify a different outputname. The output datatype matches the <func>
input.
The mean timeseries is calculated using ciftify_meants, '--roi-label', '--hemi',
'--mask', and '--weighted' arguments are passed to it. See ciftify_meants '--help' for
more info on their usage. The timeseries output (*_meants.csv) of this step can be
saved to disk using the '--output-ts' option.
If a mask is provided with the ('--mask') option. (Such as a brainmask) it will be
applied to both the seed and functional file.
The '--use-TRs' argument allows you to calcuate the correlation maps from specific
timepoints (TRs) in the timeseries. This option can be used to exclude outlier
timepoints or to limit the calculation to a subsample of the timecourse
(i.e. only the beggining or end). It expects a text file containing the integer numbers
TRs to keep (where the first TR=1).
Written by Erin W Dickie
"""
import os
import sys
import subprocess
import tempfile
import shutil
import logging
import logging.config
import numpy as np
import scipy as sp
import nibabel as nib
from docopt import docopt
import ciftify
from ciftify.utils import run
from ciftify.meants import MeantsSettings
# Read logging.conf
logger = logging.getLogger('ciftify')
logger.setLevel(logging.DEBUG)
class UserSettings(MeantsSettings):
def __init__(self, arguments):
MeantsSettings.__init__(self, arguments)
self.fisher_z = arguments['--fisher-z']
self.output_prefix = self.get_output_prefix(arguments['--outputname'])
self.outputcsv = self.get_outputcsv(arguments['--output-ts'])
self.TR_file = self.get_TRfile(arguments['--use-TRs'])
def get_output_prefix(self, outputname):
'''
output_prefix is outputname if it was specified
if not, it is created from the func and seed input paths
'''
## determine outbase if it has not been specified
if outputname:
output_prefix = outputname.replace('.nii.gz','').replace('.dscalar.nii','')
else:
outbase = '{}_{}'.format(self.func.base, self.seed.base)
output_prefix = os.path.join(os.path.dirname(self.func.path), outbase)
## uses utils funciton to make sure the output is writable, will sys.exit with error if not the case
ciftify.utils.check_output_writable(output_prefix)
return(output_prefix)
def get_outputcsv(self, output_ts):
'''set outputcsv name if this is asked for'''
if output_ts:
outputcsv = '{}_meants.csv'.format(self.output_prefix)
else:
outputcsv = None
return(outputcsv)
def get_TRfile(self, TRfile):
if TRfile:
ciftify.utils.check_input_readable(TRfile)
return(TRfile)
def main():
arguments = docopt(__doc__)
debug = arguments['--debug']
verbose = arguments['--verbose']
ch = logging.StreamHandler()
ch.setLevel(logging.WARNING)
if verbose:
ch.setLevel(logging.INFO)
if debug:
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
## set up the top of the log
logger.info('{}{}'.format(ciftify.utils.ciftify_logo(),
ciftify.utils.section_header('Starting ciftify_seed_corr')))
ciftify.utils.log_arguments(arguments)
settings = UserSettings(arguments)
with ciftify.utils.TempDir() as tmpdir:
logger.info('Creating tempdir:{} on host:{}'.format(tmpdir,
os.uname()[1]))
ret = run_ciftify_seed_corr(settings, tmpdir)
logger.info(ciftify.utils.section_header('Done ciftify_seed_corr'))
sys.exit(ret)
def run_ciftify_seed_corr(settings, tempdir):
logger.debug('func: type: {}, base: {}'.format(settings.func.type, settings.func.base))
logger.debug('seed: type: {}, base: {}'.format(settings.seed.type, settings.seed.base))
if ".dlabel.nii" in settings.seed.path:
logger.error("Sorry this function can't handle .dlabel.nii seeds")
sys.exit(1)
seed_ts = ciftify.meants.calc_meants_with_numpy(settings)
logger.debug('seed_ts shape before reshaping {}'.format(seed_ts.shape))
if ((len(seed_ts.shape) != 2) or (seed_ts.shape[0] != 1 and seed_ts.shape[1] !=1)):
logger.error("Incorrect shape dimensions. May have forgotten to indicate the '--weighted' or '-roi-label' file")
sys.exit(1)
seed_ts = seed_ts.reshape(seed_ts.shape[0]*seed_ts.shape[1])
logger.debug('seed_ts shape after reshaping {}'.format(seed_ts.shape))
logger.debug('Writing output with prefix: {}'.format(settings.output_prefix))
logger.debug('Writing meants: {}'.format(settings.outputcsv))
logger.info('Using numpy to calculate seed-correlation')
## convert to nifti
if settings.func.type == "cifti":
func_fnifti = os.path.join(tempdir,'func.nii.gz')
run(['wb_command','-cifti-convert','-to-nifti',settings.func.path, func_fnifti])
func_data, outA, header, dims = ciftify.niio.load_nifti(func_fnifti)
# import template, store the output paramaters
if settings.func.type == "nifti":
func_data, outA, header, dims = ciftify.niio.load_nifti(settings.func.path)
if settings.mask:
if settings.mask.type == "cifti":
mask_fnifti = os.path.join(tempdir,'mask.nii.gz')
run(['wb_command','-cifti-convert','-to-nifti', settings.mask.path, mask_fnifti])
mask_data, _, _, _ = ciftify.niio.load_nifti(mask_fnifti)
if settings.mask.type == "nifti":
mask_data, _, _, _ = ciftify.niio.load_nifti(settings.mask.path)
# decide which TRs go into the correlation
if settings.TR_file:
TR_file = np.loadtxt(settings.TR_file, int)
TRs = TR_file - 1 # shift TR-list to be zero-indexed
else:
TRs = np.arange(dims[3])
# get mean seed timeseries
## even if no mask given, mask out all zero elements..
std_array = np.std(func_data, axis=1)
std_nonzero = np.where(std_array > 0)[0]
idx_mask = std_nonzero
if settings.mask:
idx_of_mask = np.where(mask_data > 0)[0]
idx_mask = np.intersect1d(idx_mask, idx_of_mask)
# create output array
out = np.zeros([dims[0]*dims[1]*dims[2], 1])
# look through each time series, calculating r
for i in np.arange(len(idx_mask)):
out[idx_mask[i]] = np.corrcoef(seed_ts[TRs], func_data[idx_mask[i], TRs])[0][1]
# create the 3D volume and export
out = out.reshape([dims[0], dims[1], dims[2], 1])
out = nib.nifti1.Nifti1Image(out, outA)
## determine nifti filenames for the next two steps
if settings.func.type == "nifti":
if settings.fisher_z:
nifti_corr_output = os.path.join(tempdir, 'corr_out.nii.gz')
nifti_Zcorr_output = '{}.nii.gz'.format(settings.output_prefix)
else:
nifti_corr_output = '{}.nii.gz'.format(settings.output_prefix)
if settings.func.type == "cifti":
nifti_corr_output = os.path.join(tempdir, 'corr_out.nii.gz')
if settings.fisher_z:
nifti_Zcorr_output = os.path.join(tempdir, 'corrZ_out.nii.gz')
else:
nifti_Zcorr_output = nifti_corr_output
# write out nifti
out.to_filename(nifti_corr_output)
# do fisher-z transform on values
if settings.fisher_z:
run(['wb_command', "-volume-math 'atanh(x)'", nifti_Zcorr_output,
'-var', 'x', nifti_corr_output])
if settings.func.type == "cifti":
## convert back
run(['wb_command','-cifti-convert','-from-nifti',
nifti_Zcorr_output,
settings.func.path,
'{}.dscalar.nii'.format(settings.output_prefix),
'-reset-scalars'])
if __name__ == '__main__':
main()
| mit | -9,101,947,211,567,938,000 | 36.902954 | 120 | 0.649338 | false | 3.393653 | false | false | false |
Linutronix/elbe | elbepack/initvmaction.py | 1 | 23779 | # ELBE - Debian Based Embedded Rootfilesystem Builder
# Copyright (c) 2015-2017 Manuel Traut <[email protected]>
# Copyright (c) 2015-2018 Torben Hohn <[email protected]>
# Copyright (c) 2015 Silvio Fricke <[email protected]>
# Copyright (c) 2017 Philipp Arras <[email protected]>
# Copyright (c) 2017 Benedikt Spranger <[email protected]>
# Copyright (c) 2017 John Ogness <[email protected]>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import sys
import time
import os
import datetime
import libvirt
import elbepack
from elbepack.treeutils import etree
from elbepack.directories import elbe_exe
from elbepack.shellhelper import CommandError, system, command_out_stderr, \
command_out
from elbepack.filesystem import TmpdirFilesystem
from elbepack.elbexml import ElbeXML, ValidationError, ValidationMode
from elbepack.config import cfg
from elbepack.xmlpreprocess import PreprocessWrapper
def is_soap_local():
return cfg["soaphost"] in ("localhost", "127.0.0.1")
def cmd_exists(x):
return any(os.access(os.path.join(path, x), os.X_OK)
for path in os.environ["PATH"].split(os.pathsep))
# Create download directory with timestamp,
# if necessary
def ensure_outdir(opt):
if opt.outdir is None:
opt.outdir = "elbe-build-%s" % (
datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
print("Saving generated Files to %s" % opt.outdir)
class InitVMError(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
class InitVMAction:
actiondict = {}
@classmethod
def register(cls, tag):
def _register(action):
action.tag = tag
cls.actiondict[action.tag] = action
return action
return _register
@classmethod
def print_actions(cls):
print("available subcommands are:", file=sys.stderr)
for a in cls.actiondict:
print(" %s" % a, file=sys.stderr)
def __new__(cls, node):
action = cls.actiondict[node]
return object.__new__(action)
def __init__(self, node, initvmNeeded=True):
self.initvm = None
self.conn = None
self.node = node
# initvm might be running on a different host. Thus there's
# no need to talk with libvirt
if not is_soap_local():
return
# The tag initvmNeeded is required in order to be able to run `elbe
# initvm create`
try:
self.conn = libvirt.open("qemu:///system")
except libvirt.libvirtError as verr:
if not isinstance(verr.args[0], str):
raise
if verr.args[0].startswith('Failed to connect socket to'):
retries = 18
while retries > 0:
retries -= 1
time.sleep(10)
try:
self.conn = libvirt.open("qemu:///system")
except libvirt.libvirtError as verr:
if not isinstance(verr.args[0], str):
raise
if verr.args[0].startswith('Failed to connect socket to'):
pass
if self.conn:
break
if not self.conn:
print("", file=sys.stderr)
print("Accessing libvirt provider system not possible.", file=sys.stderr)
print("Even after waiting 180 seconds.", file=sys.stderr)
print("Make sure that package 'libvirt-daemon-system' is", file=sys.stderr)
print("installed, and the service is running properly", file=sys.stderr)
sys.exit(20)
elif verr.args[0].startswith('authentication unavailable'):
print("", file=sys.stderr)
print("Accessing libvirt provider system not allowed.", file=sys.stderr)
print("Users which want to use elbe need to be members of the 'libvirt' group.", file=sys.stderr)
print("'gpasswd -a <user> libvirt' and logging in again,", file=sys.stderr)
print("should fix the problem.", file=sys.stderr)
sys.exit(20)
elif verr.args[0].startswith('error from service: CheckAuthorization'):
print("", file=sys.stderr)
print("Accessing libvirt failed.", file=sys.stderr)
print("Probably entering the password for accssing libvirt", file=sys.stderr)
print("timed out. If this occured after 'elbe initvm create'", file=sys.stderr)
print("it should be safe to use 'elbe initvm start' to", file=sys.stderr)
print("continue.", file=sys.stderr)
sys.exit(20)
else:
# In case we get here, the exception is unknown, and we want to see it
raise
doms = self.conn.listAllDomains()
for d in doms:
if d.name() == cfg['initvm_domain']:
self.initvm = d
if not self.initvm and initvmNeeded:
sys.exit(20)
def execute(self, _initvmdir, _opt, _args):
raise NotImplementedError('execute() not implemented')
def initvm_state(self):
return self.initvm.info()[0]
@InitVMAction.register('start')
class StartAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
if self.initvm_state() == libvirt.VIR_DOMAIN_RUNNING:
print('Initvm already running.')
sys.exit(20)
elif self.initvm_state() == libvirt.VIR_DOMAIN_SHUTOFF:
# Domain is shut off. Let's start it!
self.initvm.create()
# Wait five seconds for the initvm to boot
# TODO: Instead of waiting for five seconds
# check whether SOAP server is reachable.
for _ in range(1, 5):
sys.stdout.write("*")
sys.stdout.flush()
time.sleep(1)
print("*")
@InitVMAction.register('ensure')
class EnsureAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
# initvm might be running on a different host, thus skipping
# the check
if not is_soap_local():
return
if self.initvm_state() == libvirt.VIR_DOMAIN_SHUTOFF:
system('%s initvm start' % elbe_exe)
elif self.initvm_state() == libvirt.VIR_DOMAIN_RUNNING:
stop = time.time() + 300
while time.time() < stop:
if command_out('%s control list_projects' % elbe_exe)[0] == 0:
break
time.sleep(10)
if time.time() > stop:
print("Waited for 5 minutes and the daemon is still not active."
" Exit.")
sys.exit(20)
else:
print("Elbe initvm in bad state.")
sys.exit(20)
@InitVMAction.register('stop')
class StopAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
if self.initvm_state() != libvirt.VIR_DOMAIN_RUNNING:
print('Initvm is not running.')
sys.exit(20)
while True:
sys.stdout.write("*")
sys.stdout.flush()
time.sleep(1)
state = self.initvm_state()
if state == libvirt.VIR_DOMAIN_SHUTDOWN:
continue
if state == libvirt.VIR_DOMAIN_SHUTOFF:
break
try:
self.initvm.shutdown()
except libvirt.libvirtError as e:
raise e
print("\nInitvm shutoff")
@InitVMAction.register('attach')
class AttachAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
if self.initvm_state() != libvirt.VIR_DOMAIN_RUNNING:
print('Error: Initvm not running properly.')
sys.exit(20)
print('Attaching to initvm console.')
system('virsh --connect qemu:///system console %s' % cfg['initvm_domain'])
def submit_and_dl_result(xmlfile, cdrom, opt):
# pylint: disable=too-many-statements
# pylint: disable=too-many-branches
try:
with PreprocessWrapper(xmlfile, opt) as ppw:
xmlfile = ppw.preproc
ret, prjdir, err = command_out_stderr(
'%s control create_project' % (elbe_exe))
if ret != 0:
print("elbe control create_project failed.", file=sys.stderr)
print(err, file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
prjdir = prjdir.strip()
cmd = '%s control set_xml %s %s' % (elbe_exe, prjdir, xmlfile)
ret, _, err = command_out_stderr(cmd)
if ret != 0:
print("elbe control set_xml failed2", file=sys.stderr)
print(err, file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
except CommandError:
# this is the failure from PreprocessWrapper
# it already printed the error message from
# elbe preprocess
print("Giving up", file=sys.stderr)
sys.exit(20)
if opt.writeproject:
with open(opt.writeproject, "w") as wpf:
wpf.write(prjdir)
if cdrom is not None:
print("Uploading CDROM. This might take a while")
try:
system(
'%s control set_cdrom "%s" "%s"' %
(elbe_exe, prjdir, cdrom))
except CommandError:
print("elbe control set_cdrom Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("Upload finished")
build_opts = ''
if opt.build_bin:
build_opts += '--build-bin '
if opt.build_sources:
build_opts += '--build-sources '
if cdrom:
build_opts += '--skip-pbuilder '
try:
system(
'%s control build "%s" %s' %
(elbe_exe, prjdir, build_opts))
except CommandError:
print("elbe control build Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("Build started, waiting till it finishes")
try:
system('%s control wait_busy "%s"' % (elbe_exe, prjdir))
except CommandError:
print('elbe control wait_busy Failed', file=sys.stderr)
print('', file=sys.stderr)
print('The project will not be deleted from the initvm.',
file=sys.stderr)
print('The files, that have been built, can be downloaded using:',
file=sys.stderr)
print('%s control get_files --output "%s" "%s"' % (elbe_exe,
opt.outdir,
prjdir),
file=sys.stderr)
print("", file=sys.stderr)
print('The project can then be removed using:',
file=sys.stderr)
print('%s control del_project "%s"' % (elbe_exe, prjdir),
file=sys.stderr)
print("", file=sys.stderr)
sys.exit(10)
print("")
print("Build finished !")
print("")
if opt.build_sdk:
try:
system(
'%s control build_sdk "%s" %s' %
(elbe_exe, prjdir, build_opts))
except CommandError:
print("elbe control build_sdk Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("SDK Build started, waiting till it finishes")
try:
system('%s control wait_busy "%s"' % (elbe_exe, prjdir))
except CommandError:
print('elbe control wait_busy Failed, while waiting for the SDK',
file=sys.stderr)
print('', file=sys.stderr)
print('The project will not be deleted from the initvm.',
file=sys.stderr)
print('The files, that have been built, can be downloaded using:',
file=sys.stderr)
print('%s control get_files --output "%s" "%s"' % (elbe_exe,
opt.outdir,
prjdir),
file=sys.stderr)
print("", file=sys.stderr)
print('The project can then be removed using:',
file=sys.stderr)
print('%s control del_project "%s"' % (elbe_exe, prjdir),
file=sys.stderr)
print("", file=sys.stderr)
sys.exit(10)
print("")
print("SDK Build finished !")
print("")
try:
system(
'%s control dump_file "%s" validation.txt' %
(elbe_exe, prjdir))
except CommandError:
print(
"Project failed to generate validation.txt",
file=sys.stderr)
print("Getting log.txt", file=sys.stderr)
try:
system(
'%s control dump_file "%s" log.txt' %
(elbe_exe, prjdir))
except CommandError:
print("Failed to dump log.txt", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
if opt.skip_download:
print("")
print("Listing available files:")
print("")
try:
system('%s control get_files "%s"' % (elbe_exe, prjdir))
except CommandError:
print("elbe control get_files Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("")
print(
'Get Files with: elbe control get_file "%s" <filename>' %
prjdir)
else:
print("")
print("Getting generated Files")
print("")
ensure_outdir(opt)
try:
system('%s control get_files --output "%s" "%s"' % (
elbe_exe, opt.outdir, prjdir))
except CommandError:
print("elbe control get_files Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
if not opt.keep_files:
try:
system('%s control del_project "%s"' % (
elbe_exe, prjdir))
except CommandError:
print("remove project from initvm failed",
file=sys.stderr)
sys.exit(20)
def extract_cdrom(cdrom):
""" Extract cdrom iso image
returns a TmpdirFilesystem() object containing
the source.xml, which is also validated.
"""
tmp = TmpdirFilesystem()
system('7z x -o%s "%s" source.xml' % (tmp.path, cdrom))
print("", file=sys.stderr)
if not tmp.isfile('source.xml'):
print(
"Iso image does not contain a source.xml file",
file=sys.stderr)
print(
"This is not supported by 'elbe initvm'",
file=sys.stderr)
print("", file=sys.stderr)
print("Exiting !!!", file=sys.stderr)
sys.exit(20)
try:
exml = ElbeXML(
tmp.fname('source.xml'),
url_validation=ValidationMode.NO_CHECK)
except ValidationError as e:
print(
"Iso image does contain a source.xml file.",
file=sys.stderr)
print(
"But that xml does not validate correctly",
file=sys.stderr)
print("", file=sys.stderr)
print("Exiting !!!", file=sys.stderr)
print(e)
sys.exit(20)
print("Iso Image with valid source.xml detected !")
print(
"Image was generated using Elbe Version %s" %
exml.get_elbe_version())
return tmp
@InitVMAction.register('create')
class CreateAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node, initvmNeeded=False)
def execute(self, initvmdir, opt, args):
# pylint: disable=too-many-branches
# pylint: disable=too-many-statements
if self.initvm is not None:
print("Initvm is already defined for the libvirt domain '%s'.\n" % cfg['initvm_domain'])
print("If you want to build in your old initvm, "
"use `elbe initvm submit <xml>`.")
print("If you want to remove your old initvm from libvirt "
"run `virsh --connect qemu:///system undefine %s`.\n" % cfg['initvm_domain'])
print("You can specify another libvirt domain by setting the "
"ELBE_INITVM_DOMAIN environment variable to an unused domain name.\n")
print("Note:")
print("\t1) You can reimport your old initvm via "
"`virsh --connect qemu:///system define <file>`")
print("\t where <file> is the corresponding libvirt.xml")
print("\t2) virsh --connect qemu:///system undefine does not delete the image "
"of your old initvm.")
sys.exit(20)
# Upgrade from older versions which used tmux
try:
system("tmux has-session -t ElbeInitVMSession 2>/dev/null")
print ("ElbeInitVMSession exists in tmux. "
"It may belong to an old elbe version. "
"Please stop it to prevent interfering with this version.", file=sys.stderr)
sys.exit(20)
except CommandError:
pass
# Init cdrom to None, if we detect it, we set it
cdrom = None
if len(args) == 1:
if args[0].endswith('.xml'):
# We have an xml file, use that for elbe init
xmlfile = args[0]
try:
xml = etree(xmlfile)
except ValidationError as e:
print("XML file is invalid: %s" % str(e))
# Use default XML if no initvm was specified
if not xml.has("initvm"):
xmlfile = os.path.join(
elbepack.__path__[0], "init/default-init.xml")
elif args[0].endswith('.iso'):
# We have an iso image, extract xml from there.
tmp = extract_cdrom(args[0])
xmlfile = tmp.fname('source.xml')
cdrom = args[0]
else:
print(
"Unknown file ending (use either xml or iso)",
file=sys.stderr)
sys.exit(20)
else:
# No xml File was specified, build the default elbe-init-with-ssh
xmlfile = os.path.join(
elbepack.__path__[0],
"init/default-init.xml")
try:
init_opts = ''
if opt.devel:
init_opts += ' --devel'
if opt.nesting:
init_opts += ' --nesting'
if not opt.build_bin:
init_opts += ' --skip-build-bin'
if not opt.build_sources:
init_opts += ' --skip-build-source'
with PreprocessWrapper(xmlfile, opt) as ppw:
if cdrom:
system('%s init %s --directory "%s" --cdrom "%s" "%s"' %
(elbe_exe, init_opts, initvmdir, cdrom, ppw.preproc))
else:
system(
'%s init %s --directory "%s" "%s"' %
(elbe_exe, init_opts, initvmdir, ppw.preproc))
except CommandError:
print("'elbe init' Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
# Read xml file for libvirt
with open(os.path.join(initvmdir, 'libvirt.xml')) as f:
xml = f.read()
# Register initvm in libvirt
try:
self.conn.defineXML(xml)
except CommandError:
print('Registering initvm in libvirt failed', file=sys.stderr)
print('Try `virsh --connect qemu:///system undefine %s` to delete existing initvm' % cfg['initvm_domain'],
file=sys.stderr)
sys.exit(20)
# Build initvm
try:
system('cd "%s"; make' % (initvmdir))
except CommandError:
print("Building the initvm Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
try:
system('%s initvm start' % elbe_exe)
except CommandError:
print("Starting the initvm Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
if len(args) == 1:
# if provided xml file has no initvm section xmlfile is set to a
# default initvm XML file. But we need the original file here
if args[0].endswith('.xml'):
# stop here if no project node was specified
try:
x = etree(args[0])
except ValidationError as e:
print("XML file is invalid: %s" % str(e))
sys.exit(20)
if not x.has('project'):
print("elbe initvm ready: use 'elbe initvm submit "
"myproject.xml' to build a project")
sys.exit(0)
xmlfile = args[0]
elif cdrom is not None:
xmlfile = tmp.fname('source.xml')
submit_and_dl_result(xmlfile, cdrom, opt)
@InitVMAction.register('submit')
class SubmitAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, opt, args):
try:
system('%s initvm ensure' % elbe_exe)
except CommandError:
print("Starting the initvm Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
# Init cdrom to None, if we detect it, we set it
cdrom = None
if len(args) == 1:
if args[0].endswith('.xml'):
# We have an xml file, use that for elbe init
xmlfile = args[0]
elif args[0].endswith('.iso'):
# We have an iso image, extract xml from there.
tmp = extract_cdrom(args[0])
xmlfile = tmp.fname('source.xml')
cdrom = args[0]
else:
print(
"Unknown file ending (use either xml or iso)",
file=sys.stderr)
sys.exit(20)
submit_and_dl_result(xmlfile, cdrom, opt)
@InitVMAction.register('sync')
class SyncAction(InitVMAction):
def __init__(self, node):
super(SyncAction, self).__init__(node)
def execute(self, _initvmdir, opt, args):
top_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
try:
system("rsync --info=name1,stats1 --archive --times "
"--exclude='.git*' --exclude='*.pyc' --exclude='elbe-build*' "
"--exclude='initvm' --exclude='__pycache__' --exclude='docs' "
"--exclude='examples' "
"--rsh='ssh -p %s' --chown=root:root "
"%s/ root@localhost:/var/cache/elbe/devel" %
(cfg["sshport"], top_dir))
except CommandError as E:
print(E)
| gpl-3.0 | 8,525,162,397,998,769,000 | 33.263689 | 118 | 0.529711 | false | 4.074537 | false | false | false |
Capitains/Nautilus | capitains_nautilus/flask_ext.py | 1 | 7289 | from pkg_resources import resource_filename
import logging
from copy import deepcopy
from collections import defaultdict
from flask import Blueprint, Response
from capitains_nautilus.apis.cts import CTSApi
from capitains_nautilus.apis.dts import DTSApi
def _all_origins():
return "*"
class FlaskNautilus(object):
""" HTTP API Interfaces for MyCapytains resolvers
:param prefix: Prefix on which to install the extension
:param app: Application on which to register
:param name: Name to use for the blueprint
:param resolver: Resolver
:type resolver: Resolver
:param flask_caching: HTTP Cache should be a FlaskCaching Cache object
:type flask_caching: Cache
:cvar access_Control_Allow_Methods: Dictionary with route name and allowed methods over CORS
:cvar access_Control_Allow_Origin: Dictionary with route name and allowed host over CORS or "*"
:param logger: Logging handler.
:type logger: logging
:param apis: Set of APIs to connect to Nautilus
:type apis: set of classes
:cvar ROUTES: List of triple length tuples
:cvar Access_Control_Allow_Methods: Dictionary with route name and allowed methods over CORS
:cvar Access_Control_Allow_Origin: Dictionary with route name and allowed host over CORS or "*"
:cvar LoggingHandler: Logging handler to be set for the blueprint
:ivar logger: Logging handler
:type logger: logging.Logger
:ivar resolver: CapiTainS resolver
"""
Access_Control_Allow_Origin = "*"
LoggingHandler = logging.StreamHandler
def __init__(self, prefix="", app=None, name=None,
resolver=None,
flask_caching=None,
access_Control_Allow_Origin=None,
access_Control_Allow_Methods=None,
logger=None, apis=None
):
self._extensions = {}
self.logger = None
self.retriever = None
self.resolver = resolver
self.setLogger(logger)
self.name = name
self.prefix = prefix
self.blueprint = None
self.ROUTES = []
self.CACHED = []
self.routes = []
if apis is None:
from warnings import warn
warn(
"The parameter `apis` will need to be set-up explicitly starting 2.0.0",
DeprecationWarning
)
apis = {CTSApi(), DTSApi()}
self.Access_Control_Allow_Methods = access_Control_Allow_Methods
if not self.Access_Control_Allow_Methods:
self.Access_Control_Allow_Methods = {}
if access_Control_Allow_Origin:
self.Access_Control_Allow_Origin = defaultdict(_all_origins)
self.Access_Control_Allow_Origin.update(access_Control_Allow_Origin)
else:
self.Access_Control_Allow_Origin = FlaskNautilus.Access_Control_Allow_Origin
for api in apis:
api.init_extension(self)
self.__flask_caching__ = flask_caching
if self.name is None:
self.name = __name__
if app:
self.init_app(app=app)
def register(self, extension, extension_name):
""" Register an extension into the Nautilus Router
:param extension: Extension
:param extension_name: Name of the Extension
:return:
"""
self._extensions[extension_name] = extension
self.ROUTES.extend([
tuple(list(t) + [extension_name])
for t in extension.ROUTES
])
self.CACHED.extend([
(f_name, extension_name)
for f_name in extension.CACHED
])
# This order allows for user defaults to overwrite extension ones
self.Access_Control_Allow_Methods.update({
k: v
for k, v in extension.Access_Control_Allow_Methods.items()
if k not in self.Access_Control_Allow_Methods
})
@property
def flaskcache(self):
return self.__flask_caching__
def setLogger(self, logger):
""" Set up the Logger for the application
:param logger: logging.Logger object
:return: Logger instance
"""
self.logger = logger
if logger is None:
self.logger = logging.getLogger("capitains_nautilus")
formatter = logging.Formatter("[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s")
stream = FlaskNautilus.LoggingHandler()
stream.setLevel(logging.INFO)
stream.setFormatter(formatter)
self.logger.addHandler(stream)
if self.resolver:
self.resolver.logger = self.logger
return self.logger
def init_app(self, app):
""" Initiate the extension on the application
:param app: Flask Application
:return: Blueprint for Flask Nautilus registered in app
:rtype: Blueprint
"""
self.init_blueprint(app)
if self.flaskcache is not None:
for func, extension_name in self.CACHED:
func = getattr(self._extensions[extension_name], func)
setattr(
self._extensions[extension_name],
func.__name__,
self.flaskcache.memoize()(func)
)
return self.blueprint
def init_blueprint(self, app):
""" Properly generates the blueprint, registering routes and filters and connecting the app and the blueprint
:return: Blueprint of the extension
:rtype: Blueprint
"""
self.blueprint = Blueprint(
self.name,
self.name,
template_folder=resource_filename("capitains_nautilus", "data/templates"),
url_prefix=self.prefix
)
# Register routes
for url, name, methods, extension_name in self.ROUTES:
self.blueprint.add_url_rule(
url,
view_func=self.view(name, extension_name),
endpoint=name[2:],
methods=methods
)
app.register_blueprint(self.blueprint)
return self.blueprint
def view(self, function_name, extension_name):
""" Builds response according to a function name
:param function_name: Route name / function name
:param extension_name: Name of the extension holding the function
:return: Function
"""
if isinstance(self.Access_Control_Allow_Origin, dict):
d = {
"Access-Control-Allow-Origin": self.Access_Control_Allow_Origin[function_name],
"Access-Control-Allow-Methods": self.Access_Control_Allow_Methods[function_name]
}
else:
d = {
"Access-Control-Allow-Origin": self.Access_Control_Allow_Origin,
"Access-Control-Allow-Methods": self.Access_Control_Allow_Methods[function_name]
}
def r(*x, **y):
val = getattr(self._extensions[extension_name], function_name)(*x, **y)
if isinstance(val, Response):
val.headers.extend(d)
return val
else:
val = list(val)
val[2].update(d)
return tuple(val)
return r
| mpl-2.0 | -642,994,500,773,671,200 | 32.131818 | 117 | 0.600494 | false | 4.406892 | false | false | false |
sbg2133/miscellaneous_projects | carina/ItoNH.py | 1 | 1115 | import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
import aplpy
from astropy.wcs import WCS
import sys, os
from getIQU import IQU
from astropy import coordinates as coord
from astropy.coordinates import SkyCoord
from astropy import units as u
from scipy.interpolate import griddata
plt.ion()
root_dir = '/home/wizwit/miscellaneous_projects/carina/carinaData'
blast250_file = os.path.join(root_dir, 'smooth/3.0_arcmin/carinaneb_250_smoothed_3.0_rl.fits')
beta = 1.27
def getPsi(path_to_file):
I, Q, U, __, wcs = IQU(path_to_file)
Pvals = np.sqrt(Q**2 + U**2)
pvals = Pvals/I
# pvals /= pol_eff[band_idx]
psi = 0.5*np.arctan2(U,Q)
return I, Q, U, wcs, psi
I, __, __, wcs_250, __, = getPsi(blast250_file)
#tau_d = (nu/nu0)**beta
# See Walker pg. 71
# nu0 = frequency at which dust emission becomes optically thin
#nu0 = 0.103 * Td # 0.103 (THz/K) * Td
#Inu_dust = Bnu(Td)*(1.0 - np.exp(1.0 - e**(-1.0*tau_d))
# See Walker pg. 69
# Av = 1.086*tau_d
# N_H = 1.79e21 * Av # (atoms/cm**2 mag)
# 1) Solve tau_d for temperature
# 2) Plug into Inu_dust equation
| gpl-3.0 | -8,403,188,222,198,603,000 | 24.340909 | 94 | 0.673543 | false | 2.488839 | false | false | false |
trasa/sprout | sprout/sprout/servicehosts.py | 1 | 1382 | import os
from fabric.api import *
def create_objects(cfg, service_hosts):
""" Turn a list of service host info into objects that can do
starting, stopping of services, or other things that
we think up.
"""
return [ServiceHost(
s['hostname'],
s['services'],
cfg.get_remote_user())
for s in service_hosts]
class ServiceHost(object):
def __init__(self, hostname, services, remote_user):
self.hostname = hostname
self.services = services
self.remote_user = remote_user
self.connected = False
def _connect(self):
if not self.connected:
# connect to self.hostname
env.user = self.remote_user
env.host_string = self.hostname
self.connected = True
def _run_service(self, service_name, state):
sudo('/sbin/service %s %s' % (service_name, state))
def _run_all_services(self, state):
for service_name in self.services:
self._run_service(service_name, state)
def start(self):
self._connect()
_run_all_services('start')
def stop(self):
self._connect()
self._run_all_services('stop')
def restart(self):
""" Restart the services on this host."""
self._connect()
self._run_all_services('restart')
| apache-2.0 | -7,280,064,624,537,456,000 | 26.64 | 65 | 0.575977 | false | 4.175227 | false | false | false |
hqcckes/python-scheduler | Server/message.py | 1 | 2650 | # coding=utf-8
import os
import sys
import codecs
import json
import logging
import logging.config
from rpyc import Service
from rpyc.utils.server import ThreadedServer
from ConfigParser import SafeConfigParser
class Message(Service):
@staticmethod
def exposed_send(message):
import urllib2
logger.info(u"短信内容:" + message)
lx = u"0"
dlzh = cf.get(u"message", u"username")
dlmm = cf.get(u"message", u"password")
sjhm = cf.get(u"message", u"phone")
url = cf.get(u"message", u"url")
dxnr = urllib2.quote(message.encode(u"GB18030"))
fhls = u"0"
data = u"LX=" + lx + u"&DLZH=" + dlzh + u"&DLMM=" + dlmm + u"&SJHM=" + sjhm + u"&DXNR=" + dxnr + u"&FHLS=" + fhls
url = url + data
request = urllib2.Request(url)
response = urllib2.urlopen(request).read()
response = response.decode(u"GB18030")
if response == u"0":
logger.info(u"警报短信发送成功!")
else:
logger.warning(u"警报短信发送失败,返回码:" + response)
return response
def setup_logging(path=u"message.json", level=logging.INFO, env_key=u"LOG_CFG"):
"""
加载日志配置
:param path: 默认路径
:param level: 默认日志等级
:param env_key: 环境变量
:return:
"""
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with codecs.open(filename=path, mode=u"rb", encoding=u"utf8") as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(level=level)
def read_conf(path=u"message.conf"):
"""
加载配置
:param path: 配置文件路径
:return: ConfigParser
"""
if not os.path.exists(path):
logger.error(u"没有找到配置文件:\"message.conf\" !")
sys.exit(2)
config = SafeConfigParser()
with codecs.open(path, u"rb", encoding=u"utf8") as c_file:
config.readfp(c_file)
return config
if __name__ == u'__main__':
# 系统文件分隔符
sep = os.sep
# 脚本当前所在路径,用GB18030解码以解决中文路径问题
c_path = os.path.split(os.path.realpath(__file__))[0].decode(u"GB18030")
# 加载日志配置
setup_logging(path=os.path.join(c_path, u"config/message.json"))
logger = logging.getLogger(__name__)
# 加载配置文件
config_file = os.path.join(c_path, u"config/message.conf")
cf = read_conf(path=config_file)
service = ThreadedServer(Message, port=9999, auto_register=False)
service.start()
| gpl-3.0 | -404,603,665,324,014,900 | 26.066667 | 121 | 0.605911 | false | 2.673985 | true | false | false |
ContinuumIO/ashiba | enaml/enaml/qt/qt_stack_item.py | 1 | 3902 | #------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
from atom.api import Typed
from enaml.widgets.stack_item import ProxyStackItem
from .QtGui import QFrame
from .q_single_widget_layout import QSingleWidgetLayout
from .qt_container import QtContainer
from .qt_widget import QtWidget
class QStackItem(QFrame):
""" A QFrame subclass which acts as an item QStack.
"""
def __init__(self, *args, **kwargs):
""" Initialize a QStackItem.
Parameters
----------
*args, **kwargs
The position and keyword arguments required to initialize
a QWidget.
"""
super(QStackItem, self).__init__(*args, **kwargs)
self._stack_widget = None
self.setLayout(QSingleWidgetLayout())
def stackWidget(self):
""" Get the stack widget for this stack item.
Returns
-------
result : QWidget or None
The stack widget being managed by this item.
"""
return self._stack_widget
def setStackWidget(self, widget):
""" Set the stack widget for this stack item.
Parameters
----------
widget : QWidget
The QWidget to use as the stack widget in this item.
"""
self._stack_widget = widget
self.layout().setWidget(widget)
class QtStackItem(QtWidget, ProxyStackItem):
""" A Qt implementation of an Enaml ProxyStackItem.
"""
#: A reference to the widget created by the proxy.
widget = Typed(QStackItem)
#--------------------------------------------------------------------------
# Initialization API
#--------------------------------------------------------------------------
def create_widget(self):
""" Create the underlying QStackItem widget.
"""
self.widget = QStackItem(self.parent_widget())
def init_layout(self):
""" Initialize the layout for the underyling widget.
"""
super(QtStackItem, self).init_layout()
self.widget.setStackWidget(self.stack_widget())
#--------------------------------------------------------------------------
# Utility Methods
#--------------------------------------------------------------------------
def stack_widget(self):
""" Find and return the stack widget child for this widget.
"""
d = self.declaration.stack_widget()
if d is not None:
return d.proxy.widget
#--------------------------------------------------------------------------
# Child Events
#--------------------------------------------------------------------------
def child_added(self, child):
""" Handle the child added event for a QtStackItem.
"""
super(QtStackItem, self).child_added(child)
if isinstance(child, QtContainer):
self.widget.setStackWidget(self.stack_widget())
def child_removed(self, child):
""" Handle the child added event for a QtStackItem.
"""
super(QtStackItem, self).child_removed(child)
if isinstance(child, QtContainer):
self.widget.setStackWidget(self.stack_widget())
#--------------------------------------------------------------------------
# Widget Update Methods
#--------------------------------------------------------------------------
def set_visible(self, visible):
""" An overridden visibility setter.
This setter disables changing visibility on the widget since
the visibility is controlled entirely by the parent stack.
"""
pass
| bsd-3-clause | -7,036,452,119,728,805,000 | 30.467742 | 79 | 0.494106 | false | 5.330601 | false | false | false |
forgeservicelab/ansible-roles.django_saml_app | templates/settings.py | 1 | 5840 | """
Django settings for samldemo project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
import logging
logging.basicConfig(level=logging.DEBUG)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@!poms#fy-w!ad&i945blb)arnx!(zj$37x1b$n9l_8*$2=m-0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djangosaml2',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
#'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'samldemo.urls'
WSGI_APPLICATION = 'samldemo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'djangosaml2.backends.Saml2Backend',
)
LOGIN_URL = '/saml2/login/'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer'
import saml2
SAML_CONFIG = {
# full path to the xmlsec1 binary programm
'xmlsec_binary': '/usr/bin/xmlsec1',
# your entity id, usually your subdomain plus the url to the metadata view
'entityid': '{{ ansible_fqdn }}/saml2/metadata/',
# directory with attribute mapping
'attribute_map_dir': '/usr/local/lib/python2.7/dist-packages/saml2/attributemaps',
# this block states what services we provide
'service': {
# we are just a lonely SP
'sp' : {
'allow_unsolicited': True,
'name': 'Federated Django sample SP',
'endpoints': {
# url and binding to the assetion consumer service view
# do not change the binding or service name
'assertion_consumer_service': [
('http://{{ ansible_fqdn }}/saml2/acs/',
saml2.BINDING_HTTP_POST),
],
# url and binding to the single logout service view
# do not change the binding or service name
'single_logout_service': [
('http://{{ ansible_fqdn }}/saml2/ls/',
saml2.BINDING_HTTP_REDIRECT),
],
},
# attributes that this project need to identify a user
'required_attributes': ['cn'],
# attributes that may be useful to have but not required
'optional_attributes': ['eduPersonAffiliation'],
# in this section the list of IdPs we talk to are defined
'idp': {
# we do not need a WAYF service since there is
# only an IdP defined here. This IdP should be
# present in our metadata
# the keys of this dictionary are entity ids
'{{ django_saml_app_idp }}/saml2/idp/metadata.php': {
'single_sign_on_service': {
saml2.BINDING_HTTP_REDIRECT: '{{ django_saml_app_idp }}/saml2/idp/SSOService.php',
},
'single_logout_service': {
saml2.BINDING_HTTP_REDIRECT: '{{ django_saml_app_idp }}/saml2/idp/SingleLogoutService.php',
},
},
},
},
},
# where the remote metadata is stored
'metadata': {
'local': [os.path.join(BASE_DIR, 'remote_metadata.xml')],
},
# set to 1 to output debugging information
'debug': 1,
# certificate
'key_file': os.path.join(BASE_DIR, 'key'),
'cert_file': os.path.join(BASE_DIR, 'cert.crt'),
# own metadata settings
'contact_person': [
{'given_name': 'Tomas',
'sur_name': 'Karasek',
'company': 'Digile',
'email_address': '[email protected]',
'contact_type': 'technical'},
{'given_name': 'Tomas',
'sur_name': 'Karasek',
'company': 'Digile',
'email_address': '[email protected]',
'contact_type': 'administrative'},
],
# you can set multilanguage information here
'organization': {
'name': [('Digile', 'en')],
'display_name': [('Digile', 'en')],
'url': [('http://forgeservicelab.fi', 'en')],
},
'valid_for': 24, # how long is our metadata valid
}
SAML_ATTRIBUTE_MAPPING = {
# cn is in the OID notation urn:oid:2.5.4.3
'cn': ('username', ),
'mail': ('email', ),
'givenName': ('first_name', ),
'sn': ('last_name', )
} | mit | 8,473,894,465,641,903,000 | 28.5 | 113 | 0.62089 | false | 3.582822 | false | false | false |
wil/pyroman | examples/example1/03_standard_chains.py | 1 | 1452 | """
Pyroman uses some standard chains, set in it's config.
These chains are used by the "allow()", "reject()" and "drop()" commandos
for nicer rule writing, and probably should do exactly that.
If you want maximal performance, you'll want to change these to ACCEPT and DROP
directly by calling 'Firewall.accept = "ACCEPT"' and removing the lines below.
The (small) benefits of using this approach is that you can easily disable
the rules (by modifying 'drop' and 'reject') without reloading your firewall
and that you get complete traffic counters in these chains.
The variables "Firewall.accept", "Firewall.drop" and "Firewall.reject" are
used here, so you can change them in one place only.
"""
Firewall.accept="accept"
add_chain(Firewall.accept)
# Kernel and iptables can do new string matches?
if Firewall.iptables_version(min="1.3.4") and \
Firewall.kernel_version(min="2.6.12"):
# Drop bittorrent traffic
iptables(Firewall.accept, '-m string --string "BitTorrent protocol" ' + \
'--algo bm --from 0 --to 100 -j DROP')
# add accept default rule to the chain
iptables(Firewall.accept, "-j ACCEPT")
# this is a silent drop
Firewall.drop="drop"
add_chain(Firewall.drop)
iptables(Firewall.drop, "-j DROP")
# .. these are clean "reject" rules (i.e. send 'connection refused' back)
Firewall.reject="reject"
add_chain(Firewall.reject)
iptables(Firewall.reject, "-p tcp -j REJECT --reject-with tcp-reset")
iptables(Firewall.reject, "-j REJECT")
| mit | 4,925,883,790,830,189,000 | 39.333333 | 79 | 0.74449 | false | 3.585185 | false | false | false |
ViderumGlobal/ckanext-requestdata | ckanext/requestdata/controllers/package.py | 1 | 3021 | from ckan.lib import base
from ckan.common import c, _
from ckan import logic
import ckan.model as model
import ckan.lib.helpers as h
from ckan.plugins import toolkit
from ckan.controllers.package import PackageController as _PackageController
import ckan.lib.navl.dictization_functions as dict_fns
from ckanext.requestdata.helpers import has_query_param
get_action = logic.get_action
NotAuthorized = logic.NotAuthorized
ValidationError = logic.ValidationError
clean_dict = logic.clean_dict
try:
# Support CKAN 2.6
redirect = base.redirect
except ImportError:
# Redirect is not redirect_to in CKAN 2.7
redirect = h.redirect_to
abort = base.abort
tuplize_dict = logic.tuplize_dict
parse_params = logic.parse_params
class PackageController(_PackageController):
def create_metadata_package(self):
# Handle metadata-only datasets
if has_query_param('metadata'):
package_type = 'requestdata-metadata-only'
form_vars = {
'errors': {},
'dataset_type': package_type,
'action': 'new',
'error_summary': {},
'data': {
'tag_string': '',
'group_id': None,
'type': package_type
},
'stage': ['active']
}
if toolkit.request.method == 'POST':
context = {'model': model, 'session': model.Session,
'user': c.user, 'auth_user_obj': c.userobj}
data_dict = clean_dict(dict_fns.unflatten(
tuplize_dict(parse_params(toolkit.request.POST))))
data_dict['type'] = package_type
try:
package = get_action('package_create')(context, data_dict)
url = h.url_for(controller='package', action='read',
id=package['name'])
redirect(url)
except NotAuthorized:
abort(403, _('Unauthorized to create a dataset.'))
except ValidationError, e:
errors = e.error_dict
error_summary = e.error_summary
form_vars = {
'errors': errors,
'dataset_type': package_type,
'action': 'new',
'error_summary': error_summary,
'stage': ['active']
}
form_vars['data'] = data_dict
extra_vars = {
'form_vars': form_vars,
'form_snippet': 'package/new_package_form.html',
'dataset_type': package_type
}
return toolkit.render('package/new.html',
extra_vars=extra_vars)
else:
return self.new()
else:
return self.new()
| agpl-3.0 | -8,142,442,439,098,392,000 | 33.329545 | 78 | 0.503807 | false | 4.619266 | false | false | false |
Motolea/pentagram | aplicatiepentagram/Pentagram/views.py | 1 | 3440 | from django.shortcuts import render
from django.contrib.auth.models import User
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework import status
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.authtoken.models import Token
from Pentagram.models import Photo
from Pentagram.models import Comment
from Pentagram.models import Like
from Pentagram.serializers import UserSerializer
from Pentagram.serializers import PhotoSerializer
from Pentagram.serializers import CommentSerializer
from rest_framework.permissions import AllowAny
from rest_framework.decorators import permission_classes
@api_view(['GET', 'POST'])
def photos(request):
if request.method == "GET":
photos = Photo.objects.all()
serializer = PhotoSerializer(photos, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
if request.method == "POST":
photo_serializer = PhotoSerializer(data=request.data)
if photo_serializer.is_valid():
photo_serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST, data=photo_serializer.errors)
@api_view(['GET','POST'])
@permission_classes((AllowAny,))
def users(request):
if request.method == "GET":
users = User.objects.all()
serializer = UserSerializer(users, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
if request.method == "POST":
user_serializer = UserSerializer(data=request.data)
if user_serializer.is_valid():
user_serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST, data=user_serializer.errors)
@api_view(['GET','POST'])
def comments(request, id_photo):
if request.method == "GET":
comments = Comment.objects.filter(photo_id=id_photo)
serializer = CommentSerializer(comments, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
if request.method == "POST":
request.POST['photo'] = id_photo
comment_serializer = CommentSerializer(data=request.data)
if comment_serializer.is_valid():
comment_serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST, data=comment_serializer.errors)
@api_view(['GET', 'POST'])
def like(request, id_photo):
if request.method == 'GET':
counter = Like.objects.filter(photo_id=id_photo).count()
return Response(status=status.HTTP_200_OK, data=counter)
if request.method == 'POST':
if Like.objects.filter(photo=id_photo, user=request.user.id).count() == 0:
Like.objects.create(photo_id=id_photo, user=request.user).save()
return Response(status=status.HTTP_201_CREATED)
else:
Like.objects.filter(photo=id_photo, user=request.user.id).delete()
return Response(status=status.HTTP_205_RESET_CONTENT)
class CustomObtainAuthToken(ObtainAuthToken):
def post(self, request, *args, **kwargs):
response = super(CustomObtainAuthToken, self).post(request, *args, **kwargs)
token = Token.objects.get(key=response.data['token'])
return Response({'token': token.key, 'id': token.user_id})
| gpl-3.0 | 409,339,148,438,177,900 | 39.952381 | 91 | 0.701744 | false | 3.860831 | false | false | false |
hlange/LogSoCR | .waf/waflib/extras/ocaml.py | 1 | 9469 | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
"ocaml support"
import os, re
from waflib import Utils, Task
from waflib.Logs import error
from waflib.TaskGen import feature, before_method, after_method, extension
EXT_MLL = ['.mll']
EXT_MLY = ['.mly']
EXT_MLI = ['.mli']
EXT_MLC = ['.c']
EXT_ML = ['.ml']
open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
def filter_comments(txt):
meh = [0]
def repl(m):
if m.group(1): meh[0] += 1
elif m.group(2): meh[0] -= 1
elif not meh[0]: return m.group()
return ''
return foo.sub(repl, txt)
def scan(self):
node = self.inputs[0]
code = filter_comments(node.read())
global open_re
names = []
import_iterator = open_re.finditer(code)
if import_iterator:
for import_match in import_iterator:
names.append(import_match.group(1))
found_lst = []
raw_lst = []
for name in names:
nd = None
for x in self.incpaths:
nd = x.find_resource(name.lower()+'.ml')
if not nd: nd = x.find_resource(name+'.ml')
if nd:
found_lst.append(nd)
break
else:
raw_lst.append(name)
return (found_lst, raw_lst)
native_lst=['native', 'all', 'c_object']
bytecode_lst=['bytecode', 'all']
@feature('ocaml')
def init_ml(self):
Utils.def_attrs(self,
type = 'all',
incpaths_lst = [],
bld_incpaths_lst = [],
mlltasks = [],
mlytasks = [],
mlitasks = [],
native_tasks = [],
bytecode_tasks = [],
linktasks = [],
bytecode_env = None,
native_env = None,
compiled_tasks = [],
includes = '',
uselib = '',
are_deps_set = 0)
@feature('ocaml')
@after_method('init_ml')
def init_envs_ml(self):
self.islibrary = getattr(self, 'islibrary', False)
global native_lst, bytecode_lst
self.native_env = None
if self.type in native_lst:
self.native_env = self.env.derive()
if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
self.bytecode_env = None
if self.type in bytecode_lst:
self.bytecode_env = self.env.derive()
if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
if self.type == 'c_object':
self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
@feature('ocaml')
@before_method('apply_vars_ml')
@after_method('init_envs_ml')
def apply_incpaths_ml(self):
inc_lst = self.includes.split()
lst = self.incpaths_lst
for dir in inc_lst:
node = self.path.find_dir(dir)
if not node:
error("node not found: " + str(dir))
continue
if not node in lst:
lst.append(node)
self.bld_incpaths_lst.append(node)
# now the nodes are added to self.incpaths_lst
@feature('ocaml')
@before_method('process_source')
def apply_vars_ml(self):
for i in self.incpaths_lst:
if self.bytecode_env:
app = self.bytecode_env.append_value
app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
if self.native_env:
app = self.native_env.append_value
app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
for name in self.uselib.split():
for vname in varnames:
cnt = self.env[vname+'_'+name]
if cnt:
if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
if self.native_env: self.native_env.append_value(vname, cnt)
@feature('ocaml')
@after_method('process_source')
def apply_link_ml(self):
if self.bytecode_env:
ext = self.islibrary and '.cma' or '.run'
linktask = self.create_task('ocalink')
linktask.bytecode = 1
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.env = self.bytecode_env
self.linktasks.append(linktask)
if self.native_env:
if self.type == 'c_object': ext = '.o'
elif self.islibrary: ext = '.cmxa'
else: ext = ''
linktask = self.create_task('ocalinkx')
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.env = self.native_env
self.linktasks.append(linktask)
# we produce a .o file to be used by gcc
self.compiled_tasks.append(linktask)
@extension(*EXT_MLL)
def mll_hook(self, node):
mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
mll_task.env = self.native_env.derive()
self.mlltasks.append(mll_task)
self.source.append(mll_task.outputs[0])
@extension(*EXT_MLY)
def mly_hook(self, node):
mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
mly_task.env = self.native_env.derive()
self.mlytasks.append(mly_task)
self.source.append(mly_task.outputs[0])
task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
task.env = self.native_env.derive()
@extension(*EXT_MLI)
def mli_hook(self, node):
task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
task.env = self.native_env.derive()
self.mlitasks.append(task)
@extension(*EXT_MLC)
def mlc_hook(self, node):
task = self.create_task('ocamlcc', node, node.change_ext('.o'))
task.env = self.native_env.derive()
self.compiled_tasks.append(task)
@extension(*EXT_ML)
def ml_hook(self, node):
if self.native_env:
task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
task.env = self.native_env.derive()
task.incpaths = self.bld_incpaths_lst
self.native_tasks.append(task)
if self.bytecode_env:
task = self.create_task('ocaml', node, node.change_ext('.cmo'))
task.env = self.bytecode_env.derive()
task.bytecode = 1
task.incpaths = self.bld_incpaths_lst
self.bytecode_tasks.append(task)
def compile_may_start(self):
if not getattr(self, 'flag_deps', ''):
self.flag_deps = 1
# the evil part is that we can only compute the dependencies after the
# source files can be read (this means actually producing the source files)
if getattr(self, 'bytecode', ''): alltasks = self.generator.bytecode_tasks
else: alltasks = self.generator.native_tasks
self.signature() # ensure that files are scanned - unfortunately
tree = self.generator.bld
for node in self.inputs:
lst = tree.node_deps[self.uid()]
for depnode in lst:
for t in alltasks:
if t == self: continue
if depnode in t.inputs:
self.set_run_after(t)
# TODO necessary to get the signature right - for now
delattr(self, 'cache_sig')
self.signature()
return Task.Task.runnable_status(self)
class ocamlx(Task.Task):
"""native caml compilation"""
color = 'GREEN'
run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
scan = scan
runnable_status = compile_may_start
class ocaml(Task.Task):
"""bytecode caml compilation"""
color = 'GREEN'
run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
scan = scan
runnable_status = compile_may_start
class ocamlcmi(Task.Task):
"""interface generator (the .i files?)"""
color = 'BLUE'
run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
before = ['ocamlcc', 'ocaml', 'ocamlcc']
class ocamlcc(Task.Task):
"""ocaml to c interfaces"""
color = 'GREEN'
run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'
class ocamllex(Task.Task):
"""lexical generator"""
color = 'BLUE'
run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
before = ['ocamlcmi', 'ocaml', 'ocamlcc']
class ocamlyacc(Task.Task):
"""parser generator"""
color = 'BLUE'
run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}'
before = ['ocamlcmi', 'ocaml', 'ocamlcc']
def base(self):
node = self.outputs[0]
s = os.path.splitext(node.name)[0]
return node.bld_dir() + os.sep + s
def link_may_start(self):
if getattr(self, 'bytecode', 0): alltasks = self.generator.bytecode_tasks
else: alltasks = self.generator.native_tasks
for x in alltasks:
if not x.hasrun:
return Task.ASK_LATER
if not getattr(self, 'order', ''):
# now reorder the inputs given the task dependencies
# this part is difficult, we do not have a total order on the tasks
# if the dependencies are wrong, this may not stop
seen = []
pendant = []+alltasks
while pendant:
task = pendant.pop(0)
if task in seen: continue
for x in task.run_after:
if not x in seen:
pendant.append(task)
break
else:
seen.append(task)
self.inputs = [x.outputs[0] for x in seen]
self.order = 1
return Task.Task.runnable_status(self)
class ocalink(Task.Task):
"""bytecode caml link"""
color = 'YELLOW'
run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
runnable_status = link_may_start
after = ['ocaml', 'ocamlcc']
class ocalinkx(Task.Task):
"""native caml link"""
color = 'YELLOW'
run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
runnable_status = link_may_start
after = ['ocamlx', 'ocamlcc']
def configure(conf):
opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
if (not opt) or (not occ):
conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
v = conf.env
v['OCAMLC'] = occ
v['OCAMLOPT'] = opt
v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
v['OCAMLFLAGS'] = ''
where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep
v['OCAMLLIB'] = where
v['LIBPATH_OCAML'] = where
v['INCLUDES_OCAML'] = where
v['LIB_OCAML'] = 'camlrun'
| agpl-3.0 | 1,413,643,713,363,514,400 | 27.607251 | 117 | 0.659521 | false | 2.664322 | false | false | false |
vlegoff/tsunami | src/primaires/format/editeurs/floatedit/__init__.py | 1 | 4072 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant l'éditeur 'floatedit'.
Si des redéfinitions de contexte-éditeur standard doivent être faites, elles
seront placées dans ce package
Note importante : ce package contient la définition d'un éditeur, mais
celui-ci peut très bien être étendu par d'autres modules. Au quel cas,
les extensions n'apparaîtront pas ici.
"""
from primaires.interpreteur.editeur.presentation import Presentation
from primaires.interpreteur.editeur.description import Description
from primaires.salle.editeurs.redit.edt_details import EdtDetails
class EdtFloatedit(Presentation):
"""Classe définissant l'éditeur de description flottante 'floatedit'."""
nom = "floatedit"
def __init__(self, personnage, flottante):
"""Constructeur de l'éditeur"""
if personnage:
instance_connexion = personnage.instance_connexion
else:
instance_connexion = None
Presentation.__init__(self, instance_connexion, flottante)
if personnage and flottante:
self.construire(flottante)
def __getnewargs__(self):
return (None, None)
def construire(self, flottante):
"""Construction de l'éditeur"""
# Description
description = self.ajouter_choix("description", "d", Description,
flottante)
description.parent = self
description.apercu = "{objet.description.paragraphes_indentes}"
description.aide_courte = \
"| |tit|" + "Description flottante '{}'".format(
flottante.cle).ljust(76) + "|ff||\n" + self.opts.separateur
# Détails
details = self.ajouter_choix("details", "e", EdtDetails, flottante,
"details")
details.parent = self
details.aide_courte = \
"Entrez le nom d'un |cmd|détail existant|ff| pour l'éditer ou " \
"un |cmd|nouveau détail|ff|\n" \
"pour le créer ; |ent|/|ff| pour revenir à la fenêtre parente.\n" \
"Options :\n" \
" - |ent|/s <détail existant> / <synonyme 1> (/ <synonyme 2> / " \
"...)|ff| : permet\n" \
" de modifier les synonymes du détail passée en paramètre. " \
"Pour chaque\n" \
" synonyme donné à l'option, s'il existe, il sera supprimé ; " \
"sinon, il sera\n" \
" ajouté à la liste.\n" \
" - |ent|/d <détail existant>|ff| : supprime le détail " \
"indiqué\n\n"
| bsd-3-clause | -354,768,347,464,347,840 | 41.505263 | 79 | 0.679297 | false | 3.539001 | false | false | false |
brunoabud/ic | plugins/ICGRAY2BGR/plugin_object.py | 1 | 1150 | # coding: utf-8
# Copyright (C) 2016 Bruno Abude Cardoso
#
# Imagem Cinemática is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Imagem Cinemática is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import cv2
class ICGRAY2BGR(object):
def __init__(self, plugin_path):
self.plugin_path = plugin_path
self.parameters = []
def parameter_changed(self, param_name, value):
return None
def apply_filter(self, frame):
colorspace, data, pos, timestamp = frame
data = cv2.cvtColor(data, cv2.COLOR_GRAY2BGR)
return ("BGR", data)
def release_plugin(self, error_level=0):
pass
| gpl-3.0 | 2,798,068,830,518,351,400 | 32.764706 | 75 | 0.705575 | false | 3.644444 | false | false | false |
meppe/ros-ort | src/frcnn/src/lib/pycocotools/mask.py | 1 | 4062 | __author__ = 'tsungyi'
import lib.pycocotools._mask as _mask
# Interface for manipulating masks stored in RLE format.
#
# RLE is a simple yet efficient format for storing binary masks. RLE
# first divides a vector (or vectorized image) into a series of piecewise
# constant regions and then for each piece simply stores the length of
# that piece. For example, given M=[0 0 1 1 1 0 1] the RLE counts would
# be [2 3 1 1], or for M=[1 1 1 1 1 1 0] the counts would be [0 6 1]
# (note that the odd counts are always the numbers of zeros). Instead of
# storing the counts directly, additional compression is achieved with a
# variable bitrate representation based on a common scheme called LEB128.
#
# Compression is greatest given large piecewise constant regions.
# Specifically, the size of the RLE is proportional to the number of
# *boundaries* in M (or for an image the number of boundaries in the y
# direction). Assuming fairly simple shapes, the RLE representation is
# O(sqrt(n)) where n is number of pixels in the object. Hence space usage
# is substantially lower, especially for large simple objects (large n).
#
# Many common operations on masks can be computed directly using the RLE
# (without need for decoding). This includes computations such as area,
# union, intersection, etc. All of these operations are linear in the
# size of the RLE, in other words they are O(sqrt(n)) where n is the area
# of the object. Computing these operations on the original mask is O(n).
# Thus, using the RLE can result in substantial computational savings.
#
# The following API functions are defined:
# encode - Encode binary masks using RLE.
# decode - Decode binary masks encoded via RLE.
# merge - Compute union or intersection of encoded masks.
# iou - Compute intersection over union between masks.
# area - Compute area of encoded masks.
# toBbox - Get bounding boxes surrounding encoded masks.
# frPyObjects - Convert polygon, bbox, and uncompressed RLE to encoded RLE mask.
#
# Usage:
# Rs = encode( masks )
# masks = decode( Rs )
# R = merge( Rs, intersect=false )
# o = iou( dt, gt, iscrowd )
# a = area( Rs )
# bbs = toBbox( Rs )
# Rs = frPyObjects( [pyObjects], h, w )
#
# In the API the following formats are used:
# Rs - [dict] Run-length encoding of binary masks
# R - dict Run-length encoding of binary mask
# masks - [hxwxn] Binary mask(s) (must have type np.ndarray(dtype=uint8) in column-major order)
# iscrowd - [nx1] list of np.ndarray. 1 indicates corresponding gt image has crowd region to ignore
# bbs - [nx4] Bounding box(es) stored as [x y w h]
# poly - Polygon stored as [[x1 y1 x2 y2...],[x1 y1 ...],...] (2D list)
# dt,gt - May be either bounding boxes or encoded masks
# Both poly and bbs are 0-indexed (bbox=[0 0 1 1] encloses first pixel).
#
# Finally, a note about the intersection over union (iou) computation.
# The standard iou of a ground truth (gt) and detected (dt) object is
# iou(gt,dt) = area(intersect(gt,dt)) / area(union(gt,dt))
# For "crowd" regions, we use a modified criteria. If a gt object is
# marked as "iscrowd", we allow a dt to match any subregion of the gt.
# Choosing gt' in the crowd gt that best matches the dt can be done using
# gt'=intersect(dt,gt). Since by definition union(gt',dt)=dt, computing
# iou(gt,dt,iscrowd) = iou(gt',dt) = area(intersect(gt,dt)) / area(dt)
# For crowd gt regions we use this modified criteria above for the iou.
#
# To compile run "python setup.py build_ext --inplace"
# Please do not contact us for help with compiling.
#
# Microsoft COCO Toolbox. version 2.0
# Data, paper, and tutorials available at: http://mscoco.org/
# Code written by Piotr Dollar and Tsung-Yi Lin, 2015.
# Licensed under the Simplified BSD License [see coco/license.txt]
encode = _mask.encode
decode = _mask.decode
iou = _mask.iou
merge = _mask.merge
area = _mask.area
toBbox = _mask.toBbox
frPyObjects = _mask.frPyObjects | gpl-3.0 | -2,878,177,237,587,652,600 | 48.54878 | 100 | 0.699655 | false | 3.334975 | false | false | false |
joshcai/utdcs | processing/views.py | 1 | 3030 | # Create your views here.
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.template import RequestContext, loader
from django.core.urlresolvers import reverse
from django.shortcuts import render, get_object_or_404
from django.db.models import Q
from website import secrets
import datetime
from processing.models import Post
#debugging
#import pdb
def render_post(current_post):
newpost = """var sketchProc=function(processingInstance){ with (processingInstance){
var xWidth=400;
var yHeight=400;
frameRate(45);
size(xWidth, yHeight);"""
newpost += current_post
newpost +="}};"
return newpost
def index(request, page_num=1):
post_entries = Post.objects.order_by('-date').exclude(deleted=True)
context ={
'post_entries': post_entries[(float(page_num)-1)*5:float(page_num)*5],
'page_num': page_num,
'request': request,
}
if float(page_num) > 1:
context['prev'] = True
if float(page_num)*5 < len(post_entries): # this can be optimized later - (code is already hitting database once)
context['next'] = True
return render(request, 'processing/index.html', context)
def submit(request):
if request.method == 'POST':
if request.POST['title'] and request.POST['content']:
d = datetime.datetime.now()
if request.POST['author']:
auth = request.POST['author']
else:
auth = "Anonymous"
p = Post(title=request.POST['title'],
content=request.POST['content'],
content_rendered=render_post(request.POST['content']),
author=auth,
date=d,
date_str=d.strftime('%B %d, %Y %I:%M%p'))
p.save()
return HttpResponseRedirect(reverse('processing:index'))
else:
context={
'title': request.POST['title'],
'content': request.POST['content'],
'error_message': "Title and content required<br />",
'url': reverse('processing:submit'),
'request': request,
}
return render(request, 'processing/newpost.html', context)
return render(request, 'processing/newpost.html', {'url': reverse('processing:submit'), 'request': request})
def login(request):
context={'request': request}
if request.method == 'POST':
if request.POST['password'] == secrets.login_password:
request.session['logged_in'] = True
return HttpResponseRedirect(reverse('blog:index'))
else:
context['error_message'] = "Invalid password<br />"
return render(request, 'blog/login.html', context)
def delete(request, post_id):
if 'logged_in' in request.session and request.session['logged_in']:
post = get_object_or_404(Post, pk=post_id)
post.deleted = True
post.save()
return HttpResponseRedirect(reverse('blog:index'))
def post(request, post_id):
post = get_object_or_404(Post, pk=post_id)
context={
'post': post,
'request': request,
}
query = Post.objects.all().exclude(deleted=True)
next = query.filter(pk__gt=post_id)
if next:
context['next'] = next[0]
prev = query.filter(pk__lt=post_id).order_by('id').reverse()
if prev:
context['prev'] = prev[0]
return render(request,'processing/post.html', context)
| mit | 2,868,276,417,460,019,000 | 30.894737 | 114 | 0.69604 | false | 3.196203 | false | false | false |
azumimuo/family-xbmc-addon | plugin.video.specto/resources/lib/sources/kissanime_tv.py | 1 | 4887 | # -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib.libraries import control
from resources.lib.libraries import cache
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://kissanime.io/'
#self.base_link = client.source(self.base_link, output='geturl')
self.search_link = '/wp-admin/admin-ajax.php'
self.movie_list = '/720p-1080p-bluray-movies-list/'
def get_movie(self, imdb, title, year):
try:
leter = title[0]
result = cache.get(self.filmxy_cache,9000,leter)
print "r1",result
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [i for i in result if cleantitle.movie(title) == cleantitle.movie(i[2])]
print "r2",result
result = [i[0] for i in result if any(x in i[1] for x in years)][0]
print "r3",result
url = client.replaceHTMLCodes(result)
url = url.encode('utf-8')
return url
except Exception as e:
control.log('Filmxy ERROR %s' % e)
return
def filmxy_cache(self, leter=''):
try:
url = urlparse.urljoin(self.base_link, self.search_link)
#control.log('>>>>>>>>>>>>---------- CACHE %s' % url)
headers = {'X-Requested-With':"XMLHttpRequest"}
params = {"action":"ajax_process2", "query":leter.upper()}
params = urllib.urlencode(params)
result = client.request(url, post=params, headers=headers)
result = client.parseDOM(result, 'p')
result = [(client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'a')[0], client.parseDOM(i, 'a')[0]) for i in result]
result = [(re.sub('http.+?//.+?/','/', i[0]), re.findall("\(\d+\)", i[1]), i[2].split('(')[0]) for i in result]
#control.log('>>>>>>>>>>>>---------- CACHE-4 %s' % result)
result = [(i[0], i[1][0], i[2].strip()) for i in result if len(i[1]) > 0]
return result
except Exception as e:
control.log('Filmxy Cache ERROR %s' % e)
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
print "URL",url
sources = []
if url == None: return sources
url1 = urlparse.urljoin(self.base_link, url)
result = client.request(url1)
url1 = client.parseDOM(result, 'a', attrs = {'id': 'main-down'}, ret='href')[0]
print "LINKS1",url1
result = client.request(url1)
print "LINKS2", result
for quality in ['720p', '1080p']:
links = client.parseDOM(result, 'div', attrs = {'class': '.+?'+quality})[0]
links = client.parseDOM(links, 'li')
links = [(client.parseDOM(i, 'a', ret='href')[0]) for i in links]
if '1080p' in quality: q = '1080p'
elif '720p' in quality or 'hd' in quality: q = 'HD'
else: q = 'SD'
for j in links:
print "j",j
host = j.split('/')[2]
host = host.strip().lower()
host = client.replaceHTMLCodes(host)
if not host in hostDict: raise Exception()
host = host.encode('utf-8')
print "HOST",host, j
sources.append({'source': host, 'quality': q, 'provider': 'Filmxy', 'url': j})
print "LINKS3", links
return sources
except Exception as e:
control.log('Filmxy Source ERROR %s' % e)
return sources
def resolve(self, url):
try:
#url = client.request(url, output='geturl')
#if 'requiressl=yes' in url: url = url.replace('http://', 'https://')
#else: url = url.replace('https://', 'http://')
url = resolvers.request(url)
return url
except:
return
| gpl-2.0 | 5,392,704,476,561,538,000 | 37.179688 | 135 | 0.544506 | false | 3.845004 | false | false | false |
openmips/stbgui | lib/python/Components/Renderer/NextEpgInfo.py | 2 | 3268 | from Components.VariableText import VariableText
from Renderer import Renderer
from enigma import eLabel, eEPGCache, eServiceReference
from time import localtime, strftime
from skin import parseColor
class NextEpgInfo(Renderer, VariableText):
def __init__(self):
Renderer.__init__(self)
VariableText.__init__(self)
self.epgcache = eEPGCache.getInstance()
self.numberOfItems = 1
self.hideLabel = 0
self.timecolor = ""
self.labelcolor = ""
self.foregroundColor = "00?0?0?0"
self.numOfSpaces = 1
GUI_WIDGET = eLabel
def changed(self, what):
self.text = ""
reference = self.source.service
info = reference and self.source.info
if info:
currentEvent = self.source.getCurrentEvent()
if currentEvent:
if not self.epgcache.startTimeQuery(eServiceReference(reference.toString()), currentEvent.getBeginTime() + currentEvent.getDuration()):
spaces = " "*self.numOfSpaces
if self.numberOfItems == 1:
event = self.epgcache.getNextTimeEntry()
if event:
if self.hideLabel:
self.text = "%s%s%s%s%s" % (self.timecolor, strftime("%H:%M", localtime(event.getBeginTime())), spaces, self.foregroundColor, event.getEventName())
else:
self.text = "%s%s:%s%s%s" % (self.labelcolor, pgettext("now/next: 'next' event label", "Next"), spaces, self.foregroundColor, event.getEventName())
else:
for x in range(self.numberOfItems):
event = self.epgcache.getNextTimeEntry()
if event:
self.text += "%s%s%s%s%s\n" % (self.timecolor, strftime("%H:%M", localtime(event.getBeginTime())), spaces, self.foregroundColor, event.getEventName())
if not self.hideLabel:
self.text = self.text and "%s%s\n%s" % (self.labelcolor, pgettext("now/next: 'next' event label", "Next"), self.text) or ""
def applySkin(self, desktop, parent):
attribs = []
for (attrib, value) in self.skinAttributes:
if attrib == "NumberOfItems":
self.numberOfItems = int(value)
attribs.append((attrib, value))
if attrib == "noLabel":
self.hideLabel = int(value)
attribs.append((attrib, value))
if attrib == "numOfSpaces":
self.numOfSpaces = int(value)
attribs.append((attrib, value))
if attrib == "timeColor":
self.timecolor = self.hex2strColor(parseColor(value).argb())
attribs.append((attrib, value))
if attrib == "labelColor":
self.labelcolor = self.hex2strColor(parseColor(value).argb())
attribs.append((attrib, value))
if attrib == "foregroundColor":
self.foregroundColor = self.hex2strColor(parseColor(value).argb())
attribs.append((attrib, value))
for (attrib, value) in attribs:
self.skinAttributes.remove((attrib, value))
self.timecolor = self.formatColorString(self.timecolor)
self.labelcolor = self.formatColorString(self.labelcolor)
self.foregroundColor = self.formatColorString(self.foregroundColor)
return Renderer.applySkin(self, desktop, parent)
# hex:
# 0 1 2 3 4 5 6 7 8 9 a b c d e f
# converts to:
# 0 1 2 3 4 5 6 7 8 9 : ; < = > ?
def hex2strColor(self, rgb):
out = ""
for i in range(28,-1,-4):
out += "%s" % chr(0x30 + (rgb>>i & 0xf))
return out
def formatColorString(self, color):
if color:
return "%s%s" % ('\c', color)
return "%s%s" % ('\c', self.foregroundColor) | gpl-2.0 | 6,980,544,276,022,878,000 | 37.011628 | 158 | 0.678703 | false | 3.091769 | false | false | false |
WilliamMayor/pinscher | pinscher/Keyfile.py | 1 | 1219 | import string
import pickle
import os
import utilities
class Keyfile:
LENGTH = 32
CHARACTERS = string.digits + string.letters + string.punctuation + ' '
@staticmethod
def create(path, database_path, **kwargs):
k = Keyfile()
k.path = path
k.database_path = os.path.abspath(database_path)
k.key = kwargs.get('key', utilities.generate_key())
k.iv = kwargs.get('iv', utilities.generate_iv())
k.length = kwargs.get('length', Keyfile.LENGTH)
k.characters = kwargs.get('characters', Keyfile.CHARACTERS)
k.save()
return Keyfile.load(path)
@staticmethod
def load(path):
k = pickle.load(open(path, 'rb'))
k.path = path
return k
def __getstate__(self):
_dict = self.__dict__.copy()
del _dict['path']
return _dict
def __setstate__(self, _dict):
self.__dict__.update(_dict)
def __hash__(self):
return self.path.__hash__()
def __eq__(self, other):
return self.path == other.path
def save(self):
pickle.dump(self, open(self.path, 'wb'))
def delete(self):
os.remove(self.path)
os.remove(self.database_path)
| gpl-3.0 | -6,969,443,440,557,586,000 | 23.38 | 74 | 0.575062 | false | 3.693939 | false | false | false |
PyCQA/astroid | astroid/brain/brain_type.py | 1 | 2187 | """
Astroid hooks for type support.
Starting from python3.9, type object behaves as it had __class_getitem__ method.
However it was not possible to simply add this method inside type's body, otherwise
all types would also have this method. In this case it would have been possible
to write str[int].
Guido Van Rossum proposed a hack to handle this in the interpreter:
https://github.com/python/cpython/blob/67e394562d67cbcd0ac8114e5439494e7645b8f5/Objects/abstract.c#L181-L184
This brain follows the same logic. It is no wise to add permanently the __class_getitem__ method
to the type object. Instead we choose to add it only in the case of a subscript node
which inside name node is type.
Doing this type[int] is allowed whereas str[int] is not.
Thanks to Lukasz Langa for fruitful discussion.
"""
from astroid import extract_node, inference_tip, nodes
from astroid.const import PY39_PLUS
from astroid.exceptions import UseInferenceDefault
from astroid.manager import AstroidManager
def _looks_like_type_subscript(node):
"""
Try to figure out if a Name node is used inside a type related subscript
:param node: node to check
:type node: astroid.node_classes.NodeNG
:return: true if the node is a Name node inside a type related subscript
:rtype: bool
"""
if isinstance(node, nodes.Name) and isinstance(node.parent, nodes.Subscript):
return node.name == "type"
return False
def infer_type_sub(node, context=None):
"""
Infer a type[...] subscript
:param node: node to infer
:type node: astroid.node_classes.NodeNG
:param context: inference context
:type context: astroid.context.InferenceContext
:return: the inferred node
:rtype: nodes.NodeNG
"""
node_scope, _ = node.scope().lookup("type")
if node_scope.qname() != "builtins":
raise UseInferenceDefault()
class_src = """
class type:
def __class_getitem__(cls, key):
return cls
"""
node = extract_node(class_src)
return node.infer(context=context)
if PY39_PLUS:
AstroidManager().register_transform(
nodes.Name, inference_tip(infer_type_sub), _looks_like_type_subscript
)
| lgpl-2.1 | -6,311,474,582,028,243,000 | 32.646154 | 108 | 0.715592 | false | 3.663317 | false | false | false |
gwwfps/boxrps | admin.py | 1 | 15675 | #!/usr/bin/env python
import logging
import yaml
import cgi
from xml.dom import minidom as md
from datetime import datetime, timedelta
from collections import defaultdict
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.ext import db
from django.utils import simplejson
from models import *
from utils import render_to, parse_item
class AdminHandler(webapp.RequestHandler):
def get(self):
self.response.out.write('Hello world!')
class ParseHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/parse.html')
def post(self):
all_members = Member.all()
parsed = md.parseString(self.request.get('log').encode('utf-8'))
members = []
for member in parsed.getElementsByTagName('member'):
try:
name = member.firstChild.firstChild
except AttributeError:
continue
if name is None:
continue
name = name.toxml().strip().capitalize()
class_ = member.childNodes[1].firstChild.toxml().upper()
m = Member.gql('WHERE name = :1', name).get()
if not m:
new_member = Member(name=name, class_=class_)
new_member.put()
else:
m.class_=class_
m.put()
members.append(name)
items = []
for item in parsed.getElementsByTagName('item'):
try:
name = item.firstChild.firstChild.toxml()
except AttributeError:
continue
time = item.childNodes[1].firstChild.toxml()
looter = item.childNodes[2].firstChild.toxml()
pt = item.childNodes[3].firstChild.toxml()
items.append(parse_item(name) + (time, looter, pt))
render_to(self.response, 'admin/parseadd.html',
members=set(members), all_members=all_members, events=Event.all().order('name'),
datetime=parsed.getElementsByTagName('start')[0].firstChild.toxml(),
items=items)
class RaidHanlder(webapp.RequestHandler):
def get(self):
pass
def post(self):
pass
class EventHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/events.html', events=Event.all())
def post(self):
batch = self.request.get('batch')
batch = batch.split('\n')
for line in batch:
event, pt = line.split('\t')
Event(name=cgi.escape(event), default_pt = int(float(pt.strip()))).put()
self.get()
class AjaxHandler(webapp.RequestHandler):
def post(self):
action = self.request.get('action')
if action == 'addevent':
event = Event(name=self.request.get('name'),
default_pt=int(self.request.get('pt')))
event.put()
elif action == 'geteventpt':
event = Event.get(db.Key(self.request.get('key')))
if event:
self.response.out.write(simplejson.dumps({'pt':event.default_pt}))
elif action == 'addraid':
date = datetime.strptime(self.request.get('date'), '%Y.%m.%d %H:%M')
pt = int(self.request.get('pt'))
note = self.request.get('note')
members = self.request.get('members').split('|')[0:-1]
loot = self.request.get('loot').split('|')[0:-1]
memcache = {}
for m in Member.all():
memcache[m.name] = m
key = self.request.get('key')
if key:
encounter = Encounter.get(db.Key(key))
else:
encounter = None
if encounter:
delta = 0
oldpt = encounter.pt
if not encounter.pt == pt:
delta = pt - encounter.pt
encounter.pt = pt
encounter.note = note
encounter.datetime = date
old_members = set([m.name for m in encounter.attending_members()])
members = set([member.strip().capitalize() for member in members])
remaining = old_members & members
newly_added = members - old_members
removed = old_members - members
for m in remaining:
member = memcache[m]
member.earned += delta
member.balance += delta
member.put()
for m in newly_added:
nm = memcache.get(m.strip().capitalize())
if not nm:
nm = Member(name=m)
memcache[m] = nm
nm.earned += pt
nm.balance += pt
nm.put()
encounter.attendees.append(nm.key())
for m in removed:
dm = memcache[m]
dm.earned -= oldpt
dm.balance -= oldpt
dm.put()
encounter.attendees.remove(dm.key())
encounter.put()
Member.recalculate_attendance()
lset = {}
for l in encounter.loots:
lset[str(l.key())] = l
plset = set(lset.keys())
for piece in loot:
_, name, time, looter, cost, lkey = piece.split(';')
looter = looter.strip().capitalize()
cost = int(cost)*(-1)
time = datetime.strptime(time, '%Y.%m.%d %H:%M')
if lkey in lset:
plset.remove(lkey)
l = lset[lkey]
if not l.looter.name == looter or not l.cost == cost:
m = l.looter
m.spent -= l.cost
m.balance -= l.cost
m.put()
ltr = memcache[looter]
ltr.spent += cost
ltr.balance += cost
ltr.put()
l.looter = ltr
l.cost = cost
l.put()
else:
item = Item.gql('WHERE name = :1', name).get()
if not item:
item = Item(name=name, gid=0, default_cost=cost)
item.put()
looter = memcache[looter]
looter.spent += cost
looter.balance += cost
looter.put()
loot = Loot(encounter=encounter, cost=cost, looter=looter,
datetime=time, item=item)
loot.put()
for rkey in plset:
l = lset[rkey]
m = l.looter
m.spent -= l.cost
m.balance -= l.cost
m.put()
l.delete()
else:
event = Event.get(db.Key(self.request.get('event')))
attendees = []
for member in members:
m = memcache.get(member.strip().capitalize())
if not m:
m = Member(name=member)
memcache[member.strip().capitalize()] = m
m.earned += pt
m.balance += pt
m.put()
attendees.append(m.key())
encounter = Encounter(event=event, note=note, pt=pt, datetime=date,
attendees=attendees)
encounter.put()
Member.recalculate_attendance()
for piece in loot:
logging.info(piece.encode('utf-8'))
id, name, time, looter, cost, _ = piece.split(';')
looter = looter.strip().capitalize()
try:
id = int(id)
except ValueError:
id = 0
time = datetime.strptime(time, '%Y.%m.%d %H:%M')
looter = memcache[looter]
cost = int(cost)*(-1)
item = Item.gql('WHERE name = :1', name).get()
if item:
if id:
item.gid = id
item.put()
else:
item = Item(name=name, gid=id, default_cost=cost)
item.put()
looter.spent += cost
looter.balance += cost
looter.put()
loot = Loot(encounter=encounter, cost=cost, looter=looter,
datetime=time, item=item)
loot.put()
self.response.out.write(simplejson.dumps({'key': str(encounter.key())}))
elif action == "deladjustment":
aid = self.request.get('aid')
adj = Adjustment.get(db.Key(aid))
m = adj.member
m.balance -= adj.pt
m.adjusted -= adj.pt
m.put()
adj.delete()
self.response.out.write(simplejson.dumps({}))
class ImportHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/import.html')
def post(self):
text = self.request.get('import')
for line in text.split('\n'):
line = line.split('\t')
name = line[2].capitalize()
earned = int(float(line[6]))
spent = (-1)*int(float(line[7]))
adjusted = int(float(line[8]))
balance = int(float(line[9]))
m = Member.gql('WHERE name = :1', name).get()
if m:
m.earned = earned
m.spent = spent
m.balance = balance
m.adjusted = adjusted
else:
m = Member(name=name, spent=spent, earned=earned,
balance=balance, adjusted=adjusted)
m.put()
self.get()
class AdjustmentHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/adjust.html', members=Member.all(),
adjustments=Adjustment.all())
def post(self):
member = Member.gql('WHERE name = :1', self.request.get('member').capitalize()).get()
if member:
pt = int(self.request.get('pt'))
reason = self.request.get('reason')
dt = datetime.now()
Adjustment(pt=pt, member=member, reason=reason, datetime=dt).put()
member.adjusted += pt
member.balance += pt
member.usable = min(member.balance, member.attendance * member.balance / 100)
member.put()
self.get()
class YamlHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'dump.html',
dump='<form action="/o/yaml" method="POST"><input type="submit" /> </form>')
def post(self):
stream = file('rps.yaml', 'r')
data = yaml.load(stream)
id_to_key = {}
items = defaultdict(list)
att = defaultdict(list)
memcache = {}
for m in Member.all():
memcache[m.name] = m.key()
for entry in data:
if 'adjustment_id' in entry:
continue
# member = Member.gql('WHERE name = :1', entry['member_name']).get()
# adj = Adjustment(pt=int(entry['adjustment_value']),
# reason=entry['adjustment_reason'],
# datetime=datetime.fromtimestamp(entry['adjustment_date']),
# member=member)
# adj.put()
# elif 'item_id' in entry:
if 'item_id' in entry:
items[entry['raid_id']].append((entry['item_name'],
entry['item_buyer'],
entry['item_value'],
entry['item_date']))
elif 'raid_added_by' in entry:
event = Event.gql('WHERE name = :1', entry['raid_name']).get()
if event:
if not entry['raid_note']:
entry['raid_note'] = ''
raid = Encounter(event=event, note=entry['raid_note'],
pt=int(entry['raid_value']),
datetime=datetime.fromtimestamp(entry['raid_date']))
raid.put()
id_to_key[entry['raid_id']] = raid.key()
else:
logging.error(entry)
elif 'member_lastraid' in entry:
continue
else:
try:
att[entry['raid_id']].append(entry['member_name'])
except KeyError:
logging.error(entry)
for rid, key in id_to_key.items():
r = Encounter.get(key)
for member in att[rid]:
m = memcache[member.capitalize()]
if m:
r.attendees.append(m)
else:
logging.error(member)
r.put()
for name, buyer, value, date in items[rid]:
try:
value = int(float(value))*(-1)
except UnicodeEncodeError:
logging.error(name)
item = Item.gql('WHERE name = :1', name).get()
if not item:
item = Item(name=name, default_cost=value)
item.put()
loot = Loot(item=item, encounter=key, cost=value,
looter=memcache[buyer],
datetime=datetime.fromtimestamp(date))
loot.put()
render_to(self.response, 'dump.html', dump=data)
class EditRaidHandler(webapp.RequestHandler):
def get(self, key):
raid = Encounter.get(db.Key(key))
if raid:
render_to(self.response, 'admin/parseadd.html', key=key,
members=set([m.name for m in raid.attending_members()]),
all_members=Member.all(), events=Event.all(),
datetime=raid.datetime.strftime('%Y.%m.%d %H:%M'),
items=[(i.item.gid, i.item.name, i.datetime.strftime('%Y.%m.%d %H:%M'), i.looter.name, (-1)*i.cost, str(i.key())) for i in raid.loots],
raid=raid)
def main():
application = webapp.WSGIApplication([('/o/', AdminHandler),
('/o/parse', ParseHandler),
('/o/events', EventHandler),
('/o/ajax', AjaxHandler),
('/o/import', ImportHandler),
('/o/adjust', AdjustmentHandler),
('/o/yaml', YamlHandler),
('/o/editraid/(.+)', EditRaidHandler)],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
| mit | -3,077,891,950,705,445,000 | 38.987245 | 157 | 0.45008 | false | 4.490117 | false | false | false |
googleads/google-ads-python | google/ads/googleads/v7/services/types/campaign_budget_service.py | 1 | 6329 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v7.enums.types import (
response_content_type as gage_response_content_type,
)
from google.ads.googleads.v7.resources.types import (
campaign_budget as gagr_campaign_budget,
)
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
from google.rpc import status_pb2 as status # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v7.services",
marshal="google.ads.googleads.v7",
manifest={
"GetCampaignBudgetRequest",
"MutateCampaignBudgetsRequest",
"CampaignBudgetOperation",
"MutateCampaignBudgetsResponse",
"MutateCampaignBudgetResult",
},
)
class GetCampaignBudgetRequest(proto.Message):
r"""Request message for
[CampaignBudgetService.GetCampaignBudget][google.ads.googleads.v7.services.CampaignBudgetService.GetCampaignBudget].
Attributes:
resource_name (str):
Required. The resource name of the campaign
budget to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1,)
class MutateCampaignBudgetsRequest(proto.Message):
r"""Request message for
[CampaignBudgetService.MutateCampaignBudgets][google.ads.googleads.v7.services.CampaignBudgetService.MutateCampaignBudgets].
Attributes:
customer_id (str):
Required. The ID of the customer whose
campaign budgets are being modified.
operations (Sequence[google.ads.googleads.v7.services.types.CampaignBudgetOperation]):
Required. The list of operations to perform
on individual campaign budgets.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
response_content_type (google.ads.googleads.v7.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
"""
customer_id = proto.Field(proto.STRING, number=1,)
operations = proto.RepeatedField(
proto.MESSAGE, number=2, message="CampaignBudgetOperation",
)
partial_failure = proto.Field(proto.BOOL, number=3,)
validate_only = proto.Field(proto.BOOL, number=4,)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
class CampaignBudgetOperation(proto.Message):
r"""A single operation (create, update, remove) on a campaign
budget.
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
FieldMask that determines which resource
fields are modified in an update.
create (google.ads.googleads.v7.resources.types.CampaignBudget):
Create operation: No resource name is
expected for the new budget.
update (google.ads.googleads.v7.resources.types.CampaignBudget):
Update operation: The campaign budget is
expected to have a valid resource name.
remove (str):
Remove operation: A resource name for the removed budget is
expected, in this format:
``customers/{customer_id}/campaignBudgets/{budget_id}``
"""
update_mask = proto.Field(
proto.MESSAGE, number=4, message=field_mask.FieldMask,
)
create = proto.Field(
proto.MESSAGE,
number=1,
oneof="operation",
message=gagr_campaign_budget.CampaignBudget,
)
update = proto.Field(
proto.MESSAGE,
number=2,
oneof="operation",
message=gagr_campaign_budget.CampaignBudget,
)
remove = proto.Field(proto.STRING, number=3, oneof="operation",)
class MutateCampaignBudgetsResponse(proto.Message):
r"""Response message for campaign budget mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v7.services.types.MutateCampaignBudgetResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE, number=3, message=status.Status,
)
results = proto.RepeatedField(
proto.MESSAGE, number=2, message="MutateCampaignBudgetResult",
)
class MutateCampaignBudgetResult(proto.Message):
r"""The result for the campaign budget mutate.
Attributes:
resource_name (str):
Returned for successful operations.
campaign_budget (google.ads.googleads.v7.resources.types.CampaignBudget):
The mutated campaign budget with only mutable fields after
mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(proto.STRING, number=1,)
campaign_budget = proto.Field(
proto.MESSAGE, number=2, message=gagr_campaign_budget.CampaignBudget,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | -8,298,692,412,461,880,000 | 36.229412 | 128 | 0.681624 | false | 4.070096 | false | false | false |
Micronaet/micronaet-production | production_accounting_external_closed/report/production_parser.py | 1 | 2846 | # -*- coding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2001-2015 Micronaet S.r.l. (<http://www.micronaet.it>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.report import report_sxw
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID, api
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class Parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'get_closed_object': self.get_closed_object,
'get_date': self.get_date,
})
def get_date(self, ):
''' For report time
'''
return datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT)
def get_closed_object(self, ):
''' List of order
'''
sol_pool = self.pool.get('sale.order.line')
sol_ids = sol_pool.search(self.cr, self.uid, [
('mrp_id.state', 'not in', ('cancel', 'done')),
('mrp_id', '!=', False),
('go_in_production', '=', True),
('mx_closed', '=', True),
])
items = []
for item in sorted(sol_pool.browse(
self.cr, self.uid, sol_ids),
key=lambda x: (x.mrp_id.name,x.mrp_sequence)):
if item.product_uom_qty > item.product_uom_maked_sync_qty:
items.append(item)
return items
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -6,230,255,985,330,158,000 | 35.961039 | 79 | 0.608925 | false | 4.136628 | false | false | false |
PU-Crypto/AES | Rijndael/KeySchedule.py | 1 | 1376 | # -*- coding: utf-8 -*-
#KeySchedule
from Rijndael.SubBytes import *
from Rijndael.Tables import RijndaelRcon
import math
def RotWord(Spalte):
#Verschiebe die Plaetze im Array
output = list()
output.append(Spalte[1])
output.append(Spalte[2])
output.append(Spalte[3])
output.append(Spalte[0])
return output
def XorRcon(Spalte, SpalteVor4, RconCount):
#Verknuepfe Schritt fuer Schritt die Sonderfaelle(immer die erste Spalte eines RoundKeys) Xor, inklusive der RconTabelle
output = list()
Rcon = RijndaelRcon.Rcon[RconCount]
for i in range(0,4):
output.append(format(int(Spalte[i],16)^int(SpalteVor4[i], 16)^int(format(Rcon[i], '#04x'),16), '#04x'))
return output
def Xor(Spalte, SpalteVor4):
#Verknuepfe Wert fuer Wert Xor
output = list()
for i in range(0,4):
output.append(format(int(Spalte[i], 16)^int(SpalteVor4[i], 16), '#04x'))#Hexadezimal
return output
def KeySchedule(Key):
#Erweitere den Schluessel auf insgesamt 10 weitere von einander abhaengige Schluessel
roundCounter = 0
for i in range(4,41,4):
Key.append(RotWord(Key[i-1]))
Key[i] = TranslateToSBox(Key[i])
Key[i] = XorRcon(Key[i],Key[i-4],roundCounter)
roundCounter += 1
for j in range(i+1,i+4):
Key.append(Xor(Key[j-1],Key[j-4]))
return Key
| lgpl-3.0 | 7,545,850,936,786,939,000 | 28.913043 | 124 | 0.653343 | false | 2.656371 | false | false | false |
sbt9uc/osf.io | tests/api_tests/users/test_views.py | 1 | 32030 | # -*- coding: utf-8 -*-
import urlparse
from nose.tools import * # flake8: noqa
from website.models import Node
from website.util.sanitize import strip_html
from tests.base import ApiTestCase
from tests.factories import AuthUserFactory, DashboardFactory, FolderFactory, ProjectFactory
from api.base.settings.defaults import API_BASE
class TestUsers(ApiTestCase):
def setUp(self):
super(TestUsers, self).setUp()
self.user_one = AuthUserFactory()
self.user_two = AuthUserFactory()
def tearDown(self):
super(TestUsers, self).tearDown()
def test_returns_200(self):
res = self.app.get('/{}users/'.format(API_BASE))
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_find_user_in_users(self):
url = "/{}users/".format(API_BASE)
res = self.app.get(url)
user_son = res.json['data']
ids = [each['id'] for each in user_son]
assert_in(self.user_two._id, ids)
def test_all_users_in_users(self):
url = "/{}users/".format(API_BASE)
res = self.app.get(url)
user_son = res.json['data']
ids = [each['id'] for each in user_son]
assert_in(self.user_one._id, ids)
assert_in(self.user_two._id, ids)
def test_find_multiple_in_users(self):
url = "/{}users/?filter[fullname]=fred".format(API_BASE)
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_in(self.user_one._id, ids)
assert_in(self.user_two._id, ids)
def test_find_single_user_in_users(self):
url = "/{}users/?filter[fullname]=my".format(API_BASE)
self.user_one.fullname = 'My Mom'
self.user_one.save()
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_in(self.user_one._id, ids)
assert_not_in(self.user_two._id, ids)
def test_find_no_user_in_users(self):
url = "/{}users/?filter[fullname]=NotMyMom".format(API_BASE)
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_not_in(self.user_one._id, ids)
assert_not_in(self.user_two._id, ids)
def test_users_list_takes_profile_image_size_param(self):
size = 42
url = "/{}users/?profile_image_size={}".format(API_BASE, size)
res = self.app.get(url)
user_json = res.json['data']
for user in user_json:
profile_image_url = user['attributes']['profile_image_url']
query_dict = urlparse.parse_qs(urlparse.urlparse(profile_image_url).query)
assert_equal(int(query_dict.get('size')[0]), size)
class TestUserDetail(ApiTestCase):
def setUp(self):
super(TestUserDetail, self).setUp()
self.user_one = AuthUserFactory()
self.user_one.social['twitter'] = 'howtopizza'
self.user_one.save()
self.user_two = AuthUserFactory()
def tearDown(self):
super(TestUserDetail, self).tearDown()
def test_gets_200(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_get_correct_pk_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
user_json = res.json['data']
assert_equal(user_json['attributes']['fullname'], self.user_one.fullname)
assert_equal(user_json['attributes']['twitter'], 'howtopizza')
def test_get_incorrect_pk_user_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url)
user_json = res.json['data']
assert_not_equal(user_json['attributes']['fullname'], self.user_one.fullname)
def test_get_incorrect_pk_user_not_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
user_json = res.json['data']
assert_not_equal(user_json['attributes']['fullname'], self.user_one.fullname)
assert_equal(user_json['attributes']['fullname'], self.user_two.fullname)
def test_user_detail_takes_profile_image_size_param(self):
size = 42
url = "/{}users/{}/?profile_image_size={}".format(API_BASE, self.user_one._id, size)
res = self.app.get(url)
user_json = res.json['data']
profile_image_url = user_json['attributes']['profile_image_url']
query_dict = urlparse.parse_qs(urlparse.urlparse(profile_image_url).query)
assert_equal(int(query_dict.get('size')[0]), size)
class TestUserNodes(ApiTestCase):
def setUp(self):
super(TestUserNodes, self).setUp()
self.user_one = AuthUserFactory()
self.user_one.social['twitter'] = 'howtopizza'
self.user_one.save()
self.user_two = AuthUserFactory()
self.public_project_user_one = ProjectFactory(title="Public Project User One",
is_public=True,
creator=self.user_one)
self.private_project_user_one = ProjectFactory(title="Private Project User One",
is_public=False,
creator=self.user_one)
self.public_project_user_two = ProjectFactory(title="Public Project User Two",
is_public=True,
creator=self.user_two)
self.private_project_user_two = ProjectFactory(title="Private Project User Two",
is_public=False,
creator=self.user_two)
self.deleted_project_user_one = FolderFactory(title="Deleted Project User One",
is_public=False,
creator=self.user_one,
is_deleted=True)
self.folder = FolderFactory()
self.deleted_folder = FolderFactory(title="Deleted Folder User One",
is_public=False,
creator=self.user_one,
is_deleted=True)
self.dashboard = DashboardFactory()
def tearDown(self):
super(TestUserNodes, self).tearDown()
def test_authorized_in_gets_200(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_anonymous_gets_200(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_get_projects_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_projects_not_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_projects_logged_in_as_different_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_two._id, ids)
assert_not_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
class TestUserRoutesNodeRoutes(ApiTestCase):
def setUp(self):
super(TestUserRoutesNodeRoutes, self).setUp()
self.user_one = AuthUserFactory()
self.user_one.social['twitter'] = 'howtopizza'
self.user_two = AuthUserFactory()
self.public_project_user_one = ProjectFactory(title="Public Project User One", is_public=True, creator=self.user_one)
self.private_project_user_one = ProjectFactory(title="Private Project User One", is_public=False, creator=self.user_one)
self.public_project_user_two = ProjectFactory(title="Public Project User Two", is_public=True, creator=self.user_two)
self.private_project_user_two = ProjectFactory(title="Private Project User Two", is_public=False, creator=self.user_two)
self.deleted_project_user_one = FolderFactory(title="Deleted Project User One", is_public=False, creator=self.user_one, is_deleted=True)
self.folder = FolderFactory()
self.deleted_folder = FolderFactory(title="Deleted Folder User One", is_public=False, creator=self.user_one, is_deleted=True)
self.dashboard = DashboardFactory()
def tearDown(self):
super(TestUserRoutesNodeRoutes, self).tearDown()
Node.remove()
def test_get_200_path_users_me_userone_logged_in(self):
url = "/{}users/me/".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
def test_get_200_path_users_me_usertwo_logged_in(self):
url = "/{}users/me/".format(API_BASE)
res = self.app.get(url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
def test_get_403_path_users_me_no_user(self):
# TODO: change expected exception from 403 to 401 for unauthorized users
url = "/{}users/me/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_get_404_path_users_user_id_me_user_logged_in(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_me_no_user(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_me_unauthorized_user(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_200_path_users_user_id_user_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
def test_get_200_path_users_user_id_no_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_get_200_path_users_user_id_unauthorized_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user_two._id)
def test_get_200_path_users_me_nodes_user_logged_in(self):
url = "/{}users/me/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_403_path_users_me_nodes_no_user(self):
# TODO: change expected exception from 403 to 401 for unauthorized users
url = "/{}users/me/nodes/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_get_200_path_users_user_id_nodes_user_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_200_path_users_user_id_nodes_no_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
# an anonymous/unauthorized user can only see the public projects user_one contributes to.
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_200_path_users_user_id_nodes_unauthorized_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
# an anonymous/unauthorized user can only see the public projects user_one contributes to.
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_404_path_users_user_id_nodes_me_user_logged_in(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_nodes_me_unauthorized_user(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_nodes_me_no_user(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_me_user_logged_in(self):
url = "/{}nodes/me/".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_me_no_user(self):
url = "/{}nodes/me/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_user_logged_in(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_unauthorized_user(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_no_user(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
class TestUserUpdate(ApiTestCase):
def setUp(self):
super(TestUserUpdate, self).setUp()
self.user_one = AuthUserFactory.build(
fullname='Martin Luther King Jr.',
given_name='Martin',
family_name='King',
suffix='Jr.',
social=dict(
github='userOneGithub',
scholar='userOneScholar',
personal='http://www.useronepersonalwebsite.com',
twitter='userOneTwitter',
linkedIn='userOneLinkedIn',
impactStory='userOneImpactStory',
orcid='userOneOrcid',
researcherId='userOneResearcherId'
)
)
self.user_one.save()
self.user_one_url = "/v2/users/{}/".format(self.user_one._id)
self.user_two = AuthUserFactory()
self.user_two.save()
self.new_user_one_data = {
'id': self.user_one._id,
'fullname': 'el-Hajj Malik el-Shabazz',
'given_name': 'Malcolm',
'middle_names': 'Malik el-Shabazz',
'family_name': 'X',
'suffix': 'Sr.',
'gitHub': 'newGitHub',
'scholar': 'newScholar',
'personal_website': 'http://www.newpersonalwebsite.com',
'twitter': 'http://www.newpersonalwebsite.com',
'linkedIn': 'newLinkedIn',
'impactStory': 'newImpactStory',
'orcid': 'newOrcid',
'researcherId': 'newResearcherId',
}
def tearDown(self):
super(TestUserUpdate, self).tearDown()
def test_patch_user_logged_out(self):
res = self.app.patch_json_api(self.user_one_url, {
'fullname': self.new_user_one_data['fullname'],
}, expect_errors=True)
assert_equal(res.status_code, 401)
def test_patch_user_without_required_field(self):
# PATCH does not require required fields
res = self.app.patch_json_api(self.user_one_url, {
'family_name': self.new_user_one_data['family_name'],
}, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['family_name'], self.new_user_one_data['family_name'])
self.user_one.reload()
assert_equal(self.user_one.family_name, self.new_user_one_data['family_name'])
def test_put_user_without_required_field(self):
# PUT requires all required fields
res = self.app.put_json_api(self.user_one_url, {
'family_name': self.new_user_one_data['family_name'],
}, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_partial_patch_user_logged_in(self):
# Test to make sure new fields are patched and old fields stay the same
res = self.app.patch_json_api(self.user_one_url, {
'id': self.user_one._id,
'fullname': 'new_fullname',
'gitHub': 'even_newer_github',
'suffix': 'The Millionth'
}, auth=self.user_one.auth)
self.user_one.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['fullname'], 'new_fullname')
assert_equal(res.json['data']['suffix'], 'The Millionth')
assert_equal(res.json['data']['gitHub'], 'even_newer_github')
assert_equal(res.json['data']['given_name'], self.user_one.given_name)
assert_equal(res.json['data']['middle_names'], self.user_one.middle_names)
assert_equal(res.json['data']['family_name'], self.user_one.family_name)
assert_equal(res.json['data']['personal_website'], self.user_one.social['personal'])
assert_equal(res.json['data']['twitter'], self.user_one.social['twitter'])
assert_equal(res.json['data']['linkedIn'], self.user_one.social['linkedIn'])
assert_equal(res.json['data']['impactStory'], self.user_one.social['impactStory'])
assert_equal(res.json['data']['orcid'], self.user_one.social['orcid'])
assert_equal(res.json['data']['researcherId'], self.user_one.social['researcherId'])
assert_equal(self.user_one.fullname, 'new_fullname')
assert_equal(self.user_one.suffix, 'The Millionth')
assert_equal(self.user_one.social['github'], 'even_newer_github')
def test_partial_patch_user_logged_in(self):
# Test to make sure new fields are patched and old fields stay the same
res = self.app.patch_json_api(self.user_one_url, {
'id': self.user_one._id,
'fullname': 'new_fullname',
'suffix': 'The Millionth'
}, auth=self.user_one.auth)
self.user_one.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], 'new_fullname')
assert_equal(res.json['data']['attributes']['suffix'], 'The Millionth')
assert_equal(res.json['data']['attributes']['gitHub'], self.user_one.social['github'])
assert_equal(res.json['data']['attributes']['given_name'], self.user_one.given_name)
assert_equal(res.json['data']['attributes']['middle_names'], self.user_one.middle_names)
assert_equal(res.json['data']['attributes']['family_name'], self.user_one.family_name)
assert_equal(res.json['data']['attributes']['personal_website'], self.user_one.social['personal'])
assert_equal(res.json['data']['attributes']['twitter'], self.user_one.social['twitter'])
assert_equal(res.json['data']['attributes']['linkedIn'], self.user_one.social['linkedIn'])
assert_equal(res.json['data']['attributes']['impactStory'], self.user_one.social['impactStory'])
assert_equal(res.json['data']['attributes']['orcid'], self.user_one.social['orcid'])
assert_equal(res.json['data']['attributes']['researcherId'], self.user_one.social['researcherId'])
assert_equal(self.user_one.fullname, 'new_fullname')
assert_equal(self.user_one.suffix, 'The Millionth')
assert_equal(self.user_one.social['github'], self.user_one.social['github'])
def test_partial_put_user_logged_in(self):
# Test to make sure new fields are patched and old fields stay the same
res = self.app.put_json_api(self.user_one_url, {
'id': self.user_one._id,
'fullname': 'new_fullname',
'gitHub': 'even_newer_github',
'suffix': 'The Millionth'
}, auth=self.user_one.auth)
self.user_one.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], 'new_fullname')
assert_equal(res.json['data']['attributes']['suffix'], 'The Millionth')
assert_equal(res.json['data']['attributes']['gitHub'], 'even_newer_github')
assert_equal(res.json['data']['attributes']['given_name'], self.user_one.given_name)
assert_equal(res.json['data']['attributes']['middle_names'], self.user_one.middle_names)
assert_equal(res.json['data']['attributes']['family_name'], self.user_one.family_name)
assert_equal(self.user_one.fullname, 'new_fullname')
assert_equal(self.user_one.suffix, 'The Millionth')
assert_equal(self.user_one.social['github'], 'even_newer_github')
def test_put_user_logged_in(self):
# Logged in user updates their user information via put
res = self.app.put_json_api(self.user_one_url, self.new_user_one_data, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], self.new_user_one_data['fullname'])
assert_equal(res.json['data']['attributes']['given_name'], self.new_user_one_data['given_name'])
assert_equal(res.json['data']['attributes']['middle_names'], self.new_user_one_data['middle_names'])
assert_equal(res.json['data']['attributes']['family_name'], self.new_user_one_data['family_name'])
assert_equal(res.json['data']['attributes']['suffix'], self.new_user_one_data['suffix'])
assert_equal(res.json['data']['attributes']['gitHub'], self.new_user_one_data['gitHub'])
assert_equal(res.json['data']['attributes']['personal_website'], self.new_user_one_data['personal_website'])
assert_equal(res.json['data']['attributes']['twitter'], self.new_user_one_data['twitter'])
assert_equal(res.json['data']['attributes']['linkedIn'], self.new_user_one_data['linkedIn'])
assert_equal(res.json['data']['attributes']['impactStory'], self.new_user_one_data['impactStory'])
assert_equal(res.json['data']['attributes']['orcid'], self.new_user_one_data['orcid'])
assert_equal(res.json['data']['attributes']['researcherId'], self.new_user_one_data['researcherId'])
self.user_one.reload()
assert_equal(self.user_one.fullname, self.new_user_one_data['fullname'])
assert_equal(self.user_one.given_name, self.new_user_one_data['given_name'])
assert_equal(self.user_one.middle_names, self.new_user_one_data['middle_names'])
assert_equal(self.user_one.family_name, self.new_user_one_data['family_name'])
assert_equal(self.user_one.suffix, self.new_user_one_data['suffix'])
assert_equal(self.user_one.social['github'], self.new_user_one_data['gitHub'])
assert_equal(self.user_one.social['personal'], self.new_user_one_data['personal_website'])
assert_equal(self.user_one.social['twitter'], self.new_user_one_data['twitter'])
assert_equal(self.user_one.social['linkedIn'], self.new_user_one_data['linkedIn'])
assert_equal(self.user_one.social['impactStory'], self.new_user_one_data['impactStory'])
assert_equal(self.user_one.social['orcid'], self.new_user_one_data['orcid'])
assert_equal(self.user_one.social['researcherId'], self.new_user_one_data['researcherId'])
def test_put_user_logged_out(self):
res = self.app.put_json_api(self.user_one_url, self.new_user_one_data, expect_errors=True)
assert_equal(res.status_code, 401)
def test_put_wrong_user(self):
# User tries to update someone else's user information via put
res = self.app.put_json_api(self.user_one_url, self.new_user_one_data, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_patch_wrong_user(self):
# User tries to update someone else's user information via patch
res = self.app.patch_json_api(self.user_one_url, {
'fullname': self.new_user_one_data['fullname'],
}, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
self.user_one.reload()
assert_not_equal(self.user_one.fullname, self.new_user_one_data['fullname'])
def test_update_user_sanitizes_html_properly(self):
"""Post request should update resource, and any HTML in fields should be stripped"""
bad_fullname = 'Malcolm <strong>X</strong>'
bad_family_name = 'X <script>alert("is")</script> a cool name'
res = self.app.patch_json_api(self.user_one_url, {
'fullname': bad_fullname,
'family_name': bad_family_name,
}, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], strip_html(bad_fullname))
assert_equal(res.json['data']['attributes']['family_name'], strip_html(bad_family_name))
class TestDeactivatedUser(ApiTestCase):
def setUp(self):
super(TestDeactivatedUser, self).setUp()
self.user = AuthUserFactory()
def test_deactivated_user_returns_410_response(self):
url = '/{}users/{}/'.format(API_BASE, self.user._id)
res = self.app.get(url, auth=self.user.auth , expect_errors=False)
assert_equal(res.status_code, 200)
self.user.is_disabled = True
self.user.save()
res = self.app.get(url, auth=self.user.auth , expect_errors=True)
assert_equal(res.status_code, 410)
class TestExceptionFormatting(ApiTestCase):
def setUp(self):
super(TestExceptionFormatting, self).setUp()
self.user = AuthUserFactory.build(
fullname='Martin Luther King Jr.',
given_name='Martin',
family_name='King',
suffix='Jr.',
social=dict(
github='userOneGithub',
scholar='userOneScholar',
personal='http://www.useronepersonalwebsite.com',
twitter='userOneTwitter',
linkedIn='userOneLinkedIn',
impactStory='userOneImpactStory',
orcid='userOneOrcid',
researcherId='userOneResearcherId'
)
)
self.url = '/{}users/{}/'.format(API_BASE, self.user._id)
self.user_two = AuthUserFactory()
def test_updates_user_with_no_fullname(self):
res = self.app.put_json_api(self.url, auth=self.user.auth, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert('fullname' in res.json['errors'][0]['meta']['field'])
assert('This field is required.' in res.json['errors'][0]['detail'])
def test_updates_user_unauthorized(self):
res = self.app.put_json_api(self.url, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert_equal(errors[0], {'detail': "Authentication credentials were not provided."})
def test_updates_user_forbidden(self):
res = self.app.put_json_api(self.url, auth=self.user_two.auth, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert_equal(errors[0], {'detail': 'You do not have permission to perform this action.'})
def test_user_does_not_exist_formatting(self):
url = '/{}users/{}/'.format(API_BASE, '12345')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert_equal(errors[0], {'detail': 'Not found.'})
def test_basic_auth_me_wrong_password(self):
url = '/{}users/{}/'.format(API_BASE, 'me')
res = self.app.get(url, auth=(self.user.username, 'nottherightone'), expect_errors=True)
assert_equal(res.status_code, 401)
| apache-2.0 | 1,204,231,969,254,728,200 | 46.949102 | 144 | 0.613862 | false | 3.383332 | true | false | false |
griffy/Pyap | pyap/library/db.py | 1 | 2688 | # Pyap - The Python Audio Player Library
#
# Copyright (c) 2012 Joel Griffith
# Copyright (c) 2005 Joe Wreschnig
# Copyright (c) 2002 David I. Lehn
# Copyright (c) 2005-2011 the SQLAlchemy authors and contributors
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from sqlalchemy import create_engine
from sqlalchemy import Table, Column, Integer, Unicode, MetaData
from sqlalchemy.schema import ForeignKey
from sqlalchemy.orm import mapper, relationship, sessionmaker
from pyap.audio import Audio
from pyap.playlist import Playlist
def setup(uri):
# TODO: echo should be false
if uri is None:
engine = create_engine('sqlite:///:memory:', echo=True)
else:
engine = create_engine('sqlite:///' + uri, echo=True)
metadata = MetaData()
#audio_types_table = Table('audio_types', metadata,
# Column('id', Integer, primary_key=True),
# Column('type', Unicode, unique=True)
#)
audio_table = Table('audio', metadata,
Column('id', Integer, primary_key=True),
Column('uri', Unicode, unique=True, index=True),
Column('type', Integer, nullable=False),
Column('artist', Unicode),
Column('title', Unicode),
Column('album', Unicode),
Column('track', Integer),
Column('length', Integer)
)
playlist_table = Table('playlists', metadata,
Column('id', Integer, primary_key=True),
Column('name', Unicode, unique=True, index=True)
)
# many-to-many junction table for audio and playlists
audio_playlist_table = Table('audio_playlists', metadata,
Column('audio_id', Integer, ForeignKey('audio.id')),
Column('playlist_id', Integer, ForeignKey('playlists.id'))
)
metadata.create_all(engine)
mapper(Audio, audio_table)
mapper(Playlist, playlist_table, properties={
'audio': relationship(Audio, secondary=audio_playlist_table,
backref='playlists')}
)
return sessionmaker(bind=engine)
| gpl-2.0 | 4,940,832,737,651,426,000 | 34.84 | 68 | 0.682292 | false | 4.097561 | false | false | false |
magchips/labalyzer | setup.py | 1 | 3470 | #!/usr/bin/env python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2010 <Atreju Tauschinsky> <[email protected]>
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
###################### DO NOT TOUCH THIS (HEAD TO THE SECOND PART) ######################
import os
import sys
try:
import DistUtilsExtra.auto
except ImportError:
print >> sys.stderr, 'To build labalyzer you need https://launchpad.net/python-distutils-extra'
sys.exit(1)
assert DistUtilsExtra.auto.__version__ >= '2.18', 'needs DistUtilsExtra.auto >= 2.18'
def update_config(values = {}):
oldvalues = {}
try:
fin = file('labalyzer_lib/labalyzerconfig.py', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
fields = line.split(' = ') # Separate variable from value
if fields[0] in values:
oldvalues[fields[0]] = fields[1].strip()
line = "%s = %s\n" % (fields[0], values[fields[0]])
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find labalyzer_lib/labalyzerconfig.py")
sys.exit(1)
return oldvalues
def update_desktop_file(datadir):
try:
fin = file('labalyzer.desktop.in', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
if 'Icon=' in line:
line = "Icon=%s\n" % (datadir + 'media/labalyzer.svg')
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find labalyzer.desktop.in")
sys.exit(1)
class InstallAndUpdateDataDirectory(DistUtilsExtra.auto.install_auto):
def run(self):
values = {'__labalyzer_data_directory__': "'%s'" % (self.prefix + '/share/labalyzer/'),
'__version__': "'%s'" % self.distribution.get_version()}
previous_values = update_config(values)
update_desktop_file(self.prefix + '/share/labalyzer/')
DistUtilsExtra.auto.install_auto.run(self)
update_config(previous_values)
##################################################################################
###################### YOU SHOULD MODIFY ONLY WHAT IS BELOW ######################
##################################################################################
DistUtilsExtra.auto.setup(
name='labalyzer',
version='0.1',
license='GPL-3',
#author='Your Name',
#author_email='[email protected]',
#description='UI for managing …',
#long_description='Here a longer description',
#url='https://launchpad.net/labalyzer',
cmdclass={'install': InstallAndUpdateDataDirectory}
)
| gpl-3.0 | 3,448,645,533,441,506,300 | 34.030303 | 99 | 0.580161 | false | 3.72103 | false | false | false |
olga-perederieieva/pyDEA | pyDEA/main.py | 1 | 3347 | ''' This module contains methods for running pyDEA from terminal.
'''
import sys
from pyDEA.core.data_processing.parameters import parse_parameters_from_file
from pyDEA.core.utils.run_routine import RunMethodTerminal
from pyDEA.core.utils.dea_utils import clean_up_pickled_files, get_logger
def main(filename, output_format='xlsx', output_dir='', sheet_name_usr=''):
''' Main function to run DEA models from terminal.
Args:
filename (str): path to file with parameters.
output_format (str, optional): file format of solution file.
This value is used
only if OUTPUT_FILE in parameters is empty or set to auto.
Defaults to xlsx.
output_dir (str, optional): directory where solution must
be written.
If it is not given, solution will be written to current folder.
This value is used
only if OUTPUT_FILE in parameters is empty or set to auto.
sheet_name_usr (str, optional): name of the sheet in xls- or
xlsx-file with
input data from which data will be read. If input data file is
in csv format,
this value is ignored.
'''
print('Params file', filename, 'output_format', output_format,
'output_dir', output_dir, 'sheet_name_usr', sheet_name_usr)
logger = get_logger()
logger.info('Params file "%s", output format "%s", output directory "%s", sheet name "%s".',
filename, output_format, output_dir, sheet_name_usr)
params = parse_parameters_from_file(filename)
params.print_all_parameters()
run_method = RunMethodTerminal(params, sheet_name_usr, output_format,
output_dir)
run_method.run(params)
clean_up_pickled_files()
logger.info('pyDEA exited.')
if __name__ == '__main__':
args = sys.argv[1:]
logger = get_logger()
logger.info('pyDEA started as a console application.')
print('args = {0}'.format(args))
if len(args) < 1 or len(args) > 4:
logger.error('Invalid number of input arguments. At least one '
'argument must be given, no more than 4 arguments, but %d were given.',
len(args))
raise ValueError('Invalid number of input arguments. At least one '
'argument must be given, no more than 4 arguments'
' are expected. Input arguments are:\n (1) path to'
' file with parameters (compulsory)\n'
'(2) output file format, possible values: xls, xlsx'
' and csv, default value is xlsx (optional), this'
' value is used only if auto or empty string was set'
' for OUTPUT_FILE in parameters file \n'
'(3) output directory (optional, if not specified,'
' output is written to current directory)\n'
'(4) sheet name from which data should be read '
'(optional, if not specified, data is read from'
' the first sheet)')
try:
main(*args)
except Exception as excinfo:
logger.error(excinfo)
raise
| mit | 2,119,575,361,617,346,300 | 45.486111 | 96 | 0.574544 | false | 4.456724 | false | false | false |
wireservice/csvkit | setup.py | 1 | 2897 | #!/usr/bin/env python
import sys
from setuptools import setup
install_requires = [
'agate>=1.6.1',
'agate-excel>=0.2.2',
'agate-dbf>=0.2.0',
'agate-sql>=0.5.3',
'six>=1.6.1',
'setuptools',
]
if sys.version_info < (2, 7):
install_requires.append('argparse>=1.2.1')
install_requires.append('ordereddict>=1.1')
install_requires.append('simplejson>=3.6.3')
setup(
name='csvkit',
version='1.0.6',
description='A suite of command-line tools for working with CSV, the king of tabular file formats.',
long_description=open('README.rst').read(),
author='Christopher Groskopf',
author_email='[email protected]',
url='https://github.com/wireservice/csvkit',
project_urls={
'Documentation': 'https://csvkit.readthedocs.io/en/latest/',
},
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
],
packages=[
'csvkit',
'csvkit.convert',
'csvkit.utilities'
],
entry_points={
'console_scripts': [
'csvclean = csvkit.utilities.csvclean:launch_new_instance',
'csvcut = csvkit.utilities.csvcut:launch_new_instance',
'csvformat = csvkit.utilities.csvformat:launch_new_instance',
'csvgrep = csvkit.utilities.csvgrep:launch_new_instance',
'csvjoin = csvkit.utilities.csvjoin:launch_new_instance',
'csvjson = csvkit.utilities.csvjson:launch_new_instance',
'csvlook = csvkit.utilities.csvlook:launch_new_instance',
'csvpy = csvkit.utilities.csvpy:launch_new_instance',
'csvsort = csvkit.utilities.csvsort:launch_new_instance',
'csvsql = csvkit.utilities.csvsql:launch_new_instance',
'csvstack = csvkit.utilities.csvstack:launch_new_instance',
'csvstat = csvkit.utilities.csvstat:launch_new_instance',
'in2csv = csvkit.utilities.in2csv:launch_new_instance',
'sql2csv = csvkit.utilities.sql2csv:launch_new_instance'
]
},
install_requires=install_requires
)
| mit | 7,940,495,066,647,459,000 | 37.626667 | 104 | 0.622023 | false | 3.752591 | false | false | false |
ZachMassia/platformio | platformio/builder/scripts/nordicnrf51.py | 1 | 2079 | # Copyright 2014-2016 Ivan Kravets <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Builder for Nordic nRF51 series ARM microcontrollers.
"""
from os.path import join
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild, Default,
DefaultEnvironment, SConscript)
env = DefaultEnvironment()
SConscript(env.subst(join("$PIOBUILDER_DIR", "scripts", "basearm.py")))
if env.subst("$BOARD") == "rfduino":
env.Append(
CPPFLAGS=["-fno-builtin"],
LINKFLAGS=["--specs=nano.specs"]
)
env.Replace(
UPLOADER=join("$PIOPACKAGES_DIR", "tool-rfdloader", "rfdloader"),
UPLOADERFLAGS=["-q", "$UPLOAD_PORT"],
UPLOADCMD='"$UPLOADER" $UPLOADERFLAGS $SOURCES'
)
#
# Target: Build executable and linkable firmware
#
target_elf = env.BuildProgram()
#
# Target: Build the .bin file
#
if "uploadlazy" in COMMAND_LINE_TARGETS:
target_firm = join("$BUILD_DIR", "firmware.hex")
else:
target_firm = env.ElfToHex(join("$BUILD_DIR", "firmware"), target_elf)
#
# Target: Print binary size
#
target_size = env.Alias("size", target_elf, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Upload by default .bin file
#
if env.subst("$BOARD") == "rfduino":
upload = env.Alias(
["upload", "uploadlazy"], target_firm,
[lambda target, source, env: env.AutodetectUploadPort(), "$UPLOADCMD"])
else:
upload = env.Alias(["upload", "uploadlazy"], target_firm, env.UploadToDisk)
AlwaysBuild(upload)
#
# Target: Define targets
#
Default([target_firm, target_size])
| apache-2.0 | 3,195,199,141,947,326,000 | 26 | 79 | 0.685426 | false | 3.391517 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.